├── acc ├── acc_lstm_Shakespeare.txt ├── acc_resnet18_Cifar.txt ├── acc_resnet18_MNIST.txt ├── acc_cnn_MNIST.txt ├── acc_mobilenet_MNIST.txt ├── acc_cnn_FMNIST.txt └── acc_resnet18_FMNIST.txt ├── log ├── log_resnet18_Cifar.txt ├── log_resnet18_MNIST.txt ├── log_lstm_Shakespeare.txt ├── log_cnn_MNIST.txt └── log_cnn_FMNIST.txt ├── bash ├── ip.txt ├── kill.sh ├── setup.sh └── deploy.sh ├── model ├── lstm.py ├── resnet.py ├── cnn.py └── mobilenet.py ├── noniid ├── temp │ ├── Shakespeare │ │ ├── 1.txt │ │ └── 0.txt │ ├── Cifar │ │ ├── Cifar_label_noniid_users3_data200_unbalance0.6.csv │ │ ├── Cifar_quantity_noniid_users3_data200.csv │ │ ├── Cifar_iid_users3_data500.csv │ │ ├── Cifar_label_noniid_users3_data500_unbalance0.6.csv │ │ └── Cifar_quantity_noniid_users3_data500.csv │ ├── MNIST │ │ ├── MNIST_label_noniid_users3_data200_unbalance0.6.csv │ │ ├── MNIST_iid_users3_data500.csv │ │ ├── MNIST_label_noniid_users3_data500_unbalance0.6.csv │ │ └── MNIST_quantity_noniid_users3_data500.csv │ └── FMNIST │ │ ├── FMNIST_label_noniid_users3_data500_unbalance0.6.csv │ │ ├── FMNIST_iid_users3_data500.csv │ │ └── FMNIST_quantity_noniid_users3_data500.csv ├── setting.py ├── file_flow.py └── data_noniid │ ├── Shakespeare_noniid.py │ ├── FMNIST_noniid.py │ ├── Cifar_noniid.py │ └── MNIST_noniid.py ├── data └── Shakespeare │ └── Shakespeare.txt ├── init ├── init_mobilenet.py ├── init_lstm.py ├── init_resnet18.py └── init_cnn.py ├── plot.py ├── train ├── train_cnn.py ├── train_mobilenet.py ├── train_resnet18.py └── train_lstm.py ├── main.py ├── README.md └── FL_models ├── FedAvg_ray.py └── FedAvg.py /acc/acc_lstm_Shakespeare.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /acc/acc_resnet18_Cifar.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /acc/acc_resnet18_MNIST.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /log/log_resnet18_Cifar.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /acc/acc_cnn_MNIST.txt: -------------------------------------------------------------------------------- 1 | EPOCH=001,Accuracy= 94.380% 2 | -------------------------------------------------------------------------------- /acc/acc_mobilenet_MNIST.txt: -------------------------------------------------------------------------------- 1 | EPOCH=001,Accuracy= 97.490% 2 | -------------------------------------------------------------------------------- /bash/ip.txt: -------------------------------------------------------------------------------- 1 | 192.168.0.108 2 | 192.168.0.109 3 | 192.168.0.110 4 | -------------------------------------------------------------------------------- /acc/acc_cnn_FMNIST.txt: -------------------------------------------------------------------------------- 1 | EPOCH=001,Accuracy= 20.190% 2 | EPOCH=002,Accuracy= 42.570% 3 | EPOCH=003,Accuracy= 29.230% 4 | -------------------------------------------------------------------------------- /log/log_resnet18_MNIST.txt: -------------------------------------------------------------------------------- 1 | 001 00001 |Loss: 2.411 | Acc: 11.719% 2 | 001 00002 |Loss: 2.374 | Acc: 15.625% 3 | 001 00003 |Loss: 2.290 | Acc: 17.879% 4 | -------------------------------------------------------------------------------- /log/log_lstm_Shakespeare.txt: -------------------------------------------------------------------------------- 1 | 001 0000001 |Loss: 3.871 | Acc: 1.562% 2 | 001 0000002 |Loss: 4.022 | Acc: 5.859% 3 | 001 0000003 |Loss: 3.988 | Acc: 4.427% 4 | -------------------------------------------------------------------------------- /bash/kill.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | name=$(lsof -i:29500|tail -1|awk '"$1"!=""{print $2}') 4 | if [ -z $name ] 5 | then 6 | echo "No process can be used to killed!" 7 | exit 0 8 | fi 9 | id=$(lsof -i:29500|tail -1|awk '"$1"!=""{print $2}') 10 | kill -9 $id 11 | 12 | echo "Process name=$name($id) kill!" 13 | exit 0 14 | -------------------------------------------------------------------------------- /log/log_cnn_MNIST.txt: -------------------------------------------------------------------------------- 1 | 001 00001 |Loss: 2.300 | Acc: 14.844% 2 | 001 00002 |Loss: 2.303 | Acc: 12.891% 3 | 001 00003 |Loss: 2.305 | Acc: 11.198% 4 | 001 00004 |Loss: 2.303 | Acc: 10.600% 5 | 002 00005 |Loss: 2.297 | Acc: 12.500% 6 | 002 00006 |Loss: 2.294 | Acc: 14.062% 7 | 002 00007 |Loss: 2.294 | Acc: 13.542% 8 | 002 00008 |Loss: 2.290 | Acc: 14.400% 9 | -------------------------------------------------------------------------------- /log/log_cnn_FMNIST.txt: -------------------------------------------------------------------------------- 1 | 001 00001 |Loss: 2.308 | Acc: 10.156% 2 | 001 00002 |Loss: 2.296 | Acc: 11.719% 3 | 001 00003 |Loss: 2.283 | Acc: 11.818% 4 | 002 00004 |Loss: 2.181 | Acc: 22.656% 5 | 002 00005 |Loss: 2.192 | Acc: 17.969% 6 | 002 00006 |Loss: 2.132 | Acc: 20.000% 7 | 003 00007 |Loss: 1.783 | Acc: 42.188% 8 | 003 00008 |Loss: 1.782 | Acc: 38.281% 9 | 003 00009 |Loss: 1.780 | Acc: 36.970% 10 | 004 00010 |Loss: 2.459 | Acc: 30.469% 11 | 004 00011 |Loss: 2.104 | Acc: 35.547% 12 | 004 00012 |Loss: 2.062 | Acc: 33.333% 13 | -------------------------------------------------------------------------------- /model/lstm.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | 3 | 4 | class RNN(nn.Module): 5 | def __init__(self, input_size, output_size, hidden_size, num_layers): 6 | super(RNN, self).__init__() 7 | self.embedding = nn.Embedding(input_size, input_size) 8 | self.rnn = nn.LSTM(input_size=input_size, hidden_size=hidden_size, num_layers=num_layers) 9 | self.decoder = nn.Linear(hidden_size, output_size) 10 | 11 | def forward(self, input_seq, hidden_state): 12 | embedding = self.embedding(input_seq) 13 | output, hidden_state = self.rnn(embedding, hidden_state) 14 | output = self.decoder(output) 15 | return output, (hidden_state[0].detach(), hidden_state[1].detach()) 16 | 17 | def set_weights(self, weights): 18 | self.load_state_dict(weights) 19 | -------------------------------------------------------------------------------- /noniid/temp/Shakespeare/1.txt: -------------------------------------------------------------------------------- 1 | nventory to particularise their abundance; our 2 | sufferance is a gain to them Let us revenge this with 3 | our pikes, ere we become rakes: for the gods know I 4 | speak this in hunger for bread, not in thirst for revenge. 5 | 6 | Second Citizen: 7 | Would you proceed especially against Caius Marcius? 8 | 9 | All: 10 | Against him first: he's a very dog to the commonalty. 11 | 12 | Second Citizen: 13 | Consider you what services he has done for his country? 14 | 15 | First Citizen: 16 | Very well; and could be content to give him good 17 | report fort, but that he pays himself with being proud. 18 | 19 | Second Citizen: 20 | Nay, but speak not maliciously. 21 | 22 | First Citizen: 23 | I say unto you, what he hath done famously, he did 24 | it to that end: though soft-conscienced men can be 25 | content to say it was for his country he did it to 26 | please his mother and to be partly -------------------------------------------------------------------------------- /bash/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | gnome-terminal --window -x bash -c \ 4 | "\ 5 | ssh -t qty-tp@192.168.1.110 << remotessh 6 | spawn su - root 7 | expect "Password:" 8 | send "qtyszbd\n" 9 | interact 10 | cd Semester2/Federated_learning 11 | sh bash/kill.sh 12 | python main.py --rank 0 13 | exec bash;\ 14 | " 15 | 16 | gnome-terminal --window -x bash -c \ 17 | "\ 18 | ssh -t pi@192.168.1.101 << remotessh 19 | cd qitianyu/Federated_learning 20 | sh bash/kill.sh 21 | python main.py --rank 1 22 | exec bash;\ 23 | " 24 | 25 | gnome-terminal --window -x bash -c \ 26 | "\ 27 | ssh -t pi@192.168.1.103 << remotessh 28 | cd qitianyu/Federated_learning 29 | sh bash/kill.sh 30 | python main.py --rank 2 31 | exec bash;\ 32 | " 33 | 34 | gnome-terminal --window -x bash -c \ 35 | "\ 36 | ssh -t pi@192.168.1.104 << remotessh 37 | cd qitianyu/Federated_learning 38 | sh bash/kill.sh 39 | python main.py --rank 3 40 | exec bash;\ 41 | " 42 | -------------------------------------------------------------------------------- /noniid/temp/Shakespeare/0.txt: -------------------------------------------------------------------------------- 1 | First Citizen: 2 | Before we proceed any further, hear me speak. 3 | 4 | All: 5 | Speak, speak. 6 | 7 | First Citizen: 8 | You are all resolved rather to die than to famish? 9 | 10 | All: 11 | Resolved. resolved. 12 | 13 | First Citizen: 14 | First, you know Caius Marcius is chief enemy to the people. 15 | 16 | All: 17 | We know't, we know't. 18 | 19 | First Citizen: 20 | Let us kill him, and we'll have corn at our own price. 21 | Is't a verdict? 22 | 23 | All: 24 | No more talking on't; let it be done: away, away! 25 | 26 | Second Citizen: 27 | One word, good citizens. 28 | 29 | First Citizen: 30 | We are accounted poor citizens, the patricians good. 31 | What authority surfeits on would relieve us: if they 32 | would yield us but the superfluity, while it were 33 | wholesome, we might guess they relieved us humanely; 34 | but they think we are too dear: the leanness that 35 | afflicts us, the object of our misery, is as an 36 | -------------------------------------------------------------------------------- /acc/acc_resnet18_FMNIST.txt: -------------------------------------------------------------------------------- 1 | EPOCH=001,Accuracy= 89.430% 2 | EPOCH=002,Accuracy= 91.220% 3 | EPOCH=003,Accuracy= 90.970% 4 | EPOCH=004,Accuracy= 91.010% 5 | EPOCH=005,Accuracy= 92.300% 6 | EPOCH=006,Accuracy= 92.580% 7 | EPOCH=007,Accuracy= 92.290% 8 | EPOCH=008,Accuracy= 93.030% 9 | EPOCH=009,Accuracy= 91.850% 10 | EPOCH=010,Accuracy= 92.180% 11 | EPOCH=011,Accuracy= 92.160% 12 | EPOCH=012,Accuracy= 92.470% 13 | EPOCH=013,Accuracy= 92.510% 14 | EPOCH=014,Accuracy= 93.090% 15 | EPOCH=015,Accuracy= 93.170% 16 | EPOCH=016,Accuracy= 92.920% 17 | EPOCH=017,Accuracy= 92.730% 18 | EPOCH=018,Accuracy= 92.600% 19 | EPOCH=019,Accuracy= 92.580% 20 | EPOCH=020,Accuracy= 92.980% 21 | EPOCH=021,Accuracy= 92.640% 22 | EPOCH=022,Accuracy= 92.660% 23 | EPOCH=023,Accuracy= 93.260% 24 | EPOCH=024,Accuracy= 93.420% 25 | EPOCH=025,Accuracy= 93.430% 26 | EPOCH=026,Accuracy= 93.850% 27 | EPOCH=027,Accuracy= 93.820% 28 | EPOCH=028,Accuracy= 93.910% 29 | EPOCH=029,Accuracy= 93.950% 30 | EPOCH=030,Accuracy= 94.030% 31 | EPOCH=031,Accuracy= 94.000% 32 | EPOCH=032,Accuracy= 94.010% 33 | -------------------------------------------------------------------------------- /noniid/setting.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from data_noniid.Cifar_noniid import get_dataset_cifar10_noniid 3 | from data_noniid.MNIST_noniid import get_dataset_mnist_noniid 4 | from data_noniid.FMNIST_noniid import get_dataset_fmnist_noniid 5 | from data_noniid.Shakespeare_noniid import divide_in_txt 6 | from file_flow import user_noniid_in_file 7 | 8 | 9 | def main(): 10 | parser = argparse.ArgumentParser(description='NonIID') 11 | parser.add_argument("-d", "--data", help="Cifar or MINIST or FMNIST", type=str, default='Cifar') 12 | parser.add_argument("-nm", "--noniid_model", help="quantity_noniid or label_noniid or iid/Shakespeare:iid or noniid", 13 | type=str, default='iid') 14 | parser.add_argument("-nu", "--num_users", help="The number of clients", type=int, default=2) 15 | parser.add_argument("-ts", "--total_samples", help="The total samples of each clients", type=int, default=20000) 16 | parser.add_argument("-ru", "--rate_unbalance", help="The proportion of noniid (<=1.0) ", type=float, default=0.6) 17 | parser.add_argument("-nc", "--num_class", help="The classes number of noniid (<=10) ", type=int, default=2) 18 | args = parser.parse_args() 19 | 20 | if args.data == 'Cifar': 21 | dict_users_train = get_dataset_cifar10_noniid(args) 22 | user_noniid_in_file(dict_users_train, args) 23 | if args.data == 'MNIST': 24 | dict_users_train = get_dataset_mnist_noniid(args) 25 | user_noniid_in_file(dict_users_train, args) 26 | if args.data == 'FMNIST': 27 | dict_users_train = get_dataset_fmnist_noniid(args) 28 | user_noniid_in_file(dict_users_train, args) 29 | if args.data == 'Shakespeare': 30 | divide_in_txt(args) 31 | 32 | 33 | if __name__ == "__main__": 34 | main() 35 | -------------------------------------------------------------------------------- /data/Shakespeare/Shakespeare.txt: -------------------------------------------------------------------------------- 1 | First Citizen: 2 | Before we proceed any further, hear me speak. 3 | 4 | All: 5 | Speak, speak. 6 | 7 | First Citizen: 8 | You are all resolved rather to die than to famish? 9 | 10 | All: 11 | Resolved. resolved. 12 | 13 | First Citizen: 14 | First, you know Caius Marcius is chief enemy to the people. 15 | 16 | All: 17 | We know't, we know't. 18 | 19 | First Citizen: 20 | Let us kill him, and we'll have corn at our own price. 21 | Is't a verdict? 22 | 23 | All: 24 | No more talking on't; let it be done: away, away! 25 | 26 | Second Citizen: 27 | One word, good citizens. 28 | 29 | First Citizen: 30 | We are accounted poor citizens, the patricians good. 31 | What authority surfeits on would relieve us: if they 32 | would yield us but the superfluity, while it were 33 | wholesome, we might guess they relieved us humanely; 34 | but they think we are too dear: the leanness that 35 | afflicts us, the object of our misery, is as an 36 | inventory to particularise their abundance; our 37 | sufferance is a gain to them Let us revenge this with 38 | our pikes, ere we become rakes: for the gods know I 39 | speak this in hunger for bread, not in thirst for revenge. 40 | 41 | Second Citizen: 42 | Would you proceed especially against Caius Marcius? 43 | 44 | All: 45 | Against him first: he's a very dog to the commonalty. 46 | 47 | Second Citizen: 48 | Consider you what services he has done for his country? 49 | 50 | First Citizen: 51 | Very well; and could be content to give him good 52 | report fort, but that he pays himself with being proud. 53 | 54 | Second Citizen: 55 | Nay, but speak not maliciously. 56 | 57 | First Citizen: 58 | I say unto you, what he hath done famously, he did 59 | it to that end: though soft-conscienced men can be 60 | content to say it was for his country he did it to 61 | please his mother and to be partly proud; which he 62 | is, even till the altitude of his virtue. 63 | 64 | Second Citizen: 65 | What he cannot help in his nature, you account a 66 | vice in him. You must in no way say he is covetous. 67 | -------------------------------------------------------------------------------- /init/init_mobilenet.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import torchvision.transforms as transforms 3 | import torch.optim as optim 4 | from model.resnet import * 5 | from model.mobilenet import * 6 | from torchsummary import summary 7 | from noniid.file_flow import select_trainset 8 | 9 | 10 | def normalize_data_mnist(): 11 | """ 12 | Get the normalize picture (MNIST and FMNIST) 13 | make the gray_picture * 3 layers 14 | """ 15 | transform = transforms.Compose([ 16 | transforms.ToTensor(), 17 | # transforms.Lambda(tmp_func), 18 | # transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 19 | ]) 20 | 21 | return transform 22 | 23 | 24 | def load_data(args): 25 | """ 26 | Get the train and test dataloader 27 | """ 28 | transform_mnist = normalize_data_mnist() 29 | 30 | if args.data == 'MNIST': 31 | trainset = torchvision.datasets.MNIST(root='./data', train=True, download=False, transform=transform_mnist) 32 | testset = torchvision.datasets.MNIST(root='./data', train=False, download=False, transform=transform_mnist) 33 | trainset_select = select_trainset(trainset, args) 34 | if args.data == 'FMNIST': 35 | trainset = torchvision.datasets.FashionMNIST(root='./data', train=True, download=False,transform=transform_mnist) 36 | testset = torchvision.datasets.FashionMNIST(root='./data', train=False, download=False, transform=transform_mnist) 37 | trainset_select = select_trainset(trainset, args) 38 | 39 | trainloader = torch.utils.data.DataLoader(trainset_select, batch_size=args.batchsize, shuffle=True, num_workers=2) 40 | testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=2) 41 | 42 | return trainloader, testloader, len(trainset_select), len(testset) 43 | 44 | 45 | def init(args): 46 | """ 47 | Make the net/device/data/criterion/optimizer 48 | """ 49 | # device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 50 | device = torch.device("cpu") 51 | trainloader, testloader, train_data_num, test_data_num = load_data(args) 52 | 53 | net = mobilenetv2().to(device) 54 | 55 | # Define loss functions and optimization 56 | criterion = nn.CrossEntropyLoss() 57 | optimizer = optim.SGD(net.parameters(), lr=args.learning_rate, momentum=0.9, weight_decay=5e-4) 58 | return device, trainloader, testloader, net, criterion, optimizer, train_data_num, test_data_num 59 | -------------------------------------------------------------------------------- /plot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import argparse 3 | import matplotlib.pyplot as plt 4 | 5 | def per2float(per): 6 | """ 7 | Delete the '%' in acc 8 | """ 9 | return float(per[:-1]) 10 | 11 | def GetList(train_consult,test_consult): 12 | """ 13 | Get the prasers of 'x'&'y' 14 | """ 15 | x = [] 16 | # x_test = [] 17 | train_loss = [] 18 | train_acc = [] 19 | test_acc = [] 20 | for i in range(len(train_consult)-1): 21 | if train_consult.iloc[i, 0] + 1 == train_consult.iloc[i+1,0] or i == len(train_consult)-2: 22 | if train_consult.iloc[i, 0] > test_consult.iloc[len(test_consult)-1, 1]: 23 | break 24 | x.append(train_consult.iloc[i, 0]) 25 | train_loss.append(train_consult.iloc[i, 4]) 26 | train_acc.append(per2float(train_consult.iloc[i, 7])) 27 | test_acc.append(per2float(test_consult.iloc[train_consult.iloc[i, 0]-1, 4])) 28 | return x, train_loss, train_acc, test_acc 29 | 30 | def draw(x, train_loss, train_acc, test_acc, args): 31 | title = args.model + '---' + args.data 32 | 33 | plt.figure() 34 | plt.suptitle(title) 35 | 36 | plt.subplot(2, 1, 1) 37 | plt.grid() 38 | plt.plot(x, train_loss) 39 | plt.xlabel('Epochs') 40 | plt.ylabel('Loss') 41 | 42 | plt.subplot(2, 1, 2) 43 | plt.grid() 44 | plt.plot(x, train_acc, label='train') 45 | plt.plot(x, test_acc, label='test') 46 | plt.legend() 47 | plt.xlabel('Epochs') 48 | plt.ylabel('Acc') 49 | 50 | plt.tight_layout() 51 | 52 | plt.show() 53 | 54 | def main(): 55 | parser = argparse.ArgumentParser(description='Plot') 56 | parser.add_argument("-m", "--model", help="resnet18 or cnn or lstm", type=str, default='cnn') 57 | parser.add_argument("-d", "--data", help="Cifar or MINIST or FMNIST or Shakespeare", type=str, default='MNIST') 58 | args = parser.parse_args() 59 | 60 | train_path = "./log/log_" + args.model + "_" + args.data + ".txt" 61 | test_path = "./acc/acc_" + args.model + "_" + args.data + ".txt" 62 | 63 | train_consult = pd.read_csv(train_path, sep="\s", header=None, engine='python', 64 | names=['0', '1', '2', '3', '4', '5', '6', '7']) 65 | test_consult = pd.read_csv(test_path, sep="\s|,|=", header=None, engine='python', 66 | names=['0', '1','2','3','4']) 67 | 68 | 69 | x, train_loss, train_acc, test_acc = GetList(train_consult, test_consult) 70 | draw(x, train_loss, train_acc, test_acc, args) 71 | 72 | print(train_consult) 73 | 74 | 75 | if __name__ == "__main__": 76 | main() 77 | -------------------------------------------------------------------------------- /init/init_lstm.py: -------------------------------------------------------------------------------- 1 | from model.lstm import * 2 | import torch 3 | import torch.optim as optim 4 | 5 | 6 | def load_data(args): 7 | """ 8 | load data from txt (or make Non-IID data) 9 | """ 10 | data_path = './data/' + args.data + '/' + args.data + '.txt' 11 | data_idx_path = './noniid/temp/Shakespeare/' + str(args.idx_user) + '.txt' 12 | data = open(data_path, 'r').read() 13 | data_idx = open(data_idx_path, 'r').read() 14 | chars = sorted(list(set(data))) 15 | data_size, vocab_size = len(data), len(chars) 16 | chars_idx = sorted(list(set(data_idx))) 17 | data_idx_size, vocab_idx_size = len(data_idx), len(chars_idx) 18 | 19 | print("-----------load data...-----------------") 20 | print("Data has {} characters, {} unique".format(data_size, vocab_size)) 21 | print("Client {} has {} characters, {} unique".format(args.idx_user-1, data_idx_size, vocab_idx_size)) 22 | print("----------------------------------------") 23 | 24 | # device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 25 | device = torch.device("cpu") 26 | 27 | # char to index and index to char maps 28 | char_to_ix = {ch: i for i, ch in enumerate(chars)} 29 | ix_to_char = {i: ch for i, ch in enumerate(chars)} 30 | 31 | data_train_select = list(data_idx) 32 | data_test_select = list(data) 33 | 34 | data_train = data_train_select 35 | data_test = data_test_select[int(data_size * 0.9): data_size-1] 36 | 37 | train_num = len(data_train) 38 | test_num = len(data_test) 39 | 40 | for i, ch in enumerate(data_train): 41 | data_train[i] = char_to_ix[ch] 42 | for i, ch in enumerate(data_test): 43 | data_test[i] = char_to_ix[ch] 44 | 45 | data_train = torch.tensor(data_train).to(device) 46 | data_test = torch.tensor(data_test).to(device) 47 | data_train = torch.unsqueeze(data_train, dim=1) 48 | data_test = torch.unsqueeze(data_test, dim=1) 49 | 50 | return data_train, data_test, data_size, vocab_size, char_to_ix, ix_to_char, device, train_num, test_num 51 | 52 | 53 | def init(args): 54 | """ 55 | Make the net/device/data/criterion/optimizer/train_num/test_num 56 | """ 57 | hidden_size = 512 # size of hidden state 58 | num_layers = 3 # num of layers in LSTM layer stack 59 | 60 | data_train, data_test, data_size, vocab_size, char_to_ix, ix_to_char, device, train_num, test_num = load_data(args) 61 | 62 | rnn = RNN(vocab_size, vocab_size, hidden_size, num_layers).to(device) 63 | 64 | criterion = nn.CrossEntropyLoss() 65 | optimizer = optim.Adam(rnn.parameters(), lr=args.learning_rate) 66 | 67 | return device, rnn, data_train, data_test, criterion, optimizer, train_num, test_num 68 | -------------------------------------------------------------------------------- /model/resnet.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch.nn.functional as F 3 | 4 | 5 | class ResidualBlock(nn.Module): 6 | def __init__(self, inchannel, outchannel, stride=1): 7 | super(ResidualBlock, self).__init__() 8 | self.left = nn.Sequential( 9 | nn.Conv2d(inchannel, outchannel, kernel_size=3, stride=stride, padding=1, bias=False), 10 | nn.BatchNorm2d(outchannel), 11 | nn.ReLU(inplace=True), 12 | nn.Conv2d(outchannel, outchannel, kernel_size=3, stride=1, padding=1, bias=False), 13 | nn.BatchNorm2d(outchannel) 14 | ) 15 | self.shortcut = nn.Sequential() 16 | if stride != 1 or inchannel != outchannel: 17 | self.shortcut = nn.Sequential( 18 | nn.Conv2d(inchannel, outchannel, kernel_size=1, stride=stride, bias=False), 19 | nn.BatchNorm2d(outchannel) 20 | ) 21 | 22 | def forward(self, x): 23 | out = self.left(x) 24 | out += self.shortcut(x) 25 | out = F.relu(out) 26 | return out 27 | 28 | 29 | class ResNet(nn.Module): 30 | def __init__(self, ResidualBlock, num_classes=10): 31 | super(ResNet, self).__init__() 32 | self.inchannel = 64 # 输入的像素为64 33 | self.conv1 = nn.Sequential( 34 | nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False), # 三通道,三个颜色输入 35 | nn.BatchNorm2d(64), 36 | nn.ReLU(), 37 | ) 38 | self.layer1 = self.make_layer(ResidualBlock, 64, 2, stride=1) 39 | self.layer2 = self.make_layer(ResidualBlock, 128, 2, stride=2) 40 | self.layer3 = self.make_layer(ResidualBlock, 256, 2, stride=2) 41 | self.layer4 = self.make_layer(ResidualBlock, 512, 2, stride=2) 42 | self.fc = nn.Linear(512, num_classes) 43 | 44 | def make_layer(self, block, channels, num_blocks, stride): 45 | strides = [stride] + [1] * (num_blocks - 1) #strides=[1,1] or [2,1] 46 | layers = [] 47 | for stride in strides: 48 | layers.append(block(self.inchannel, channels, stride)) 49 | self.inchannel = channels 50 | return nn.Sequential(*layers) 51 | 52 | def forward(self, x): 53 | out = self.conv1(x) 54 | out = self.layer1(out) 55 | out = self.layer2(out) 56 | out = self.layer3(out) 57 | out = self.layer4(out) 58 | out = F.avg_pool2d(out, 4) 59 | out = out.view(out.size(0), -1) 60 | out = self.fc(out) 61 | return out 62 | 63 | def set_weights(self, weights): 64 | self.load_state_dict(weights) 65 | 66 | 67 | def ResNet18(): 68 | return ResNet(ResidualBlock) 69 | -------------------------------------------------------------------------------- /model/cnn.py: -------------------------------------------------------------------------------- 1 | from turtle import forward 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import torch 5 | 6 | 7 | class CNN(nn.Module): 8 | def __init__(self): 9 | super(CNN, self).__init__() 10 | self.conv1 = nn.Conv2d(1, 32, 3, 1) 11 | self.conv2 = nn.Conv2d(32, 64, 3, 1) 12 | self.dropout1 = nn.Dropout(0.25) 13 | self.dropout2 = nn.Dropout(0.5) 14 | self.fc1 = nn.Linear(9216, 128) 15 | self.fc2 = nn.Linear(128, 10) 16 | 17 | def forward(self, x): 18 | x = self.conv1(x) 19 | x = F.relu(x) 20 | x = self.conv2(x) 21 | x = F.relu(x) 22 | x = F.max_pool2d(x, 2) 23 | x = self.dropout1(x) 24 | x = torch.flatten(x, 1) 25 | x = self.fc1(x) 26 | x = F.relu(x) 27 | x = self.dropout2(x) 28 | x = self.fc2(x) 29 | output = F.log_softmax(x, dim=1) 30 | return output 31 | 32 | def set_weights(self, weights): 33 | self.load_state_dict(weights) 34 | 35 | 36 | class CNN4Cifar(nn.Module): 37 | def __init__(self): 38 | super(CNN4Cifar, self).__init__() 39 | self.conv1 = nn.Conv2d(3, 6, 5, 1) 40 | self.conv2 = nn.Conv2d(6, 16, 5, 1) 41 | self.pool = torch.nn.MaxPool2d(kernel_size = 2, stride = 2) 42 | self.fc1 = torch.nn.Linear(16*5*5, 120) 43 | self.fc2 = torch.nn.Linear(120, 84) 44 | self.fc3 = torch.nn.Linear(84, 10) 45 | 46 | def forward(self, x): 47 | x=self.pool(F.relu(self.conv1(x))) 48 | x=self.pool(F.relu(self.conv2(x))) 49 | x=x.view(-1,16*5*5) 50 | x = F.relu(self.fc1(x)) 51 | x = F.relu(self.fc2(x)) 52 | x = self.fc3(x) 53 | output = F.log_softmax(x, dim=1) 54 | return output 55 | 56 | def set_weights(self, weights): 57 | self.load_state_dict(weights) 58 | 59 | 60 | class CNN4lite(nn.Module): 61 | def __init__(self): 62 | super(CNN4lite, self).__init__() 63 | self.conv1 = nn.Conv2d(1, 4, 3, 1) 64 | self.conv2 = nn.Conv2d(4, 12, 3, 1) 65 | self.dropout1 = nn.Dropout(0.25) 66 | self.dropout2 = nn.Dropout(0.5) 67 | self.fc1 = nn.Linear(1728, 128) 68 | self.fc2 = nn.Linear(128, 10) 69 | 70 | def forward(self, x): 71 | x = self.conv1(x) 72 | x = F.relu(x) 73 | x = self.conv2(x) 74 | x = F.relu(x) 75 | x = F.max_pool2d(x, 2) 76 | x = self.dropout1(x) 77 | x = torch.flatten(x, 1) 78 | x = self.fc1(x) 79 | x = F.relu(x) 80 | x = self.dropout2(x) 81 | x = self.fc2(x) 82 | output = F.log_softmax(x, dim=1) 83 | return output 84 | 85 | def set_weights(self, weights): 86 | self.load_state_dict(weights) -------------------------------------------------------------------------------- /train/train_cnn.py: -------------------------------------------------------------------------------- 1 | from init.init_cnn import init 2 | import torch 3 | 4 | 5 | def train_cnn(args): 6 | device, trainloader, testloader, net, criterion, optimizer, _, _ = init(args) 7 | 8 | best_acc = 85 # 2 初始化best test accuracy 9 | pre_epoch = 0 10 | 11 | print("Start Training: "+args.model+"--"+args.data) 12 | with open("./acc/"+"acc_"+args.model+"_"+args.data+".txt", "w") as f: 13 | with open("./log/"+"log_"+args.model+"_"+args.data+".txt", "w")as f2: 14 | for epoch in range(pre_epoch, args.EPOCH): 15 | print('\nEpoch: %d' % (epoch + 1)) 16 | net.train() 17 | sum_loss = 0.0 18 | correct = 0.0 19 | total = 0.0 20 | for i, data in enumerate(trainloader, 0): 21 | length = len(trainloader) 22 | inputs, labels = data 23 | inputs, labels = inputs.to(device), labels.to(device) 24 | optimizer.zero_grad() 25 | 26 | # forward + backward 27 | outputs = net(inputs) 28 | loss = criterion(outputs, labels) 29 | loss.backward() 30 | optimizer.step() 31 | 32 | # 每训练1个batch打印一次loss和准确率 33 | sum_loss += loss.item() 34 | _, predicted = torch.max(outputs.data, 1) #predicted返回的是tensor每行最大的索引值 35 | total += labels.size(0) 36 | correct += predicted.eq(labels.data).cpu().sum() 37 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 38 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 39 | f2.write('%03d %05d |Loss: %.03f | Acc: %.3f%% ' 40 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 41 | f2.write('\n') 42 | f2.flush() 43 | 44 | # 每训练完一个epoch测试一下准确率 45 | print("Waiting Test!") 46 | with torch.no_grad(): 47 | correct = 0 48 | total = 0 49 | for data in testloader: 50 | net.eval() 51 | images, labels = data 52 | images, labels = images.to(device), labels.to(device) 53 | outputs = net(images) 54 | # 取得分最高的那个类 (outputs.data的索引号) 55 | _, predicted = torch.max(outputs.data, 1) 56 | total += labels.size(0) 57 | correct += (predicted == labels).sum() 58 | print('测试分类准确率为:%.3f%%' % (100. * correct / total)) 59 | acc = 100. * correct / total 60 | # 将每次测试结果实时写入acc.txt文件中 61 | # print('Saving model......') 62 | # torch.save(net.state_dict(), '%s/net_%03d.pth' % (args.outf, epoch + 1)) 63 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch + 1, acc)) 64 | f.write('\n') 65 | f.flush() 66 | 67 | print("Training Finished, TotalEPOCH=%d" % args.EPOCH) -------------------------------------------------------------------------------- /train/train_mobilenet.py: -------------------------------------------------------------------------------- 1 | from init.init_mobilenet import init 2 | import torch 3 | 4 | 5 | def train_mobilenet(args): 6 | device, trainloader, testloader, net, criterion, optimizer, _, _ = init(args) 7 | 8 | best_acc = 85 # 2 初始化best test accuracy 9 | pre_epoch = 0 10 | 11 | print("Start Training: "+args.model+"--"+args.data) 12 | with open("./acc/"+"acc_"+args.model+"_"+args.data+".txt", "w") as f: 13 | with open("./log/"+"log_"+args.model+"_"+args.data+".txt", "w")as f2: 14 | for epoch in range(pre_epoch, args.EPOCH): 15 | print('\nEpoch: %d' % (epoch + 1)) 16 | net.train() 17 | sum_loss = 0.0 18 | correct = 0.0 19 | total = 0.0 20 | for i, data in enumerate(trainloader, 0): 21 | length = len(trainloader) 22 | inputs, labels = data 23 | inputs, labels = inputs.to(device), labels.to(device) 24 | optimizer.zero_grad() 25 | 26 | # forward + backward 27 | outputs = net(inputs) 28 | loss = criterion(outputs, labels) 29 | loss.backward() 30 | optimizer.step() 31 | 32 | # 每训练1个batch打印一次loss和准确率 33 | sum_loss += loss.item() 34 | _, predicted = torch.max(outputs.data, 1) #predicted返回的是tensor每行最大的索引值 35 | total += labels.size(0) 36 | correct += predicted.eq(labels.data).cpu().sum() 37 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 38 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 39 | f2.write('%03d %05d |Loss: %.03f | Acc: %.3f%% ' 40 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 41 | f2.write('\n') 42 | f2.flush() 43 | 44 | # 每训练完一个epoch测试一下准确率 45 | print("Waiting Test!") 46 | with torch.no_grad(): 47 | correct = 0 48 | total = 0 49 | for data in testloader: 50 | net.eval() 51 | images, labels = data 52 | images, labels = images.to(device), labels.to(device) 53 | outputs = net(images) 54 | # 取得分最高的那个类 (outputs.data的索引号) 55 | _, predicted = torch.max(outputs.data, 1) 56 | total += labels.size(0) 57 | correct += (predicted == labels).sum() 58 | print('测试分类准确率为:%.3f%%' % (100. * correct / total)) 59 | acc = 100. * correct / total 60 | # 将每次测试结果实时写入acc.txt文件中 61 | # print('Saving model......') 62 | # torch.save(net.state_dict(), '%s/net_%03d.pth' % (args.outf, epoch + 1)) 63 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch + 1, acc)) 64 | f.write('\n') 65 | f.flush() 66 | 67 | print("Training Finished, TotalEPOCH=%d" % args.EPOCH) -------------------------------------------------------------------------------- /train/train_resnet18.py: -------------------------------------------------------------------------------- 1 | from init.init_resnet18 import init 2 | import torch 3 | 4 | 5 | def train_resnet18(args): 6 | device, trainloader, testloader, net, criterion, optimizer, _, _ = init(args) 7 | 8 | best_acc = 85 # 2 初始化best test accuracy 9 | pre_epoch = 0 10 | 11 | print("Start Training: "+args.model+"--"+args.data) 12 | with open("./acc/"+"acc_"+args.model+"_"+args.data+".txt", "w") as f: 13 | with open("./log/"+"log_"+args.model+"_"+args.data+".txt", "w")as f2: 14 | for epoch in range(pre_epoch, args.EPOCH): 15 | print('\nEpoch: %d' % (epoch + 1)) 16 | net.train() 17 | sum_loss = 0.0 18 | correct = 0.0 19 | total = 0.0 20 | for i, data in enumerate(trainloader, 0): 21 | length = len(trainloader) 22 | inputs, labels = data 23 | inputs, labels = inputs.to(device), labels.to(device) 24 | optimizer.zero_grad() 25 | 26 | # forward + backward 27 | outputs = net(inputs) 28 | loss = criterion(outputs, labels) 29 | loss.backward() 30 | optimizer.step() 31 | 32 | # 每训练1个batch打印一次loss和准确率 33 | sum_loss += loss.item() 34 | _, predicted = torch.max(outputs.data, 1) #predicted返回的是tensor每行最大的索引值 35 | total += labels.size(0) 36 | correct += predicted.eq(labels.data).cpu().sum() 37 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 38 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 39 | f2.write('%03d %05d |Loss: %.03f | Acc: %.3f%% ' 40 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total)) 41 | f2.write('\n') 42 | f2.flush() 43 | 44 | # 每训练完一个epoch测试一下准确率 45 | print("Waiting Test!") 46 | with torch.no_grad(): 47 | correct = 0 48 | total = 0 49 | for data in testloader: 50 | net.eval() 51 | images, labels = data 52 | images, labels = images.to(device), labels.to(device) 53 | outputs = net(images) 54 | # 取得分最高的那个类 (outputs.data的索引号) 55 | _, predicted = torch.max(outputs.data, 1) 56 | total += labels.size(0) 57 | correct += (predicted == labels).sum() 58 | print('测试分类准确率为:%.3f%%' % (100. * correct / total)) 59 | acc = 100. * correct / total 60 | # 将每次测试结果实时写入acc.txt文件中 61 | # print('Saving model......') 62 | # torch.save(net.state_dict(), '%s/net_%03d.pth' % (args.outf, epoch + 1)) 63 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch + 1, acc)) 64 | f.write('\n') 65 | f.flush() 66 | 67 | print("Training Finished, TotalEPOCH=%d" % args.EPOCH) -------------------------------------------------------------------------------- /bash/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | USER=pi 4 | PASSWORD=raspberry 5 | SEVER=qty-tp 6 | 7 | # nameworker=(raspi8 raspi9 raspi10) 8 | # IP_raspi8=192.168.0.104 9 | # IP_raspi9=192.168.0.101: 10 | # IP_raspi10=192.168.0.103 11 | 12 | SRCDIR_root=/Users/qitianyu/Master/Semester1/Federated_learning/ 13 | dir1=FL_models 14 | dir2=model 15 | dir3=noniid 16 | dir4=train 17 | dir5=init 18 | dir6=bash 19 | file1=main.py 20 | 21 | DESDIR=/home/pi/qitianyu/Federated_learning/ 22 | DESDIR1=/home/jason/qitianyu/Federated_learning/ 23 | SERVER_DESDIR=/home/qty-tp/qitianyu/Federated_learning/ 24 | FILENAME=./bash/ip.txt 25 | 26 | for_in_file(){ 27 | 28 | scp -v -r ${dir1} qty-tp@192.168.0.105:${SERVER_DESDIR} 29 | scp -v -r ${dir2} qty-tp@192.168.0.105:${SERVER_DESDIR} 30 | scp -v -r ${dir3} qty-tp@192.168.0.105:${SERVER_DESDIR} 31 | scp -v -r ${dir4} qty-tp@192.168.0.105:${SERVER_DESDIR} 32 | scp -v -r ${dir5} qty-tp@192.168.0.105:${SERVER_DESDIR} 33 | scp -v -r ${dir6} qty-tp@192.168.0.105:${SERVER_DESDIR} 34 | scp ${file1} qty-tp@192.168.0.105:${SERVER_DESDIR} 35 | echo "Deploy for 192.168.0.105 is done" 36 | sleep 1 37 | 38 | # For rapberrys: 39 | for ip in `cat $FILENAME` 40 | do 41 | echo $ip 42 | scp -v -r ${dir1} ${USER}@${ip}:${DESDIR} 43 | scp -v -r ${dir2} ${USER}@${ip}:${DESDIR} 44 | scp -v -r ${dir3} ${USER}@${ip}:${DESDIR} 45 | scp -v -r ${dir4} ${USER}@${ip}:${DESDIR} 46 | scp -v -r ${dir5} ${USER}@${ip}:${DESDIR} 47 | scp -v -r ${dir6} ${USER}@${ip}:${DESDIR} 48 | scp ${file1} ${USER}@${ip}:${DESDIR} 49 | echo "Deploy for $ip is done" 50 | sleep 1 51 | done 52 | 53 | scp -v -r ${dir1} jason@192.168.0.104:${DESDIR1} 54 | scp -v -r ${dir2} jason@192.168.0.104:${DESDIR1} 55 | scp -v -r ${dir3} jason@192.168.0.104:${DESDIR1} 56 | scp -v -r ${dir4} jason@192.168.0.104:${DESDIR1} 57 | scp -v -r ${dir5} jason@192.168.0.104:${DESDIR1} 58 | scp -v -r ${dir6} jason@192.168.0.104:${DESDIR1} 59 | scp ${file1} jason@192.168.0.104:${DESDIR1} 60 | echo "Deploy for 192.168.0.104 is done" 61 | sleep 1 62 | 63 | scp -v -r ${dir1} jason@192.168.0.114:${DESDIR1} 64 | scp -v -r ${dir2} jason@192.168.0.114:${DESDIR1} 65 | scp -v -r ${dir3} jason@192.168.0.114:${DESDIR1} 66 | scp -v -r ${dir4} jason@192.168.0.114:${DESDIR1} 67 | scp -v -r ${dir5} jason@192.168.0.114:${DESDIR1} 68 | scp -v -r ${dir6} jason@192.168.0.114:${DESDIR1} 69 | scp ${file1} jason@192.168.0.114:${DESDIR1} 70 | echo "Deploy for 192.168.0.114 is done" 71 | sleep 1 72 | 73 | # # For qty-TP: 74 | # scp -v -r ${dir1} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 75 | # scp -v -r ${dir2} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 76 | # scp -v -r ${dir3} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 77 | # scp -v -r ${dir4} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 78 | # scp -v -r ${dir5} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 79 | # scp -v -r ${dir6} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 80 | # scp ${file1} ${SEVER}@192.168.1.110:${SERVER_DESDIR} 81 | # echo "Deploy for Server is done" 82 | # sleep 1 83 | 84 | # # For qty-desk: 85 | # scp -v -r bash qty@192.168.1.109:/home/qty/Semester2 86 | # echo "Deploy for bash is done" 87 | # sleep 1 88 | } 89 | 90 | cd ${SRCDIR_root}&& 91 | echo "Ready to deploy..."&& 92 | sleep 1 93 | 94 | for_in_file 95 | 96 | echo "All deployment and configuration completed successfully~~~~!" 97 | -------------------------------------------------------------------------------- /noniid/file_flow.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import torch 3 | import numpy as np 4 | import math 5 | from torch.utils.data import TensorDataset 6 | 7 | 8 | def split_integer(m, n): 9 | assert n > 0 10 | quotient = int(m / n) 11 | remainder = m % n 12 | if remainder > 0: 13 | return [quotient] * (n - remainder) + [quotient + 1] * remainder 14 | if remainder < 0: 15 | return [quotient - 1] * -remainder + [quotient] * (n + remainder) 16 | return [quotient] * n 17 | 18 | 19 | def test_label(dict_user_train, train_dataset, args): 20 | sum_data = [] 21 | for i in range(len(dict_user_train)): 22 | test_num = np.zeros((10,), dtype=np.int) 23 | for j in range(len(dict_user_train[i])): 24 | idxs = dict_user_train[i][j] 25 | label = train_dataset[idxs][1] 26 | test_num[label] += 1 27 | sum_data.append(sum(test_num)) 28 | print('----------', end=' ') 29 | print('Client :', i, ', Sum data is', sum(test_num), end=' ') 30 | print('----------') 31 | print(test_num) 32 | 33 | 34 | def user_noniid_in_file(dict_users_train, args): 35 | if args.noniid_model == 'label_noniid': 36 | file_name = './temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 37 | '_data' + str(args.total_samples) + '_unbalance' + str(args.rate_unbalance) + '.csv' 38 | if args.noniid_model == 'quantity_noniid': 39 | file_name = './temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 40 | '_data' + str(args.total_samples) + '.csv' 41 | if args.noniid_model == 'iid': 42 | file_name = './temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 43 | '_data' + str(args.total_samples) + '.csv' 44 | frame = pd.DataFrame.from_dict(dict_users_train, orient='index') 45 | 46 | frame.to_csv(file_name) 47 | 48 | 49 | def user_out_file(args): 50 | if args.noniid_model == 'label_noniid': 51 | file_name = './noniid/temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 52 | '_data' + str(args.total_samples) + '_unbalance' + str(args.rate_unbalance) + '.csv' 53 | if args.noniid_model == 'quantity_noniid': 54 | file_name = './noniid/temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 55 | '_data' + str(args.total_samples) + '.csv' 56 | if args.noniid_model == 'iid': 57 | file_name = './noniid/temp/' + args.data + '/' + args.data + '_' + args.noniid_model + '_users' + str(args.num_users) +\ 58 | '_data' + str(args.total_samples) + '.csv' 59 | frame = pd.read_csv(file_name) 60 | train_idx = [] 61 | for i in range(frame.shape[1]-1): 62 | if math.isnan(frame.iloc[args.idx_user-1, i+1]): 63 | break 64 | train_idx.append(frame.iloc[args.idx_user-1, i+1]) 65 | 66 | return train_idx 67 | 68 | 69 | def select_trainset(trainset, args): 70 | train_idx = user_out_file(args) 71 | train_select_list = [] 72 | train_label_list = [] 73 | for i in range(len(train_idx)): 74 | train_select_list.append(trainset[i][0]) 75 | train_label_list.append(trainset[i][1]) 76 | train_select_tens = torch.stack(train_select_list) 77 | train_label_tens = torch.tensor(train_label_list) 78 | trainset_select = TensorDataset(train_select_tens, train_label_tens) 79 | 80 | return trainset_select 81 | -------------------------------------------------------------------------------- /init/init_resnet18.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torchvision 3 | import torchvision.transforms as transforms 4 | import torch.optim as optim 5 | from model.resnet import * 6 | from model.cnn import * 7 | from torchsummary import summary 8 | from noniid.file_flow import select_trainset 9 | 10 | 11 | def tmp_func(x): 12 | return x.repeat(3, 1, 1) 13 | 14 | 15 | def normalize_data_cifar(): 16 | """ 17 | Get the normalize picture (cifar) 18 | """ 19 | transform_train = transforms.Compose([ 20 | transforms.RandomCrop(32, padding=4), # 先四周填充0,在吧图像随机裁剪成32*32 21 | transforms.RandomHorizontalFlip(), # 图像一半的概率翻转,一半的概率不翻转 22 | transforms.ToTensor(), 23 | # The mean and variance of R,G, and B for each level of normalization 24 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 25 | ]) 26 | 27 | transform_test = transforms.Compose([ 28 | transforms.ToTensor(), 29 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 30 | ]) 31 | return transform_train, transform_test 32 | 33 | 34 | def normalize_data_mnist(): 35 | """ 36 | Get the normalize picture (MNIST and FMNIST) 37 | make the gray_picture * 3 layers 38 | """ 39 | transform = transforms.Compose([ 40 | transforms.ToTensor(), 41 | transforms.Lambda(tmp_func), 42 | transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 43 | ]) 44 | 45 | return transform 46 | 47 | 48 | def load_data(args): 49 | """ 50 | Get the train and test dataloader 51 | """ 52 | transform_train_cifar, transform_test_cifar = normalize_data_cifar() 53 | transform_mnist = normalize_data_mnist() 54 | 55 | if args.data == 'Cifar': 56 | trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=False, transform=transform_train_cifar) 57 | testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=False, transform=transform_test_cifar) 58 | trainset_select = select_trainset(trainset, args) 59 | if args.data == 'MNIST': 60 | trainset = torchvision.datasets.MNIST(root='./data', train=True, download=False, transform=transform_mnist) 61 | testset = torchvision.datasets.MNIST(root='./data', train=False, download=False, transform=transform_mnist) 62 | trainset_select = select_trainset(trainset, args) 63 | if args.data == 'FMNIST': 64 | trainset = torchvision.datasets.FashionMNIST(root='./data', train=True, download=False,transform=transform_mnist) 65 | testset = torchvision.datasets.FashionMNIST(root='./data', train=False, download=False, transform=transform_mnist) 66 | trainset_select = select_trainset(trainset, args) 67 | 68 | trainloader = torch.utils.data.DataLoader(trainset_select, batch_size=args.batchsize, shuffle=True, num_workers=2) 69 | testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=2) 70 | 71 | return trainloader, testloader, len(trainset_select), len(testset) 72 | 73 | 74 | def init(args): 75 | """ 76 | Make the net/device/data/criterion/optimizer 77 | """ 78 | # device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 79 | device = torch.device("cpu") 80 | trainloader, testloader, train_data_num, test_data_num = load_data(args) 81 | 82 | net = ResNet18().to(device) 83 | 84 | # Define loss functions and optimization 85 | criterion = nn.CrossEntropyLoss() 86 | optimizer = optim.SGD(net.parameters(), lr=args.learning_rate, momentum=0.9, weight_decay=5e-4) 87 | return device, trainloader, testloader, net, criterion, optimizer, train_data_num, test_data_num 88 | -------------------------------------------------------------------------------- /init/init_cnn.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import torchvision.transforms as transforms 3 | import torch.optim as optim 4 | from model.resnet import * 5 | from model.cnn import * 6 | from torchsummary import summary 7 | from noniid.file_flow import select_trainset 8 | 9 | 10 | def tmp_func(x): 11 | return x.repeat(3, 1, 1) 12 | 13 | 14 | def normalize_data_cifar(): 15 | """ 16 | Get the normalize picture (cifar) 17 | """ 18 | transform_train = transforms.Compose([ 19 | transforms.RandomCrop(32, padding=4), # 先四周填充0,在吧图像随机裁剪成32*32 20 | transforms.RandomHorizontalFlip(), # 图像一半的概率翻转,一半的概率不翻转 21 | transforms.ToTensor(), 22 | # The mean and variance of R,G, and B for each level of normalization 23 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 24 | ]) 25 | 26 | transform_test = transforms.Compose([ 27 | transforms.ToTensor(), 28 | transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)), 29 | ]) 30 | return transform_train, transform_test 31 | 32 | 33 | def normalize_data_mnist(): 34 | """ 35 | Get the normalize picture (MNIST and FMNIST) 36 | make the gray_picture * 3 layers 37 | """ 38 | transform = transforms.Compose([ 39 | transforms.ToTensor(), 40 | # transforms.Lambda(tmp_func), 41 | # transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 42 | ]) 43 | 44 | return transform 45 | 46 | 47 | def load_data(args): 48 | """ 49 | Get the train and test dataloader 50 | """ 51 | transform_train_cifar, transform_test_cifar = normalize_data_cifar() 52 | transform_mnist = normalize_data_mnist() 53 | 54 | if args.data == 'Cifar': 55 | trainset = torchvision.datasets.CIFAR10(root='./data', train=True, download=False, transform=transform_train_cifar) 56 | testset = torchvision.datasets.CIFAR10(root='./data', train=False, download=False, transform=transform_test_cifar) 57 | trainset_select = select_trainset(trainset, args) 58 | if args.data == 'MNIST': 59 | trainset = torchvision.datasets.MNIST(root='./data', train=True, download=False, transform=transform_mnist) 60 | testset = torchvision.datasets.MNIST(root='./data', train=False, download=False, transform=transform_mnist) 61 | trainset_select = select_trainset(trainset, args) 62 | if args.data == 'FMNIST': 63 | trainset = torchvision.datasets.FashionMNIST(root='./data', train=True, download=False,transform=transform_mnist) 64 | testset = torchvision.datasets.FashionMNIST(root='./data', train=False, download=False, transform=transform_mnist) 65 | trainset_select = select_trainset(trainset, args) 66 | 67 | trainloader = torch.utils.data.DataLoader(trainset_select, batch_size=args.batchsize, shuffle=True, num_workers=2) 68 | testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=2) 69 | 70 | return trainloader, testloader, len(trainset_select), len(testset) 71 | 72 | 73 | def init(args): 74 | """ 75 | Make the net/device/data/criterion/optimizer 76 | """ 77 | # device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 78 | device = torch.device("cpu") 79 | trainloader, testloader, train_data_num, test_data_num = load_data(args) 80 | 81 | if args.data != 'Cifar': 82 | net = CNN4lite().to(device) 83 | if args.data == 'Cifar': 84 | net = CNN4Cifar().to(device) 85 | 86 | # Define loss functions and optimization 87 | criterion = nn.CrossEntropyLoss() 88 | optimizer = optim.SGD(net.parameters(), lr=args.learning_rate, momentum=0.9, weight_decay=5e-4) 89 | return device, trainloader, testloader, net, criterion, optimizer, train_data_num, test_data_num 90 | -------------------------------------------------------------------------------- /noniid/temp/Cifar/Cifar_label_noniid_users3_data200_unbalance0.6.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199 2 | 0,32194,48069,40733,31165,29735,6380,20355,7859,24285,47215,49993,15100,2173,30553,29121,49503,13332,34349,41849,3285,39786,22869,18503,49471,41970,32361,13936,32711,16439,44121,33880,46230,47973,21287,35913,24301,801,6185,48540,20918,15446,17450,37656,26715,3460,28260,20192,44254,42035,9147,16859,11285,3453,4312,20083,44441,18156,49541,3421,41082,48230,41969,12624,49754,13188,42232,4999,27379,26516,37789,28102,38073,22958,32287,3933,13058,15969,15691,13632,44091,40255,25888,20461,8998,17625,23282,23258,10805,33802,35621,45463,34872,17428,33668,10246,10497,46656,17142,2529,1734,18063,5243,22983,12949,45483,29068,35674,16929,3555,14425,12064,41138,48293,12412,10479,1021,45856,49643,6130,37494,44589,42276,45655,18276,10169,48636,40003,42287,7148,49977,25817,23832,11130,39737,26026,4607,43789,21959,22658,25881,44418,13884,10819,5951,29226,14379,11954,12350,41654,48799,6525,45993,47641,42941,44873,27917,33487,5893,265,9911,30932,13580,18547,48590,15033,16966,19122,40101,37793,43042,44287,38663,45197,3928,39984,14295,46072,5611,37730,16901,10161,3088,3697,35961,12902,29401,32337,39572,9434,35746,27162,6264,40970,47767,37074,40117,49601,26822,29985,39570 3 | 1,27338,38519,2459,43709,3564,29927,27444,14052,49303,5874,23662,40815,10797,47288,15966,22818,27132,4797,47423,9745,17325,18084,21018,1676,9313,16402,11934,17206,27110,32049,33043,6496,1938,1723,25074,2326,34407,30960,43744,29001,8911,45764,33015,5739,28290,1225,23223,11626,46511,18290,3153,14841,21915,10668,42524,22355,44593,21610,11083,40051,13796,5497,9855,1956,23370,23418,22347,31270,28506,9320,28569,24786,15796,14191,42812,10347,9964,20690,29081,23988,21160,19513,6789,39908,17360,41676,11248,10876,31590,15455,10658,17435,45116,46227,48435,1697,24133,25590,23225,35288,46898,27908,27176,27850,26644,24020,46297,3665,33087,34037,3362,25920,34009,28817,10777,15466,23126,49734,12461,32775,35796,28703,13289,12640,18013,32401,33502,44967,15572,5470,14434,37955,48013,23010,3295,41702,3336,39719,22112,23528,23854,20349,38898,26416,19277,48136,49525,9253,34032,10202,38422,18022,13521,39767,22052,7088,12994,46154,38575,11154,21396,21927,21244,46064,8817,15459,6653,47466,40893,85,8277,46896,17494,27451,38249,8681,36565,19816,5220,45572,44169,30984,21852,12759,34848,10546,26031,37474,972,29778,47383,6564,42268,21107,36895,14144,47835,10643,22341,48357 4 | 2,32613,26177,8185,24476,47560,25624,44307,36981,8882,2035,27802,12929,43797,32806,36021,3935,17700,15662,15814,39387,27482,23288,27309,44648,15160,43154,8843,28377,14688,46733,49367,28984,14575,35272,25541,44800,15741,30459,22456,5221,20573,17635,25675,40219,14690,43810,42575,33827,42795,46384,4884,49495,2906,28550,3198,40951,1015,36954,7908,26501,1176,42712,27473,46271,7001,11438,33402,1259,25828,36044,15073,10529,47082,3715,40440,24141,42440,12502,14446,6409,28239,35586,12024,28269,25124,42161,2754,14037,3240,17654,9028,32745,37094,37349,9843,43560,7943,49637,44289,30178,38538,12272,19739,8608,2278,38703,31316,26861,14742,22846,8904,24468,28197,31739,16995,23290,19263,34569,11581,33388,27481,7885,12572,34438,24026,46391,47356,19582,49630,8508,43083,30278,48304,2256,16350,47898,34487,43060,26796,38833,37,47092,17378,33755,17314,26670,1469,41150,8694,37055,4857,12522,18374,10350,48521,20108,6770,12309,18739,21389,38964,22005,26048,23421,9497,49946,25232,6569,7059,22498,4301,33080,33581,21570,44803,17299,6897,48143,15947,8860,22202,45132,26307,43090,23879,45811,31840,20096,13210,9353,14036,40943,27812,20282,15395,6049,40268,20498,4839,27429 5 | -------------------------------------------------------------------------------- /noniid/temp/MNIST/MNIST_label_noniid_users3_data200_unbalance0.6.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199 2 | 0,35463,36312,27484,32834,48274,36198,14559,22757,19604,46603,59,21601,51936,15962,46127,42288,43972,25244,24904,13556,45964,27754,5885,27880,30905,7079,6846,1755,26221,24360,29521,15815,45764,7987,54996,57872,53376,20437,21695,19863,16314,53249,58823,59501,31517,38011,28705,47363,25438,1580,32372,48374,22918,50451,7024,26752,43136,45305,58535,27117,26077,54618,9244,56481,30687,48451,15257,57125,8072,28177,46858,42733,14719,45571,1231,29984,20720,51113,31319,5968,29090,23091,55683,52944,32907,32011,57237,43687,15956,58748,20308,13116,10860,26837,51582,29018,48,11123,4568,14058,39045,46491,32587,56845,42611,4018,56840,40139,33056,10729,14909,29981,8384,21752,41940,41442,24576,47564,3139,4712,44571,53230,51953,24526,4030,40271,32752,46991,32500,31754,36866,8191,28591,7206,24685,977,51232,44498,18849,498,51370,36701,59785,23343,21565,31686,21488,43995,56182,32211,56617,44929,6322,47719,37436,45649,48372,54136,10866,45891,27678,38942,58384,11872,40089,6335,44007,32919,21118,35569,34594,31781,27650,36845,6924,44945,33883,1713,31205,20198,12899,22604,8125,16883,56141,50714,23400,51588,33890,35561,15616,29665,48559,53860,10054,50902,54884,51488,56872,45019 3 | 1,10340,3286,21932,52133,13830,15169,9310,21640,34345,35720,40362,43887,20611,46984,50685,56810,48184,39052,24424,37869,5843,41256,5902,11997,58896,39079,42219,30872,33878,17973,43200,23357,20458,22569,19222,42355,13704,5870,16503,56305,12194,49692,48833,45404,56855,729,33922,15027,11578,531,44341,22729,18947,27845,52019,33622,38944,50734,53715,5688,59117,32241,9281,51620,20173,1902,42700,50811,33692,1957,44409,33873,18006,50426,23230,30465,46356,45657,32877,11180,47747,59967,46495,35096,31023,6952,37778,22557,14872,8047,45301,8671,23257,35598,58222,12431,37321,26518,16422,34458,21992,44990,1308,6879,1050,14793,21152,48840,16464,11930,9321,15299,52734,7903,2800,50725,15023,30215,7442,851,26217,14683,51288,52260,2254,37487,53260,39375,45214,1212,5960,30666,20219,1004,22472,45821,56406,40709,42956,31793,10177,41043,34035,51199,52344,7958,13068,9746,30602,2963,23605,2382,22532,2833,23408,32103,50532,32742,38544,17743,57665,8659,53782,16141,25179,51341,58779,33270,40146,36221,3526,13171,1780,2687,28813,43754,17765,47312,29306,42408,59803,17641,26532,48509,24953,6582,44839,16040,32018,40488,46191,3438,56126,47212,55586,125,33018,37032,49366,56613 4 | 2,662,2935,43662,24225,47422,33237,9048,53174,293,48111,2604,4685,11279,11278,34759,40464,45559,35554,29133,29974,14484,27311,53608,32212,46687,55129,20022,12445,35571,47101,46821,27930,14286,25641,57291,9300,37443,41781,9182,56437,20714,29166,1196,27041,18617,33355,41574,39743,21130,29115,36252,6451,14428,32707,57489,34370,18446,55795,37114,54500,8224,36061,7289,2494,59029,12191,44503,50588,3392,41963,30538,17370,56329,15200,50760,26489,32266,56601,9382,26964,54190,31800,36199,54684,14553,14413,15820,36233,15757,32971,15235,18015,20518,55473,25502,33483,42149,47703,10784,59050,14081,24198,51379,48294,5823,36775,28944,54105,40090,23087,28282,11299,18010,35279,21044,51937,56513,34167,37172,23764,26751,59587,5815,35685,49475,59355,6639,21668,12093,49839,51743,7139,35051,54951,277,58619,10072,31354,35597,40360,27150,19693,58568,1478,43826,19508,3112,3357,52812,40686,13838,52387,506,59982,42080,16430,59098,2059,20142,50289,58839,47358,6219,8100,19134,55111,11828,33615,37217,51857,37235,23850,22869,33693,35791,53993,27208,35205,49550,48433,33100,1898,44250,59310,55134,44365,26657,38347,54610,21745,9068,39508,31268,23019,38694,8797,44715,40616,57297,5039 5 | -------------------------------------------------------------------------------- /noniid/data_noniid/Shakespeare_noniid.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from noniid.file_flow import split_integer 3 | import shutil 4 | import os 5 | pd.set_option('display.max_columns', None) 6 | 7 | 8 | def sum_idx_list(list): 9 | sum_list = [0] 10 | for i in range(len(list)): 11 | sum = 0 12 | for j in range(i+1): 13 | sum += list[j] 14 | sum_list.append(sum) 15 | return sum_list 16 | 17 | 18 | def assign_list(lenth, n): 19 | assignment = [] 20 | cout_list = split_integer(lenth,n) 21 | for i in range(len(cout_list)): 22 | for j in range(cout_list[i]): 23 | assignment.append(i) 24 | return assignment 25 | 26 | 27 | def get_sort_data(args): 28 | # data_path = '../shakespeare.txt' 29 | data_path = '../data/Shakespeare/Shakespeare.txt' 30 | data = open(data_path, 'r').read() 31 | data_size = len(data) 32 | data = list(data) 33 | data = data[0: int(data_size*0.9)] 34 | 35 | delete_list = [] 36 | for i in range(1, len(data)-1): 37 | if data[i] == '\n' and data[i-1] == '\n' and data[i+1] == '\n': 38 | delete_list.append(i) 39 | x = 0 40 | for y in delete_list: 41 | data.pop(y-x) 42 | x+=1 43 | 44 | name_list = [] 45 | idx_list = [0] 46 | part_list = [] 47 | name = "" 48 | part = "" 49 | name_flag = 1 50 | 51 | for i in range(len(data)): 52 | if data[i - 1] == ':' and data[i] == '\n': 53 | if name_flag == 1: 54 | name_flag = 0 55 | name_list.append(name) 56 | name = "" 57 | if name_flag == 1: 58 | name += data[i] 59 | if data[i] == '\n' and data[i - 1] == '\n': 60 | name_flag = 1 61 | idx_list.append(i) 62 | idx_list.append(len(data)) 63 | 64 | for i in range(len(idx_list) - 1): 65 | temp = data[idx_list[i]:idx_list[i + 1]] 66 | for j in range(len(temp)): 67 | part += temp[j] 68 | part_list.append(part) 69 | part = "" 70 | 71 | df = pd.DataFrame({'Name': name_list, 'Part': part_list, 'idx': idx_list[0:len(idx_list) - 1]}) 72 | 73 | df_sort = df.sort_values(by=['Name']) 74 | user_list = assign_list(len(name_list), args.num_users) 75 | df_sort['User'] = user_list 76 | return df_sort 77 | 78 | 79 | def get_iid_data(args): 80 | data_path = '../data/Shakespeare/Shakespeare.txt' 81 | data = open(data_path, 'r').read() 82 | data_size = len(data) 83 | data = list(data) 84 | data = data[0: int(data_size*0.9)] 85 | lenth_list = split_integer(len(data), args.num_users) 86 | return data, lenth_list 87 | 88 | 89 | def divide_in_txt(args): 90 | ''' 91 | The function is used to divide the data into different users. 92 | The train and test data is 9:1. 93 | You can change the train and test data by changing the ratio 94 | in get_iid_data and get_sort_data. 95 | ''' 96 | shutil.rmtree('./temp/Shakespeare/') 97 | os.mkdir('./temp/Shakespeare/') 98 | 99 | if args.noniid_model == 'noniid': 100 | dataframe_sort = get_sort_data(args) 101 | 102 | for i in range(args.num_users): 103 | df = dataframe_sort[dataframe_sort['User']==i] 104 | df = df.sort_values(by=['idx']) 105 | 106 | data_path = './temp/Shakespeare/' + str(i) + '.txt' 107 | 108 | with open(data_path, "w") as f: 109 | for j in range(df.shape[0]): 110 | f.write(df.iloc[j][1]) 111 | f.close() 112 | 113 | if args.noniid_model == 'iid': 114 | data, lenth_list = get_iid_data(args) 115 | sum_list = sum_idx_list(lenth_list) 116 | 117 | for i in range(args.num_users): 118 | data_path = './temp/Shakespeare/' + str(i) + '.txt' 119 | with open(data_path, "w") as f: 120 | for item in data[sum_list[i]:sum_list[i+1]-1]: 121 | f.write(item) 122 | f.close() 123 | 124 | print('Devide Shakespeare successfully!~~') 125 | -------------------------------------------------------------------------------- /train/train_lstm.py: -------------------------------------------------------------------------------- 1 | from init.init_lstm import init 2 | import numpy as np 3 | import torch 4 | import torch.nn.functional as F 5 | from torch.distributions import Categorical 6 | 7 | 8 | def train_lstm(args): 9 | device, rnn, data_train, data_test, criterion, optimizer, _, _ = init(args) 10 | pre_epoch = 0 11 | iter = 0 12 | 13 | print("Start Training: " + args.model + "--" + args.data) 14 | with open("./acc/"+"acc_"+args.model+"_"+args.data+".txt", "w") as f: 15 | with open("./log/"+"log_"+args.model+"_"+args.data+".txt", "w")as f2: 16 | 17 | # avg epoch : train 18 | for epoch in range(pre_epoch, args.EPOCH): 19 | print('\nEpoch: %d' % (epoch + 1)) 20 | 21 | data_ptr = np.random.randint(100) 22 | n = 0 23 | sum_loss = 0 24 | correct = 0.0 25 | total = 0.0 26 | hidden_state = None 27 | 28 | while True: 29 | input_seq = data_train[data_ptr: data_ptr + args.batchsize] 30 | target_seq = data_train[data_ptr + 1: data_ptr + args.batchsize + 1] 31 | input_seq, target_seq = input_seq.to(device), target_seq.to(device) 32 | optimizer.zero_grad() 33 | 34 | # forward + backward 35 | output, hidden_state = rnn(input_seq, hidden_state) 36 | loss = criterion(torch.squeeze(output), torch.squeeze(target_seq)) 37 | loss.backward() 38 | optimizer.step() 39 | 40 | # loss + acc 41 | sum_loss += loss.item() 42 | _, predicted = torch.max(torch.squeeze(output).data, 1) 43 | total += torch.squeeze(target_seq).size(0) 44 | correct += predicted.eq(torch.squeeze(target_seq).data).cpu().sum() 45 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 46 | % (epoch + 1, iter + 1, sum_loss / (n + 1), 100. * correct / total)) 47 | f2.write('%03d %07d |Loss: %.03f | Acc: %.3f%% ' 48 | % (epoch + 1, iter + 1, sum_loss / (n + 1), 100. * correct / total)) 49 | f2.write('\n') 50 | f2.flush() 51 | 52 | data_ptr += args.batchsize 53 | n += 1 54 | iter += 1 55 | 56 | if data_ptr + args.batchsize + 1 > data_train.size(0): 57 | break 58 | 59 | # after epoch : test 60 | print("Waiting Test!") 61 | with torch.no_grad(): 62 | data_ptr = 0 63 | hidden_state = None 64 | sum_correct = 0 65 | sum_test = 0 66 | 67 | # random character from data to begin 68 | rand_index = np.random.randint(100) 69 | 70 | while True: 71 | input_seq = data_test[rand_index + data_ptr: rand_index + data_ptr + 1] 72 | target_seq = data_test[rand_index + data_ptr + 1: rand_index + data_ptr + 2] 73 | output, hidden_state = rnn(input_seq, hidden_state) 74 | 75 | output = F.softmax(torch.squeeze(output), dim=0) 76 | dist = Categorical(output) 77 | index = dist.sample() 78 | 79 | if index.item() == target_seq[0][0]: 80 | sum_correct += 1 81 | sum_test += 1 82 | data_ptr += 1 83 | 84 | if data_ptr > data_test.size(0) - rand_index - 2: 85 | break 86 | print('测试分类准确率为:%.3f%%' % (100. * sum_correct / sum_test)) 87 | acc = 100. * sum_correct / sum_test 88 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch + 1, acc)) 89 | f.write('\n') 90 | f.flush() 91 | 92 | print("Training Finished, TotalEPOCH=%d" % args.EPOCH) -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from train.train_resnet18 import train_resnet18 3 | from train.train_cnn import train_cnn 4 | from train.train_lstm import train_lstm 5 | from train.train_mobilenet import train_mobilenet 6 | import FL_models.FedAvg as FL 7 | import FL_models.HierFL as HFL 8 | import FL_models.HFL_drl as HFL_drl 9 | import FL_models.FedAvg_ray as FL_ray 10 | import FL_models.HFL_ray as HFL_ray 11 | 12 | def main(): 13 | """ 14 | for example: 15 | python main.py -m resnet18 -d Cifar10 -bs 128 -e 200 16 | python main.py -m lstm -d Shakespeare -bs 128 -e 20 17 | """ 18 | parser = argparse.ArgumentParser(description='Federated Learning') 19 | # parser.add_argument('--outf', default='./model/', help='folder to output images and model checkpoints') #输出结果保存路径 20 | # parser.add_argument('--net', default='./model/Resnet18.pth', help="path to net (to continue training)") #恢复训练时的模型路径 21 | '''Dataset, model, batch size, epoch''' 22 | parser.add_argument("-m", "--model", help="resnet18 or lstm or cnn or mobilenet", type=str, default='cnn') 23 | parser.add_argument("-d", "--data", help="Cifar or MINIST or FMNIST or Shakespeare", type=str, default='MNIST') 24 | parser.add_argument("-bs", "--batchsize", help="the batch size of each epoch", type=int, default=128) 25 | parser.add_argument("-e", "--EPOCH", help="the number of epochs", type=int, default=10) 26 | parser.add_argument("-lr", "--learning_rate", help="learning rate", type=float, default=0.01) 27 | parser.add_argument("-nm", "--noniid_model", help="quantity_noniid or label_noniid or iid", type=str, default='iid') 28 | parser.add_argument("-iu", "--idx_user", help="Select the client number(<=num_users)", type=int, default=0) 29 | parser.add_argument("-nu", "--num_users", help="The number of clients", type=int, default=2) 30 | parser.add_argument("-ts", "--total_samples", help="The total samples of each clients", type=int, default=25000) 31 | parser.add_argument("-ru", "--rate_unbalance", help="The proportion of noniid (<=1.0) ", type=float, default=0.6) 32 | parser.add_argument("-nc", "--num_class", help="The classes number of noniid (<=10) ", type=int, default=2) 33 | 34 | '''Federated Learning''' 35 | parser.add_argument("-fm", "--FL_model", help="the model of FL: FL/HFL", type=str,default="HFL_ray") 36 | parser.add_argument("-p", "--port", help="the port used for rpc initialization", type=str,default="29500") 37 | parser.add_argument("-a", "--addr", help="the addr used for server", type=str, default="192.168.0.105") 38 | parser.add_argument("-r", "--rank", help="rank of this process", type=int, default=0) 39 | parser.add_argument("-tn", "--topo_num", help="the num of the topo", type=list, default=[3, 2, 2]) 40 | parser.add_argument("-ws", "--world_size", help="number of process in group", type=int, default=3) # world_size 41 | parser.add_argument("-ew", "--epoch_worker", help="the num of per worker run", type=int, default=3) # epoch_worker 42 | parser.add_argument("-ee", "--epoch_edge", help="the num of per edge run", type=int, default=2) # epoch_edge 43 | 44 | '''DRL''' 45 | parser.add_argument("-tf", "--traj_fre", help="The frequency of trajector", type=int, default=5) 46 | parser.add_argument("-ea", "--epoch_agent", help="The epoch of agent to learn", type=int, default=10) 47 | parser.add_argument("-g", "--greedy", help="select greedy", type=float, default=0.01) 48 | args = parser.parse_args() 49 | 50 | """ 51 | for single worker train (the traditional codes) 52 | """ 53 | # if args.model == 'resnet18': 54 | # train_resnet18(args) 55 | # if args.model == 'cnn': 56 | # train_cnn(args) 57 | # if args.model == 'lstm': 58 | # train_lstm(args) 59 | # if args.model == 'mobilenet': 60 | # train_mobilenet(args) 61 | 62 | ''' 63 | This part is for RPC training, including the following: 64 | FedAvg: 65 | 1 server and k workers 66 | HierFedAvg: 67 | 1 server and n clients and k workers 68 | ''' 69 | if args.FL_model == 'FL': 70 | FL.run_worker(args) 71 | elif args.FL_model == 'HFL': 72 | HFL.run_worker(args) 73 | elif args.FL_model == 'HFL_drl': 74 | HFL_drl.run_worker(args) 75 | elif args.FL_model =='FL_ray': 76 | FL_ray.run_worker(args) 77 | elif args.FL_model == 'HFL_ray': 78 | HFL_ray.run_worker(args) 79 | 80 | 81 | if __name__ == "__main__": 82 | main() 83 | -------------------------------------------------------------------------------- /noniid/temp/Cifar/Cifar_quantity_noniid_users3_data200.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269 2 | 0,12767,34259,48148,27813,49799,25787,32831,48209,44454,33117,1887,25763,49263,34700,22395,40081,24566,16577,2753,3062,3485,2862,40595,32937,12283,22636,29925,28814,18248,41018,1328,49756,38996,12819,8800,5836,35718,9523,42523,24208,18584,44976,21106,46466,20806,1059,41989,10518,8385,42351,20378,41918,27847,14690,8488,41582,46174,30655,29471,21753,16356,13760,4395,22005,24659,18773,2416,30290,29508,34299,4788,47029,30506,29450,34391,29758,6730,38361,42901,1053,34965,23582,38773,45575,11408,25761,27933,4937,41142,47396,43561,10106,43491,40672,1054,267,48858,21493,4059,19794,35776,8311,31828,19064,32577,47094,25632,11253,29287,21361,28922,9483,32271,45240,39658,27919,12688,20636,25945,38868,47641,44410,30420,12112,11715,43578,40068,42362,43785,20430,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | 1,41851,13326,49128,13522,22167,14520,47651,25746,527,15886,8387,45656,7663,47799,5082,42069,22258,34417,5210,16831,11966,37239,41254,44466,22163,26787,24446,2283,21333,36500,42879,45118,47495,42237,28640,30104,22564,33963,47088,26405,25998,10567,45021,2491,28095,30286,47958,11538,15828,7942,975,46832,6177,35723,44334,13639,19359,9348,46771,29354,38999,10462,32155,33387,11998,44323,25633,47897,49855,27540,5797,39325,35299,25006,42029,25547,23372,47892,34623,45914,32095,22498,41301,34770,27105,9389,47510,21978,7362,39505,23329,22173,40088,5754,27193,19170,14816,48780,32300,38101,4424,3015,17931,19401,11072,8407,932,40783,43056,32386,34084,31132,9280,20776,35775,35034,5565,19160,24303,5613,35615,6810,44650,5474,16404,29093,16945,32958,5898,37593,42660.0,34662.0,26135.0,19731.0,30011.0,26634.0,30828.0,32385.0,35838.0,31159.0,25876.0,12432.0,21814.0,29047.0,19535.0,48258.0,3348.0,39465.0,33045.0,16667.0,17364.0,4460.0,45449.0,35650.0,8277.0,2554.0,7.0,35768.0,19507.0,11926.0,32234.0,11141.0,32726.0,6508.0,14260.0,21515.0,4191.0,43872.0,44833.0,16178.0,25521.0,29915.0,38523.0,7066.0,40990.0,37798.0,456.0,9469.0,30018.0,49434.0,3090.0,28932.0,36918.0,6643.0,31566.0,5286.0,29025.0,14387.0,16519.0,18943.0,27707.0,30392.0,19243.0,27237.0,6708.0,38300.0,43008.0,2768.0,29167.0,12001.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | 2,14591,30029,34173,13894,1473,32775,26361,11939,10811,9121,14350,33694,11487,45827,19493,8160,16541,22962,3136,29735,6085,40022,44653,10812,2633,40442,18953,14855,43847,17534,7740,31969,17732,3064,43935,45655,22788,38725,48899,42539,24484,20811,5438,46928,25071,10325,201,26794,42341,590,28873,3233,20442,26899,34393,7633,19337,47727,31850,9716,46082,29709,40281,12399,34996,28551,31478,15173,48093,19579,18285,34203,32792,2744,36583,17415,5281,4306,17302,4703,21906,16904,1962,17646,18769,15298,21921,35124,25825,2254,37429,14108,10565,25378,20820,45754,8974,28996,36054,22914,26974,2947,13601,43493,16411,39187,49742,3490,13887,36843,12006,40835,24630,28793,47928,34556,24084,37075,15307,17145,42142,26894,48605,45496,46824,26979,42087,17483,8473,34306,36740.0,1925.0,37688.0,3300.0,21327.0,26033.0,14673.0,15242.0,24546.0,36362.0,31062.0,21011.0,45469.0,7227.0,36392.0,38954.0,26996.0,39880.0,21428.0,21928.0,43248.0,14908.0,21910.0,39558.0,34565.0,3747.0,48967.0,37316.0,49380.0,14437.0,32780.0,167.0,2872.0,36841.0,2222.0,2146.0,31753.0,33715.0,39867.0,48929.0,14236.0,14838.0,12168.0,23764.0,7257.0,39189.0,42850.0,37865.0,33256.0,45978.0,46995.0,4324.0,1760.0,5268.0,12441.0,5020.0,24198.0,9275.0,29259.0,2891.0,49391.0,41838.0,3844.0,7546.0,14372.0,15511.0,5766.0,2263.0,38920.0,8469.0,1806.0,26851.0,19259.0,29892.0,49925.0,32568.0,8593.0,44130.0,22076.0,27363.0,12984.0,6321.0,2008.0,30197.0,33389.0,11527.0,36953.0,6099.0,2942.0,3144.0,35707.0,27640.0,1138.0,43725.0,40606.0,35995.0,2689.0,31915.0,3720.0,5217.0,22976.0,35992.0,507.0,10065.0,30509.0,39015.0,45695.0,19567.0,31258.0,36341.0,33521.0,3969.0,30035.0,47405.0,33794.0,12873.0,4068.0,24042.0,17052.0,26822.0,16910.0,23651.0,7111.0,21496.0,11213.0,9676.0,2214.0,3941.0,44547.0,10751.0,25988.0,42395.0,30934.0,6016.0,20825.0,21786.0,3234.0,25963.0,34549.0,9959.0 5 | -------------------------------------------------------------------------------- /model/mobilenet.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import math 3 | import torch 4 | 5 | def conv_bn(inp, oup, stride): 6 | return nn.Sequential( 7 | nn.Conv2d(inp, oup, 3, stride, 1, bias=False), 8 | nn.BatchNorm2d(oup), 9 | nn.ReLU6(inplace=True) 10 | ) 11 | 12 | 13 | def conv_1x1_bn(inp, oup): 14 | return nn.Sequential( 15 | nn.Conv2d(inp, oup, 1, 1, 0, bias=False), 16 | nn.BatchNorm2d(oup), 17 | nn.ReLU6(inplace=True) 18 | ) 19 | 20 | 21 | class InvertedResidual(nn.Module): 22 | def __init__(self, inp, oup, stride, expand_ratio): 23 | super(InvertedResidual, self).__init__() 24 | self.stride = stride 25 | assert stride in [1, 2] 26 | 27 | hidden_dim = round(inp * expand_ratio) 28 | self.use_res_connect = self.stride == 1 and inp == oup 29 | self.expand_ratio = expand_ratio 30 | if expand_ratio == 1: 31 | self.conv = nn.Sequential( 32 | # dw 33 | nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), 34 | nn.BatchNorm2d(hidden_dim), 35 | nn.ReLU6(inplace=True), 36 | # pw-linear 37 | nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), 38 | nn.BatchNorm2d(oup), 39 | ) 40 | else: 41 | self.conv = nn.Sequential( 42 | # pw 43 | nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False), 44 | nn.BatchNorm2d(hidden_dim), 45 | nn.ReLU6(inplace=True), 46 | # dw 47 | nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), 48 | nn.BatchNorm2d(hidden_dim), 49 | nn.ReLU6(inplace=True), 50 | # pw-linear 51 | nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), 52 | nn.BatchNorm2d(oup), 53 | ) 54 | 55 | def forward(self, x): 56 | if self.use_res_connect: 57 | return x + self.conv(x) 58 | else: 59 | return self.conv(x) 60 | 61 | 62 | class MobileNetV2(nn.Module): 63 | def __init__(self, n_class=10, input_size=28, width_mult=1., dropout=0.0): 64 | super(MobileNetV2, self).__init__() 65 | block = InvertedResidual 66 | input_channel = 32 67 | last_channel = 1280 68 | interverted_residual_setting = [ 69 | # t, c, n, s 70 | [1, 16, 1, 1], 71 | [6, 24, 2, 2], 72 | [6, 32, 3, 2], 73 | [6, 64, 4, 2], 74 | [6, 96, 3, 1], 75 | [6, 160, 3, 2], 76 | [6, 320, 1, 1], 77 | ] 78 | 79 | # building first layer 80 | # assert input_size % 32 == 0 81 | input_channel = int(input_channel * width_mult) 82 | self.last_channel = int(last_channel * width_mult) if width_mult > 1.0 else last_channel 83 | self.features = [conv_bn(1, input_channel, 2)] 84 | # building inverted residual blocks 85 | for t, c, n, s in interverted_residual_setting: 86 | output_channel = int(c * width_mult) 87 | for i in range(n): 88 | if i == 0: 89 | self.features.append(block(input_channel, output_channel, s, expand_ratio=t)) 90 | else: 91 | self.features.append(block(input_channel, output_channel, 1, expand_ratio=t)) 92 | input_channel = output_channel 93 | # building last several layers 94 | self.features.append(conv_1x1_bn(input_channel, self.last_channel)) 95 | # self.features.append(nn.AvgPool2d(input_size // 32)) 96 | # make it nn.Sequential 97 | self.features = nn.Sequential(*self.features) 98 | 99 | # building classifier 100 | self.classifier = nn.Sequential( 101 | nn.Dropout(dropout), 102 | nn.Linear(self.last_channel, n_class), 103 | ) 104 | 105 | self._initialize_weights() 106 | 107 | def forward(self, x): 108 | x = self.features(x) 109 | x = x.mean([2, 3]) 110 | x = self.classifier(x) 111 | return x 112 | 113 | def _initialize_weights(self): 114 | for m in self.modules(): 115 | if isinstance(m, nn.Conv2d): 116 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels 117 | m.weight.data.normal_(0, math.sqrt(2. / n)) 118 | if m.bias is not None: 119 | m.bias.data.zero_() 120 | elif isinstance(m, nn.BatchNorm2d): 121 | m.weight.data.fill_(1) 122 | m.bias.data.zero_() 123 | elif isinstance(m, nn.Linear): 124 | n = m.weight.size(1) 125 | m.weight.data.normal_(0, 0.01) 126 | m.bias.data.zero_() 127 | 128 | def set_weights(self, weights): 129 | self.load_state_dict(weights) 130 | 131 | 132 | def mobilenetv2(**kwargs): 133 | """ 134 | Constructs a MobileNetV2 model. 135 | """ 136 | model = MobileNetV2(**kwargs) 137 | return model -------------------------------------------------------------------------------- /noniid/data_noniid/FMNIST_noniid.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import numpy as np 3 | import sys 4 | sys.path.append("..") 5 | import init.init_resnet18 as init_res 6 | from noniid.file_flow import split_integer, test_label 7 | 8 | 9 | def get_dataset_fmnist_noniid(args): 10 | transform_train_mnist = init_res.normalize_data_mnist() 11 | 12 | train_dataset = torchvision.datasets.FashionMNIST('../data', train=True, download=False,\ 13 | transform=transform_train_mnist) 14 | 15 | if args.noniid_model == 'label_noniid': 16 | data_users_train = fmnist_extr_label_noniid(train_dataset, args) 17 | if args.noniid_model == 'quantity_noniid': 18 | data_users_train = fmnist_extr_quantity_noniid(train_dataset, args) 19 | if args.noniid_model == 'iid': 20 | data_users_train = fmnist_extr_iid(train_dataset, args) 21 | return data_users_train 22 | 23 | 24 | def fmnist_extr_label_noniid(train_dataset, args): 25 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 26 | idxs = np.arange(60000) 27 | labels = np.array(train_dataset.targets) 28 | 29 | # probability 30 | prop_equal = 10 * (1-args.rate_unbalance)/(10-args.num_class) 31 | num_per_equal = int(args.total_samples * prop_equal / 10) 32 | assert ((1-args.rate_unbalance)/(10-args.num_class) <= args.rate_unbalance/args.num_class) 33 | 34 | # verb and sort labels 35 | idxs_labels = np.vstack((idxs, labels)) 36 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 37 | 38 | # shuffle 39 | for i in range(10): 40 | np.random.shuffle(idxs_labels[0][6000 * i:6000 * (i+1)]) 41 | idxs = idxs_labels[0, :] 42 | 43 | # equal part 44 | for i in range(args.num_users): 45 | for j in range(10): 46 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_equal: j * 6000 + (1 + i) * num_per_equal]), axis=0) 47 | 48 | # noniid init part 49 | residue_noniid = 60000 - num_per_equal * 10 * args.num_users 50 | num_samples = int((args.total_samples - num_per_equal*10) / args.num_class) 51 | num_shards_train, num_imgs_train = int(residue_noniid / num_samples), num_samples 52 | assert (args.num_class * args.num_users <= num_shards_train) 53 | assert (args.num_class <= 10) 54 | idx_shard = [i for i in range(num_shards_train)] 55 | 56 | # delete rank 57 | delete_rank = np.array([]) 58 | delete_res = residue_noniid - num_shards_train * num_samples 59 | surplus_list = split_integer(delete_res,10) 60 | for i in range(10): 61 | delete_rank = np.append(delete_rank, np.arange(i * 6000, args.num_users * num_per_equal + i * 6000, 1)) 62 | for i in range(len(surplus_list)): 63 | delete_rank = np.append(delete_rank, np.arange((i + 1) * 6000 - surplus_list[i], (i + 1) * 6000, 1)) 64 | delete_rank = delete_rank.astype(int) 65 | idxs_labels = np.delete(idxs_labels, delete_rank, axis=1) 66 | idxs = idxs_labels[0, :] 67 | 68 | # divide and assign 69 | for i in range(args.num_users): 70 | rand_set = set(np.random.choice(idx_shard, args.num_class, replace=False)) 71 | idx_shard = list(set(idx_shard) - rand_set) 72 | for rand in rand_set: 73 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[rand*num_imgs_train:(rand+1)*num_imgs_train]), axis=0) 74 | 75 | # change type of dict_user_train 76 | for i in range(len(dict_users_train)): 77 | dict_users_train[i] = dict_users_train[i].astype(int) 78 | 79 | test_label(dict_users_train, train_dataset, args) 80 | return dict_users_train 81 | 82 | 83 | def fmnist_extr_quantity_noniid(train_dataset, args): 84 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 85 | idxs = np.arange(60000) 86 | labels = np.array(train_dataset.targets) 87 | 88 | # verb and sort labels 89 | idxs_labels = np.vstack((idxs, labels)) 90 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 91 | 92 | # shuffle 93 | for i in range(10): 94 | np.random.shuffle(idxs_labels[0][6000 * i:6000 * (i+1)]) 95 | idxs = idxs_labels[0, :] 96 | 97 | # equal difference compute 98 | equal_diff = 2 * args.total_samples * args.num_users / (args.num_users * (args.num_users + 3)) 99 | num_list, num_per_user_list = [], [] 100 | assert (args.num_users > 1) 101 | for i in range(args.num_users - 1): 102 | num_list.append(round(equal_diff*(i+2),-1)) 103 | num_list.append(args.num_users * args.total_samples - sum(num_list)) 104 | for i in range(len(num_list)): 105 | num_per_user_list.append(int(num_list[i]/10)) 106 | 107 | # equal part 108 | for i in range(args.num_users): 109 | for j in range(10): 110 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_user_list[i]: j * 6000 + (1 + i) * num_per_user_list[i]]), axis=0) 111 | 112 | # change type of dict_user_train 113 | for i in range(len(dict_users_train)): 114 | dict_users_train[i] = dict_users_train[i].astype(int) 115 | 116 | test_label(dict_users_train, train_dataset, args) 117 | return dict_users_train 118 | 119 | 120 | def fmnist_extr_iid(train_dataset, args): 121 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 122 | idxs = np.arange(60000) 123 | labels = np.array(train_dataset.targets) 124 | 125 | # verb and sort labels 126 | idxs_labels = np.vstack((idxs, labels)) 127 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 128 | 129 | # shuffle 130 | for i in range(10): 131 | np.random.shuffle(idxs_labels[0][6000 * i:6000 * (i+1)]) 132 | idxs = idxs_labels[0, :] 133 | 134 | num_per_user = int(args.total_samples/10) 135 | 136 | for i in range(args.num_users): 137 | for j in range(10): 138 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_user: j * 6000 + (1 + i) * num_per_user]), axis=0) 139 | 140 | # change type of dict_user_train 141 | for i in range(len(dict_users_train)): 142 | dict_users_train[i] = dict_users_train[i].astype(int) 143 | 144 | test_label(dict_users_train, train_dataset, args) 145 | return dict_users_train 146 | -------------------------------------------------------------------------------- /noniid/data_noniid/Cifar_noniid.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import numpy as np 3 | import sys 4 | sys.path.append("..") 5 | import init.init_resnet18 as init_res 6 | from noniid.file_flow import split_integer, test_label 7 | 8 | 9 | def get_dataset_cifar10_noniid(args): 10 | transform_train_cifar, transform_test_cifar = init_res.normalize_data_cifar() 11 | 12 | train_dataset = torchvision.datasets.CIFAR10('../data', train=True, download=False,\ 13 | transform=transform_train_cifar) 14 | 15 | if args.noniid_model == 'label_noniid': 16 | data_users_train = cifar_extr_label_noniid(train_dataset, args) 17 | if args.noniid_model == 'quantity_noniid': 18 | data_users_train = cifar_extr_quantity_noniid(train_dataset, args) 19 | if args.noniid_model == 'iid': 20 | data_users_train = cifar_extr_iid(train_dataset, args) 21 | return data_users_train 22 | 23 | 24 | def cifar_extr_label_noniid(train_dataset, args): 25 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 26 | idxs = np.arange(50000) 27 | labels = np.array(train_dataset.targets) 28 | 29 | # probability 30 | prop_equal = 10 * (1-args.rate_unbalance)/(10-args.num_class) 31 | num_per_equal = int(args.total_samples * prop_equal / 10) 32 | assert ((1-args.rate_unbalance)/(10-args.num_class) <= args.rate_unbalance/args.num_class) 33 | 34 | # verb and sort labels 35 | idxs_labels = np.vstack((idxs, labels)) 36 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 37 | 38 | # shuffle 39 | for i in range(10): 40 | np.random.shuffle(idxs_labels[0][5000 * i:5000 * (i+1)]) 41 | idxs = idxs_labels[0, :] 42 | 43 | # equal part 44 | for i in range(args.num_users): 45 | for j in range(10): 46 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 5000 + i * num_per_equal: j * 5000 + (1 + i) * num_per_equal]), axis=0) 47 | 48 | # noniid init part 49 | residue_noniid = 50000 - num_per_equal * 10 * args.num_users 50 | num_samples = int((args.total_samples - num_per_equal*10) / args.num_class) 51 | num_shards_train, num_imgs_train = int(residue_noniid / num_samples), num_samples 52 | assert (args.num_class * args.num_users <= num_shards_train) 53 | assert (args.num_class <= 10) 54 | idx_shard = [i for i in range(num_shards_train)] 55 | 56 | # delete rank 57 | delete_rank = np.array([]) 58 | delete_res = residue_noniid - num_shards_train * num_samples 59 | surplus_list = split_integer(delete_res,10) 60 | for i in range(10): 61 | delete_rank = np.append(delete_rank, np.arange(i * 5000, args.num_users * num_per_equal + i * 5000, 1)) 62 | for i in range(len(surplus_list)): 63 | delete_rank = np.append(delete_rank, np.arange((i + 1) * 5000 - surplus_list[i], (i + 1) * 5000, 1)) 64 | delete_rank = delete_rank.astype(int) 65 | idxs_labels = np.delete(idxs_labels, delete_rank, axis=1) 66 | idxs = idxs_labels[0, :] 67 | 68 | # divide and assign 69 | for i in range(args.num_users): 70 | rand_set = set(np.random.choice(idx_shard, args.num_class, replace=False)) 71 | idx_shard = list(set(idx_shard) - rand_set) 72 | for rand in rand_set: 73 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[rand*num_imgs_train:(rand+1)*num_imgs_train]), axis=0) 74 | 75 | # change type of dict_user_train 76 | for i in range(len(dict_users_train)): 77 | dict_users_train[i] = dict_users_train[i].astype(int) 78 | 79 | test_label(dict_users_train, train_dataset, args) 80 | return dict_users_train 81 | 82 | 83 | def cifar_extr_quantity_noniid(train_dataset, args): 84 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 85 | idxs = np.arange(50000) 86 | labels = np.array(train_dataset.targets) 87 | 88 | # verb and sort labels 89 | idxs_labels = np.vstack((idxs, labels)) 90 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 91 | 92 | # shuffle 93 | for i in range(10): 94 | np.random.shuffle(idxs_labels[0][5000 * i:5000 * (i+1)]) 95 | idxs = idxs_labels[0, :] 96 | 97 | # equal difference compute 98 | equal_diff = 2 * args.total_samples * args.num_users / (args.num_users * (args.num_users + 3)) 99 | num_list, num_per_user_list = [], [] 100 | assert (args.num_users > 1) 101 | for i in range(args.num_users - 1): 102 | num_list.append(round(equal_diff*(i+2),-1)) 103 | num_list.append(args.num_users * args.total_samples - sum(num_list)) 104 | for i in range(len(num_list)): 105 | num_per_user_list.append(int(num_list[i]/10)) 106 | 107 | # equal part 108 | for i in range(args.num_users): 109 | for j in range(10): 110 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 5000 + i * num_per_user_list[i]: j * 5000 + (1 + i) * num_per_user_list[i]]), axis=0) 111 | 112 | # change type of dict_user_train 113 | for i in range(len(dict_users_train)): 114 | dict_users_train[i] = dict_users_train[i].astype(int) 115 | 116 | test_label(dict_users_train, train_dataset, args) 117 | return dict_users_train 118 | 119 | 120 | def cifar_extr_iid(train_dataset, args): 121 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 122 | idxs = np.arange(50000) 123 | labels = np.array(train_dataset.targets) 124 | 125 | # verb and sort labels 126 | idxs_labels = np.vstack((idxs, labels)) 127 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 128 | 129 | # shuffle 130 | for i in range(10): 131 | np.random.shuffle(idxs_labels[0][5000 * i:5000 * (i+1)]) 132 | idxs = idxs_labels[0, :] 133 | 134 | num_per_user = int(args.total_samples/10) 135 | 136 | for i in range(args.num_users): 137 | for j in range(10): 138 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 5000 + i * num_per_user: j * 5000 + (1 + i) * num_per_user]), axis=0) 139 | 140 | # change type of dict_user_train 141 | for i in range(len(dict_users_train)): 142 | dict_users_train[i] = dict_users_train[i].astype(int) 143 | 144 | test_label(dict_users_train, train_dataset, args) 145 | return dict_users_train 146 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Federated_Learning 💻 2 | This projest include common test models for federated learning, like CNN, Resnet18 and lstm, controlled by different parser. It can also handle common noniid data. We can change the parameters to change the model and dataset. Here is the related introduction. 3 | 4 | --- 5 | 6 | 7 | ## Network Models 🔑 8 | 9 | This project contains network models commonly used in FL:Resnet18, CNN and LSTM. 10 | - **Resnet18** 11 | 12 | A ***Residual Neural Network*** (ResNet) is an artificial neural network (ANN). Typical ResNet models are implemented with double- or triple- layer skips that contain nonlinearities (ReLU) and batch normalization in between. An additional weight matrix may be used to learn the skip weights. These models are known as HighwayNets. 13 | 14 | > Working with datasets: Cifar10, MNIST, FMNIST. 15 | 16 | - **CNN** 17 | 18 | In deep learning, the **Convolutional Neural Network** (CNN) is a class of artificial neural network, most commonly applied to analyze visual imagery. CNNs are a specialized type of neural networks that use convolution in place of general matrix multiplication in at least one of their layers. 19 | 20 | > Working with datasets: Cifar10, MNIST, FMNIST. 21 | 22 | - **LSTM** 23 | 24 | **Long short-term memory** (LSTM) is an artificial recurrent neural network (RNN) architecture used in the field of deep learning. LSTM networks are well-suited to classifying, processing and making predictions based on time series data. 25 | 26 | > Working with datasets: Shakespeare. 27 | 28 | ## Datasets 📝 29 | 30 | - Cifar10: Consist of 60000 32x32 color images in 10 classes, with 6000 images per class. There are 50000 training images and 10000 test images. 31 | - MNIST: Consist of 70000 28x28 gray images in 10 classes. There are 60000 training images and 10000 test images. 32 | - FasionMNIST: Consist of 70000 28x28 gray images in 10 classes, with 7000 images per class. There are 60000 training images and 10000 test images. 33 | - Shakespeare: Consist of 1146 local devices, a txt file. 34 | 35 | All datasets will be saved at ``/data/``. 36 | 37 | ***The models and the datasets must match!!! Otherwise an error will occur.*** ❗️ 38 | 39 | ## Environment 🐍 40 | 41 | - Python >= 3.6.0 42 | - Pytorch >= 1.7.0 43 | - Torchvision >= 0.8.0 44 | 45 | ## Train ⏳ 46 | 47 | ### Parameter Introduction 48 | 49 | #### 1. setting.py ⚙️ 50 | 51 | This file is used to assign the datasets. The address is ``/noniid/setting.py``. You should ``python setting.py`` before formal training. And the assignment result will be saved at ``/noniid/temp/``. Here is the parsers. 52 | 53 | - ``--data``: Select the datasets which will be assigned, including ``Cifar``, ``MNIST``, ``FMNIST``. 54 | - ``--noniid_model``: There are 3 models of noniid setting, including ``iid``, ``label_noniid`` and ``quantity_noniid``. ``iid`` means each clients has the same distribution of data. `label_noniid` means each client has data with different labels. ``quantity_noniid`` means each client has a different amount of data. 55 | - ``--num_users``: Select the number of clients. 56 | - ``--total_samples``: In ``iid`` and ``label_noniid`` models, this means the number of data of each client. In ``quantity_noniid`` model, this means the mean number of data of all clients. 57 | - ``--rate_unbalance``: In ``label_noniid``, this means the offset of the data label. It is a number less than 1, which represents the proportion of noniid. 58 | - ``--num_class``: In ``label_noniid``, this means the number of labels of each client. 59 | 60 | If you don't have dataset, you should change the ``download=True`` in ``/noniid/data_noniid/Cifar_noniid.py/get_dataset_cifar10_noniid``, ``/noniid/data_noniid/MNIST_noniid.py/get_dataset_mnist_noniid`` and ``/noniid/data_noniid/FMNIST_noniid.py/get_dataset_fmnist_noniid``. 61 | 62 | #### 2. main.py 63 | 64 | This file is used to train the datasets. The information from training will be saved at ``/log/`` and ``/acc/``. Here is the parsers. 65 | 66 | - ``--model``: Select the network models, including ``resnet18``, ``cnn`` and ``lstm``. 67 | - ``--data``: Select the datasets which will be trained, including ``Cifar``, ``MNIST``, ``FMNIST``. 68 | - ``--batchsize``: The batch size of each epoch, better multiples of 128. 69 | - ``--EPOCH``: The number of epochs. 70 | - ``--noniid_model``: There are 3 models of noniid setting, including ``iid``, ``label_noniid`` and ``quantity_noniid``. 71 | - ``--num_users``: Select the number of clients. 72 | - ``--total_samples``: In ``iid`` and ``label_noniid`` models, this means the number of data of each client. In ``quantity_noniid`` model, this means the mean number of data of all clients. 73 | - ``--rate_unbalance``: In ``label_noniid``, this means the offset of the data label. It is a number less than 1, which represents the proportion of noniid. 74 | - ``--num_class``: In ``label_noniid``, this means the number of labels of each client. 75 | - ``--idx_user``: The serial number of client we want to train. 76 | 77 | Some parameters here are the same as in ``setting.py``, so that it can match the data just processed. 78 | 79 | --- 80 | 81 | ### Exemples 🙋 82 | 83 | 🔸 **(a)** Slice dataset ``Cifar`` to make the ``iid`` data with ``3`` clients, ``1000`` data per client. 84 | 85 | ```python /noniid/setting.py -d Cifar -nm iid -nu 3 -ts 1000``` 86 | 87 | 🔸 **(b)** Slice dataset ``MNIST`` to make the ``label_iid`` data with ``4`` clients, ``1000`` data per client, ``0.6`` related unbalanced and ``2`` related labels. 88 | 89 | ```python /noniid/setting.py -d MNIST -nm label_noniid -nu 4 -ts 1000 -ru 0.6 -nc 2``` 90 | 91 | In this way we can get the number of data from 10 classes like ``[50 50 300 50 300 50 50 50 50 50]``. 92 | 93 | 🔸 **(c)** Slice dataset ``FMNIST`` to make the ``quantity_noniid`` data with ``3`` clients, average ``500`` data per client. 94 | 95 | ```python /noniid/setting.py -d FMNIST -nm quantity_noniid -nu 3 -ts 500``` 96 | 97 | In this way we can get the number of data from 10 classes as ``0:[33 33 33 33 33 33 33 33 33 33]`` ``1:[50 50 50 50 50 50 50 50 50 50]`` ``2:[67 67 67 67 67 67 67 67 67 67]``. 98 | 99 | 🔸 **(d)** Train the ``1th`` client in (c) with ``cnn`` model, ``128`` batchsize, ``1000`` epochs. 100 | 101 | ```python main.py -m cnn -d FMNIST -bs 128 -e 1000 -nm quantity_noniid -nu 3 -ts 500 -iu 1``` 102 | 103 | --- 104 | 105 | ## Federated Learning 106 | 107 | ### FL 108 | 109 | If you want to run FL, you should set the file in ``main.py``. You can run this code in different equipments. 110 | 111 | --- 112 | 113 | _**About the result you can ask me via email. The project is still being updated...**_ 114 | -------------------------------------------------------------------------------- /noniid/data_noniid/MNIST_noniid.py: -------------------------------------------------------------------------------- 1 | import torchvision 2 | import numpy as np 3 | import sys 4 | sys.path.append("..") 5 | import init.init_resnet18 as init_res 6 | from noniid.file_flow import split_integer, test_label 7 | 8 | 9 | def get_dataset_mnist_noniid(args): 10 | transform_train_mnist = init_res.normalize_data_mnist() 11 | 12 | train_dataset = torchvision.datasets.MNIST('../data', train=True, download=False,\ 13 | transform=transform_train_mnist) 14 | 15 | if args.noniid_model == 'label_noniid': 16 | data_users_train = mnist_extr_label_noniid(train_dataset, args) 17 | if args.noniid_model == 'quantity_noniid': 18 | data_users_train = mnist_extr_quantity_noniid(train_dataset, args) 19 | if args.noniid_model == 'iid': 20 | data_users_train = mnist_extr_iid(train_dataset, args) 21 | return data_users_train 22 | 23 | 24 | def sum_list(list, n): 25 | sum = 0 26 | for i in range(n): 27 | sum += list[i] 28 | return sum 29 | 30 | 31 | def regular_mnist(idxs_labels): 32 | num_label = np.array([5923, 6742, 5958, 6131, 5842, 5421, 5918, 6265, 5851, 5949]) 33 | 34 | # shuffle 35 | for i in range(10): 36 | np.random.shuffle(idxs_labels[0][sum_list(num_label, i):sum_list(num_label, i+1)]) 37 | 38 | # cut surplus 39 | surplus_arr = np.hstack((idxs_labels[:,11923:12665], idxs_labels[:,24623:24754], idxs_labels[:,47935:48200])) 40 | 41 | # piece together 42 | return_arr = np.array([[], []], dtype='int32') 43 | counter = 0 44 | for i in range(10): 45 | if num_label[i] < 6000: 46 | return_arr = np.hstack((return_arr, idxs_labels[:, sum_list(num_label, i):sum_list(num_label, i+1)])) 47 | return_arr = np.hstack((return_arr, surplus_arr[:, counter:counter + 6000 - num_label[i]])) 48 | counter += 6000 - num_label[i] 49 | if num_label[i] > 6000: 50 | return_arr = np.hstack((return_arr, idxs_labels[:, sum_list(num_label, i):6000 + sum_list(num_label, i)])) 51 | print(return_arr.shape) 52 | 53 | return return_arr 54 | 55 | 56 | def mnist_extr_label_noniid(train_dataset, args): 57 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 58 | idxs = np.arange(60000) 59 | labels = np.array(train_dataset.targets) 60 | 61 | # probability 62 | prop_equal = 10 * (1-args.rate_unbalance)/(10-args.num_class) 63 | num_per_equal = int(args.total_samples * prop_equal / 10) 64 | assert ((1-args.rate_unbalance)/(10-args.num_class) <= args.rate_unbalance/args.num_class) 65 | 66 | # verb and sort labels 67 | idxs_labels = np.vstack((idxs, labels)) 68 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 69 | 70 | # regular data 71 | idxs_labels = regular_mnist(idxs_labels) 72 | idxs = idxs_labels[0, :] 73 | 74 | # equal part 75 | for i in range(args.num_users): 76 | for j in range(10): 77 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_equal: j * 6000 + (1 + i) * num_per_equal]), axis=0) 78 | 79 | # noniid init parts to shard 80 | residue_noniid = 60000 - num_per_equal * 10 * args.num_users 81 | num_samples = int((args.total_samples - num_per_equal*10) / args.num_class) 82 | num_shards_train, num_imgs_train = int(residue_noniid / num_samples), num_samples 83 | assert (args.num_class * args.num_users <= num_shards_train) 84 | assert (args.num_class <= 10) 85 | idx_shard = [i for i in range(num_shards_train)] 86 | 87 | # delete rank 88 | delete_rank = np.array([]) 89 | delete_res = residue_noniid - num_shards_train * num_samples 90 | surplus_list = split_integer(delete_res,10) 91 | for i in range(10): 92 | delete_rank = np.append(delete_rank, np.arange(i * 6000, args.num_users * num_per_equal + i * 6000, 1)) 93 | for i in range(len(surplus_list)): 94 | delete_rank = np.append(delete_rank, np.arange((i + 1) * 6000 - surplus_list[i], (i + 1) * 6000, 1)) 95 | delete_rank = delete_rank.astype(int) 96 | idxs_labels = np.delete(idxs_labels, delete_rank, axis=1) 97 | idxs = idxs_labels[0, :] 98 | 99 | # divide and assign 100 | for i in range(args.num_users): 101 | rand_set = set(np.random.choice(idx_shard, args.num_class, replace=False)) 102 | idx_shard = list(set(idx_shard) - rand_set) 103 | for rand in rand_set: 104 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[rand*num_imgs_train:(rand+1)*num_imgs_train]), axis=0) 105 | 106 | # change type of dict_user_train 107 | for i in range(len(dict_users_train)): 108 | dict_users_train[i] = dict_users_train[i].astype(int) 109 | 110 | test_label(dict_users_train, train_dataset, args) 111 | return dict_users_train 112 | 113 | 114 | def mnist_extr_quantity_noniid(train_dataset, args): 115 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 116 | idxs = np.arange(60000) 117 | labels = np.array(train_dataset.targets) 118 | 119 | # verb and sort labels 120 | idxs_labels = np.vstack((idxs, labels)) 121 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 122 | 123 | # regular data 124 | idxs_labels = regular_mnist(idxs_labels) 125 | idxs = idxs_labels[0, :] 126 | 127 | # equal difference compute 128 | equal_diff = 2 * args.total_samples * args.num_users / (args.num_users * (args.num_users + 3)) 129 | num_list, num_per_user_list = [], [] 130 | assert (args.num_users > 1) 131 | for i in range(args.num_users - 1): 132 | num_list.append(round(equal_diff*(i+2),-1)) 133 | num_list.append(args.num_users * args.total_samples - sum(num_list)) 134 | for i in range(len(num_list)): 135 | num_per_user_list.append(int(num_list[i]/10)) 136 | 137 | # equal part 138 | for i in range(args.num_users): 139 | for j in range(10): 140 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_user_list[i]: j * 6000 + (1 + i) * num_per_user_list[i]]), axis=0) 141 | 142 | # change type of dict_user_train 143 | for i in range(len(dict_users_train)): 144 | dict_users_train[i] = dict_users_train[i].astype(int) 145 | 146 | test_label(dict_users_train, train_dataset, args) 147 | return dict_users_train 148 | 149 | 150 | def mnist_extr_iid(train_dataset, args): 151 | dict_users_train = {i: np.array([]) for i in range(args.num_users)} 152 | idxs = np.arange(60000) 153 | labels = np.array(train_dataset.targets) 154 | 155 | # verb and sort labels 156 | idxs_labels = np.vstack((idxs, labels)) 157 | idxs_labels = idxs_labels[:, idxs_labels[1, :].argsort()] 158 | 159 | # regular data 160 | idxs_labels = regular_mnist(idxs_labels) 161 | idxs = idxs_labels[0, :] 162 | 163 | num_per_user = int(args.total_samples/10) 164 | 165 | for i in range(args.num_users): 166 | for j in range(10): 167 | dict_users_train[i] = np.concatenate((dict_users_train[i], idxs[j * 6000 + i * num_per_user: j * 6000 + (1 + i) * num_per_user]), axis=0) 168 | 169 | # change type of dict_user_train 170 | for i in range(len(dict_users_train)): 171 | dict_users_train[i] = dict_users_train[i].astype(int) 172 | 173 | test_label(dict_users_train, train_dataset, args) 174 | return dict_users_train 175 | -------------------------------------------------------------------------------- /noniid/temp/Cifar/Cifar_iid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,199,16501,47106,36038,9379,43698,10332,30152,45445,22142,39966,40843,40548,31488,49410,9124,11535,44165,19804,24257,17396,27873,23804,44785,23776,28584,3318,18953,40928,42231,30604,39691,22608,16564,13841,47849,17247,29364,32508,26027,47882,37999,31249,21023,29378,29173,12056,5426,42852,30913,45322,4011,29794,10039,28615,12378,29905,38327,28981,24206,32601,21418,9048,28634,23345,46268,19668,40297,917,14165,25224,32679,49539,14978,9007,1571,45427,37320,15801,19457,12167,45099,10747,48003,49759,32492,20680,20327,9411,47774,23139,11361,25731,12264,79,5076,11133,42758,12620,1242,48344,48250,673,3259,22228,8037,23343,1409,28994,24993,36307,35597,43287,18184,42615,8081,46673,5111,41925,43057,57,1727,11197,17085,39233,23716,2007,30251,4183,41972,42168,39308,32330,35870,19918,24902,28124,13114,27325,12380,957,20102,3372,787,42107,9440,4656,37339,37353,21363,40226,46561,11477,29177,9710,20413,38360,1815,23859,21574,31633,14282,49600,28819,17195,28808,8760,29084,10192,1772,46977,29324,3447,12113,46880,5097,28472,9145,26556,32541,36518,29255,33345,25688,4108,43779,3087,46659,18462,40532,15383,5072,12007,7213,27831,26554,42351,32527,30653,24598,22274,44916,4998,21323,14036,46250,3560,5051,38964,27199,32095,31515,21570,39284,30474,15201,23276,18052,49622,8247,20585,44040,25423,21016,4715,3236,46476,43885,6410,30151,9506,7383,45397,4985,9845,15348,3802,33226,48018,8688,15487,868,25250,7559,2210,33369,14649,31735,4973,10877,31240,12119,15393,20860,48935,34635,38632,21325,45290,12908,25980,26548,25677,46911,6689,16653,2335,17916,44538,3767,37815,17495,44089,34444,8601,39660,46381,21318,13532,46532,16377,41210,37936,17971,6697,24303,2410,14707,2579,24838,34884,16494,48573,10509,13203,167,40857,83,26836,26969,39710,35817,15110,19138,22839,49818,33726,2152,2115,24649,25828,200,11987,31895,42968,12632,29994,3266,44862,22968,33787,11363,46587,48203,30800,11397,37332,33687,18722,8466,10649,40675,5134,40993,20036,31315,24660,40206,48142,556,38747,40782,23878,5424,48201,33536,32772,40202,20333,5474,35473,7193,46996,30455,44221,5976,34446,17737,37802,39985,13942,27634,7809,12229,20251,24613,46228,38474,35392,15194,22569,32187,43964,33804,18620,44326,21814,11648,45336,30484,25575,5357,31736,11520,1716,28182,42771,1125,6287,21136,38703,21995,1549,28366,20347,1173,43568,32298,7686,15344,48133,34137,48877,16159,16443,16738,43982,7693,39154,21862,35468,20812,47708,32543,44129,25093,2065,7287,30273,43622,16532,39393,36933,48213,32622,3324,45809,30686,30529,11010,40947,25691,3950,48821,36560,34135,48125,28929,21226,49127,6862,32475,36390,17116,42922,27243,30423,31188,32086,30448,36118,1171,43997,45261,42704,27755,3137,18850,4520,6706,43614,31010,26218,42047,11703,11085,8725,10080,31713,3312,30678,45388,40237,48151,306,37634,35330,32164,49904,31096,4169,45435,18982,28901,11918,14765,45048,36050,15752,47296,7704,4131,7838,49194,30729,44462,20827,19805,37536,43405 3 | 1,39153,18215,46108,39447,44956,3352,43092,49371,32903,7757,940,614,24639,29295,44464,38898,15435,48979,40485,14666,21815,10729,35683,11514,6711,46419,41741,26764,10989,42892,37079,27026,23848,31658,3607,34745,41609,2613,29717,4854,27986,17998,17488,1871,3964,41238,16903,308,2142,38796,10895,15633,24263,29828,49658,8566,44423,13940,18754,48030,31830,45968,22943,39234,7605,9365,33237,32959,9054,41627,42916,16989,8692,4517,20856,36611,41248,25061,36791,30014,35789,4206,25135,43969,29807,10795,48749,38818,14844,578,41736,35453,10832,2887,16985,493,20391,26742,41908,5125,33474,31850,44730,16041,15654,46242,38129,10241,29989,15563,1267,36116,28878,46832,22104,44938,20149,5410,12180,5922,2317,34398,25757,7521,3440,40918,37745,34827,14314,1209,25650,46235,15310,36197,27764,16165,22006,4634,19453,14174,6392,33498,44495,36385,18239,17217,9933,43593,2870,38055,20569,31936,26202,35479,3870,23430,44191,36990,28891,22793,39313,26116,20714,32649,21207,36554,7367,13064,49742,8918,44313,7063,22638,19726,4720,16243,46371,7220,49825,8193,31825,34360,38994,41349,24634,10452,18373,8259,31050,8380,45380,27118,2337,38193,36601,37091,16164,30424,47518,37209,2461,33664,7851,4447,15415,13871,23252,24419,7787,13746,38755,39348,4561,21978,16587,37960,345,4969,43957,41691,10154,33947,11565,17510,27105,25635,15267,9782,16112,9761,27647,44737,43567,16264,5669,16908,45046,45688,39849,15990,11129,4666,46779,33401,18011,23853,24737,26145,22183,28766,1713,36629,7090,34972,13150,13405,36354,47225,28336,40938,42293,8412,28961,36227,31883,47485,14681,47911,27701,27925,35752,23207,49422,38241,23665,45583,19757,15894,46997,34216,10566,6352,17278,48704,42846,2984,45941,48474,15602,6577,3388,18764,38840,28825,29382,40331,28660,40538,23803,37732,45774,35904,46677,4417,29282,18895,41341,15664,17759,16508,46856,29292,22439,19322,25035,7959,46273,36749,12692,36880,25709,3710,23516,40568,4404,45823,6300,37045,18265,6712,9275,8697,36257,5711,46846,31448,47873,5167,31139,8651,23066,11466,14547,46377,132,37487,44014,48983,12641,920,17997,42951,6827,41473,1369,15579,15208,8604,19396,10562,6261,22894,46691,45722,33775,12980,20392,38425,26866,27172,35869,36654,45999,39421,16537,43322,4485,36619,26422,7618,11561,42114,33082,21692,14897,13014,27054,48717,38232,43844,14641,37595,46270,43894,43915,366,23794,35277,48579,6216,11977,5312,43278,31915,19995,41048,30294,32203,33446,36024,42236,2176,35210,49501,41247,44310,38384,20662,29317,11798,39089,8424,23031,25123,42807,23085,43899,12585,27515,30835,19706,19141,36799,25566,33410,48286,2285,49657,35036,39981,10171,37692,25744,5360,1183,475,25902,49681,29873,26806,8807,10769,16975,27311,38687,46579,2757,46965,2991,47511,5691,6937,41504,47435,26780,34427,31403,24253,18019,38267,15287,31039,31774,24960,28684,27341,38516,23287,3928,36875,37242,15687,25897,32921,49779,22038,36695,31566,5140,13588,20713,31453,13690,16681,16602,9025,17192,47069,1117,40257 4 | 2,21305,9157,25712,28794,32617,7520,44037,46678,36100,12807,27323,4599,40867,37165,11400,21166,34101,25422,23449,36470,5481,1824,3940,39392,10455,7991,9375,35662,34454,30029,22408,11080,41222,39241,32326,32033,27048,3880,39170,29587,39574,35750,36464,3962,24844,8695,48643,15087,20434,11356,26465,15609,38183,19866,27905,45195,13713,848,7379,14995,43547,879,28344,37849,22675,35339,23832,24381,7983,9932,49286,39769,19616,44051,48269,35404,5420,28027,43786,33188,4334,34554,9744,24762,24654,275,34595,45910,32681,9894,15559,33970,15042,29226,43050,25689,6701,6191,20168,9459,33449,44482,36006,16570,11528,34614,38063,49972,47180,21849,11873,40148,12044,31636,31058,20660,15182,8166,29995,8534,13154,5384,2193,1745,39390,35596,7106,5550,18138,8065,37268,24623,25671,29354,42854,6011,13195,21623,32956,47194,38058,48472,46491,14435,29433,3193,16462,18837,44335,42891,4760,18662,22105,37513,28330,15641,36946,33238,16864,174,26453,23922,8356,25570,11874,40459,32509,12546,49832,6574,21420,10085,6071,44142,39150,25384,28242,5277,9970,6716,3248,5106,3772,9005,21744,27441,31676,14330,30754,21832,6783,12552,5733,32998,16120,23857,9841,29464,44252,6151,33266,26540,32094,42216,9364,30802,4763,36600,22820,35797,21951,29218,7269,9010,49023,25403,31060,21929,44519,6840,24529,28271,33862,22371,14484,12204,18365,14560,30551,26897,28904,43239,3533,45729,31902,18705,58,2245,35077,14618,28319,19194,1396,24548,7054,13664,10167,32120,37340,4962,30847,17871,22342,9649,26237,34704,1636,42317,11185,22174,31222,11077,38253,12655,11375,21035,25722,23055,43088,44497,48724,16236,25202,4160,32556,33567,32481,49294,34565,24226,43693,30642,18399,20672,3280,29276,22840,23774,6575,27257,10482,27974,4641,18634,13980,42580,45043,19319,42943,10060,39542,17476,9864,30259,42619,11318,7648,8734,40549,26379,38061,29391,40666,4605,16762,4062,21004,28992,40824,11144,33704,45300,17389,43388,40261,39516,36192,15489,3682,11041,33733,29898,6224,12339,9785,20504,27010,39527,19762,18334,12000,11602,45350,25396,32853,15904,26513,20135,45582,45836,37230,18255,11608,42118,29208,24988,1113,11527,13182,21735,22772,43094,11950,5336,40243,28853,28496,47572,45643,47844,24012,13706,15413,22601,40999,49801,15511,11840,44922,27637,15355,49760,43316,17640,15564,7219,48815,40977,38195,34372,47356,28152,10988,39717,37190,23744,21820,15332,12,45256,6717,4658,49567,4851,12600,21374,23321,3258,2820,25533,6796,10069,43001,111,19656,1617,3384,5910,28125,8831,41509,49905,20822,17683,28450,19413,48836,14348,19123,43344,2109,43350,18221,31250,46540,7630,860,46707,5949,11598,23565,39579,46091,47652,1177,20237,31675,48964,39970,49379,37387,10246,48461,2609,8501,32549,46189,9146,18947,49502,208,13399,6708,9026,14723,37427,19199,40144,48648,44949,27005,48260,34134,29609,49147,23410,48996,14847,34232,23242,36064,45160,30565,25308,16152,36321,2097,45378,29221,28560,22651,46072,36705,46298,16917,47148,32838,2047,41630,4649 5 | -------------------------------------------------------------------------------- /noniid/temp/Cifar/Cifar_label_noniid_users3_data500_unbalance0.6.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,31927,44785,39138,23822,605,36621,30709,39301,8359,21633,10516,26723,19988,16219,9014,34381,25581,47106,20180,11218,24644,23297,18383,7859,3891,27405,31152,31945,7176,14092,7607,2390,49613,48312,893,26093,10790,49283,65,38031,25389,21309,22479,13763,30560,31522,35945,30014,18754,6733,43527,49573,19845,34609,25911,44177,49457,44140,13141,3254,42605,41990,18477,2033,34974,35267,12892,35643,15189,25446,16221,2041,8750,42345,36138,24073,36633,38649,19101,28529,8319,11337,342,13581,17821,43472,3190,41330,25378,7615,10101,39289,47840,45236,43779,6229,39885,44802,33043,9307,15399,26929,37731,26819,4715,17787,26653,15624,5514,30289,22259,14744,14397,37602,2905,28666,20373,2297,14474,1784,29698,9963,4382,5178,6560,39090,45678,42177,35801,27032,20322,18581,38210,35501,25655,9634,27892,49014,49638,1636,2167,24452,9677,21543,35879,17670,20733,16748,908,9748,38355,11687,5374,47354,11363,46550,31984,38156,8527,44077,12781,40142,16489,14873,43864,19562,29802,33309,12447,2777,16670,2159,5680,2100,9653,31873,17439,16477,1488,16877,15271,7186,37266,49738,5101,11390,15551,33902,29740,16000,31822,3553,9517,36342,41973,43587,14582,35922,28670,44610,48647,36375,6848,35210,16086,31313,42683,28682,13929,20728,30294,8358,7361,17519,47389,161,25013,2577,17533,15237,9960,8636,13684,10336,47947,17982,34628,44881,11422,30351,39564,26790,37314,45241,26691,19953,44056,23444,42767,29002,21129,38159,45167,32588,10698,19474,23429,33937,48345,23229,11724,32917,21400,1938,35759,7290,25627,4392,36474,39187,15858,34465,11531,4720,25664,36077,6556,49178,21832,8609,39461,38761,42029,19902,31404,24958,8193,17018,20390,17828,18487,9248,33709,48561,22025,3134,47812,30571,6385,15217,42783,47942,17814,34858,42670,29638,16635,6607,47933,1100,39398,25676,47553,25103,19820,24652,13463,9073,23136,17835,30361,43262,42930,43767,8880,10488,38984,19447,4439,2432,5209,27064,42590,31016,19566,31141,8255,3762,17641,8056,37417,9304,26862,7427,34294,32392,27779,203,2254,5407,17300,19780,17578,6758,44322,7665,44852,43233,20522,17337,7566,494,548,18780,12342,23122,41461,5354,46855,24666,17036,26900,24961,882,21287,39374,14282,3105,18157,5838,44047,42657,3785,34184,25384,36344,13478,1777,45203,27264,33602,35363,8395,26442,14017,39687,49730,2393,22974,46242,17117,16439,25984,4771,48531,24019,19981,16693,2484,13901,9463,26119,26181,16080,34218,23388,4608,8776,37754,16279,24496,29345,44797,43665,1768,28158,43658,1533,30940,38830,8219,34600,5184,1084,14029,46326,796,16319,14266,35870,46522,14280,32526,30493,32467,38731,36398,2971,22953,33741,47411,14078,19158,48790,6265,40327,19729,5871,1745,29448,8798,37164,31926,46766,4090,11825,14620,33519,6774,35168,26768,19551,9558,16694,37932,43312,7552,8043,43287,37336,27163,17347,1415,37315,31475,23950,24789,46293,34482,43822,9921,2202,39882,33134,9716,36122,46686,35226,35161,25427,39935,26593,11813,41721,12362,3259,10263,9577,26855,41 3 | 1,23781,5050,42869,36180,38043,7360,27323,39780,12749,15542,20379,15571,24104,38681,35326,43292,38847,1540,32107,5549,13467,34766,29497,2156,13613,22900,44185,44777,18331,38183,29953,45283,3899,30821,29188,24486,29986,44309,20736,28981,38841,17580,21234,160,21706,14851,38691,49998,6241,29616,2007,7962,49536,42564,22373,46016,11288,27368,28301,38476,4604,45119,9446,11106,25398,41385,40911,28802,6799,31550,30690,12212,45223,19916,17598,41705,9927,4402,37161,24017,46989,41715,18769,40400,26183,31022,47219,12032,49574,35913,3679,16904,5725,28839,33955,35040,15682,38825,31499,6309,14704,705,17550,2078,8491,4280,9260,34464,10059,20956,13038,45491,20253,3377,10299,8987,20890,11279,23078,41023,38496,41648,30341,28971,43502,43852,40269,46143,25634,42405,28946,11083,20059,13100,31021,17934,32137,37860,1875,14034,6248,42533,2482,2500,3995,33161,39591,48135,19610,11753,39791,39295,15991,23917,47934,11310,22039,26190,48352,6824,10052,2252,34762,3203,25469,15517,18494,17310,46600,15498,35117,33548,48665,36908,23539,31265,42612,46353,19791,47039,39443,18711,5995,10435,21742,45054,44076,32241,37935,45171,2049,32000,45808,42114,19346,30053,8946,7836,46620,23356,46985,5949,12861,32618,30299,17653,15328,42280,33165,42162,6055,38808,32183,1236,21058,23871,45017,45325,36670,9324,20668,602,46736,35491,43674,664,29221,42264,25590,35030,14847,29956,33208,7764,12159,1622,11552,28907,36413,2206,22033,33966,25486,24173,19086,41776,46426,8725,17229,8327,33962,43399,11560,37790,11322,48301,18587,27671,15362,4000,2579,48724,14700,30819,26185,46065,16150,7755,31355,7846,462,47333,11928,33567,24663,2877,29204,42919,20084,15736,45903,46867,5090,37706,38840,31292,31185,10995,23619,39783,37364,17392,4451,22416,20066,4330,19540,39779,47029,28902,12005,46440,22911,14015,3746,20483,46246,26705,25432,3152,32764,6058,36002,14246,13689,14522,28218,30196,7017,45627,32431,19483,2901,47463,33609,13610,12205,32559,30742,20416,44164,36669,30042,2532,11034,18399,11922,30689,1199,13251,21449,5411,31994,49927,21554,8671,16969,47173,30372,22757,4004,28889,27690,11790,27016,14987,27125,17795,1204,20122,48730,18758,22893,47783,42537,17332,10555,24037,40697,11351,48797,31555,34803,8223,49826,27324,43775,626,5828,48013,36693,37181,41772,5037,20242,1329,38342,40482,34758,22907,18009,16637,16903,24094,13578,1260,9718,34567,22904,27855,39396,13902,45413,31048,15494,42201,40432,37065,41258,48136,18864,16733,9657,32479,24713,19814,36322,14654,38190,14136,26421,49761,1481,3375,25748,38154,19945,21439,11475,7462,1039,376,31970,47754,3607,20574,43996,45358,45387,9966,20039,10487,2053,16137,8190,42166,40621,29917,5673,18105,25538,35996,25243,11838,17174,11908,12991,9611,5969,20852,31165,40740,44122,47102,37962,34231,35446,24367,15548,5988,21310,13751,23163,12461,48958,19662,8258,21087,9193,38023,35934,28681,25419,1988,29,12344,5266,41806,36068,4868,25644,48197,40644,28126,4673,32002,7093,32874,27813,5972 4 | 2,13710,3918,13229,29336,34040,22200,14413,35354,32662,2478,32220,13084,36049,9057,16245,21674,47763,28017,27961,12936,44074,39076,19216,35256,36311,43359,35567,18048,16368,19448,33974,26614,22522,126,49491,24595,22983,61,9317,22265,5666,578,482,19619,11532,176,10747,37320,9884,33452,25593,28872,5096,5992,12694,12465,38398,48025,8377,35486,5215,17070,28114,34305,11629,29503,1409,39508,36305,37811,2470,17657,7184,29678,10959,27525,7924,28437,20625,10752,21864,12528,20339,11639,36476,12517,7355,27013,13590,15265,19068,23986,36431,1703,25535,48132,26904,2082,45931,22826,14105,10834,4328,45913,20060,22443,25672,4063,49990,17800,41688,33209,9041,6021,42319,32784,11118,28797,17420,856,25335,28823,15342,12645,43386,15939,30482,3896,3580,32530,38629,49920,450,13721,49271,47757,29127,39445,27528,6333,7157,3576,32863,42691,5503,44734,569,5156,47659,42384,8703,7403,6572,43986,6273,6548,22926,6829,25967,1917,16621,27095,22277,47707,39858,17554,11412,32993,16663,48463,7958,44478,2134,27376,12512,15647,23727,19394,15997,32073,39799,38803,43884,44130,12227,48060,30562,31224,8076,21107,30319,49246,48857,21613,17942,32440,47263,17157,38212,27054,6740,32200,30807,29437,27854,24451,42884,14767,29475,31372,13233,44458,25721,12400,34441,13041,12318,44414,16090,3874,46128,19131,6474,17390,13874,31291,31012,22805,13867,19505,37762,30697,26530,30775,8788,40622,28046,29721,16592,17470,45418,36143,30546,46166,8758,13298,13078,45388,8644,36115,40238,49037,26420,14191,13977,25116,44453,15325,6283,10730,23131,27998,20618,46726,10259,43706,24645,34273,19008,49141,45722,22078,9581,32487,15391,33559,14757,49335,23852,39400,15359,37892,45999,7895,10916,12397,37744,23330,33374,49396,25594,22454,28296,2834,19200,28916,19235,29614,18152,5991,47864,3946,41195,48357,24327,5617,5652,16811,6335,39251,2715,19162,22291,27763,11053,8751,15072,25080,42812,32561,2449,5024,22892,41754,33805,14977,27672,34113,24784,4901,32269,1840,34044,23490,17202,47856,44753,18401,38265,29644,22179,41319,14261,5767,42355,28496,5842,42061,38938,16665,20417,15547,33755,386,5579,41846,44559,5328,34367,32343,36535,30255,5160,18010,17129,30883,7618,41332,1489,7922,7684,31764,48803,10422,48642,2650,9732,26387,12031,34031,26685,25729,30496,4064,17952,12596,49145,29572,10647,37759,48607,47297,49829,47165,27134,18029,9356,12374,33192,32709,5489,48448,17596,18820,18610,41088,47552,33213,20717,14749,19308,49305,48400,32828,37887,32606,40128,26461,30920,44431,32435,42232,26725,13635,12491,15651,10012,12004,33541,32868,35094,26502,28252,48184,33141,39900,20921,41320,41303,15060,1536,1802,41830,556,48337,17204,16390,95,26413,7036,48526,6517,1126,18826,38864,6932,45793,14999,5694,7254,31580,34288,11675,15071,6025,7232,43069,36613,23036,25678,31780,31598,37134,48802,28322,40202,15612,13452,39969,12799,41326,41004,40946,33366,2992,42923,1596,31279,11098,39343,23375,49929,48022,27595,18129,6095,41331,4605,4452,42610 5 | -------------------------------------------------------------------------------- /noniid/temp/MNIST/MNIST_iid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,49596,14179,59012,17535,32690,40162,57020,7427,39570,16485,28771,28814,40266,42488,2147,43685,30787,38993,56781,20681,37377,22706,40563,24809,56158,3434,54808,10045,43778,26843,4284,5344,57407,20161,10861,39735,45083,31943,47722,37183,17539,27280,6958,4181,53937,49280,57650,43662,34730,55967,394,9960,1354,59044,33942,23160,8112,17465,26951,27136,36315,25414,11229,4291,4953,15929,37547,29214,55787,57430,17782,7748,2762,59478,36110,37972,48112,7789,50731,39937,24738,57437,6698,31432,17042,9599,4920,59498,22659,56965,38928,5876,36356,315,45893,21476,2426,52780,20643,17638,33785,1894,17684,22062,8883,5647,22036,47114,862,57455,55508,2065,9682,9374,38935,10962,18535,46266,1397,8922,22713,52643,33261,12621,35670,19755,4071,1048,9211,51214,21053,619,34056,3179,1602,42789,38560,19693,50966,37412,9252,18489,55969,28210,35509,2792,44549,34371,8742,12844,44608,52818,56730,16649,48014,16260,26433,25861,26165,12246,46838,59480,8227,24539,28264,43476,18244,36262,40401,6427,48917,1583,10206,4256,43637,39136,25728,52164,40185,13787,44969,8808,9648,3793,6251,13124,43080,19215,53740,20989,25829,59263,41994,7387,42469,25208,9140,56574,59100,37209,48546,23055,842,39690,48728,35256,45915,30602,58300,6210,564,57965,1440,26618,33552,29367,14957,47252,4493,10908,10311,13463,18551,12867,31340,22768,53222,36641,22434,33265,28442,34953,10191,55930,39833,26506,47921,9940,33872,5033,12187,49274,36970,3732,24365,41664,6556,34780,59063,21627,38604,21252,6018,46290,34276,19173,47204,40347,36173,15284,31953,35207,42935,29915,55204,37438,37817,16671,38848,1978,29344,55376,49631,40887,25315,32248,16655,2268,25002,43442,24361,28395,29830,15873,42663,58537,27317,10831,3204,49760,25598,25973,47769,59269,19497,50460,6290,54970,23927,13699,4620,5849,2320,26439,29015,5920,31371,39260,48740,30115,40917,44128,45910,50108,48827,2756,51400,59035,39431,4683,15831,785,15756,49275,31167,12977,43260,33207,56345,48883,17120,51697,23667,33518,56767,21599,27530,31623,48051,41642,1041,41380,37475,7385,30878,48492,17075,47564,39356,22618,8511,31423,38621,47351,3718,10309,58090,48914,29103,446,20765,48422,30718,7418,34704,30008,41161,5472,30432,30271,36069,3368,23803,24743,17248,2825,42451,17383,55094,3442,2676,43911,13933,3863,37001,52760,52960,21871,6087,28723,54115,53549,58676,57663,25058,56920,13319,27728,16809,47181,34108,39068,37456,13895,12173,49332,6957,38841,57304,33883,18521,18378,58305,11070,49901,32105,29700,36681,49095,48289,6017,45311,43824,6390,6284,352,3658,28999,55395,44221,25607,17013,43609,17162,38351,48192,49578,9861,3480,40698,31306,539,25527,57655,32147,18619,38685,28364,4370,32544,50088,30470,40469,41377,1038,41855,54989,45308,40647,18153,2613,16270,56788,39034,44895,42524,56135,9998,26683,53337,7239,14988,47185,42209,23710,17989,16137,24756,19,7518,13897,8358,8409,15832,20767,21756,47459,29072,19914,10516,56090,43660,28424,10143,18506,319,58703,16704,5924,47755 3 | 1,35064,41412,28667,41445,41211,32494,38138,50617,33172,15814,8110,3131,27145,41557,8294,52847,45112,58769,29599,8961,3590,18868,34070,19490,42862,28855,8763,57746,10407,11129,11605,10488,43941,59331,51029,18892,55177,5202,54783,18599,19902,29177,3128,27462,40760,1626,75,26095,28125,17327,40909,49194,40287,55825,47035,39711,22192,27759,36608,29068,8156,57749,45167,26535,27242,41449,13260,13487,46507,32807,42179,33441,4066,59166,3487,25352,40987,20865,27531,276,43714,15138,42506,22222,959,9264,32133,43577,59697,9856,2688,10239,40352,58891,24888,26495,56955,44210,12634,25800,1572,25507,57105,28559,4697,4997,22067,29167,57262,52812,49168,27587,12263,55526,18632,22019,25677,34202,57894,57387,46997,36100,12679,37274,46966,28920,13082,8670,50700,47649,57358,36009,32768,35373,57496,2357,47731,41301,14914,40044,32981,29004,26477,59641,52188,31063,29990,37613,1679,28059,19829,12350,42681,14619,45164,37280,14810,18044,35015,37468,36855,24333,18030,43043,3291,54648,37848,11747,31108,33609,35486,23779,49207,2146,6089,9180,41428,48499,24969,53803,8075,42150,19905,35336,40223,42962,46424,57868,5852,30717,47628,15474,32434,47329,54736,26739,34788,56398,44231,9529,7578,11641,51182,33671,13564,6329,41426,19810,29584,14187,49138,11764,42507,46374,12291,29538,21611,18345,28049,5002,32448,16290,39607,1172,661,16526,3117,39117,48215,19907,48782,5631,30198,53965,33803,19478,43732,58420,50531,13125,58,58795,39404,42330,20702,45788,34449,56659,26758,8061,48348,39972,4177,33303,53715,42590,57055,39271,20854,8479,39126,55248,57242,21210,31013,18329,23392,6655,58646,13843,14144,36609,50959,3467,37086,42698,25954,45091,47952,33892,44003,54883,23597,18364,55639,32828,28952,54189,51762,49759,35082,1111,26856,11725,42762,33448,20547,6555,45155,46244,14778,55616,26983,41519,2741,11976,49711,58904,16821,5837,45965,14960,34765,57514,19385,27685,33035,32312,14708,41351,9174,42729,8076,29217,21404,22616,24377,45324,11253,22323,431,22851,26657,59073,2930,26065,58435,32670,26077,45376,22986,20383,4980,27991,44697,52197,44635,10435,37581,54885,48988,25850,853,23951,56074,43796,17935,24505,13794,19791,45382,19642,58635,3909,18025,40397,57230,10590,24138,23246,34344,9224,37922,54497,52925,37596,57949,3249,12678,58185,15305,35203,56844,58836,48893,5146,32497,6862,43470,41328,29908,35675,43381,27758,37875,52861,53183,46770,19327,48648,27636,36121,3959,43268,37891,7142,46528,43749,32199,30874,54056,33387,44458,29988,15096,18338,13866,9334,45737,21965,56907,22837,5800,49279,30843,10482,21125,15941,20491,47851,53854,38942,57277,56985,10963,28851,54047,44144,4761,34901,415,33452,39322,43051,7182,38807,5140,17179,32573,49225,13304,40997,24825,8111,29475,43751,44022,27440,12320,58372,14117,15899,2821,29822,10597,44948,36777,6010,17708,32672,20202,16006,39713,47703,1272,33236,31866,8626,33576,22795,3044,35603,31842,28592,50752,59174,2274,40458,37965,28055,40487,27909,52827,37352,3090,44776,25824,344,10539,9933 4 | 2,2081,16151,7183,9034,26343,12497,27712,15103,42925,28002,57536,1590,43468,43932,41635,12872,31995,56163,37153,53444,36628,53380,32609,16787,53670,8754,54303,10806,6326,17187,18505,29732,51290,27328,45596,48267,16961,50205,24896,20895,210,11620,24525,5115,2839,45235,13730,30473,56724,25773,37289,22349,3394,18316,20108,58207,35826,12236,41555,47982,22545,11765,11533,59354,18421,33487,48600,6654,49768,609,56000,46936,10448,2123,57203,44648,29087,55324,19150,35595,871,41715,13080,15300,29567,51563,7553,20898,56584,47551,14725,6747,15308,23835,38311,12590,25600,58203,35600,57037,22047,12161,36423,15712,2454,24728,23491,1624,53668,55316,13857,54768,18851,28944,44611,11632,25510,24557,8878,28335,52153,55308,14868,47623,43743,50221,35035,41687,32250,12223,29906,21511,24551,57624,9509,21251,23229,45387,49189,31360,30791,40318,3444,34724,11446,52834,43513,37838,48951,56819,34048,28441,2771,52227,8762,15163,11949,5088,21098,4864,35563,54730,37896,670,9182,7501,49081,50995,14026,10450,47157,47123,29104,8756,10677,51616,3033,41773,16111,26909,12437,52327,51987,21812,37186,11645,55133,51816,51790,36169,32703,22149,10483,38445,36427,2481,21306,41407,42545,6230,15871,21517,6009,29792,41691,36674,49206,46324,53295,4768,6833,23151,53985,41796,23685,50889,20711,26132,3217,9431,7914,3439,36733,59227,19514,57043,1079,48876,27800,27738,7442,33638,4439,2874,25313,17842,33011,37080,15525,39983,11283,13548,33971,26790,28866,46712,36222,54869,28615,55529,50831,14543,47528,12856,37380,24630,24859,39625,48197,34128,27802,48680,33537,12937,52326,3266,35707,34021,1997,31053,53978,6026,30393,9344,13681,19398,13321,16406,58632,29712,31768,34626,191,6068,15197,5925,18418,42940,30371,14199,38408,3527,43212,28899,37448,54880,23518,50316,13185,26413,45914,54298,30466,21294,5772,53777,54227,48301,53818,27161,51662,43399,13511,52518,22872,55645,15220,52263,29209,18388,50810,27268,52674,44237,57463,57587,35184,26159,9316,17528,28758,2789,18058,15370,38084,19559,53840,26344,3932,6373,47808,9967,34413,3779,48083,829,21862,7901,42350,20736,41174,5825,52460,47439,38794,44552,21123,38269,19050,38857,52176,31523,30165,49590,29931,15554,54542,8377,39752,44865,37389,4358,4876,42892,14475,24571,22743,45329,33307,52969,2344,17557,21111,11838,35696,20295,18682,17833,14850,11207,28053,21258,16015,7423,56453,56729,16369,17807,13815,4897,16951,43656,2958,43558,19911,9594,29131,48104,34946,36696,44206,45082,21962,50625,37783,47726,53508,40882,28871,8285,27562,31972,41611,16398,3032,25592,34787,15458,24311,22507,29198,58856,48528,35987,17999,25275,34150,46942,29858,43921,53663,32503,55451,16282,26100,46897,6275,16828,15766,12594,19696,42946,18138,10588,34694,18043,6036,44075,58152,38795,25224,31942,53831,50754,36196,3998,15309,21158,24305,16918,56726,56829,35341,4747,48097,28183,19913,26877,55159,56527,6589,51099,17410,49541,26643,17455,22655,46490,33226,36832,54592,45671,33990,20330,59243,51209,49442,36044,39762,26973 5 | -------------------------------------------------------------------------------- /noniid/temp/FMNIST/FMNIST_label_noniid_users3_data500_unbalance0.6.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,13530,3070,59749,53837,47407,38409,20027,17,31552,26243,56434,40313,11937,36396,22709,1329,1589,53168,50795,47182,23592,2772,29725,34504,52745,36922,40239,2265,55144,9105,53792,53266,40268,2789,24769,55475,50266,55340,11389,34962,34617,17272,38094,29167,45435,11084,2684,15327,5163,51254,29250,8357,48142,53283,53550,16779,42978,26026,19499,39901,7890,6725,12583,19611,21686,46226,749,6624,20023,53938,16892,54141,36045,58994,14814,3508,48321,407,13408,39376,16313,35913,1653,35740,3535,6947,10460,26557,18611,41621,53007,15673,30327,23678,21944,9774,51308,8959,49078,7150,6458,30858,12301,45584,34113,50998,27916,2714,20917,39446,9441,34884,2773,46758,54132,27144,8624,16195,52594,37336,49453,51841,6039,42731,18758,45863,48895,39174,54652,39840,9471,13805,51410,12432,32311,57556,29399,59343,39772,51995,39370,53653,46119,7847,11263,22897,52510,22234,7673,2432,58343,31083,11242,10751,38123,14321,22167,30415,54330,23397,10338,2951,24227,41287,31846,44467,52157,5068,40299,29807,48942,57344,42738,10402,51550,35691,26456,46629,12310,8477,55263,4205,26483,32395,38528,12783,38441,4566,21786,52446,22882,17604,56999,16226,47520,50473,58376,30049,5343,41251,28117,53322,39438,36829,7577,41935,36859,58581,45396,59000,2205,41867,20510,10742,3670,38894,15453,37090,57443,58682,10941,34662,37706,41344,5972,7407,2890,2288,13338,24493,5539,4526,58984,10039,54385,21539,55653,23927,19980,56119,17535,9384,15729,52801,38899,56865,16809,43472,7455,58918,18608,22249,46374,38612,39589,26857,14053,39404,21843,29905,58360,58792,21044,8371,26219,31856,35603,37359,6971,38838,434,39076,40305,49793,19667,28634,8342,36320,4827,47228,36553,22745,2365,54433,47758,29128,43487,2282,10263,34442,21068,7839,32524,27340,42986,16760,40258,57608,38909,57786,43939,1528,47434,45667,17412,10787,5115,4306,55715,4385,19655,5327,1659,17702,55349,11849,48210,33569,24950,10732,37785,46090,11125,3837,52024,51250,13445,23590,56252,1576,20721,13817,26047,38296,36127,24301,2179,29236,33448,57552,31139,36686,859,3139,33193,36519,4286,7013,869,25649,24167,15458,14483,43420,13214,38087,45035,6484,36477,5181,50522,22510,28340,6950,43695,34626,15702,1555,4741,29770,9148,35303,646,42730,795,1037,22094,35286,3007,43755,17148,39073,19294,54904,23243,24594,58509,9828,29835,21186,58859,43039,29179,12270,40831,36459,12608,31724,13928,30389,54093,51695,41267,3770,41531,2478,44227,3057,19230,20135,38579,5688,52637,38513,53129,10432,5330,3098,13098,10567,11090,54134,16309,43318,43525,46578,51414,47106,53529,20991,17318,24895,28485,33303,52968,47253,49915,11603,55204,55520,32470,32703,26018,25184,53509,2299,41092,19486,5538,30936,39739,12682,21601,41472,39809,15180,53628,3885,15935,10605,22399,37997,26016,17933,54628,50496,41423,32758,22460,57990,53843,47140,1917,24219,47696,40444,42656,13187,54953,842,51653,51989,41736,3186,31839,42551,58832,55685,2419,1318,24949,49402,58616,30026,17836,35551,1268,23866,14203,44845 3 | 1,6395,24238,27994,29197,38719,27332,47509,24707,3358,17602,59085,54318,39387,14359,29180,39990,46680,43681,17397,43836,21249,18050,19143,25501,25324,24801,27705,26426,36832,30199,29080,29291,6468,20974,36286,872,6132,43271,33909,27187,58081,39448,4658,57209,44674,21785,7491,50662,28959,21726,33423,53511,12412,32568,16472,13306,8156,33211,19157,18100,28791,32415,38527,15330,15831,24481,43417,8364,27941,55586,47162,53001,11126,40786,28082,8967,58617,44228,4999,47809,30860,1270,50770,13054,17452,56304,26710,14126,44287,38867,41806,6513,22758,22082,3696,7916,58691,48615,17840,27669,20790,31449,32893,18823,13769,37565,4973,13645,20823,11643,6300,14139,59611,27731,45946,51812,13118,4152,59630,56597,21340,14122,21220,56820,52454,23166,8707,46759,40520,49892,6772,25461,41383,32402,17806,4707,40770,33981,58531,32523,10912,6273,19827,47556,51280,57117,29152,14183,50209,47296,46644,22108,44010,127,19984,48441,14475,39706,48269,12915,14682,45606,1147,42632,19804,19135,56584,38799,37661,47824,18619,14974,10472,48910,23053,32040,53346,14304,46007,10883,53520,51213,26061,53590,48393,21936,21042,15202,57617,14655,36042,57158,32335,57619,16987,42309,7805,13732,9535,21986,37922,55255,16024,9107,47232,47378,58505,55396,18124,47305,5514,47641,54790,11205,46586,23042,38334,34754,49260,27002,8960,7462,19501,39383,6450,54697,1255,20462,19574,4832,37824,27464,51029,29222,5004,3287,38546,4528,27432,58377,36869,46213,18836,53033,37603,23834,5129,6877,5454,7369,10132,39628,45971,49588,7675,56888,44749,36591,21111,27191,46023,47513,38880,50445,25924,28111,6236,49123,21036,26717,41521,43311,18431,29267,24706,53947,23541,52048,11109,4423,42905,44792,40729,24357,25978,29424,4855,15351,51670,56582,47986,45153,163,1502,53636,34936,32306,27016,35005,49212,21448,41849,24086,7101,18493,14670,27307,42847,56800,22438,12961,22311,52976,50911,54503,23143,53558,2229,56695,1335,18294,12246,6675,51441,49351,4607,27527,7127,32948,3382,44700,32718,22908,36025,195,55309,15773,43838,4434,50450,29613,17864,909,30602,40464,23080,27359,23861,52748,49087,57006,43326,47355,57791,5061,1574,33471,41290,46991,17738,56424,59831,40877,58949,56422,22449,18901,55055,28896,31117,42151,18342,48132,27418,37761,41085,26984,12318,8516,57361,25208,27693,24426,41751,41140,43632,5421,7415,35860,13606,31337,32616,54917,34312,5446,28231,45349,7760,4605,14569,27963,8293,45651,11162,16353,18806,39228,25020,39325,49385,52253,39054,27776,1943,44101,11994,7102,45274,35233,30121,54780,11632,49917,42711,49968,55995,4403,50831,13633,51028,31323,8425,17905,8617,40573,42493,25556,8847,19118,6996,20596,24682,38408,42977,2553,34511,52168,58864,6612,32722,30138,7156,21854,28713,55038,39803,24470,1713,56570,46430,45266,22860,20762,29715,50826,56109,38258,36015,22637,29593,18020,42598,3449,52924,43390,30538,46378,23441,32507,9747,53164,22759,26582,37430,48718,26835,30363,34122,21791,9482,6857,12900,13327,19633,56892,11753,47736,46774,35502,10116 4 | 2,24919,29911,15098,32202,45607,36791,54508,40175,23058,12494,58086,7465,23011,4889,59671,52715,28665,56131,34555,59590,41255,42394,14749,34649,59593,57201,53192,2438,37181,32515,2377,26881,21108,29803,29820,2176,36303,45495,34899,1161,28179,10551,21131,1665,43267,8873,48637,45074,54345,48744,58534,23667,6654,37703,3685,45195,41439,28489,3427,18072,22571,10006,47203,41765,30621,37013,26233,19077,218,48791,30311,52960,37066,51361,42671,8074,38228,44235,26631,14130,4113,38371,47092,37458,18044,14660,631,10812,9780,28893,43940,51059,30732,28748,6037,55176,31619,54497,44892,18946,23695,25123,5420,8480,5203,40895,3338,56998,16183,28850,32977,7275,53949,28855,4460,27570,16624,31991,47465,8305,10381,13570,13877,33540,17373,9364,15267,19010,28194,5385,10861,9752,36399,32101,8829,50363,5858,42544,57286,26782,52550,46985,25381,54977,25412,27337,29330,1250,22020,22785,51412,4814,52802,48709,52591,19342,41302,58337,53573,51299,34791,49487,45661,52321,157,6942,33184,56081,55784,29216,40300,20394,26120,25593,39375,18242,56604,36249,13953,26242,24400,31357,58090,3517,44147,23552,10717,41811,2869,10196,158,54169,33583,31307,57599,51233,53681,5452,58016,26151,23791,46775,58461,16537,57852,32279,33704,5258,27238,9099,1712,2840,29571,36875,7595,57674,18359,17608,14305,34543,23776,30045,38384,10287,41278,11895,18147,22058,18339,37294,39317,37535,7682,40253,50957,21026,8207,15081,11772,37004,19786,54823,48758,46380,52274,28913,30486,9186,19567,47540,20707,36623,56105,5171,57580,11845,51961,4791,139,47536,45175,6252,18066,26607,41983,20231,37193,27704,41035,47311,24751,30130,45771,50436,16555,22556,27736,32016,34071,44734,55063,59617,14134,56187,31654,39290,40284,43029,3607,27870,17486,55284,17860,59752,8853,50082,59460,2366,39281,44277,52587,11983,3194,433,24193,52358,53162,33265,12438,50561,7659,6604,59450,30633,18509,21120,2140,43126,30084,3588,56905,12730,54512,45659,9072,58215,34489,26828,31933,18741,49809,7919,32600,57614,40294,7521,32322,20492,20769,55451,28651,39203,55411,878,6924,15970,2685,49613,19373,20627,39350,9678,15760,8180,42492,29385,41643,27518,8988,32574,41525,31760,57485,54909,56571,241,32597,31208,10024,25432,35379,2709,36592,49819,14260,24456,32004,18770,8099,5811,13345,56194,41739,7630,9176,14552,33051,30929,23760,12359,51056,38017,41638,1747,11727,55537,31526,41178,45008,11286,55767,10778,34359,659,28902,35127,23460,48134,20174,44858,47072,18931,30813,21305,15302,55105,1898,11020,57494,47969,24864,9698,35556,38489,29151,40096,24278,56310,59745,23300,40626,54995,59290,4141,14762,53084,55960,51850,29842,5731,24840,24876,49934,58273,4523,59899,10669,50819,57561,54689,55222,29692,54766,53721,54890,15648,25182,6745,26316,29833,8731,38443,33494,53446,7076,56614,58986,59763,51288,17633,7197,4393,30797,53378,18643,14806,58894,32152,1416,35626,57806,50118,58061,22179,46537,3997,45953,3181,6957,25229,54458,19506,53518,8963,17092,41360,17844,23433,46492,27381 5 | -------------------------------------------------------------------------------- /noniid/temp/FMNIST/FMNIST_iid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,41105,43720,51138,5099,59846,32704,46481,52931,23279,29717,14695,13364,45430,13602,37597,13530,39785,14680,17706,36026,1489,36573,58217,1071,5774,6239,3874,12478,14944,6085,36436,58497,46883,28237,29149,14313,35113,5141,41160,2198,5309,55293,50059,43811,36424,639,19765,26757,59974,56271,58245,18281,34380,16340,24335,9456,55760,21192,52092,13495,54704,46748,24706,51719,22725,39628,45186,24911,33061,14899,30115,6675,49659,33757,58528,41065,9779,27832,30489,47024,11161,45979,33060,26730,3799,27556,39644,38439,16075,20366,58943,47489,4262,10391,34202,26401,20164,31072,13080,25074,15145,40838,8416,36980,16097,49579,12234,21687,33072,19150,32585,45770,12315,54644,48163,41632,45202,14571,59200,43076,42289,3510,29240,44551,47653,26629,49437,10042,38568,1244,7285,42400,23260,14870,22274,12034,55588,29127,27118,16682,41854,34927,16779,23249,7421,35003,54117,34616,59944,34025,36084,58177,488,1130,56858,48383,58734,14499,13813,53756,3956,12785,32693,15396,39184,25728,23189,3007,32239,20638,40430,56062,19160,20376,8005,39195,47500,5212,19067,25195,15440,11211,53413,15468,8112,1674,49565,33056,20991,2034,46628,26428,36672,45835,45037,1150,56165,23757,26414,7772,47085,13751,58961,45203,26423,25432,36387,15148,24489,33384,39927,31664,19595,339,49345,59738,54897,31913,22715,14984,37360,55543,59990,30804,625,42140,26873,24468,51665,31782,24053,41437,2391,20077,5281,5418,33556,22111,37912,22841,19463,52347,56646,15054,56979,36806,53515,16496,58413,9525,52336,12800,47300,21522,39629,45860,46589,55035,1458,54978,41795,21584,42744,4931,18673,53774,47077,1235,27011,42237,33248,27083,16605,26736,20517,41163,12450,12198,22188,47390,17359,36584,50483,40765,8819,23124,23027,45805,11656,28796,9290,6212,28350,32592,5789,20978,3715,33150,25022,13415,30415,23241,20710,9278,12640,38547,36589,8008,32160,14646,56607,10315,34824,42056,53204,9200,8445,4430,56985,20811,2256,53612,14184,41029,24203,35352,19322,1087,33974,7679,23165,5068,57123,50172,36378,20310,26248,36825,25578,55667,22663,9597,38845,8441,56834,52722,40022,7648,17783,35936,12752,44709,38528,20607,22258,49016,11213,35934,29961,21828,48005,12372,57079,52165,31157,6791,14795,30053,13162,2450,26969,5718,45591,43040,15683,43775,3494,22490,55858,48640,58263,41060,45038,27816,2075,36067,52374,23582,15789,45773,19131,44341,53048,41575,14927,26772,51165,7052,5613,57487,42769,42168,31739,8079,25809,33635,8094,42833,28295,33958,26853,21185,1317,34132,4816,35097,2220,24606,53426,58893,59830,14085,4102,48335,34716,16072,46240,30247,13025,22533,48416,18526,40659,39696,55592,47691,44480,7571,14040,34861,53762,17988,56141,50591,31483,6562,36001,30687,11809,39765,9482,12215,29011,55934,19580,18551,59545,24471,14109,57193,35585,34324,28402,40479,53353,36517,19621,45966,33461,11637,30338,57770,18656,8563,822,48994,59647,31526,30393,17869,4209,16760,7110,4605,55160,39957,21063,51809,40262,28962,16375,38858,59313,8816,30576,30885,2064,40514,54260,10237 3 | 1,40180,15231,27698,7010,4932,36725,34184,16280,58763,4621,38389,49680,44774,26942,12585,38539,24796,30900,12250,16115,41438,41434,28857,10363,7390,15199,16769,14749,21945,19141,20795,28425,1,5266,13151,17104,26575,40518,6366,33801,1634,38933,46282,48133,55640,33128,17146,2479,12683,44157,3436,4763,20977,43412,12115,10777,7511,14422,47358,38493,32966,25234,43810,10264,48164,7135,51389,29613,47767,799,59918,34567,34979,34463,4905,27436,13158,3910,3693,45217,8610,38301,3473,46703,26887,48509,37517,36885,8135,47953,35822,59283,23900,52068,19846,54511,2184,10437,51254,53379,27758,12637,42604,16235,23898,48207,44608,13268,25136,8665,57070,32440,27250,58781,24774,34945,9346,13453,6231,12365,14544,13320,22641,10216,33932,21860,58935,32204,33777,48052,14018,43136,22593,19703,13478,44794,20520,7048,9664,18100,6725,2963,29819,5955,38010,52721,59534,21570,33319,15664,10009,37370,4481,24102,25901,46234,15137,40919,13098,2059,52962,53448,31971,49434,20187,30846,47925,40467,17915,37629,10674,36932,40884,22427,33623,22468,46376,56166,15595,56563,59980,31074,26731,38696,6369,56353,45732,35768,6227,58195,56146,55904,12207,38406,2349,43204,25736,6337,29081,21378,35258,13865,7575,5420,37492,59044,677,14122,2182,54685,54001,59322,28850,8728,38473,27916,25456,20707,36377,52974,17861,14070,50137,5195,45936,34102,32574,26577,11375,46115,26480,24455,33312,36618,21418,51869,33382,51502,4370,28389,42806,48019,56558,788,48092,5968,19370,57254,47484,43494,10334,9889,25124,17948,47001,35443,34083,17289,18256,16113,172,32974,15799,47207,51973,18520,38022,13805,35540,15093,52429,15541,4339,52175,54246,36555,3896,55097,13031,27823,14413,16354,55629,4201,57959,36984,35980,34646,37223,3014,16718,39256,37523,22567,41436,58590,13122,57848,14098,21497,27930,27165,11778,13987,37214,10306,56159,29148,11022,29609,22455,602,58983,22538,48046,19751,45606,8397,59987,13090,20814,11496,48172,44527,35049,1472,4158,59772,49264,36086,43337,43227,45236,20929,16714,53719,52999,41640,56761,16818,40042,27925,10269,16366,15840,24064,59544,8203,52619,48979,7506,41336,3886,30924,27619,13584,6557,46228,22713,35832,7092,40737,18367,21079,13746,6135,21870,13223,27700,4154,58155,57439,11250,19155,24195,26195,15907,8880,6405,48888,17733,7329,32530,15962,8968,31648,26257,28412,55603,13728,8111,57368,25344,50646,35715,59210,34635,32934,24532,10084,33010,5928,21600,46772,50756,24026,6276,31703,4705,59146,49041,47266,13992,4469,4806,49372,29383,16919,54337,12212,59288,8365,32355,5920,1759,27646,59202,49180,25181,15660,41412,55787,27676,26596,20412,21698,39418,29028,24498,22598,17575,37591,44988,3328,26402,7226,27442,653,29246,30240,6455,28367,53912,35872,37604,6818,55671,23888,2326,45357,36236,39299,54243,26117,7504,10040,13069,19477,5241,6770,56193,37603,15774,12890,28703,1663,19704,47047,19054,52610,30076,19843,33221,28175,50165,42520,17389,2851,38602,55777,37335,24000,10918,21157,42691,39797,36421,2160,49618,57609 4 | 2,54709,38558,58253,17871,53350,40858,12929,23088,38079,59105,50396,30135,43198,10682,55911,26763,47107,30593,10636,32938,24851,18478,35321,21247,3373,7395,48197,4835,14997,25873,25946,18461,57859,42296,4061,21321,36537,25126,19576,9257,14052,50251,50200,18061,48143,57976,8790,20611,55578,53523,19508,39185,33171,28448,12548,57459,8656,53597,52976,14249,10339,17923,39136,54528,20776,35607,3071,59788,33697,40617,11008,14050,50333,10387,33478,47517,23676,33909,38850,6330,14191,19020,5762,50965,22183,22418,17325,47845,49608,51795,53055,1566,38008,25116,10335,54237,28570,17959,27361,16329,25238,25094,57753,10785,49194,34905,9314,57291,24030,16591,19547,11950,35340,58957,35843,18708,30384,53826,40638,51303,27979,36288,44672,9267,28503,50349,13073,28561,55556,3119,23847,38109,27197,24824,54208,35920,52522,1452,8641,14376,19563,11306,9447,45075,35177,38910,41075,42190,52917,32568,51238,52318,39880,8340,37763,39521,19754,14386,40093,26739,9983,19553,40854,21607,23445,18755,13275,34734,31341,16461,9431,26945,47625,48021,50199,14842,9550,20244,52416,42755,18286,45937,44433,36711,23704,16161,4003,1270,39779,44516,25230,36459,59935,44239,55901,30620,58509,4727,46623,11264,20545,18042,22474,55472,57626,4828,27519,19922,18989,54216,14375,29520,40801,43416,31051,12357,22689,21115,1917,43730,14283,15521,10497,40349,45825,33806,5332,30741,5211,27245,22148,1824,53906,55382,40108,41858,28548,54751,28771,33732,31728,16618,18139,45453,10457,1405,57593,50905,8853,59234,30831,51575,2681,13611,29927,52196,49779,56100,52547,13,37563,2522,58642,54286,35689,37675,30528,20040,7180,53294,25744,34168,20486,29382,28930,20393,35925,16616,49606,28887,31162,384,11682,2143,39212,36029,3831,26592,3961,29268,56949,48831,39417,10488,50555,19441,4440,29217,20512,9173,53485,50703,50677,2843,43528,4150,485,56343,32434,53830,36770,54189,42076,38466,31240,46966,1725,7609,10797,51299,22569,8869,33184,7417,11839,13803,353,6898,55676,4478,17539,29778,36804,5319,58046,9876,29572,36559,43003,20098,19967,12265,20314,56173,44280,23620,14581,40299,2021,29440,32580,45693,14573,54542,45150,49103,52445,51171,15635,49250,50890,9467,25958,33104,21060,54084,55974,8473,9004,29709,30536,36008,1697,18564,46734,20640,47224,53847,13042,47872,23926,42581,48284,47110,43795,4298,24668,10589,607,50899,40414,49537,21588,44029,28800,36506,3638,40923,17494,14872,32583,44387,9376,45793,23274,37869,37096,48874,17362,5704,55514,9569,7146,22528,50277,33351,46826,46295,41327,13284,29088,3527,22401,28717,16014,12416,50384,16221,16761,22125,33453,25564,1323,1118,35848,17801,13653,37191,45336,9114,21299,32426,1624,29247,49899,43475,28273,5145,53468,57124,26252,45230,14621,25528,49662,13340,26630,58273,48758,2197,31365,28048,30624,44361,9837,44216,27432,2834,43957,27383,7658,25518,13854,53394,13744,30301,10255,45596,19568,10428,11872,12030,54766,15856,51045,58802,26378,5632,5223,1844,44090,59169,31794,8016,47852,44201,16196,31655,35123,2224 5 | -------------------------------------------------------------------------------- /noniid/temp/MNIST/MNIST_label_noniid_users3_data500_unbalance0.6.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499 2 | 0,23378,5884,23113,30585,48412,1904,15142,15564,56709,41126,59043,34639,43827,49354,34985,17838,5244,16921,54411,10283,55115,14559,49027,58396,41138,15148,20263,40373,23404,23134,7349,20643,52916,33433,42731,57266,20409,44369,49796,27410,29518,56326,26104,49569,45858,12178,21552,38683,35831,32937,13039,57928,55723,47254,50474,23194,26340,58585,56664,122,30522,41074,16294,10078,53837,39918,43345,36450,5397,54305,21441,35902,17927,31852,54095,9829,17442,34906,35522,8227,51543,8794,55665,55580,45135,8568,41513,37478,8528,15872,32960,34684,14080,49081,41391,13161,19280,21775,55966,51890,42822,12788,41680,56276,8477,21131,25162,3838,6258,58712,11879,1814,21968,23175,51678,21916,30927,7611,8825,40867,45681,52681,49072,37961,22456,49957,21275,49132,244,36739,56363,53507,17598,5423,26552,18829,17819,10386,12269,43733,57982,37830,36407,53050,2478,30553,58471,41016,19279,14435,5686,51386,28997,55820,35140,8076,2925,40475,18593,47824,15638,52908,11487,43940,311,51438,34658,50594,49436,6178,22107,29507,48328,15386,8057,56722,57357,11076,29452,22167,44150,45114,26851,26021,20090,17282,51996,59323,2492,48914,24239,48165,33521,2102,53962,370,3782,58061,40795,24254,46135,33533,57118,57501,24937,44299,7436,16870,29604,48693,36541,22389,19877,6808,39109,34575,26289,14830,40626,34883,41200,10881,52401,44196,58993,4378,16973,18415,6295,59776,14950,44769,17936,3265,13435,38999,35355,14816,54517,59131,57159,59857,8902,22080,32771,55835,26316,15312,59236,3169,40929,9509,23472,16327,23319,18210,44797,39510,19653,30826,6055,25272,55972,7776,59093,9891,54682,13249,9748,8403,58000,19210,21598,22067,21665,50986,11243,5436,12718,24513,11364,53522,58476,51702,53056,40967,16473,46143,32135,34444,50695,19149,28135,25609,12860,33290,45375,26357,4182,54033,6211,31616,28844,57455,41264,10308,37942,42308,52403,51735,2641,27093,22861,39079,41335,36685,54286,48320,46849,56301,45964,46217,12156,42918,58670,36775,15153,19084,35202,24089,22188,31884,1301,10949,24152,50897,33274,10535,38649,47971,15712,38107,43314,19028,23942,14118,58720,38785,57981,20862,5952,8002,39470,14267,30762,55100,33438,4371,50977,31492,171,10279,34484,55950,40718,36728,52153,59677,2693,50153,6492,37526,3582,22769,55203,23496,59923,20693,51795,11922,18364,30216,9450,48572,28259,43766,19775,16424,53973,11549,2968,56982,27228,4625,18248,38624,18347,45716,28579,25198,46612,29112,7249,31316,30399,34932,6936,21588,50881,59301,403,44481,59064,44099,50507,6305,3219,19752,41970,12689,28360,12569,51973,18417,28845,6680,17338,5188,15975,699,6646,25711,24660,24504,58814,59731,39689,51736,19959,18769,47,16219,37558,53433,47528,20885,12341,20092,50764,23312,38266,28949,37699,52902,11397,14534,23209,50939,11477,3289,55903,36405,39725,32996,4921,19306,50533,39881,50366,54883,9033,51972,39516,14867,55305,44869,13946,25577,407,16574,1721,31863,26856,10894,1817,51296,22382,15631,58456,49671,58706,11980,39214,16120,49794,35029,34888,47769,49140,58957 3 | 1,44844,50723,58489,36476,58457,54783,24777,23454,50480,47466,44243,5929,24480,44325,10859,48780,52804,27931,43020,31509,46748,57384,34514,28272,6987,30068,22828,7764,56903,22926,21019,13630,33667,48989,55232,29901,33943,8625,25876,16278,54094,47721,1734,15435,42881,58691,3197,31238,46512,39695,18040,57500,43630,52142,40542,26006,53373,34450,51569,33707,39688,22611,38667,15573,58104,33160,42278,16123,3166,2402,2189,646,45133,4241,39216,12044,50834,10676,35574,45436,53619,10349,44519,17993,41343,32539,45959,2788,10450,3033,11012,12998,35643,57306,54867,6651,32786,55726,31564,22767,27040,12589,10812,31885,56705,29992,33177,21107,3039,10269,14469,38052,47996,2809,52855,22434,17390,50161,12969,57499,40267,40334,59623,23474,4619,59960,24773,7480,57240,4752,57013,42046,16406,36244,9531,39671,52540,59216,29037,5598,44501,26771,38883,28254,32815,38907,27656,33808,46180,26236,35026,3524,58352,1985,50623,50220,1204,51734,33916,54572,59442,5837,55626,5554,36698,27149,28756,29380,7549,44732,35497,16894,23766,50146,27119,31967,2095,26967,38210,54274,21518,2145,23164,31396,18392,21412,57905,44626,45804,30310,26894,51636,56160,47453,26097,42798,9676,38026,5178,5251,33664,42635,11028,39238,5360,12944,1974,15236,32636,3501,34075,47730,49831,13682,35847,44391,2150,48560,51487,28643,59280,30985,8465,38956,21578,16481,51843,49464,36211,16602,4936,43028,344,26108,38087,13891,58263,44592,34330,53250,53324,16853,49466,20763,3827,58641,5803,35118,18885,22127,41646,30352,48371,37059,42925,31445,24429,56766,51520,17476,6603,29579,40266,38693,11514,50492,14693,11605,12421,40582,15825,49737,50344,51758,44660,16987,38155,37723,20088,45276,48200,26775,33921,38062,14393,17755,14877,35999,34798,15442,18650,26881,33230,44656,42487,54514,3461,8116,50643,27247,31769,1927,56989,38103,13751,42488,50539,21314,48419,5844,40821,8682,36842,31243,29560,38963,32785,46535,25065,21256,34553,43884,2729,26999,57349,48895,6737,50990,51006,12011,4607,59762,56949,30790,48167,29862,59972,20609,18645,43243,19969,53189,16217,1028,56801,13135,18893,42664,41683,19439,38929,5192,59203,7281,12321,14229,9965,8309,42433,7243,21736,17648,56444,30565,32221,20731,57642,43195,19818,53404,40895,34283,25366,49197,58110,53429,5697,55890,54980,25159,41357,53460,56707,8533,58617,2526,52921,47815,43468,14518,14481,51623,20955,36524,50637,52179,57882,27411,29732,16817,55822,15753,54822,36848,2192,24346,56023,6320,24828,42848,20238,13807,32531,31716,23988,17124,48715,19156,9083,37902,55785,59154,30275,23297,8838,56489,50654,1268,23154,20662,41806,29623,9750,53657,12662,10564,41490,2914,50769,44060,39740,5268,42499,26712,48554,39347,14712,57326,24676,4265,16244,49304,206,47069,53255,54833,51286,48421,47473,7708,3691,16905,49518,16323,7691,29067,58229,30748,1798,1797,5563,45997,52047,17223,11060,8982,16363,7391,16192,24856,49835,22202,2557,9197,54089,54808,5674,13203,34016,49208,3309,35516,17379,52228,34538,44513,28888,12168,6681,429 4 | 2,54313,15383,58376,39279,47087,25629,13196,33844,15480,29471,31695,45434,943,1626,8250,33136,11661,11526,11267,192,20351,27328,1368,26608,11361,34911,31633,47621,42440,26096,17425,52741,4807,58377,59658,15999,20963,52058,22004,32474,33516,48949,35396,46850,33596,55567,56728,36682,43261,1654,49411,50669,39408,20007,2261,45885,48912,15650,27052,17046,39284,45041,2104,2847,30091,18423,34747,6445,47956,4278,8972,27458,56394,22726,9658,7666,5354,41943,49294,26014,12344,31140,15762,1603,26358,12828,24899,14383,11570,8816,28204,53671,37035,17333,23860,8230,24099,20326,9706,59356,59647,5613,4839,20653,43614,51868,52935,56564,1271,13104,54911,10899,50599,33106,56827,17744,54588,53925,59727,29929,5863,1992,36938,54234,53974,54781,2527,35743,9323,43856,12856,7964,17780,11882,31554,13532,29505,53152,50895,39972,59654,32023,54928,48260,31584,23546,11410,37181,26493,36599,9024,8091,3820,4090,7224,7008,31981,3451,6635,8638,59273,13635,34539,38376,24569,40271,23186,57514,53625,6150,58226,25340,56845,11466,13261,53699,52321,4452,5161,34501,32812,34395,11349,6770,1154,10050,4618,22683,46042,2348,19949,17772,58528,52173,51974,23645,9774,57556,44818,33356,3476,5557,52496,26061,54120,41347,25486,39266,29280,31206,17512,47401,13341,18406,17784,5029,51878,50996,51612,37771,8659,11176,42112,8746,16539,31120,31161,1555,51032,3095,35503,8059,1984,20159,55563,15253,2331,31351,38775,4122,48347,59604,56238,49844,49793,1882,46885,22655,32021,20389,20501,12863,32190,50126,46888,27837,10548,25458,47232,26589,20558,35101,42606,40559,47206,48250,49738,52997,9603,15570,54678,50732,24742,39252,25026,19023,12601,37922,57097,26076,8123,16986,44219,57372,59264,50668,8604,14348,39729,3895,26579,29051,10533,7694,44658,24131,28399,25200,43104,27976,14145,36873,13056,21043,59830,17177,5407,10893,41698,35512,49933,13329,5422,51279,59843,13123,30018,3116,55802,23551,39223,39275,31485,11020,10302,45871,56354,35549,54734,562,2173,49444,58977,14231,13756,17703,41054,40397,31731,8954,13255,5564,57386,33867,29210,59469,30017,39784,1022,55244,1944,20579,898,57828,17017,35860,51198,12348,41161,40077,47170,45469,8758,43390,12676,27984,20296,44117,24583,52528,59792,4734,59646,28158,4432,36647,58634,30875,10683,36342,46795,26544,6546,7090,42858,42684,39298,16719,43137,35362,289,2169,54309,52015,3437,48522,354,42333,26494,43078,34570,15686,39128,49373,37846,18636,1667,2290,26242,31728,7751,6220,28363,50531,53031,52417,17474,2069,13252,36286,34744,40529,9026,55423,5331,6621,47589,54807,28669,2686,21935,52193,43608,50405,30830,37419,55187,3222,2276,21664,37884,1537,58478,37144,10908,59780,52517,7596,36706,29792,23956,29558,52308,45821,4781,18054,41531,14175,9709,882,55788,3092,56031,4802,9704,36962,58867,40449,10964,54650,31580,13363,33127,43858,40842,35425,4318,2204,28049,53144,56184,48972,32683,38375,8579,55282,51684,1253,25739,48286,5038,26599,33195,9626,25114,41774,34035,51549,15092,48936 5 | -------------------------------------------------------------------------------- /FL_models/FedAvg_ray.py: -------------------------------------------------------------------------------- 1 | import os 2 | # import torch.distributed.rpc as rpc 3 | import torch 4 | import torch.nn.functional as F 5 | from torch.distributions import Categorical 6 | import numpy as np 7 | import init.init_cnn as cnn_module 8 | import init.init_mobilenet as mobilenet_module 9 | import init.init_resnet18 as resnet18_module 10 | import init.init_lstm as lstm_module 11 | from collections import OrderedDict 12 | import ray 13 | 14 | 15 | @ray.remote 16 | class Server(object): 17 | def __init__(self, args): 18 | self.device, _, self.test_loader, self.model, _, self.optimizer, _, self.test_num = init_fuc(args) 19 | # self.server_rref = rpc.RRef(self) 20 | self.worker_rrefs = [] 21 | self.world_size = args.world_size 22 | # print(f"{rpc.get_worker_info().name} has received the {self.test_num} data successfully!") 23 | 24 | def run_episode(self, epoch_s, args, workers): 25 | print(f'Round: {epoch_s + 1}') 26 | update_paras, data_num = [], [] 27 | para = self.model.state_dict() 28 | 29 | for i in range(len(workers)): 30 | data_num.append(ray.get(workers[i].get_data_num.remote())) 31 | update_paras.append(ray.get(workers[i].run_episode.remote(para, args))) 32 | # # futs: run_episode ; weight_futs: get_data_num 33 | # futs, update_paras = [], [] 34 | # weight_futs, data_num = [], [] 35 | 36 | # para = self.model.state_dict() 37 | # for worker_rref in self.worker_rrefs: 38 | # futs.append(rpc.rpc_async(worker_rref.owner(), _call_method, args=(Worker.run_episode, \ 39 | # worker_rref, para, args), timeout=0)) 40 | # weight_futs.append(rpc.rpc_async(worker_rref.owner(), _call_method, args=(Worker.get_data_num, \ 41 | # worker_rref), timeout=0)) 42 | # update_paras.extend(fut.wait() for fut in futs) 43 | # data_num.extend(weight_fut.wait() for weight_fut in weight_futs) 44 | print(data_num) 45 | self.model_average(*update_paras, data_num = data_num) 46 | self.evaluate(args, epoch_s) 47 | 48 | def model_average(self, *local_weights, data_num): 49 | global_weight = OrderedDict() 50 | server_data_sum = sum(data_num) 51 | for index, local_update in enumerate([*local_weights]): 52 | weight = data_num[index]/server_data_sum 53 | for key in self.model.state_dict().keys(): 54 | if index == 0: 55 | global_weight[key] = weight*local_update[key] 56 | else: 57 | global_weight[key] += weight*local_update[key] 58 | self.model.set_weights(global_weight) 59 | 60 | def evaluate(self, args, epoch_s): 61 | with open("./acc/" + "acc_" + args.model + "_" + args.data + ".txt", "w") as f: 62 | print("Waiting Test!") 63 | self.model.eval() 64 | with torch.no_grad(): 65 | # for model: CNN / MobileNet / ResNet18 66 | if args.model != 'lstm': 67 | correct = 0 68 | total = 0 69 | for data in self.test_loader: 70 | self.model.eval() 71 | images, labels = data 72 | images, labels = images.to(self.device), labels.to(self.device) 73 | outputs = self.model(images) 74 | # 取得分最高的那个类 (outputs.data的索引号) 75 | _, predicted = torch.max(outputs.data, 1) 76 | total += labels.size(0) 77 | correct += (predicted == labels).sum() 78 | print('The Global Test Accuracy is: %.3f%%' % (100. * correct / total)) 79 | acc = 100. * correct / total 80 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch_s + 1, acc)) 81 | f.write('\n') 82 | f.flush() 83 | 84 | # for model: LSTM 85 | if args.model == 'lstm': 86 | data_ptr = 0 87 | hidden_state = None 88 | sum_correct = 0 89 | sum_test = 0 90 | 91 | # random character from data to begin 92 | rand_index = np.random.randint(100) 93 | 94 | while True: 95 | input_seq = self.test_loader[rand_index + data_ptr: rand_index + data_ptr + 1] 96 | target_seq = self.test_loader[rand_index + data_ptr + 1: rand_index + data_ptr + 2] 97 | output, hidden_state = self.model(input_seq, hidden_state) 98 | 99 | output = F.softmax(torch.squeeze(output), dim=0) 100 | dist = Categorical(output) 101 | index = dist.sample() 102 | 103 | if index.item() == target_seq[0][0]: 104 | sum_correct += 1 105 | sum_test += 1 106 | data_ptr += 1 107 | 108 | if data_ptr > self.test_loader.size(0) - rand_index - 2: 109 | break 110 | print('The Global Test Accuracy is: %.3f%%' % (100. * sum_correct / sum_test)) 111 | acc = 100. * sum_correct / sum_test 112 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch_s + 1, acc)) 113 | f.write('\n') 114 | f.flush() 115 | 116 | 117 | @ray.remote 118 | class Worker(object): 119 | def __init__(self, args): 120 | self.device, self.train_loader, _, self.model, self.criterion, self.optimizer, self.train_num, _ = init_fuc(args) 121 | self.idx = args.idx_user + 1 122 | # print(f"{rpc.get_worker_info().name} has received the {self.train_num} data successfully!") 123 | 124 | def run_episode(self, para, args): 125 | # for model: CNN / MobileNet / ResNet-18 / LSTM 126 | print(f'-----------------Worker {self.idx} is running!-----------------') 127 | if args.model != "lstm": 128 | self.model.load_state_dict(para) 129 | self.model.zero_grad() 130 | pre_epoch = 0 131 | for epoch in range(pre_epoch, args.epoch_worker): 132 | print('\nEpoch: %d' % (epoch + 1)) 133 | self.model.train() 134 | sum_loss = 0.0 135 | correct = 0.0 136 | total = 0.0 137 | for i, data in enumerate(self.train_loader, 0): 138 | length = len(self.train_loader) 139 | inputs, labels = data 140 | inputs, labels = inputs.to(self.device), labels.to(self.device) 141 | self.optimizer.zero_grad() 142 | 143 | # forward + backward 144 | outputs = self.model(inputs) 145 | loss = self.criterion(outputs, labels) 146 | loss.backward() 147 | self.optimizer.step() 148 | 149 | # 每训练1个batch打印一次loss和准确率 150 | sum_loss += loss.item() 151 | _, predicted = torch.max(outputs.data, 1) # predicted返回的是tensor每行最大的索引值 152 | total += labels.size(0) 153 | correct += predicted.eq(labels.data).cpu().sum() 154 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ----Rank%d' 155 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total, self.idx)) 156 | local_para = self.model.state_dict() 157 | return local_para 158 | 159 | if args.model == "lstm": 160 | self.model.load_state_dict(para) 161 | self.model.zero_grad() 162 | pre_epoch = 0 163 | iter = 0 164 | for epoch in range(pre_epoch, args.epoch_worker): 165 | print('\nEpoch: %d' % (epoch + 1)) 166 | 167 | data_ptr = np.random.randint(100) 168 | n = 0 169 | sum_loss = 0 170 | correct = 0.0 171 | total = 0.0 172 | hidden_state = None 173 | 174 | while True: 175 | input_seq = self.train_loader[data_ptr: data_ptr + args.batchsize] 176 | target_seq = self.train_loader[data_ptr + 1: data_ptr + args.batchsize + 1] 177 | input_seq, target_seq = input_seq.to(self.device), target_seq.to(self.device) 178 | self.optimizer.zero_grad() 179 | 180 | # forward + backward 181 | output, hidden_state = self.model(input_seq, hidden_state) 182 | loss = self.criterion(torch.squeeze(output), torch.squeeze(target_seq)) 183 | loss.backward() 184 | self.optimizer.step() 185 | 186 | # loss + acc 187 | sum_loss += loss.item() 188 | _, predicted = torch.max(torch.squeeze(output).data, 1) 189 | total += torch.squeeze(target_seq).size(0) 190 | correct += predicted.eq(torch.squeeze(target_seq).data).cpu().sum() 191 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 192 | % (epoch + 1, iter + 1, sum_loss / (n + 1), 100. * correct / total)) 193 | 194 | data_ptr += args.batchsize 195 | n += 1 196 | iter += 1 197 | 198 | if data_ptr + args.batchsize + 1 > self.train_loader.size(0): 199 | break 200 | local_para = self.model.state_dict() 201 | return local_para 202 | 203 | def get_data_num(self): 204 | return self.train_num 205 | 206 | 207 | # get init informations according to args 208 | def init_fuc(args): 209 | if args.model == "cnn": 210 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = cnn_module.init(args) 211 | if args.model == "mobilenet": 212 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = mobilenet_module.init(args) 213 | if args.model == "resnet18": 214 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = resnet18_module.init(args) 215 | if args.model == "lstm": 216 | device, net, trainloader, testloader, criterion, optimizer, train_num, test_num = lstm_module.init(args) 217 | return device, trainloader, testloader, net, criterion, optimizer, train_num, test_num 218 | 219 | 220 | def _call_method(method, rref, *args, **kwargs): 221 | return method(rref.local_value(), *args, **kwargs) 222 | 223 | 224 | def run_worker(args): 225 | ray.init() 226 | server = Server.remote(args) 227 | workers = [] 228 | for work_rank in range(1, args.world_size): 229 | args.idx_user = work_rank - 1 230 | workers.append(Worker.remote(args)) 231 | print('hello') 232 | for i in range(args.EPOCH): 233 | ray.get(server.run_episode.remote(i, args, workers)) 234 | ray.shutdown() -------------------------------------------------------------------------------- /FL_models/FedAvg.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch.distributed.rpc as rpc 3 | import torch 4 | import torch.nn.functional as F 5 | from torch.distributions import Categorical 6 | import numpy as np 7 | import init.init_cnn as cnn_module 8 | import init.init_mobilenet as mobilenet_module 9 | import init.init_resnet18 as resnet18_module 10 | import init.init_lstm as lstm_module 11 | from collections import OrderedDict 12 | 13 | 14 | class Server(object): 15 | def __init__(self, args): 16 | self.device, _, self.test_loader, self.model, _, self.optimizer, _, self.test_num = init_fuc(args) 17 | self.server_rref = rpc.RRef(self) 18 | self.worker_rrefs = [] 19 | self.world_size = args.world_size 20 | print(f"{rpc.get_worker_info().name} has received the {self.test_num} data successfully!") 21 | 22 | def run_episode(self, epoch_s, args): 23 | print(f'Round: {epoch_s + 1}') 24 | 25 | # futs: run_episode ; weight_futs: get_data_num 26 | futs, update_paras = [], [] 27 | weight_futs, data_num = [], [] 28 | 29 | para = self.model.state_dict() 30 | for worker_rref in self.worker_rrefs: 31 | futs.append(rpc.rpc_async(worker_rref.owner(), _call_method, args=(Worker.run_episode, \ 32 | worker_rref, para, args), timeout=0)) 33 | weight_futs.append(rpc.rpc_async(worker_rref.owner(), _call_method, args=(Worker.get_data_num, \ 34 | worker_rref), timeout=0)) 35 | update_paras.extend(fut.wait() for fut in futs) 36 | data_num.extend(weight_fut.wait() for weight_fut in weight_futs) 37 | self.model_average(*update_paras, data_num = data_num) 38 | self.evaluate(args, epoch_s) 39 | 40 | def model_average(self, *local_weights, data_num): 41 | global_weight = OrderedDict() 42 | server_data_sum = sum(data_num) 43 | for index, local_update in enumerate([*local_weights]): 44 | weight = data_num[index]/server_data_sum 45 | for key in self.model.state_dict().keys(): 46 | if index == 0: 47 | global_weight[key] = weight*local_update[key] 48 | else: 49 | global_weight[key] += weight*local_update[key] 50 | self.model.set_weights(global_weight) 51 | 52 | def evaluate(self, args, epoch_s): 53 | with open("./acc/" + "acc_" + args.model + "_" + args.data + ".txt", "w") as f: 54 | print("Waiting Test!") 55 | self.model.eval() 56 | with torch.no_grad(): 57 | # for model: CNN / MobileNet / ResNet18 58 | if args.model != 'lstm': 59 | correct = 0 60 | total = 0 61 | for data in self.test_loader: 62 | self.model.eval() 63 | images, labels = data 64 | images, labels = images.to(self.device), labels.to(self.device) 65 | outputs = self.model(images) 66 | # 取得分最高的那个类 (outputs.data的索引号) 67 | _, predicted = torch.max(outputs.data, 1) 68 | total += labels.size(0) 69 | correct += (predicted == labels).sum() 70 | print('The Global Test Accuracy is: %.3f%%' % (100. * correct / total)) 71 | acc = 100. * correct / total 72 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch_s + 1, acc)) 73 | f.write('\n') 74 | f.flush() 75 | 76 | # for model: LSTM 77 | if args.model == 'lstm': 78 | data_ptr = 0 79 | hidden_state = None 80 | sum_correct = 0 81 | sum_test = 0 82 | 83 | # random character from data to begin 84 | rand_index = np.random.randint(100) 85 | 86 | while True: 87 | input_seq = self.test_loader[rand_index + data_ptr: rand_index + data_ptr + 1] 88 | target_seq = self.test_loader[rand_index + data_ptr + 1: rand_index + data_ptr + 2] 89 | output, hidden_state = self.model(input_seq, hidden_state) 90 | 91 | output = F.softmax(torch.squeeze(output), dim=0) 92 | dist = Categorical(output) 93 | index = dist.sample() 94 | 95 | if index.item() == target_seq[0][0]: 96 | sum_correct += 1 97 | sum_test += 1 98 | data_ptr += 1 99 | 100 | if data_ptr > self.test_loader.size(0) - rand_index - 2: 101 | break 102 | print('The Global Test Accuracy is: %.3f%%' % (100. * sum_correct / sum_test)) 103 | acc = 100. * sum_correct / sum_test 104 | f.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch_s + 1, acc)) 105 | f.write('\n') 106 | f.flush() 107 | 108 | 109 | class Worker(object): 110 | def __init__(self, args): 111 | self.device, self.train_loader, _, self.model, self.criterion, self.optimizer, self.train_num, _ = init_fuc(args) 112 | self.idx = args.idx_user + 1 113 | print(f"{rpc.get_worker_info().name} has received the {self.train_num} data successfully!") 114 | 115 | def run_episode(self, para, args): 116 | # for model: CNN / MobileNet / ResNet-18 / LSTM 117 | print(f'-----------------Worker {self.idx} is running!-----------------') 118 | if args.model != "lstm": 119 | self.model.load_state_dict(para) 120 | self.model.zero_grad() 121 | pre_epoch = 0 122 | for epoch in range(pre_epoch, args.epoch_worker): 123 | print('\nEpoch: %d' % (epoch + 1)) 124 | self.model.train() 125 | sum_loss = 0.0 126 | correct = 0.0 127 | total = 0.0 128 | for i, data in enumerate(self.train_loader, 0): 129 | length = len(self.train_loader) 130 | inputs, labels = data 131 | inputs, labels = inputs.to(self.device), labels.to(self.device) 132 | self.optimizer.zero_grad() 133 | 134 | # forward + backward 135 | outputs = self.model(inputs) 136 | loss = self.criterion(outputs, labels) 137 | loss.backward() 138 | self.optimizer.step() 139 | 140 | # 每训练1个batch打印一次loss和准确率 141 | sum_loss += loss.item() 142 | _, predicted = torch.max(outputs.data, 1) # predicted返回的是tensor每行最大的索引值 143 | total += labels.size(0) 144 | correct += predicted.eq(labels.data).cpu().sum() 145 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ----Rank%d' 146 | % (epoch + 1, (i + 1 + epoch * length), sum_loss / (i + 1), 100. * correct / total, self.idx)) 147 | local_para = self.model.state_dict() 148 | return local_para 149 | 150 | if args.model == "lstm": 151 | self.model.load_state_dict(para) 152 | self.model.zero_grad() 153 | pre_epoch = 0 154 | iter = 0 155 | for epoch in range(pre_epoch, args.epoch_worker): 156 | print('\nEpoch: %d' % (epoch + 1)) 157 | 158 | data_ptr = np.random.randint(100) 159 | n = 0 160 | sum_loss = 0 161 | correct = 0.0 162 | total = 0.0 163 | hidden_state = None 164 | 165 | while True: 166 | input_seq = self.train_loader[data_ptr: data_ptr + args.batchsize] 167 | target_seq = self.train_loader[data_ptr + 1: data_ptr + args.batchsize + 1] 168 | input_seq, target_seq = input_seq.to(self.device), target_seq.to(self.device) 169 | self.optimizer.zero_grad() 170 | 171 | # forward + backward 172 | output, hidden_state = self.model(input_seq, hidden_state) 173 | loss = self.criterion(torch.squeeze(output), torch.squeeze(target_seq)) 174 | loss.backward() 175 | self.optimizer.step() 176 | 177 | # loss + acc 178 | sum_loss += loss.item() 179 | _, predicted = torch.max(torch.squeeze(output).data, 1) 180 | total += torch.squeeze(target_seq).size(0) 181 | correct += predicted.eq(torch.squeeze(target_seq).data).cpu().sum() 182 | print('[epoch:%d, iter:%d] Loss: %.03f | Acc: %.3f%% ' 183 | % (epoch + 1, iter + 1, sum_loss / (n + 1), 100. * correct / total)) 184 | 185 | data_ptr += args.batchsize 186 | n += 1 187 | iter += 1 188 | 189 | if data_ptr + args.batchsize + 1 > self.train_loader.size(0): 190 | break 191 | local_para = self.model.state_dict() 192 | return local_para 193 | 194 | def get_data_num(self): 195 | return self.train_num 196 | 197 | 198 | # get init informations according to args 199 | def init_fuc(args): 200 | if args.model == "cnn": 201 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = cnn_module.init(args) 202 | if args.model == "mobilenet": 203 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = mobilenet_module.init(args) 204 | if args.model == "resnet18": 205 | device, trainloader, testloader, net, criterion, optimizer, train_num, test_num = resnet18_module.init(args) 206 | if args.model == "lstm": 207 | device, net, trainloader, testloader, criterion, optimizer, train_num, test_num = lstm_module.init(args) 208 | return device, trainloader, testloader, net, criterion, optimizer, train_num, test_num 209 | 210 | 211 | def _call_method(method, rref, *args, **kwargs): 212 | return method(rref.local_value(), *args, **kwargs) 213 | 214 | 215 | def run_worker(args): 216 | os.environ['MASTER_ADDR'] = args.addr 217 | os.environ['MASTER_PORT'] = args.port 218 | print("waiting for connecting......") 219 | 220 | if args.rank == 0: 221 | os.environ["GLOO_SOCKET_IFNAME"] = "wlp4s0" 222 | rpc.init_rpc(name='server', rank=args.rank, world_size=args.world_size) 223 | print(f"{rpc.get_worker_info().name} has been initialized successfully") 224 | server = Server(args) 225 | for work_rank in range(1, args.world_size): 226 | args.idx_user = work_rank - 1 227 | work_info = rpc.get_worker_info(f'worker{work_rank}') 228 | server.worker_rrefs.append(rpc.remote(work_info, Worker, args=(args,))) 229 | print("RRef map has been created successfully!") 230 | print(f"The length of RRef is {len(server.worker_rrefs)}") 231 | for i in range(args.EPOCH): 232 | server.run_episode(i, args) 233 | 234 | else: 235 | os.environ["GLOO_SOCKET_IFNAME"] = "eth0" 236 | rpc.init_rpc(name=f'worker{args.rank}', rank=args.rank, world_size=args.world_size) 237 | print(f"{rpc.get_worker_info().name} has been initialized successfully") 238 | 239 | rpc.shutdown() 240 | -------------------------------------------------------------------------------- /noniid/temp/Cifar/Cifar_quantity_noniid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669 2 | 0,872,38635,24600,44830,33229,36634,33764,4683,48119,17103,10617,7263,19317,15827,22190,37619,18849,31756,11772,43363,18861,13668,13223,26376,32683,42231,28216,1130,23157,4746,189,25236,19503,17136,32410,14311,44026,28896,26362,11966,28552,2769,49566,42805,10236,29088,41984,27607,35651,13149,2023,15357,41296,9318,24579,23837,1694,21199,43214,2037,29066,29117,29120,17479,10910,21422,8025,36268,43019,24857,13362,4526,7929,20285,2230,38905,47584,28327,1532,7644,25041,46467,45238,4168,2567,9991,35420,335,48988,46784,45748,27580,9114,3440,26398,4647,10628,45494,42947,29154,41903,4827,21510,11938,28786,30789,14536,26907,41012,47439,1658,7004,32936,14344,31499,10320,29638,47982,34199,23980,12500,44635,19049,23498,44926,7615,21991,44562,15341,27351,43720,43052,1866,27003,38134,28349,4561,49895,37438,23655,19952,12896,37805,22385,16461,43835,43746,19520,1937,48425,39849,48521,27180,27888,4057,28600,48896,17623,19315,26417,23474,43563,33004,17608,12134,32751,45963,8742,6874,39923,41743,42686,9444,15713,28130,6140,20837,26330,28995,27459,12480,47134,17354,12687,15862,22158,41318,9537,8223,35049,46973,37750,49938,42642,33578,8586,24914,31901,10260,28663,41203,39639,31660,33247,9245,7073,33738,16389,49041,48771,26476,13687,13261,10165,1766,27177,5878,9170,28651,20766,48650,15489,11432,10563,48525,39326,37759,28816,39874,13921,40627,6696,2554,35426,40866,27782,40251,14944,26490,10538,8580,26901,21225,47058,20016,1908,49939,42659,43435,19247,25043,2358,2103,31914,2481,33475,17256,15058,46754,34241,46583,16032,24596,3450,23535,43982,4595,24697,8229,43191,18379,9567,38850,3642,6862,43675,3115,20822,47202,25029,17442,43725,31168,2441,36636,45659,38467,22827,38726,36295,49090,47658,41570,12087,6541,2934,2599,37490,16591,27190,1201,1241,37730,9419,6657,28783,3088,40774,15209,16911,28141,5772,27588,5395,39993,37226,25586,39065,14220,41631,9218,27112,49931,4489,48682,43604,6705,3228,5611,1235,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | 1,35317,49641,47104,34766,15509,47711,32830,15500,11989,23667,12165,38782,12499,42804,32218,48054,8437,35934,5010,1470,11169,10024,23924,1424,26361,44599,12854,32971,2066,24834,21732,8355,40197,30114,19347,13979,9905,1211,23644,47215,49868,23684,48687,38238,3224,14069,7792,42332,4281,48520,46330,17017,10022,10133,49585,5722,5629,3886,38262,37506,31158,15891,14519,3404,39524,36542,20616,40505,40056,14676,5590,13327,42860,36546,36641,41686,22217,26899,12622,39402,41241,39820,26158,35966,12946,17984,4050,10403,22516,45268,38452,17621,49554,35344,5635,25105,19490,36021,47001,14501,39520,29779,43856,3301,16906,25145,44491,10567,9802,803,28849,5294,47632,1146,16207,20112,8438,43607,3119,26096,45606,23041,45647,19044,9249,15837,5707,1180,14114,13814,30893,37745,12871,1981,6484,14248,30511,14459,23653,37848,14918,27599,41104,36355,13413,14595,43478,29517,46642,1620,46437,46424,11055,258,26224,33031,44707,34806,37752,28839,39071,39865,29478,13686,20252,16656,35832,38048,36828,35246,1856,33721,33098,10441,3188,12190,10186,43744,41154,23430,15269,44550,36014,14924,8330,35772,39898,5065,43295,47014,5502,36543,12016,22611,20624,48262,49142,18216,8846,32128,5486,19452,33515,33826,20826,27723,19758,45688,20103,34255,13903,9779,47257,14429,31689,42977,47883,36968,8361,38376,7822,7024,153,39676,34819,15395,4305,37172,46339,5029,44007,33816,48211,30734,20785,25779,46845,17632,45132,14385,14152,37665,6780,3684,33037,22213,48351,36523,15734,42629,39790,22879,2167,28754,5547,5098,10175,32764,37471,4838,11988,7157,35112,46925,31129,24196,44847,4532,4074,46329,5832,7246,2711,46903,41501,38954,15186,33313,13865,2374,5517,4627,30054,18230,32641,22289,28386,11142,36123,4529,3082,18064,27283,25240,15989,3604,28736,42038,39456,16047,24870,43482,31071,21146,5025,18615,22085,24673,12379,6824,45155,39697,30920,42527,21872,3200,10220,22389,6888,12887,286,47276,22686,22,31672,44012,7754,5302,15726,35506,38156.0,1690.0,7967.0,48974.0,35087.0,33799.0,44612.0,8442.0,28169.0,47347.0,6686.0,30323.0,17554.0,38470.0,13335.0,42444.0,48888.0,27926.0,42780.0,36922.0,32469.0,16229.0,40730.0,17202.0,7285.0,12733.0,1997.0,40694.0,10623.0,1890.0,5325.0,4522.0,34446.0,21343.0,26973.0,16149.0,8817.0,847.0,5812.0,6538.0,4933.0,39408.0,30421.0,1900.0,24559.0,3138.0,23852.0,1218.0,36608.0,39077.0,26046.0,47170.0,1165.0,26910.0,38737.0,25868.0,9451.0,49912.0,43305.0,23132.0,14955.0,17627.0,41985.0,25937.0,48578.0,26317.0,37118.0,30825.0,84.0,21329.0,15237.0,31340.0,30366.0,25042.0,8542.0,30472.0,13981.0,48943.0,13982.0,5454.0,7306.0,36876.0,35693.0,26188.0,28642.0,33111.0,16607.0,10442.0,39011.0,14613.0,21361.0,16792.0,5795.0,13386.0,40500.0,8594.0,35210.0,26944.0,35315.0,15727.0,2077.0,48500.0,12128.0,15128.0,31563.0,17753.0,40415.0,12090.0,27924.0,33227.0,13851.0,16930.0,16479.0,21226.0,16042.0,49953.0,1323.0,27932.0,21483.0,29014.0,7453.0,30771.0,20600.0,33200.0,34957.0,31920.0,34535.0,17687.0,41977.0,8441.0,11755.0,26596.0,25946.0,37090.0,48226.0,37678.0,27654.0,36456.0,16396.0,29851.0,756.0,22543.0,28908.0,2991.0,890.0,22150.0,9608.0,41614.0,31008.0,49194.0,31385.0,47238.0,18778.0,47187.0,31252.0,43596.0,40970.0,16293.0,17280.0,26585.0,15857.0,4538.0,15150.0,42981.0,25375.0,17846.0,42213.0,1622.0,26610.0,12715.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | 2,38003,5457,6867,23805,22939,3347,26704,44031,19878,14708,7731,42026,17311,38340,10340,26076,39104,7991,44618,12083,47328,46782,7643,34917,32662,18073,15900,30904,49006,2287,22084,9533,38179,17222,38783,32931,12916,11440,10343,10548,405,43288,31164,31481,21404,18386,40275,4929,13467,8272,33926,34037,23952,14754,46831,48516,47617,6719,24257,23822,26783,24695,19004,20319,25808,33857,25270,15988,3131,23936,21437,3505,42031,43371,42065,6915,22070,12766,16765,5149,44971,23648,39745,25881,44309,10824,47226,33126,16286,10479,11367,2249,17609,5102,10446,33206,10100,29261,31783,11767,29763,37382,26115,40046,833,5885,21604,27627,34906,29330,25625,27405,11421,9658,36237,26794,46643,35011,238,24539,7048,15883,34809,21448,42879,9095,13234,13462,20222,34588,12114,9889,4607,23921,2004,38150,15297,19440,28497,21115,31519,9659,22441,44238,42848,47353,30878,47903,2833,25626,48404,42687,18855,34181,48873,33575,21849,10959,17702,37485,21500,4183,13936,39478,39309,10389,7745,43763,44542,34553,29415,11991,41458,29943,22702,14252,18503,31272,1307,38573,15182,21859,16804,39096,5087,38365,11270,4378,31171,47593,2044,36682,30765,31496,21135,35841,19504,32943,28301,12584,7380,46596,25713,31094,2944,3997,48834,1121,15778,23197,29439,13201,4308,30983,46729,3302,8880,33101,35464,22695,44693,23761,17451,18497,42541,33955,27698,32264,678,24801,10565,32320,12332,20918,4310,34496,416,3813,8349,26573,4428,46855,27013,45949,5354,36043,34397,33159,23376,1109,47848,34623,43598,35130,18570,34197,19895,2565,22467,6332,19496,24262,7681,49306,1963,17046,3772,11476,24920,34751,82,16948,28875,10707,33066,13959,7043,45786,28170,39003,33365,10950,44525,41093,37719,30885,39292,5212,25458,25142,12213,12224,20823,38802,10970,8765,33370,19721,47399,1212,40591,44324,13815,33590,41665,42662,33167,33692,43553,47613,11845,34808,35760,30547,14377,13834,49475,14147,44629,49318,17193,28313,27491,8911,37963,7382,40146,26897,48360,25776,16492.0,18507.0,660.0,14612.0,4527.0,42846.0,19434.0,18845.0,11328.0,33818.0,25875.0,30016.0,4153.0,20899.0,4862.0,380.0,19182.0,16079.0,47949.0,12015.0,21258.0,32782.0,25418.0,33682.0,32863.0,32946.0,35842.0,23206.0,33765.0,37720.0,15556.0,44118.0,19007.0,5399.0,48704.0,21767.0,37498.0,32559.0,32427.0,39660.0,48077.0,22912.0,4997.0,49964.0,767.0,47899.0,41638.0,2089.0,26755.0,28090.0,44596.0,36952.0,16280.0,10760.0,8328.0,25390.0,2604.0,3853.0,45636.0,17971.0,5068.0,41566.0,6575.0,2081.0,31559.0,27666.0,29521.0,16098.0,20542.0,35053.0,28247.0,38839.0,36861.0,11191.0,9161.0,23469.0,35745.0,20036.0,48619.0,24176.0,39086.0,49874.0,16285.0,44329.0,27403.0,5728.0,25373.0,38397.0,42167.0,6804.0,40952.0,19970.0,2203.0,15902.0,37542.0,49135.0,41632.0,22572.0,17231.0,9784.0,48804.0,12597.0,12369.0,39197.0,10733.0,12374.0,38583.0,14212.0,44346.0,42671.0,49655.0,11397.0,49708.0,41528.0,32953.0,29292.0,245.0,12000.0,49929.0,29994.0,46772.0,15294.0,17776.0,4926.0,47546.0,6537.0,37846.0,20802.0,35742.0,25511.0,23180.0,29973.0,23516.0,13140.0,48057.0,34076.0,37231.0,17684.0,552.0,22566.0,39053.0,9821.0,10502.0,4903.0,2502.0,744.0,26420.0,46816.0,38203.0,17549.0,10687.0,14577.0,3208.0,49809.0,49529.0,21082.0,43748.0,44471.0,3914.0,771.0,31482.0,18323.0,17640.0,22927.0,46283.0,29872.0,2677.0,15942.0,30987.0,20028.0,19393.0,18258.0,31723.0,27600.0,44982.0,41185.0,31316.0,8980.0,36394.0,6051.0,1729.0,41795.0,5241.0,11502.0,15283.0,32282.0,29652.0,15547.0,41655.0,17532.0,20062.0,28528.0,20992.0,46455.0,31215.0,38962.0,10259.0,43902.0,23292.0,8907.0,21344.0,21340.0,24181.0,44105.0,36417.0,15970.0,11327.0,9627.0,11898.0,29928.0,18718.0,42024.0,47193.0,23472.0,39298.0,35678.0,29328.0,16650.0,7437.0,1337.0,30035.0,4207.0,16239.0,34098.0,26608.0,30533.0,16509.0,7886.0,20762.0,38264.0,28239.0,16457.0,38607.0,23617.0,22023.0,37483.0,11863.0,36405.0,892.0,34718.0,39895.0,41429.0,11159.0,6598.0,13322.0,15239.0,21431.0,971.0,41008.0,12520.0,28599.0,18848.0,1245.0,19132.0,12544.0,47723.0,17073.0,41403.0,41225.0,40070.0,16697.0,47974.0,29708.0,11058.0,39612.0,38734.0,38371.0,3978.0,37722.0,1431.0,14934.0,38237.0,13720.0,48017.0,44810.0,10723.0,22140.0,8658.0,30901.0,34355.0,46171.0,39890.0,13889.0,7069.0,36794.0,49031.0,49461.0,19122.0,48276.0,17590.0,49580.0,39063.0,16142.0,39585.0,19831.0,32925.0,26674.0,34435.0,26655.0,39174.0,46744.0,38426.0,20944.0,24880.0,24718.0,27011.0,23023.0,40222.0,24987.0,23028.0,20676.0,40450.0,4457.0,6906.0,48503.0,17171.0,10287.0,27979.0,41173.0,1247.0,29604.0,5783.0,23749.0,16966.0,1028.0,49601.0,37242.0,10123.0,30702.0,2676.0,46986.0,39685.0,9705.0,38132.0,13379.0,553.0,10312.0,47121.0,35073.0,4416.0 5 | -------------------------------------------------------------------------------- /noniid/temp/FMNIST/FMNIST_quantity_noniid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669 2 | 0,16482,18543,16462,13879,58970,34038,45831,8729,26288,59787,11683,5743,17560,47651,18243,12937,8924,48178,8743,23454,38849,31916,32164,35214,27341,43673,50126,39296,2698,48197,11228,34198,17722,33901,55343,49269,19434,30672,10634,52923,54622,55542,26388,48444,22005,18645,14385,54810,49397,43838,43445,46387,39681,40536,36639,55271,9619,685,51843,3252,26700,59006,29267,47093,26526,27820,7656,20500,8416,23851,27809,48521,16493,49516,58267,49713,6725,41577,31601,56581,44039,5215,8828,19703,13306,6225,36354,57292,13892,47015,29718,44883,51790,14596,59797,34711,26888,40838,30150,23445,20951,54540,40553,27948,19920,27667,10674,58700,29848,24045,9951,14364,21106,38327,20727,20210,46369,35946,25082,49416,2865,56590,9763,4025,14698,52032,57018,44439,46673,33507,8421,47111,34245,43387,28878,672,6359,28515,8993,23411,28850,29654,42800,22414,29938,34295,9718,47933,34636,17909,24641,39892,5557,48033,48159,36819,10881,2715,3624,33833,46340,49842,57178,45377,32394,46321,19525,32725,230,43717,36196,57342,35781,9574,20777,57286,50424,7114,37569,49261,26405,9921,51767,53784,29681,39678,6819,17110,18834,46920,44059,36055,48660,55094,42467,6463,9944,11006,57971,50685,12636,53585,6603,256,598,39761,40085,26497,5054,1142,34668,36022,29853,49963,22720,22388,44943,16423,1562,7366,35028,52539,48631,16623,50923,30833,44953,33725,37872,39329,52237,43301,34641,5744,46086,58454,18905,28787,52054,10830,18136,35896,49044,23145,46659,13720,17296,22682,30072,39061,52374,19416,34890,54645,23706,39413,44236,52659,16825,29495,29059,42332,48094,22145,41861,39962,57444,33918,49856,52236,2656,41547,12704,41339,35048,38757,33074,31088,14846,49499,52173,11237,49041,27593,44864,54594,37529,34774,27585,8082,43160,31316,9114,36246,37530,15821,58564,663,37538,52720,14322,11048,5083,28192,2546,8067,13434,646,20618,38208,54555,4437,39560,59313,33313,5634,25272,20953,24279,45704,17391,38630,15824,11235,48053,58982,16289,20596,59088,14118,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | 1,21175,45462,4704,49804,22624,51728,1228,28461,55076,48523,15099,25227,44310,21672,49099,57628,49315,20253,12830,27167,20962,10299,41294,48309,54221,9003,5465,16278,33277,15268,16927,1254,41297,3548,33943,715,42087,30920,59998,5137,5114,2639,27920,20030,9706,4,32668,712,9733,50810,52195,19508,36076,2989,6691,30050,36286,50983,22157,25054,58429,1365,24573,46242,18294,12721,7084,33001,2344,48969,9473,30765,56800,30014,31714,8103,40390,959,57437,53925,18388,27436,15020,826,15803,27252,44432,58849,10410,58847,30235,25904,5173,59321,9977,2892,11189,58523,34682,15597,17342,33432,13085,46448,48102,18791,53323,4147,29276,42996,3912,34303,19264,58781,1137,36775,22090,3313,25875,37329,22861,28218,52381,1188,49513,52067,49247,23100,52005,21961,46642,20115,39008,42146,17561,43197,57969,3914,13006,57348,40474,42435,59347,7119,9489,14943,44693,27243,4421,28443,47583,13656,22461,31583,28009,16161,54691,47076,277,17139,30609,44739,39321,3378,51882,7689,38234,31676,6240,37808,2275,42403,55135,56407,11454,56837,39098,46820,46493,44402,43631,17075,43290,36229,43978,24594,25370,34467,54529,57716,42424,35357,21685,50215,36105,45133,15673,51279,1400,23367,22949,26016,17186,15766,59675,51329,42618,4465,4587,7664,34624,12911,48740,30298,44869,2787,57310,24244,23436,34688,45814,32095,41645,36697,22209,53568,26366,30352,58777,25576,47782,32228,27233,57232,20370,29237,55507,58064,14180,39111,22460,54032,3588,54554,33634,52056,35382,31783,96,52765,21710,50659,10879,41424,21438,53881,21150,1903,28519,20427,43051,51691,59972,33867,52626,16686,40310,7625,15599,19861,927,21469,51071,32851,40183,16518,5711,36945,2323,54820,13481,25317,48539,51684,6517,17676,27172,24776,37996,46870,32435,435,7107,17625,22657,55212,50125,14291,30944,18473,56115,56071,8041,31450,45487,38521,34421,47212,52656,33268,36589,32591,45710,36526,31338,53657,25833,58129,23773,7397,58046,57634,53727,56808,21017,18117,23573,4662,23322,56219,19701.0,49746.0,5572.0,29549.0,48632.0,4404.0,7556.0,39180.0,48260.0,36731.0,24031.0,27895.0,54065.0,43325.0,53682.0,2934.0,16066.0,41701.0,41315.0,19357.0,10044.0,42345.0,1501.0,49040.0,17601.0,1960.0,50250.0,12805.0,10989.0,5091.0,28794.0,30146.0,14381.0,52918.0,36900.0,54560.0,57545.0,57612.0,40245.0,16944.0,20142.0,59694.0,4476.0,50132.0,53189.0,42873.0,8466.0,19468.0,51368.0,35761.0,26149.0,37024.0,37813.0,57274.0,24140.0,33078.0,37060.0,44284.0,28144.0,23362.0,55378.0,14250.0,57601.0,17538.0,26866.0,20503.0,10532.0,26647.0,58465.0,31921.0,36374.0,3141.0,17328.0,10052.0,26842.0,10649.0,15014.0,53817.0,16604.0,29849.0,4614.0,30303.0,58002.0,57294.0,39923.0,12689.0,8232.0,59159.0,38634.0,4229.0,21062.0,37136.0,53352.0,30687.0,40969.0,21492.0,44569.0,51864.0,48335.0,38491.0,37659.0,43740.0,14279.0,45527.0,5603.0,1909.0,15297.0,14926.0,14736.0,10287.0,17322.0,56608.0,57704.0,25662.0,48359.0,10003.0,39749.0,23730.0,32045.0,25148.0,17869.0,9398.0,6857.0,25458.0,3153.0,2907.0,55742.0,28093.0,12031.0,1531.0,1705.0,44316.0,18795.0,38709.0,23814.0,45357.0,4794.0,19410.0,44818.0,57324.0,18982.0,12862.0,8250.0,36191.0,30226.0,34569.0,37451.0,53553.0,38500.0,54777.0,15853.0,31892.0,54095.0,3752.0,59093.0,47805.0,49724.0,41225.0,24359.0,27124.0,45567.0,37756.0,52001.0,7178.0,17305.0,53630.0,11173.0,45384.0,5466.0,21202.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | 2,45452,36537,49620,20379,19532,4585,8822,47233,47725,30291,22329,27327,46195,14186,40366,13176,43089,8427,36336,3674,8485,36991,41335,41397,40282,43703,33321,13674,30290,11131,16740,25885,18635,55911,15786,15085,25362,49043,43986,15114,12809,2994,46564,28889,57892,32083,38738,16594,12754,13191,6251,42599,54375,4210,47678,40693,33152,51220,53767,20555,51474,8428,11866,19268,51317,31945,20151,28363,29425,7044,19914,26865,29107,31550,18273,40052,22698,9655,8994,16941,39856,11607,38920,10154,52154,8712,39344,8038,14372,41374,7438,35118,30747,57946,58435,40027,1097,34405,12100,21420,58132,43231,43380,16926,35726,25221,52331,43698,54209,21954,37617,452,23710,39898,5822,9854,25821,13715,59528,59276,45529,12885,15974,40616,49691,52403,57510,50162,58426,6387,36208,8408,9901,6606,33913,28980,40569,46543,53985,49988,40136,41075,50955,6315,6149,47978,44698,2170,25902,8168,52266,3880,37826,50234,22704,54201,34836,20569,1044,43945,36020,15903,35181,16376,22834,22813,38724,10997,1834,49165,19834,59511,34232,45040,39579,55854,39089,16172,49074,10320,36530,33105,13954,49607,33499,59092,19816,14588,55947,55402,48052,1210,35561,33777,16095,17652,4720,40176,12493,2572,11133,34845,49384,54638,8509,53224,49076,47040,34234,1352,32572,52817,13539,7054,40430,9378,22530,34041,37858,14177,20021,39077,23852,15433,50491,25401,43707,16392,25483,31279,44252,19474,42968,34573,7691,39040,51022,42902,44691,7557,13717,21479,33310,29922,59209,39445,7562,48633,24482,24289,42563,27000,55789,25056,36241,9507,26557,23048,8784,17915,26632,38219,5395,34771,17635,41510,28628,42236,44248,57021,18066,27822,20934,33790,52691,39139,5664,47904,20917,28319,35568,5496,31938,16206,21903,15022,4545,731,3224,26970,44788,33029,5355,54216,24773,10435,4770,8518,42053,21379,35379,45659,51012,8526,15425,47083,10815,51577,44874,36130,9318,4091,24921,30325,42116,37392,33429,55643,41346,33917,55826,15781,194,22773,6630,43479,26372,50780,29636,50436,26536.0,46165.0,24327.0,45203.0,53902.0,6707.0,23323.0,33139.0,33553.0,22107.0,43862.0,41171.0,21374.0,21923.0,3115.0,37436.0,43480.0,33251.0,22919.0,20069.0,36171.0,23081.0,29899.0,50486.0,19519.0,43259.0,53215.0,50579.0,2486.0,18663.0,50904.0,39689.0,44307.0,4931.0,14337.0,40774.0,18649.0,1180.0,9755.0,31025.0,36271.0,43435.0,18191.0,12800.0,4812.0,47688.0,52788.0,53124.0,42869.0,36128.0,46511.0,39574.0,16021.0,9728.0,32463.0,56449.0,30768.0,22722.0,29748.0,21908.0,24674.0,49676.0,50290.0,15744.0,2481.0,41376.0,25669.0,25819.0,58268.0,44876.0,47023.0,58242.0,12193.0,19024.0,23985.0,8933.0,56177.0,4028.0,30730.0,2352.0,6291.0,24574.0,22804.0,54506.0,59384.0,16807.0,37425.0,24141.0,23102.0,36256.0,56.0,6760.0,22986.0,5425.0,50600.0,46181.0,43588.0,58251.0,12792.0,19299.0,30911.0,18792.0,39728.0,8612.0,56397.0,361.0,38176.0,58101.0,5347.0,51783.0,40133.0,58930.0,40438.0,53885.0,5705.0,30524.0,5779.0,57063.0,50865.0,737.0,2862.0,8948.0,36378.0,12035.0,51517.0,3875.0,8191.0,32327.0,13026.0,51382.0,44986.0,26587.0,2468.0,1415.0,53966.0,21488.0,9835.0,30229.0,1438.0,33730.0,632.0,54922.0,26872.0,25372.0,42911.0,2869.0,29130.0,23816.0,26326.0,20190.0,45679.0,26203.0,5756.0,33818.0,46829.0,30236.0,52386.0,16974.0,43801.0,49803.0,38697.0,46819.0,29986.0,54005.0,4379.0,4606.0,37609.0,59571.0,21329.0,40172.0,48502.0,4977.0,27754.0,6292.0,2022.0,14082.0,46810.0,7269.0,13997.0,40040.0,31754.0,19951.0,45148.0,32416.0,38118.0,25892.0,32039.0,21857.0,52464.0,51863.0,8740.0,19733.0,3971.0,2587.0,56972.0,29099.0,53280.0,8341.0,6627.0,19550.0,3582.0,34729.0,42593.0,24151.0,47049.0,27309.0,22547.0,52836.0,54071.0,43956.0,39492.0,24700.0,2277.0,50568.0,50866.0,4472.0,25307.0,11178.0,25164.0,1201.0,23425.0,26677.0,7947.0,26363.0,1618.0,3601.0,56085.0,671.0,12021.0,37138.0,23024.0,9642.0,26805.0,23266.0,59994.0,22528.0,25813.0,42158.0,12804.0,55219.0,52911.0,55803.0,46468.0,55816.0,32214.0,769.0,7685.0,21383.0,30357.0,32161.0,38520.0,30699.0,25433.0,51434.0,25570.0,50922.0,48967.0,2763.0,21773.0,17286.0,50937.0,24768.0,14447.0,313.0,41987.0,8670.0,49141.0,11316.0,39169.0,6827.0,51515.0,3065.0,43314.0,32403.0,51546.0,57025.0,46045.0,18899.0,6991.0,50684.0,15868.0,26748.0,9600.0,44830.0,53361.0,56438.0,49882.0,34657.0,35064.0,49104.0,56555.0,4542.0,9726.0,32884.0,35469.0,17092.0,59563.0,43124.0,46414.0,26111.0,55726.0,3232.0,21542.0,30082.0,54348.0,22959.0,25717.0,22092.0,59059.0,49420.0,27037.0,11872.0,8961.0,21314.0,1163.0,55449.0,35603.0,57970.0,7287.0,10865.0,47014.0,48055.0,27715.0,15894.0,4802.0,8001.0,56715.0,4986.0,24881.0,722.0,35428.0,52651.0,50516.0,51204.0,10840.0,41728.0,30608.0,6807.0,37294.0,17928.0 5 | -------------------------------------------------------------------------------- /noniid/temp/MNIST/MNIST_quantity_noniid_users3_data500.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669 2 | 0,26040,54690,17169,50534,14229,54411,30929,18665,58943,50949,15530,9032,17223,51753,6630,14445,24371,54502,9965,8515,23557,43685,17828,7372,12666,41374,53360,6676,26408,50444,44045,37901,54232,2134,16267,36098,43583,47022,4708,48851,5864,18541,32859,17618,57101,52906,21818,35900,28906,8491,34566,36273,24970,36288,32054,22343,9277,40769,42769,56865,57436,34585,25549,52715,50572,11895,44639,43590,3054,23364,3307,53017,5247,30076,989,34495,43314,21668,58705,6289,35134,40812,16198,2877,30802,2332,15093,36694,33698,40617,57876,56752,50211,37588,37083,22026,9218,29662,56222,40752,9253,17832,23221,58758,38991,57173,56754,48700,32421,15701,98,25094,43853,11039,10608,34140,38549,52404,24870,51675,25208,35165,51116,15080,32691,19601,28391,38755,21853,55190,14447,52917,3261,22472,8261,5590,23641,59210,34808,10396,33679,914,11495,38093,39954,41580,4346,47744,40402,24295,29372,42710,34354,36897,42181,58938,44167,23549,46460,58417,10937,38654,926,48961,26704,26868,17159,19638,53732,13749,7475,4908,10244,12940,29029,55838,48586,1263,31495,39629,32217,38762,18090,51392,7081,51086,49107,12962,18808,49783,34365,4622,23736,15996,56327,16553,35727,57132,22936,1781,30063,32265,48169,41500,7991,5365,39792,51993,22277,48262,28756,49344,52076,12541,5121,11959,22709,31110,42450,14610,5990,9049,19105,38299,26126,18302,32230,39508,57675,46750,51676,23248,12408,7959,5473,5959,15690,13566,15106,3915,25011,1075,52633,45570,55802,51203,51204,51460,49666,44911,36157,8137,13540,26080,52824,24040,41405,50162,16169,16369,2018,26500,38783,13479,10866,44196,50512,48192,50283,11120,16631,55122,54056,44308,33719,31600,50740,7606,45351,32573,18901,53737,50272,37106,42453,56557,13327,352,25545,53286,21589,58334,15267,5389,43745,1760,47675,9817,20519,13996,47870,29234,1112,48225,44361,13270,56156,50144,34786,1940,55569,45449,47909,39096,25900,27909,9191,53317,4989,24313,54050,49489,26845,6542,54843,48713,2728,21297,32970,7006,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | 1,11162,32162,19180,5773,37323,59105,8249,38543,33464,59547,26389,15318,53897,46582,15883,42277,56984,14574,26853,41810,36937,49952,52126,21526,13853,2935,16429,30473,14282,23579,39494,48719,13077,29736,12869,33111,47078,59784,16412,12414,32746,10974,40108,49389,16543,23277,28105,35067,28627,36797,57268,29518,19674,56768,20143,11404,38840,30471,33924,51390,18790,593,28736,59849,52624,11043,29173,19305,25800,36864,20279,58727,6108,56779,1003,34467,46012,41193,43704,33394,47817,16742,54876,4685,51035,19804,27091,14998,52675,36071,32543,57969,36145,3230,22632,77,24397,41685,52042,22850,35279,9721,33938,33296,32825,52370,49349,14932,47862,14515,21630,15403,16089,38127,49856,6478,24786,36292,31665,38010,15123,44490,2879,55808,11006,10207,5345,4843,45906,42092,11095,18930,44424,28937,49282,30336,46951,21024,15688,12559,53862,54457,14465,54169,20160,49454,59009,10714,35785,33691,18805,35876,22307,53815,42434,27362,36421,17745,49467,47628,39595,24231,1836,29823,46588,37021,58086,59691,11012,44,17227,21726,16657,4579,57402,14454,37928,31745,21795,878,45801,15109,22002,22767,30747,59961,21643,4223,29313,58406,46187,43711,540,29528,50982,44426,15815,54020,24850,9670,11930,48103,6484,37736,13241,10303,50014,25490,14637,48953,30773,23272,18490,34570,59933,13419,8943,19059,19033,54347,17492,16851,52128,16051,51225,21131,26738,4205,10967,828,11255,25635,11956,21738,14616,30745,19378,3686,51723,35078,22694,38402,42198,50486,8298,8944,39117,51312,44379,9634,16737,20799,9340,48046,21677,22205,6529,35274,41693,25973,48777,47060,10884,9397,10111,30262,51416,37249,2758,27317,2282,42983,19429,145,2080,15024,25654,46366,12232,56838,19529,12210,41926,27127,21548,33603,1509,35207,38678,49820,39539,33288,19422,2182,56032,30697,2698,4515,18123,48561,30577,15441,43804,28830,41262,34748,28386,56811,13389,28572,47288,13696,49255,34271,7847,22435,8590,4214,56746,24438,19532,41129,58506,22282,10709,5026,39036,51410,25611,36176,23790.0,40351.0,51681.0,49019.0,56678.0,19272.0,42490.0,50004.0,21349.0,58835.0,53969.0,45523.0,22559.0,22240.0,34960.0,45986.0,47404.0,48328.0,26697.0,7504.0,53227.0,37994.0,54536.0,51935.0,51452.0,56822.0,32571.0,19703.0,48593.0,21123.0,36134.0,24477.0,19050.0,59925.0,263.0,24518.0,18757.0,3804.0,30973.0,57078.0,23802.0,15256.0,12979.0,4130.0,19511.0,25133.0,6878.0,44971.0,4571.0,44818.0,12428.0,41872.0,28236.0,24364.0,43774.0,54561.0,2115.0,22167.0,17718.0,17373.0,38220.0,39399.0,27722.0,45776.0,19800.0,3515.0,27057.0,14832.0,19262.0,9676.0,20025.0,57984.0,44115.0,47728.0,48162.0,45016.0,3028.0,25849.0,31781.0,9364.0,39478.0,40296.0,28023.0,18884.0,37324.0,24889.0,4370.0,1804.0,34455.0,17290.0,7872.0,42944.0,31816.0,7617.0,51645.0,38849.0,14697.0,15707.0,18523.0,42661.0,3619.0,26569.0,26085.0,9925.0,36192.0,9970.0,11998.0,32968.0,54136.0,22965.0,50048.0,54909.0,58107.0,9861.0,46368.0,27508.0,37956.0,43754.0,6093.0,5248.0,6838.0,50041.0,601.0,10305.0,4831.0,41502.0,5572.0,36497.0,24300.0,53682.0,53976.0,29426.0,34903.0,28889.0,51032.0,39481.0,51597.0,17002.0,22223.0,47863.0,34332.0,18797.0,1305.0,54548.0,21072.0,27779.0,22334.0,24305.0,58519.0,55970.0,42728.0,7620.0,38493.0,54508.0,8356.0,27605.0,52232.0,27848.0,20450.0,55978.0,6234.0,51581.0,33961.0,19393.0,15746.0,6577.0,41903.0,110.0,43502.0,9950.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | 2,33465,54942,25304,4776,28108,57769,36549,29869,59605,9476,21933,29405,119,42559,39093,18892,51892,2817,19004,50617,52318,55541,28456,32207,18301,5317,17095,2084,21735,42664,50036,56347,57027,27735,59450,23988,59165,4203,21576,24698,13054,16285,14384,3698,45035,14897,45021,59203,53404,9869,41156,54941,8309,37092,37738,31180,38057,38126,54812,9857,33496,23679,20174,40340,9406,39498,14402,40314,12918,31377,10468,11205,50682,34156,12071,39057,26406,10907,14133,33417,49612,27489,24431,19461,51051,1405,50062,59052,9906,36353,22478,21060,51544,29033,47993,57094,15555,53273,47118,15105,38522,55846,28387,40878,49840,48645,59033,2552,50150,2123,50158,26174,15536,39555,10301,43044,12013,32406,32440,59112,23295,276,53627,31871,30512,12632,52136,46984,41360,51876,54293,51493,40779,14900,3077,56957,32148,10429,10200,12366,48332,6773,7259,5307,37630,53026,49642,19508,33414,27232,57694,21566,109,54513,34056,16408,45094,45543,18139,40155,26690,4476,11298,15712,50962,8390,47529,3112,36100,22990,49643,28388,30054,39810,46056,39153,4414,17973,53322,375,53709,7780,24484,22070,57163,37504,9850,40513,50221,18632,57976,47842,26630,48279,14978,59476,13762,33184,52801,13159,8211,7980,5434,53283,43552,52455,10902,1924,23287,9420,59375,54622,54296,17699,55301,21468,32114,53514,50141,4898,7613,30090,20553,56515,54704,36684,11201,25820,17988,22492,28157,45108,10238,38495,59308,35346,51909,48702,34259,58729,27988,38752,58878,56817,1244,57399,22126,40636,32194,16676,2796,17010,22989,58249,51998,21035,54499,45473,37729,7367,47367,37174,47653,49234,28006,25601,4512,11021,57779,40721,22161,22747,1692,5792,54814,47732,12543,33106,4423,14257,20069,13861,17862,5482,16551,55983,49733,4896,43732,52855,34406,25313,58843,24514,8747,59951,11184,49897,29747,14351,15682,21935,58504,37272,58482,24019,49552,42895,50149,14397,1030,22518,30851,25170,36025,43137,17576,31984,39396,19138,1411,35830,50777,59077,42519,14449,412,56620,20665,49754,19599.0,48785.0,5278.0,54650.0,46877.0,48850.0,50111.0,38365.0,52388.0,11864.0,51551.0,21481.0,10401.0,49062.0,748.0,27099.0,55108.0,44390.0,27154.0,43638.0,53695.0,51385.0,57311.0,480.0,43199.0,33733.0,23146.0,55212.0,3275.0,32815.0,33575.0,8260.0,37006.0,32547.0,48826.0,50570.0,11345.0,50166.0,46733.0,30928.0,49360.0,57106.0,39625.0,36787.0,35495.0,57662.0,25726.0,18298.0,42387.0,21873.0,59906.0,48637.0,6018.0,3695.0,53617.0,49726.0,9945.0,22541.0,33404.0,46108.0,32686.0,48950.0,15685.0,37750.0,9159.0,14106.0,27259.0,5211.0,35609.0,3085.0,37820.0,16358.0,623.0,38032.0,57802.0,7224.0,43744.0,51402.0,34124.0,15431.0,26418.0,22298.0,15234.0,8636.0,55382.0,20978.0,16273.0,24774.0,50938.0,11490.0,7544.0,27369.0,22068.0,9000.0,58766.0,1493.0,24474.0,6705.0,56845.0,16065.0,10600.0,48276.0,16493.0,34413.0,7711.0,40775.0,218.0,31736.0,37277.0,22009.0,48116.0,31175.0,40085.0,2680.0,28708.0,2947.0,22806.0,44204.0,8312.0,841.0,15043.0,14405.0,47807.0,21407.0,3485.0,6693.0,26393.0,2487.0,44809.0,59503.0,34284.0,37831.0,3229.0,38936.0,977.0,8357.0,55327.0,33916.0,48918.0,26933.0,46265.0,7099.0,12769.0,38575.0,12597.0,46688.0,33307.0,14280.0,45652.0,27220.0,9908.0,39314.0,20385.0,51198.0,730.0,18346.0,33681.0,223.0,39026.0,44239.0,35553.0,5897.0,30307.0,7994.0,59435.0,52779.0,5448.0,56287.0,41161.0,43238.0,48271.0,49755.0,25998.0,4784.0,30946.0,17630.0,41725.0,14448.0,50553.0,18876.0,29373.0,54256.0,43346.0,38836.0,3209.0,53213.0,18333.0,1422.0,39524.0,48166.0,1636.0,18855.0,48709.0,10392.0,12031.0,13053.0,49507.0,17963.0,36873.0,40114.0,258.0,15168.0,8949.0,46031.0,9052.0,6986.0,16964.0,42104.0,16322.0,13354.0,34669.0,10726.0,55153.0,20614.0,55146.0,54040.0,35617.0,23161.0,26743.0,18685.0,59622.0,25975.0,45859.0,34257.0,29722.0,51969.0,45109.0,21549.0,42079.0,12616.0,25276.0,17654.0,35253.0,44221.0,29989.0,12335.0,58776.0,59763.0,58178.0,10444.0,3658.0,50532.0,18378.0,14189.0,23833.0,3501.0,47857.0,28136.0,15664.0,28959.0,3816.0,16000.0,51899.0,53520.0,14664.0,59967.0,34313.0,43749.0,55015.0,18130.0,3158.0,14406.0,6957.0,54423.0,50505.0,4521.0,34533.0,19557.0,31060.0,1013.0,27927.0,49295.0,45891.0,22684.0,17570.0,5803.0,16776.0,13049.0,37268.0,50816.0,52270.0,58187.0,40126.0,3055.0,54571.0,18277.0,41603.0,25248.0,25532.0,45527.0,46655.0,45352.0,34301.0,48708.0,4329.0,18700.0,35273.0,46206.0,5614.0,30491.0,53735.0,49983.0,6535.0,11186.0,4824.0,39247.0,30395.0,36102.0,10810.0,36973.0,13742.0,54860.0,33061.0,54076.0,38020.0,29861.0,31312.0,8558.0,38747.0,52406.0,38395.0,41279.0,19766.0,50245.0,1949.0,33336.0,10848.0,43862.0,46008.0,31693.0,18401.0,36771.0,8238.0,35506.0,4830.0,3389.0,26839.0,5964.0,41503.0,31225.0 5 | --------------------------------------------------------------------------------