├── .DS_Store ├── DRLAgent └── README.md ├── ICDCS-TC ├── DNC.py ├── DNC_PPO.py ├── PPO.py ├── Readme.txt ├── continuousEnv.py └── main.py ├── INFOCOM2020 └── Readme ├── IPDPS2020 ├── .DS_Store ├── DNC.py ├── DNC_PPO.py ├── Dataset │ ├── report_bicycle_0001.log │ ├── report_bicycle_0002.log │ ├── report_bus_0001.log │ ├── report_bus_0002.log │ ├── report_bus_0003.log │ ├── report_bus_0004.log │ ├── report_bus_0005.log │ ├── report_bus_0006.log │ ├── report_bus_0007.log │ ├── report_bus_0008.log │ ├── report_bus_0009.log │ ├── report_bus_0010.log │ ├── report_bus_0011.log │ ├── report_car_0001.log │ ├── report_car_0002.log │ ├── report_car_0003.log │ ├── report_car_0004.log │ ├── report_car_0005.log │ ├── report_car_0006.log │ ├── report_car_0007.log │ ├── report_car_0008.log │ ├── report_foot_0001.log │ ├── report_foot_0002.log │ ├── report_foot_0003.log │ ├── report_foot_0004.log │ ├── report_foot_0005.log │ ├── report_foot_0006.log │ ├── report_foot_0007.log │ ├── report_foot_0008.log │ ├── report_train_0001.log │ ├── report_train_0002.log │ ├── report_train_0003.log │ ├── report_tram_0001.log │ ├── report_tram_0002.log │ ├── report_tram_0003.log │ ├── report_tram_0004.log │ ├── report_tram_0005.log │ ├── report_tram_0006.log │ ├── report_tram_0007.log │ └── report_tram_0008.log ├── PPO.py ├── README.md ├── continuousEnv.py ├── main.py ├── preprocess.ipynb ├── process_data.ipynb ├── reward.png ├── test.py ├── train.ipynb ├── train.py └── train200.py └── README.md /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitzj2015/DRL-Networking/f42184639c4e54951e8919a04a4d617d1cdd21ea/.DS_Store -------------------------------------------------------------------------------- /DRLAgent/README.md: -------------------------------------------------------------------------------- 1 | ***Ongoing*** 2 | 3 | This file will include popular deep reinforcement learning algorithms for future usage. 4 | -------------------------------------------------------------------------------- /ICDCS-TC/DNC.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | import os 4 | 5 | 6 | class DNC: 7 | def __init__(self, input_size, output_size, seq_len, num_words=256, word_size=64, num_heads=4): 8 | # define data 9 | # input data - [[1 0] [0 1] [0 0] [0 0]] 10 | self.input_size = input_size # X 11 | # output data [[0 0] [0 0] [1 0] [0 1]] 12 | self.output_size = output_size # Y 13 | 14 | # define read + write vector size 15 | # 10 16 | self.num_words = num_words # N 17 | # 4 characters 18 | self.word_size = word_size # W 19 | 20 | # define number of read+write heads 21 | # we could have multiple, but just 1 for simplicity 22 | self.num_heads = num_heads # R 23 | 24 | # size of output vector from controller that defines interactions with memory matrix 25 | self.interface_size = num_heads * word_size + 3 * word_size + 5 * num_heads + 3 26 | 27 | # the actual size of the neural network input after flatenning and 28 | # concatenating the input vector with the previously read vctors from memory 29 | self.controller_input_size = num_heads * word_size + input_size 30 | 31 | # size of output 32 | self.controller_output_size = output_size + self.interface_size 33 | 34 | # gaussian normal distribution for both outputs 35 | self.controller_out = tf.truncated_normal( 36 | [1, self.output_size], stddev=0.1) 37 | self.interface_vec = tf.truncated_normal( 38 | [1, self.interface_size], stddev=0.1) 39 | 40 | # Create memory matrix 41 | self.mem_mat = tf.zeros([num_words, word_size]) # N*W 42 | 43 | # other variables 44 | # The usage vector records which locations have been used so far, 45 | self.usage_vec = tf.fill([num_words, 1], 1e-6) # N*1 46 | # a temporal link matrix records the order in which locations were written; 47 | self.link_mat = tf.zeros([num_words, num_words]) # N*N 48 | # represents degrees to which last location was written to 49 | self.precedence_weight = tf.zeros([num_words, 1]) # N*1 50 | 51 | # Read and write head variables 52 | self.read_weights = tf.fill([num_words, num_heads], 1e-6) # N*R 53 | self.write_weights = tf.fill([num_words, 1], 1e-6) # N*1 54 | self.read_vecs = tf.fill([num_heads, word_size], 1e-6) # R*W 55 | 56 | # NETWORK VARIABLES 57 | # gateways into the computation graph for input output pairs 58 | # self.i_data = tf.placeholder( 59 | # tf.float32, [seq_len * 2, self.input_size], name='input_node') 60 | # self.o_data = tf.placeholder( 61 | # tf.float32, [seq_len * 2, self.output_size], name='output_node') 62 | 63 | # 2 layer feedforwarded network 64 | self.W1 = tf.Variable(tf.truncated_normal( 65 | [self.controller_input_size, 64], stddev=0.1), name='layer1_weights', dtype=tf.float32) 66 | self.b1 = tf.Variable( 67 | tf.zeros([64]), name='layer1_bias', dtype=tf.float32) 68 | self.W2 = tf.Variable(tf.truncated_normal( 69 | [64, self.controller_output_size], stddev=0.1), name='layer2_weights', dtype=tf.float32) 70 | self.b2 = tf.Variable( 71 | tf.zeros([self.controller_output_size]), name='layer2_bias', dtype=tf.float32) 72 | 73 | # DNC OUTPUT WEIGHTS 74 | self.controller_out_weights = tf.Variable(tf.truncated_normal( 75 | [self.controller_output_size, self.output_size], stddev=0.1), name='net_output_weights') 76 | self.interface_weights = tf.Variable(tf.truncated_normal( 77 | [self.controller_output_size, self.interface_size], stddev=0.1), name='interface_weights') 78 | 79 | self.read_vecs_out_weight = tf.Variable(tf.truncated_normal( 80 | [self.num_heads * self.word_size, self.output_size], stddev=0.1), name='read_vector_weights') 81 | 82 | # 3 attention mechanisms for read/writes to memory 83 | 84 | # 1 85 | # a key vector emitted by the controller is compared to the 86 | # content of each location in memory according to a similarity measure 87 | # The similarity scores determine a weighting that can be used by the read heads 88 | # for associative recall1 or by the write head to modify an existing vector in memory. 89 | def content_lookup(self, key, str): 90 | # The l2 norm of a vector is the square root of the sum of the 91 | # absolute values squared 92 | norm_mem = tf.nn.l2_normalize(self.mem_mat, 1) # N*W 93 | norm_key = tf.nn.l2_normalize(key, 0) # 1*W for write or R*W for read 94 | # get similarity measure between both vectors, transpose before multiplicaiton 95 | # (N*W,W*1)->N*1 for write 96 | #(N*W,W*R)->N*R for read 97 | sim = tf.matmul(norm_mem, norm_key, transpose_b=True) 98 | #str is 1*1 or 1*R 99 | # returns similarity measure 100 | return tf.nn.softmax(sim * str, 0) # N*1 or N*R 101 | 102 | # 2 103 | # retreives the writing allocation weighting based on the usage free list 104 | # The ‘usage’ of each location is represented as a number between 0 and 1, 105 | # and a weighting that picks out unused locations is delivered to the write head. 106 | 107 | # independent of the size and contents of the memory, meaning that 108 | # DNCs can be trained to solve a task using one size of memory and later 109 | # upgraded to a larger memory without retraining 110 | def allocation_weighting(self): 111 | # sorted usage - the usage vector sorted ascndingly 112 | # the original indices of the sorted usage vector 113 | sorted_usage_vec, free_list = tf.nn.top_k( 114 | -1 * self.usage_vec, k=self.num_words) 115 | sorted_usage_vec *= -1 116 | cumprod = tf.cumprod(sorted_usage_vec, axis=0, exclusive=True) 117 | unorder = (1 - sorted_usage_vec) * cumprod 118 | 119 | alloc_weights = tf.zeros([self.num_words]) 120 | I = tf.constant(np.identity(self.num_words, dtype=np.float32)) 121 | 122 | # for each usage vec 123 | for pos, idx in enumerate(tf.unstack(free_list[0])): 124 | # flatten 125 | m = tf.squeeze(tf.slice(I, [idx, 0], [1, -1])) 126 | # add to weight matrix 127 | alloc_weights += m * unorder[0, pos] 128 | # the allocation weighting for each row in memory 129 | return tf.reshape(alloc_weights, [self.num_words, 1]) 130 | 131 | # at every time step the controller receives input vector from dataset and emits output vector. 132 | # it also recieves a set of read vectors from the memory matrix at the previous time step via 133 | # the read heads. then it emits an interface vector that defines its interactions with the memory 134 | # at the current time step 135 | def step_m(self, x): 136 | 137 | # reshape input 138 | input = tf.concat( 139 | [x, tf.reshape(self.read_vecs, [1, self.num_heads * self.word_size])], 1) 140 | 141 | # forward propagation 142 | l1_out = tf.matmul(input, self.W1) + self.b1 143 | l1_act = tf.nn.tanh(l1_out) 144 | l2_out = tf.matmul(l1_act, self.W2) + self.b2 145 | l2_act = tf.nn.tanh(l2_out) 146 | 147 | # output vector 148 | # (1*eta+Y, eta+Y*Y)->(1*Y) 149 | self.controller_out = tf.matmul(l2_act, self.controller_out_weights) 150 | # interaction vector - how to interact with memory 151 | # (1*eta+Y, eta+Y*eta)->(1*eta) 152 | self.interface_vec = tf.matmul(l2_act, self.interface_weights) 153 | 154 | partition = tf.constant([[0] * (self.num_heads * self.word_size) + [1] * (self.num_heads) + [2] * (self.word_size) + [3] + 155 | [4] * (self.word_size) + [5] * (self.word_size) + 156 | [6] * (self.num_heads) + [7] + [8] + [9] * (self.num_heads * 3)], dtype=tf.int32) 157 | 158 | # convert interface vector into a set of read write vectors 159 | # using tf.dynamic_partitions(Partitions interface_vec into 10 tensors using indices from partition) 160 | (read_keys, read_str, write_key, write_str, 161 | erase_vec, write_vec, free_gates, alloc_gate, write_gate, read_modes) = \ 162 | tf.dynamic_partition(self.interface_vec, partition, 10) 163 | 164 | # read vectors 165 | read_keys = tf.reshape( 166 | read_keys, [self.num_heads, self.word_size]) # R*W 167 | read_str = 1 + tf.nn.softplus(tf.expand_dims(read_str, 0)) # 1*R 168 | 169 | # write vectors 170 | write_key = tf.expand_dims(write_key, 0) # 1*W 171 | # help init our write weights 172 | write_str = 1 + tf.nn.softplus(tf.expand_dims(write_str, 0)) # 1*1 173 | erase_vec = tf.nn.sigmoid(tf.expand_dims(erase_vec, 0)) # 1*W 174 | write_vec = tf.expand_dims(write_vec, 0) # 1*W 175 | 176 | # the degree to which locations at read heads will be freed 177 | free_gates = tf.nn.sigmoid(tf.expand_dims(free_gates, 0)) # 1*R 178 | # the fraction of writing that is being allocated in a new location 179 | alloc_gate = tf.nn.sigmoid(alloc_gate) # 1 180 | # the amount of information to be written to memory 181 | write_gate = tf.nn.sigmoid(write_gate) # 1 182 | # the softmax distribution between the three read modes (backward, forward, lookup) 183 | # The read heads can use gates called read modes to switch between content lookup 184 | # using a read key and reading out locations either forwards or backwards 185 | # in the order they were written. 186 | read_modes = tf.nn.softmax(tf.reshape( 187 | read_modes, [3, self.num_heads])) # 3*R 188 | 189 | # used to calculate usage vector, what's available to write to? 190 | retention_vec = tf.reduce_prod( 191 | 1 - free_gates * self.read_weights, reduction_indices=1) 192 | # used to dynamically allocate memory 193 | self.usage_vec = (self.usage_vec + self.write_weights - 194 | self.usage_vec * self.write_weights) * retention_vec 195 | 196 | # retreives the writing allocation weighting 197 | alloc_weights = self.allocation_weighting() # N*1 198 | # where to write to?? 199 | write_lookup_weights = self.content_lookup(write_key, write_str) # N*1 200 | # define our write weights now that we know how much space to allocate for them and where to write to 201 | self.write_weights = write_gate * \ 202 | (alloc_gate * alloc_weights + (1 - alloc_gate) * write_lookup_weights) 203 | 204 | # write erase, then write to memory! 205 | self.mem_mat = self.mem_mat * (1 - tf.matmul(self.write_weights, erase_vec)) + \ 206 | tf.matmul(self.write_weights, write_vec) 207 | 208 | # As well as writing, the controller can read from multiple locations in memory. 209 | # Memory can be searched based on the content of each location, or the associative 210 | # temporal links can be followed forward and backward to recall information written 211 | # in sequence or in reverse. (3rd attention mechanism) 212 | 213 | # updates and returns the temporal link matrix for the latest write 214 | # given the precedence vector and the link matrix from previous step 215 | nnweight_vec = tf.matmul( 216 | self.write_weights, tf.ones([1, self.num_words])) # N*N 217 | self.link_mat = (1 - nnweight_vec - tf.transpose(nnweight_vec)) * self.link_mat + \ 218 | tf.matmul(self.write_weights, 219 | self.precedence_weight, transpose_b=True) 220 | self.link_mat *= tf.ones([self.num_words, self.num_words]) - \ 221 | tf.constant(np.identity(self.num_words, dtype=np.float32)) 222 | 223 | self.precedence_weight = (1 - tf.reduce_sum(self.write_weights, reduction_indices=0)) * \ 224 | self.precedence_weight + self.write_weights 225 | # 3 modes - forward, backward, content lookup 226 | # (N*N,N*R)->N*R 227 | forw_w = read_modes[2] * tf.matmul(self.link_mat, self.read_weights) 228 | look_w = read_modes[1] * \ 229 | self.content_lookup(read_keys, read_str) # N*R 230 | back_w = read_modes[0] * tf.matmul(self.link_mat, 231 | self.read_weights, transpose_a=True) # N*R 232 | 233 | # use them to intiialize read weights 234 | self.read_weights = back_w + look_w + forw_w # N*R 235 | # create read vectors by applying read weights to memory matrix 236 | self.read_vecs = tf.transpose(tf.matmul( 237 | self.mem_mat, self.read_weights, transpose_a=True)) # (W*N,N*R)^T->R*W 238 | 239 | # multiply them together 240 | read_vec_mut = tf.matmul(tf.reshape(self.read_vecs, [1, self.num_heads * self.word_size]), 241 | self.read_vecs_out_weight) # (1*RW, RW*Y)-> (1*Y) 242 | 243 | # return output + read vecs product 244 | return self.controller_out + read_vec_mut 245 | 246 | # output list of numbers (one hot encoded) by running the step function 247 | def run(self, input_data): 248 | big_out = [] 249 | if np.shape(input_data)[0] > 0: 250 | for t, seq in enumerate(tf.unstack(input_data, axis=0)): 251 | seq = tf.expand_dims(seq, 0) 252 | y = self.step_m(seq) 253 | big_out.append(y) 254 | else: 255 | seq = input_data[0] 256 | seq = tf.expand_dims(seq, 0) 257 | y = self.step_m(seq) 258 | print("kkk", np.shape(y)) 259 | big_out.append(y) 260 | return tf.stack(big_out, axis=0) 261 | -------------------------------------------------------------------------------- /ICDCS-TC/DNC_PPO.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import tensorflow.contrib.rnn as rnn 3 | import numpy as np 4 | from collections import deque 5 | import random 6 | from DNC import DNC 7 | 8 | BELTA = 0.0003 9 | METHOD = [ 10 | dict(name='kl_pen', kl_target=0.01, lam=0.5), # KL penalty 11 | # Clipped surrogate objective, find this is better 12 | dict(name='clip', epsilon=0.1), 13 | dict(name='a2c', epsilon=0) 14 | ][1] # choose the method for optimization 15 | 16 | 17 | class PPO(object): 18 | replay_memory = deque() 19 | memory_size = 100 20 | 21 | def __init__(self, S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, num): # num是什么意思 22 | self.sess = tf.Session() 23 | self.tfs = tf.placeholder(tf.float32, [None, S_DIM], 'state') 24 | self.S_DIM = S_DIM 25 | self.A_DIM = A_DIM 26 | self.BATCH = BATCH 27 | self.A_UPDATE_STEPS = A_UPDATE_STEPS 28 | self.C_UPDATE_STEPS = C_UPDATE_STEPS 29 | self.decay = tf.placeholder(tf.float32, (), 'decay') 30 | self.a_lr = tf.placeholder(tf.float32, (), 'a_lr') 31 | self.c_lr = tf.placeholder(tf.float32, (), 'c_lr') 32 | self.num = num 33 | 34 | # critic 35 | with tf.variable_scope('critic'): 36 | w1 = tf.Variable(tf.truncated_normal( 37 | [self.S_DIM, 200], stddev=0.01), name='w1') 38 | bias1 = tf.Variable(tf.constant( 39 | 0.0, shape=[200], dtype=tf.float32), name='b1') 40 | l1 = tf.nn.relu(tf.matmul(self.tfs, w1) + bias1) 41 | 42 | w2 = tf.Variable(tf.truncated_normal( 43 | [200, 50], stddev=0.01), name='w2') 44 | bias2 = tf.Variable(tf.constant( 45 | 0.0, shape=[50], dtype=tf.float32), name='b2') 46 | l2 = tf.nn.relu(tf.matmul(l1, w2) + bias2) 47 | dnc = DNC(input_size=50, output_size=1, 48 | seq_len=0, num_words=10, word_size=32, num_heads=1) 49 | self.v = tf.reshape(dnc.run(l2), [-1, np.shape(dnc.run(l2))[-1]]) 50 | 51 | # w3 = tf.Variable(tf.truncated_normal( 52 | # [50, 1], stddev=0.01), name='w3') 53 | # bias3 = tf.Variable(tf.constant( 54 | # 0.0, shape=[1], dtype=tf.float32), name='b3') 55 | # self.v = tf.nn.relu(tf.matmul(l2, w3) + bias3) 56 | 57 | self.tfdc_r = tf.placeholder(tf.float32, [None, 1], 'discounted_r') 58 | self.advantage = self.tfdc_r - self.v 59 | self.closs = tf.reduce_mean(tf.square(self.advantage)) + \ 60 | BELTA # * (tf.nn.l2_loss(w1) + tf.nn.l2_loss(w3)) 61 | optimizer = tf.train.AdamOptimizer(learning_rate=self.c_lr) 62 | vars_ = tf.trainable_variables() 63 | grads, _ = tf.clip_by_global_norm( 64 | tf.gradients(self.closs, vars_), 5.0) 65 | self.ctrain_op = optimizer.apply_gradients(zip(grads, vars_)) 66 | 67 | # actor 68 | pi, pi_params, l2_loss_a = self._build_anet('pi', trainable=True) 69 | oldpi, oldpi_params, _ = self._build_anet('oldpi', trainable=False) 70 | with tf.variable_scope('sample_action'): 71 | # choosing action squeeze:reduce the first dimension 72 | self.sample_op = tf.squeeze(pi.sample(1), axis=0) 73 | with tf.variable_scope('update_oldpi'): 74 | self.update_oldpi_op = [oldp.assign( 75 | p) for p, oldp in zip(pi_params, oldpi_params)] 76 | 77 | self.tfa = tf.placeholder(tf.float32, [None, self.A_DIM], 'action') 78 | self.tfadv = tf.placeholder(tf.float32, [None, 1], 'advantage') 79 | with tf.variable_scope('loss'): 80 | with tf.variable_scope('surrogate'): 81 | # ratio = tf.exp(pi.log_prob(self.tfa) - oldpi.log_prob(self.tfa)) 82 | ratio = pi.prob(self.tfa) / oldpi.prob(self.tfa) 83 | surr = ratio * self.tfadv 84 | if METHOD['name'] == 'kl_pen': 85 | self.tflam = tf.placeholder(tf.float32, None, 'lambda') 86 | kl = tf.distributions.kl_divergence(oldpi, pi) 87 | self.kl_mean = tf.reduce_mean(kl) 88 | self.aloss = -(tf.reduce_mean(surr - self.tflam * kl)) 89 | elif METHOD['name'] == 'ddpg': 90 | self.aloss = -(tf.reduce_mean(pi.prob(self.tfa) * self.tfadv)) 91 | else: # clipping method, find this is better 92 | self.aloss = -tf.reduce_mean(tf.minimum( 93 | surr, 94 | tf.clip_by_value(ratio, 1. - METHOD['epsilon'], 1. + METHOD['epsilon']) * self.tfadv)) + \ 95 | BELTA * l2_loss_a 96 | 97 | with tf.variable_scope('atrain'): 98 | # self.atrain_op = tf.train.AdamOptimizer(A_LR).minimize(self.aloss) 99 | optimizer = tf.train.AdamOptimizer(learning_rate=self.a_lr) 100 | vars_ = tf.trainable_variables() 101 | grads, _ = tf.clip_by_global_norm( 102 | tf.gradients(self.aloss, vars_), 5.0) 103 | self.atrain_op = optimizer.apply_gradients(zip(grads, vars_)) 104 | 105 | tf.summary.FileWriter("log/", self.sess.graph) 106 | init = tf.global_variables_initializer() 107 | self.saver = tf.train.Saver() 108 | for var in tf.trainable_variables(): 109 | tf.summary.histogram(var.op.name, var) 110 | summary_op = tf.summary.merge_all() 111 | summary_writer = tf.summary.FileWriter('tmp/vintf/', self.sess.graph) 112 | self.sess.run(init) 113 | if HAVE_TRAIN == True: 114 | model_file = tf.train.latest_checkpoint( 115 | 'ckpt/' + str(self.num) + "/") 116 | self.saver.restore(self.sess, model_file) 117 | 118 | def update(self, s, a, r, dec, alr, clr, epoch): 119 | self.sess.run(self.update_oldpi_op) 120 | adv = self.sess.run( 121 | self.advantage, {self.tfs: s, self.tfdc_r: r, self.decay: dec}) 122 | # adv = (adv - adv.mean())/(adv.std()+1e-6) # sometimes helpful 123 | 124 | # update actor 125 | if METHOD['name'] == 'kl_pen': 126 | for _ in range(self.A_UPDATE_STEPS): 127 | _, kl = self.sess.run( 128 | [self.atrain_op, self.kl_mean], 129 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.tflam: METHOD['lam']}) 130 | if kl > 4 * METHOD['kl_target']: # this in in google's paper 131 | break 132 | # adaptive lambda, this is in OpenAI's paper 133 | if kl < METHOD['kl_target'] / 1.5: 134 | METHOD['lam'] /= 2 135 | elif kl > METHOD['kl_target'] * 1.5: 136 | METHOD['lam'] *= 2 137 | # sometimes explode, this clipping is my solution 138 | METHOD['lam'] = np.clip(METHOD['lam'], 1e-4, 10) 139 | else: # clipping method, find this is better (OpenAI's paper) 140 | for i in range(self.A_UPDATE_STEPS): 141 | aloss, _ = self.sess.run([self.aloss, self.atrain_op], 142 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 143 | 144 | # update critic 145 | for i in range(self.C_UPDATE_STEPS): 146 | closs, _ = self.sess.run([self.closs, self.ctrain_op], { 147 | self.tfs: s, self.tfdc_r: r, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 148 | # self.saver.save(self.sess, "ckpt/" + str(self.num) + "/", global_step=epoch) 149 | return closs, aloss 150 | 151 | def _build_anet(self, name, trainable): 152 | with tf.variable_scope(name): 153 | w4 = tf.Variable(tf.truncated_normal( 154 | [self.S_DIM, 200], stddev=0.01), name='w4') 155 | bias4 = tf.Variable(tf.constant( 156 | 0.0, shape=[200], dtype=tf.float32), name='b4') 157 | l3 = tf.nn.sigmoid(tf.matmul(self.tfs, w4) + bias4) 158 | # dnc = DNC(input_size=200, output_size=50, 159 | # seq_len=0, num_words=10, word_size=4, num_heads=1) 160 | # l4 = tf.reshape(dnc.run(l3), [-1, np.shape(dnc.run(l3))[-1]]) 161 | # print(np.shape(l4)) 162 | 163 | w5 = tf.Variable(tf.truncated_normal( 164 | [200, 50], stddev=0.01), name='w5') 165 | bias5 = tf.Variable(tf.constant( 166 | 0.0, shape=[50], dtype=tf.float32), name='b5') 167 | l4 = tf.nn.sigmoid(tf.matmul(l3, w5) + bias5) 168 | 169 | w6 = tf.Variable(tf.truncated_normal( 170 | [50, self.A_DIM], stddev=0.01), name='w6') 171 | bias6 = tf.Variable(tf.constant( 172 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b6') 173 | 174 | mu = 1 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) 175 | # mu = 5 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) + 0.0001 176 | # print('mu:', np.shape(mu)) 177 | 178 | w7 = tf.Variable(tf.truncated_normal( 179 | [50, self.A_DIM], stddev=0.01), name='w7') 180 | bias7 = tf.Variable(tf.constant( 181 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b7') 182 | sigma = self.decay * \ 183 | tf.nn.sigmoid(tf.matmul(l4, w7) + bias7) + 0.00001 184 | # print('sigma:',np.shape(sigma)) 185 | 186 | # mu = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) 187 | # sigma = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) + 0.0001 188 | norm_dist = tf.distributions.Normal( 189 | loc=mu, scale=sigma) # loc:mean scale:sigma 190 | params = tf.get_collection( 191 | tf.GraphKeys.GLOBAL_VARIABLES, scope=name) 192 | # tf.nn.l2_loss(w4) + tf.nn.l2_loss(w5) + tf.nn.l2_loss(w6) + tf.nn.l2_loss(w7) 193 | l2_loss_a = 0 194 | return norm_dist, params, l2_loss_a 195 | 196 | def choose_action(self, s, dec): 197 | s = s[np.newaxis, :] 198 | a = self.sess.run(self.sample_op, feed_dict={ 199 | self.tfs: s, self.decay: dec}) 200 | # a, sigma, mu = self.sess.run([self.sample_op, self.sigma, self.mu], feed_dict={self.tfs: s, self.decay: dec}) 201 | 202 | return np.clip(a[0], 0.0001, 1) # clip the output 203 | 204 | def get_v(self, s): 205 | if s.ndim < 2: 206 | s = s[np.newaxis, :] 207 | return self.sess.run(self.v, {self.tfs: s})[0, 0] 208 | -------------------------------------------------------------------------------- /ICDCS-TC/PPO.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import tensorflow.contrib.rnn as rnn 3 | import numpy as np 4 | from collections import deque 5 | import random 6 | 7 | BELTA = 0.0003 8 | METHOD = [ 9 | dict(name='kl_pen', kl_target=0.01, lam=0.5), # KL penalty 10 | # Clipped surrogate objective, find this is better 11 | dict(name='clip', epsilon=0.1), 12 | ][1] # choose the method for optimization 13 | 14 | 15 | class PPO(object): 16 | replay_memory = deque() 17 | memory_size = 100 18 | 19 | def __init__(self, S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, num): # num是什么意思 20 | self.sess = tf.Session() 21 | self.tfs = tf.placeholder(tf.float32, [None, S_DIM], 'state') 22 | self.S_DIM = S_DIM 23 | self.A_DIM = A_DIM 24 | self.BATCH = BATCH 25 | self.A_UPDATE_STEPS = A_UPDATE_STEPS 26 | self.C_UPDATE_STEPS = C_UPDATE_STEPS 27 | self.decay = tf.placeholder(tf.float32, (), 'decay') 28 | self.a_lr = tf.placeholder(tf.float32, (), 'a_lr') 29 | self.c_lr = tf.placeholder(tf.float32, (), 'c_lr') 30 | self.num = num 31 | 32 | # critic 33 | with tf.variable_scope('critic'): 34 | w1 = tf.Variable(tf.truncated_normal( 35 | [self.S_DIM, 200], stddev=0.01), name='w1') 36 | bias1 = tf.Variable(tf.constant( 37 | 0.0, shape=[200], dtype=tf.float32), name='b1') 38 | l1 = tf.nn.relu(tf.matmul(self.tfs, w1) + bias1) 39 | # l1 = tf.reshape(l1, shape=(-1, 200, 1)) 40 | # lstm_cell = rnn.BasicLSTMCell(num_units=128) 41 | # # init_state = lstm_cell.zero_state( 42 | # # batch_size=self.BATCH, dtype=tf.float32) 43 | # outputs, states = tf.nn.dynamic_rnn( 44 | # cell=lstm_cell, inputs=l1, dtype=tf.float32) # , initial_state=init_state, dtype=tf.float32) 45 | # l2 = outputs[:, -1, :] 46 | # print(np.shape(l2)) 47 | 48 | w2 = tf.Variable(tf.truncated_normal( 49 | [200, 50], stddev=0.01), name='w2') 50 | bias2 = tf.Variable(tf.constant( 51 | 0.0, shape=[50], dtype=tf.float32), name='b2') 52 | l2 = tf.nn.relu(tf.matmul(l1, w2) + bias2) 53 | 54 | w3 = tf.Variable(tf.truncated_normal( 55 | [50, 1], stddev=0.01), name='w3') 56 | bias3 = tf.Variable(tf.constant( 57 | 0.0, shape=[1], dtype=tf.float32), name='b3') 58 | self.v = tf.nn.relu(tf.matmul(l2, w3) + bias3) 59 | 60 | self.tfdc_r = tf.placeholder(tf.float32, [None, 1], 'discounted_r') 61 | self.advantage = self.tfdc_r - self.v 62 | self.closs = tf.reduce_mean(tf.square(self.advantage)) + \ 63 | BELTA * (tf.nn.l2_loss(w1) + tf.nn.l2_loss(w3)) 64 | optimizer = tf.train.AdamOptimizer(learning_rate=self.c_lr) 65 | vars_ = tf.trainable_variables() 66 | grads, _ = tf.clip_by_global_norm( 67 | tf.gradients(self.closs, vars_), 5.0) 68 | self.ctrain_op = optimizer.apply_gradients(zip(grads, vars_)) 69 | 70 | # actor 71 | pi, pi_params, l2_loss_a = self._build_anet('pi', trainable=True) 72 | oldpi, oldpi_params, _ = self._build_anet('oldpi', trainable=False) 73 | with tf.variable_scope('sample_action'): 74 | # choosing action squeeze:去掉第一维(=1) 75 | self.sample_op = tf.squeeze(pi.sample(1), axis=0) 76 | with tf.variable_scope('update_oldpi'): 77 | self.update_oldpi_op = [oldp.assign( 78 | p) for p, oldp in zip(pi_params, oldpi_params)] 79 | 80 | self.tfa = tf.placeholder(tf.float32, [None, self.A_DIM], 'action') 81 | self.tfadv = tf.placeholder(tf.float32, [None, 1], 'advantage') 82 | with tf.variable_scope('loss'): 83 | with tf.variable_scope('surrogate'): 84 | # ratio = tf.exp(pi.log_prob(self.tfa) - oldpi.log_prob(self.tfa)) 85 | ratio = pi.prob(self.tfa) #/oldpi.prob(self.tfa) 86 | surr = ratio * self.tfadv 87 | if METHOD['name'] == 'kl_pen': 88 | self.tflam = tf.placeholder(tf.float32, None, 'lambda') 89 | kl = tf.distributions.kl_divergence(oldpi, pi) 90 | self.kl_mean = tf.reduce_mean(kl) 91 | self.aloss = -(tf.reduce_mean(surr - self.tflam * kl)) 92 | else: # clipping method, find this is better 93 | self.aloss = -tf.reduce_mean(tf.minimum( 94 | surr, 95 | tf.clip_by_value(ratio, 1. - METHOD['epsilon'], 1. + METHOD['epsilon']) * self.tfadv)) + \ 96 | BELTA * l2_loss_a 97 | 98 | with tf.variable_scope('atrain'): 99 | # self.atrain_op = tf.train.AdamOptimizer(A_LR).minimize(self.aloss) 100 | optimizer = tf.train.AdamOptimizer(learning_rate=self.a_lr) 101 | vars_ = tf.trainable_variables() 102 | grads, _ = tf.clip_by_global_norm( 103 | tf.gradients(self.aloss, vars_), 5.0) 104 | self.atrain_op = optimizer.apply_gradients(zip(grads, vars_)) 105 | 106 | tf.summary.FileWriter("log/", self.sess.graph) 107 | init = tf.global_variables_initializer() 108 | self.saver = tf.train.Saver() 109 | for var in tf.trainable_variables(): 110 | tf.summary.histogram(var.op.name, var) 111 | summary_op = tf.summary.merge_all() 112 | summary_writer = tf.summary.FileWriter('tmp/vintf/', self.sess.graph) 113 | self.sess.run(init) 114 | if HAVE_TRAIN == True: 115 | model_file = tf.train.latest_checkpoint( 116 | 'ckpt/' + str(self.num) + "/") 117 | self.saver.restore(self.sess, model_file) 118 | 119 | def update(self, s, a, r, dec, alr, clr, epoch): 120 | self.sess.run(self.update_oldpi_op) 121 | adv = self.sess.run( 122 | self.advantage, {self.tfs: s, self.tfdc_r: r, self.decay: dec}) 123 | # adv = (adv - adv.mean())/(adv.std()+1e-6) # sometimes helpful 124 | 125 | # update actor 126 | if METHOD['name'] == 'kl_pen': 127 | for _ in range(self.A_UPDATE_STEPS): 128 | _, kl = self.sess.run( 129 | [self.atrain_op, self.kl_mean], 130 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.tflam: METHOD['lam']}) 131 | if kl > 4 * METHOD['kl_target']: # this in in google's paper 132 | break 133 | # adaptive lambda, this is in OpenAI's paper 134 | if kl < METHOD['kl_target'] / 1.5: 135 | METHOD['lam'] /= 2 136 | elif kl > METHOD['kl_target'] * 1.5: 137 | METHOD['lam'] *= 2 138 | # sometimes explode, this clipping is my solution 139 | METHOD['lam'] = np.clip(METHOD['lam'], 1e-4, 10) 140 | else: # clipping method, find this is better (OpenAI's paper) 141 | [self.sess.run(self.atrain_op, 142 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 143 | for _ in range(self.A_UPDATE_STEPS)] 144 | 145 | # update critic 146 | [self.sess.run(self.ctrain_op, {self.tfs: s, self.tfdc_r: r, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 147 | for _ in range(self.C_UPDATE_STEPS)] 148 | # self.saver.save(self.sess, "ckpt/" + str(self.num) + "/", global_step=epoch) 149 | 150 | def _build_anet(self, name, trainable): 151 | with tf.variable_scope(name): 152 | w4 = tf.Variable(tf.truncated_normal( 153 | [self.S_DIM, 200], stddev=0.01), name='w4') 154 | bias4 = tf.Variable(tf.constant( 155 | 0.0, shape=[200], dtype=tf.float32), name='b4') 156 | l3 = tf.nn.sigmoid(tf.matmul(self.tfs, w4) + bias4) 157 | 158 | w5 = tf.Variable(tf.truncated_normal( 159 | [200, 50], stddev=0.01), name='w5') 160 | bias5 = tf.Variable(tf.constant( 161 | 0.0, shape=[50], dtype=tf.float32), name='b5') 162 | l4 = tf.nn.sigmoid(tf.matmul(l3, w5) + bias5) 163 | 164 | w6 = tf.Variable(tf.truncated_normal( 165 | [50, self.A_DIM], stddev=0.01), name='w6') 166 | bias6 = tf.Variable(tf.constant( 167 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b6') 168 | 169 | mu = 1 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) 170 | # mu = 5 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) + 0.0001 171 | # print('mu:', np.shape(mu)) 172 | 173 | w7 = tf.Variable(tf.truncated_normal( 174 | [50, self.A_DIM], stddev=0.01), name='w7') 175 | bias7 = tf.Variable(tf.constant( 176 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b7') 177 | sigma = self.decay * \ 178 | tf.nn.sigmoid(tf.matmul(l4, w7) + bias7) + 0.00001 179 | # print('sigma:',np.shape(sigma)) 180 | 181 | # mu = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) 182 | # sigma = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) + 0.0001 183 | norm_dist = tf.distributions.Normal( 184 | loc=mu, scale=sigma) # loc:mean scale:sigma 185 | params = tf.get_collection( 186 | tf.GraphKeys.GLOBAL_VARIABLES, scope=name) # 返回name=="name"的变量列表 187 | l2_loss_a = tf.nn.l2_loss( 188 | w4) + tf.nn.l2_loss(w5) + tf.nn.l2_loss(w6) + tf.nn.l2_loss(w7) 189 | return norm_dist, params, l2_loss_a 190 | 191 | def choose_action(self, s, dec): 192 | s = s[np.newaxis, :] 193 | a = self.sess.run(self.sample_op, feed_dict={ 194 | self.tfs: s, self.decay: dec}) 195 | # a, sigma, mu = self.sess.run([self.sample_op, self.sigma, self.mu], feed_dict={self.tfs: s, self.decay: dec}) 196 | 197 | return np.clip(a[0], 0.0001, 1) # 把输出限制在[0, 5]之内 198 | 199 | def get_v(self, s): 200 | if s.ndim < 2: 201 | s = s[np.newaxis, :] 202 | return self.sess.run(self.v, {self.tfs: s})[0, 0] 203 | 204 | def preceive(self, state, action, reward, dec, alr, clr): 205 | self.replay_memory.append((state, action, reward)) 206 | if len(self.replay_memory) > self.memory_size: 207 | self.replay_memory.popleft() 208 | else: 209 | self.train_network(dec, alr, clr) 210 | 211 | def train_network(self, dec, alr, clr): 212 | mini_batch = random.sample(self.replay_memory, self.BATCH) 213 | state_batch = [data[0] for data in mini_batch] 214 | action_batch = [data[1] for data in mini_batch] 215 | reward_batch = [data[2] for data in mini_batch] 216 | 217 | self.update(state_batch, action_batch, reward_batch, dec, alr, clr) 218 | -------------------------------------------------------------------------------- /ICDCS-TC/Readme.txt: -------------------------------------------------------------------------------- 1 | continuousEnv.py: definition of the game environment, namely how mobile users and platform will interact with each other. 2 | 3 | DNC.py: a simple version of DNC, referring to https://github.com/llSourcell/differentiable_neural_computer_LIVE. 4 | 5 | PPO.py: an implementation of proximal policy optimization algorithm, referring to https://github.com/MorvanZhou/Reinforcement-learning-with-tensorflow. 6 | 7 | DNC_PPO.py: an implementation of proximal policy optimization, where the value function contains DNC module for solving partially observable Markov decision process. 8 | 9 | main.py: main function for this project. 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /ICDCS-TC/continuousEnv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from collections import defaultdict 3 | 4 | 5 | class ContinuousEnv(object): 6 | state_map = None # state pool 7 | his_len = 5 8 | 9 | def __init__(self, user_num, his_len): 10 | self.user_num = user_num 11 | self.his_len = his_len 12 | self.state_map = np.zeros( 13 | (self.user_num, self.user_num * self.his_len), 'float32') 14 | self.S = np.zeros((self.user_num, self.user_num)) # user's state 15 | 16 | def reset(self): 17 | # transmission power of user i 18 | self.p = np.ones(self.user_num, dtype=np.float32) 19 | # Data for detection of user i 20 | self.C = np.ones(self.user_num, dtype=np.float32) 21 | action = np.random.random(self.user_num - 1) # random number 0~1 22 | # self.b = [1.0 / 6, 1.0 / 4] # transmission bandwidth of user i 23 | # transmission bandwidth of user i 24 | # self.b = [1.0 / 11.5, 1.0 / 25 | # 10.5, 1.0 / 9.5, 1.0 / 8.5] 26 | self.b = [1 / 8.5, 1 / 8.5, 1 / 8.5, 1 / 27 | 8.5, 1 / 8.5, 1 / 8.5, 1 / 8.5, 1 / 8.5, 1 / 8.5, 1 / 8.5] 28 | # self.b = [7 / 77, 7 / 75, 7 / 73, 7 / 29 | # 71, 7 / 69, 7 / 67, 7 / 65, 7 / 63] 30 | # self.b = [9 / 99, 9 / 97, 9 / 95, 9 / 93, 9 / 31 | # 91, 9 / 89, 9 / 87, 9 / 85, 9 / 83, 9 / 81] 32 | # init state 33 | # self.S[0] = [action, self.b[0]] 34 | # action = np.random.random() # random number 0~1 35 | # self.S[1] = [action, self.b[1]] 36 | for i in range(self.user_num): 37 | flag = 0 38 | action = np.random.random(self.user_num - 1) 39 | for j in range(self.user_num): 40 | if j != i: 41 | self.S[i, flag] = action[flag] 42 | flag += 1 43 | if flag != self.user_num - 1: 44 | print("Index error!") 45 | else: 46 | self.S[i, flag] = self.b[i] 47 | 48 | # instant utility of user i 49 | self.user_profit = np.zeros(self.user_num) 50 | return self.state_map 51 | 52 | def step(self, x): # continuous action 53 | sum = 0.0 54 | R = 24.0 55 | for i in range(self.user_num): 56 | sum += x[i] * self.C[i] 57 | for i in range(self.user_num): 58 | self.user_profit[i] = x[i] * self.C[i] * R / \ 59 | sum - self.p[i] * x[i] * self.C[i] * \ 60 | (1 / self.b[i]) # - 0.25 + 0.5 * np.random.random()) 61 | 62 | # update S and S_map based on the last action 63 | for i in range(self.user_num): 64 | flag = 0 65 | for j in range(self.user_num): 66 | if j != i: 67 | self.S[i, flag] = x[j] 68 | flag += 1 69 | if flag != self.user_num - 1: 70 | print("Index error!") 71 | else: 72 | self.S[i, flag] = self.b[i] 73 | 74 | # self.S[0] = [x[1], self.b[0]] 75 | # self.S[1] = [x[0], self.b[1]] 76 | self.state_map[:, :-self.user_num] = self.state_map[:, self.user_num:] 77 | self.state_map[:, -self.user_num:] = self.S 78 | 79 | self.user_profit = np.clip(self.user_profit, 0, R) 80 | return self.state_map, self.user_profit 81 | -------------------------------------------------------------------------------- /ICDCS-TC/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 3 | from continuousEnv import ContinuousEnv 4 | import numpy as np 5 | import random 6 | import matplotlib.pyplot as plt 7 | from DNC_PPO import PPO 8 | import csv 9 | 10 | 11 | def main(): 12 | train_ppo() 13 | 14 | 15 | def train_ppo(): 16 | user_num = 10 17 | his_len = 5 18 | A_DIM, S_DIM = 1, user_num * his_len 19 | BATCH = 20 20 | A_UPDATE_STEPS = 5 21 | C_UPDATE_STEPS = 5 22 | A_LR = 0.00003 23 | C_LR = 0.00003 24 | v_s = np.zeros(user_num) 25 | ppo = [] 26 | env = ContinuousEnv(user_num, his_len) 27 | GAMMA = 0.95 28 | EP_MAX = 1000 29 | EP_LEN = 400 30 | dec = 0.5 31 | action = np.zeros(user_num) 32 | Algs = "dnc" 33 | 34 | max_r = np.zeros(user_num) 35 | max_a = np.random.random(user_num) 36 | 37 | for i in range(user_num): 38 | ppo.append(PPO(S_DIM, 1, BATCH, A_UPDATE_STEPS, 39 | C_UPDATE_STEPS, False, i)) 40 | csvFile1 = open("./0_Rewards/static_result_" + Algs + 41 | "_" + str(user_num) + ".csv", 'w', newline='') 42 | writer1 = csv.writer(csvFile1) 43 | csvFile2 = open("./1_Actions/static_result_" + Algs + 44 | "_" + str(user_num) + ".csv", 'w', newline='') 45 | writer2 = csv.writer(csvFile2) 46 | csvFile3 = open("./3_loss_pi/static_result_" + Algs + 47 | "_" + str(user_num) + ".csv", 'w', newline='') 48 | writer3 = csv.writer(csvFile3) 49 | csvFile4 = open("./4_loss_v/static_result_" + Algs + 50 | "_" + str(user_num) + ".csv", 'w', newline='') 51 | writer4 = csv.writer(csvFile4) 52 | 53 | rewards = [] 54 | actions = [] 55 | closs = [] 56 | aloss = [] 57 | cur_state = env.reset() 58 | for ep in range(EP_MAX): 59 | if ep % 50 == 0: 60 | dec = dec * 1 61 | A_LR = A_LR * 0.8 62 | C_LR = C_LR * 0.8 63 | buffer_s = [[] for _ in range(user_num)] 64 | buffer_a = [[] for _ in range(user_num)] 65 | buffer_r = [[] for _ in range(user_num)] 66 | sum_reward = np.zeros(user_num) 67 | sum_action = np.zeros(user_num) 68 | sum_closs = np.zeros(user_num) 69 | sum_aloss = np.zeros(user_num) 70 | for t in range(EP_LEN): 71 | for i in range(user_num): 72 | action[i] = ppo[i].choose_action(cur_state[i], dec) 73 | # Greedy algorithm 74 | # if np.random.random() < 0.1: 75 | # action[i] = np.random.random() 76 | # else: 77 | # action[i] = max_a[i] 78 | # action[i] = np.random.random() 79 | 80 | next_state, reward = env.step(action) 81 | sum_reward += reward 82 | sum_action += action 83 | 84 | # Greedy algorithm 85 | # for i in range(user_num): 86 | # if reward[i] > max_r[i]: 87 | # max_r[i] = reward[i] 88 | # max_a[i] = action[i] 89 | # if max_a[i] == action[i]: 90 | # max_r[i] = reward[i] 91 | 92 | for i in range(user_num): 93 | v_s[i] = ppo[i].get_v(next_state[i]) 94 | 95 | for i in range(user_num): 96 | buffer_a[i].append(action[i]) 97 | buffer_s[i].append(cur_state[i]) 98 | buffer_r[i].append(reward[i]) 99 | 100 | cur_state = next_state 101 | # update ppo 102 | if (t + 1) % BATCH == 0: 103 | for i in range(user_num): 104 | discounted_r = np.zeros(len(buffer_r[i]), 'float32') 105 | v_s[i] = ppo[i].get_v(next_state[i]) 106 | running_add = v_s[i] 107 | 108 | for rd in reversed(range(len(buffer_r[i]))): 109 | running_add = running_add * GAMMA + buffer_r[i][rd] 110 | discounted_r[rd] = running_add 111 | 112 | discounted_r = discounted_r[np.newaxis, :] 113 | discounted_r = np.transpose(discounted_r) 114 | ppo[i].update(np.vstack(buffer_s[i]), np.vstack( 115 | buffer_a[i]), discounted_r, dec, A_LR, C_LR, ep) 116 | 117 | if ep % 10 == 0: 118 | print('ep:', ep) 119 | print("reward:", reward) 120 | print("action:", action) 121 | rewards.append(sum_reward / EP_LEN) 122 | actions.append(sum_action / EP_LEN) 123 | closs.append(sum_closs / EP_LEN) 124 | aloss.append(sum_aloss / EP_LEN) 125 | print("average reward:", sum_reward / EP_LEN) 126 | print("average action:", sum_action / EP_LEN) 127 | print("average closs:", sum_closs / EP_LEN) 128 | print("average aloss:", sum_aloss / EP_LEN) 129 | 130 | for i in range(user_num): 131 | usr_reward = [data[i] for data in rewards] 132 | usr_action = [data[i] for data in actions] 133 | usr_closs = [data[i] for data in closs] 134 | usr_aloss = [data[i] for data in aloss] 135 | plt.plot(usr_reward) 136 | writer1.writerow(usr_reward) 137 | writer2.writerow(usr_action) 138 | writer3.writerow(usr_closs) 139 | writer4.writerow(usr_aloss) 140 | plt.show() 141 | csvFile1.close() 142 | csvFile2.close() 143 | csvFile3.close() 144 | csvFile4.close() 145 | 146 | 147 | if __name__ == '__main__': 148 | main() 149 | -------------------------------------------------------------------------------- /INFOCOM2020/Readme: -------------------------------------------------------------------------------- 1 | To be uploaded 2 | -------------------------------------------------------------------------------- /IPDPS2020/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitzj2015/DRL-Networking/f42184639c4e54951e8919a04a4d617d1cdd21ea/IPDPS2020/.DS_Store -------------------------------------------------------------------------------- /IPDPS2020/DNC.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | import os 4 | 5 | 6 | class DNC: 7 | def __init__(self, input_size, output_size, seq_len, num_words=256, word_size=64, num_heads=4): 8 | # define data 9 | # input data - [[1 0] [0 1] [0 0] [0 0]] 10 | self.input_size = input_size # X 11 | # output data [[0 0] [0 0] [1 0] [0 1]] 12 | self.output_size = output_size # Y 13 | 14 | # define read + write vector size 15 | # 10 16 | self.num_words = num_words # N 17 | # 4 characters 18 | self.word_size = word_size # W 19 | 20 | # define number of read+write heads 21 | # we could have multiple, but just 1 for simplicity 22 | self.num_heads = num_heads # R 23 | 24 | # size of output vector from controller that defines interactions with memory matrix 25 | self.interface_size = num_heads * word_size + 3 * word_size + 5 * num_heads + 3 26 | 27 | # the actual size of the neural network input after flatenning and 28 | # concatenating the input vector with the previously read vctors from memory 29 | self.controller_input_size = num_heads * word_size + input_size 30 | 31 | # size of output 32 | self.controller_output_size = output_size + self.interface_size 33 | 34 | # gaussian normal distribution for both outputs 35 | self.controller_out = tf.truncated_normal( 36 | [1, self.output_size], stddev=0.1) 37 | self.interface_vec = tf.truncated_normal( 38 | [1, self.interface_size], stddev=0.1) 39 | 40 | # Create memory matrix 41 | self.mem_mat = tf.zeros([num_words, word_size]) # N*W 42 | 43 | # other variables 44 | # The usage vector records which locations have been used so far, 45 | self.usage_vec = tf.fill([num_words, 1], 1e-6) # N*1 46 | # a temporal link matrix records the order in which locations were written; 47 | self.link_mat = tf.zeros([num_words, num_words]) # N*N 48 | # represents degrees to which last location was written to 49 | self.precedence_weight = tf.zeros([num_words, 1]) # N*1 50 | 51 | # Read and write head variables 52 | self.read_weights = tf.fill([num_words, num_heads], 1e-6) # N*R 53 | self.write_weights = tf.fill([num_words, 1], 1e-6) # N*1 54 | self.read_vecs = tf.fill([num_heads, word_size], 1e-6) # R*W 55 | 56 | # NETWORK VARIABLES 57 | # gateways into the computation graph for input output pairs 58 | # self.i_data = tf.placeholder( 59 | # tf.float32, [seq_len * 2, self.input_size], name='input_node') 60 | # self.o_data = tf.placeholder( 61 | # tf.float32, [seq_len * 2, self.output_size], name='output_node') 62 | 63 | # 2 layer feedforwarded network 64 | self.W1 = tf.Variable(tf.truncated_normal( 65 | [self.controller_input_size, 64], stddev=0.1), name='layer1_weights', dtype=tf.float32) 66 | self.b1 = tf.Variable( 67 | tf.zeros([64]), name='layer1_bias', dtype=tf.float32) 68 | self.W2 = tf.Variable(tf.truncated_normal( 69 | [64, self.controller_output_size], stddev=0.1), name='layer2_weights', dtype=tf.float32) 70 | self.b2 = tf.Variable( 71 | tf.zeros([self.controller_output_size]), name='layer2_bias', dtype=tf.float32) 72 | 73 | # DNC OUTPUT WEIGHTS 74 | self.controller_out_weights = tf.Variable(tf.truncated_normal( 75 | [self.controller_output_size, self.output_size], stddev=0.1), name='net_output_weights') 76 | self.interface_weights = tf.Variable(tf.truncated_normal( 77 | [self.controller_output_size, self.interface_size], stddev=0.1), name='interface_weights') 78 | 79 | self.read_vecs_out_weight = tf.Variable(tf.truncated_normal( 80 | [self.num_heads * self.word_size, self.output_size], stddev=0.1), name='read_vector_weights') 81 | 82 | # 3 attention mechanisms for read/writes to memory 83 | 84 | # 1 85 | # a key vector emitted by the controller is compared to the 86 | # content of each location in memory according to a similarity measure 87 | # The similarity scores determine a weighting that can be used by the read heads 88 | # for associative recall1 or by the write head to modify an existing vector in memory. 89 | def content_lookup(self, key, str): 90 | # The l2 norm of a vector is the square root of the sum of the 91 | # absolute values squared 92 | norm_mem = tf.nn.l2_normalize(self.mem_mat, 1) # N*W 93 | norm_key = tf.nn.l2_normalize(key, 0) # 1*W for write or R*W for read 94 | # get similarity measure between both vectors, transpose before multiplicaiton 95 | # (N*W,W*1)->N*1 for write 96 | #(N*W,W*R)->N*R for read 97 | sim = tf.matmul(norm_mem, norm_key, transpose_b=True) 98 | #str is 1*1 or 1*R 99 | # returns similarity measure 100 | return tf.nn.softmax(sim * str, 0) # N*1 or N*R 101 | 102 | # 2 103 | # retreives the writing allocation weighting based on the usage free list 104 | # The ‘usage’ of each location is represented as a number between 0 and 1, 105 | # and a weighting that picks out unused locations is delivered to the write head. 106 | 107 | # independent of the size and contents of the memory, meaning that 108 | # DNCs can be trained to solve a task using one size of memory and later 109 | # upgraded to a larger memory without retraining 110 | def allocation_weighting(self): 111 | # sorted usage - the usage vector sorted ascndingly 112 | # the original indices of the sorted usage vector 113 | sorted_usage_vec, free_list = tf.nn.top_k( 114 | -1 * self.usage_vec, k=self.num_words) 115 | sorted_usage_vec *= -1 116 | cumprod = tf.cumprod(sorted_usage_vec, axis=0, exclusive=True) 117 | unorder = (1 - sorted_usage_vec) * cumprod 118 | 119 | alloc_weights = tf.zeros([self.num_words]) 120 | I = tf.constant(np.identity(self.num_words, dtype=np.float32)) 121 | 122 | # for each usage vec 123 | for pos, idx in enumerate(tf.unstack(free_list[0])): 124 | # flatten 125 | m = tf.squeeze(tf.slice(I, [idx, 0], [1, -1])) 126 | # add to weight matrix 127 | alloc_weights += m * unorder[0, pos] 128 | # the allocation weighting for each row in memory 129 | return tf.reshape(alloc_weights, [self.num_words, 1]) 130 | 131 | # at every time step the controller receives input vector from dataset and emits output vector. 132 | # it also recieves a set of read vectors from the memory matrix at the previous time step via 133 | # the read heads. then it emits an interface vector that defines its interactions with the memory 134 | # at the current time step 135 | def step_m(self, x): 136 | 137 | # reshape input 138 | input = tf.concat( 139 | [x, tf.reshape(self.read_vecs, [1, self.num_heads * self.word_size])], 1) 140 | 141 | # forward propagation 142 | l1_out = tf.matmul(input, self.W1) + self.b1 143 | l1_act = tf.nn.tanh(l1_out) 144 | l2_out = tf.matmul(l1_act, self.W2) + self.b2 145 | l2_act = tf.nn.tanh(l2_out) 146 | 147 | # output vector 148 | # (1*eta+Y, eta+Y*Y)->(1*Y) 149 | self.controller_out = tf.matmul(l2_act, self.controller_out_weights) 150 | # interaction vector - how to interact with memory 151 | # (1*eta+Y, eta+Y*eta)->(1*eta) 152 | self.interface_vec = tf.matmul(l2_act, self.interface_weights) 153 | 154 | partition = tf.constant([[0] * (self.num_heads * self.word_size) + [1] * (self.num_heads) + [2] * (self.word_size) + [3] + 155 | [4] * (self.word_size) + [5] * (self.word_size) + 156 | [6] * (self.num_heads) + [7] + [8] + [9] * (self.num_heads * 3)], dtype=tf.int32) 157 | 158 | # convert interface vector into a set of read write vectors 159 | # using tf.dynamic_partitions(Partitions interface_vec into 10 tensors using indices from partition) 160 | (read_keys, read_str, write_key, write_str, 161 | erase_vec, write_vec, free_gates, alloc_gate, write_gate, read_modes) = \ 162 | tf.dynamic_partition(self.interface_vec, partition, 10) 163 | 164 | # read vectors 165 | read_keys = tf.reshape( 166 | read_keys, [self.num_heads, self.word_size]) # R*W 167 | read_str = 1 + tf.nn.softplus(tf.expand_dims(read_str, 0)) # 1*R 168 | 169 | # write vectors 170 | write_key = tf.expand_dims(write_key, 0) # 1*W 171 | # help init our write weights 172 | write_str = 1 + tf.nn.softplus(tf.expand_dims(write_str, 0)) # 1*1 173 | erase_vec = tf.nn.sigmoid(tf.expand_dims(erase_vec, 0)) # 1*W 174 | write_vec = tf.expand_dims(write_vec, 0) # 1*W 175 | 176 | # the degree to which locations at read heads will be freed 177 | free_gates = tf.nn.sigmoid(tf.expand_dims(free_gates, 0)) # 1*R 178 | # the fraction of writing that is being allocated in a new location 179 | alloc_gate = tf.nn.sigmoid(alloc_gate) # 1 180 | # the amount of information to be written to memory 181 | write_gate = tf.nn.sigmoid(write_gate) # 1 182 | # the softmax distribution between the three read modes (backward, forward, lookup) 183 | # The read heads can use gates called read modes to switch between content lookup 184 | # using a read key and reading out locations either forwards or backwards 185 | # in the order they were written. 186 | read_modes = tf.nn.softmax(tf.reshape( 187 | read_modes, [3, self.num_heads])) # 3*R 188 | 189 | # used to calculate usage vector, what's available to write to? 190 | retention_vec = tf.reduce_prod( 191 | 1 - free_gates * self.read_weights, reduction_indices=1) 192 | # used to dynamically allocate memory 193 | self.usage_vec = (self.usage_vec + self.write_weights - 194 | self.usage_vec * self.write_weights) * retention_vec 195 | 196 | # retreives the writing allocation weighting 197 | alloc_weights = self.allocation_weighting() # N*1 198 | # where to write to?? 199 | write_lookup_weights = self.content_lookup(write_key, write_str) # N*1 200 | # define our write weights now that we know how much space to allocate for them and where to write to 201 | self.write_weights = write_gate * \ 202 | (alloc_gate * alloc_weights + (1 - alloc_gate) * write_lookup_weights) 203 | 204 | # write erase, then write to memory! 205 | self.mem_mat = self.mem_mat * (1 - tf.matmul(self.write_weights, erase_vec)) + \ 206 | tf.matmul(self.write_weights, write_vec) 207 | 208 | # As well as writing, the controller can read from multiple locations in memory. 209 | # Memory can be searched based on the content of each location, or the associative 210 | # temporal links can be followed forward and backward to recall information written 211 | # in sequence or in reverse. (3rd attention mechanism) 212 | 213 | # updates and returns the temporal link matrix for the latest write 214 | # given the precedence vector and the link matrix from previous step 215 | nnweight_vec = tf.matmul( 216 | self.write_weights, tf.ones([1, self.num_words])) # N*N 217 | self.link_mat = (1 - nnweight_vec - tf.transpose(nnweight_vec)) * self.link_mat + \ 218 | tf.matmul(self.write_weights, 219 | self.precedence_weight, transpose_b=True) 220 | self.link_mat *= tf.ones([self.num_words, self.num_words]) - \ 221 | tf.constant(np.identity(self.num_words, dtype=np.float32)) 222 | 223 | self.precedence_weight = (1 - tf.reduce_sum(self.write_weights, reduction_indices=0)) * \ 224 | self.precedence_weight + self.write_weights 225 | # 3 modes - forward, backward, content lookup 226 | # (N*N,N*R)->N*R 227 | forw_w = read_modes[2] * tf.matmul(self.link_mat, self.read_weights) 228 | look_w = read_modes[1] * \ 229 | self.content_lookup(read_keys, read_str) # N*R 230 | back_w = read_modes[0] * tf.matmul(self.link_mat, 231 | self.read_weights, transpose_a=True) # N*R 232 | 233 | # use them to intiialize read weights 234 | self.read_weights = back_w + look_w + forw_w # N*R 235 | # create read vectors by applying read weights to memory matrix 236 | self.read_vecs = tf.transpose(tf.matmul( 237 | self.mem_mat, self.read_weights, transpose_a=True)) # (W*N,N*R)^T->R*W 238 | 239 | # multiply them together 240 | read_vec_mut = tf.matmul(tf.reshape(self.read_vecs, [1, self.num_heads * self.word_size]), 241 | self.read_vecs_out_weight) # (1*RW, RW*Y)-> (1*Y) 242 | 243 | # return output + read vecs product 244 | return self.controller_out + read_vec_mut 245 | 246 | # output list of numbers (one hot encoded) by running the step function 247 | def run(self, input_data): 248 | big_out = [] 249 | if np.shape(input_data)[0] > 0: 250 | for t, seq in enumerate(tf.unstack(input_data, axis=0)): 251 | seq = tf.expand_dims(seq, 0) 252 | y = self.step_m(seq) 253 | big_out.append(y) 254 | else: 255 | seq = input_data[0] 256 | seq = tf.expand_dims(seq, 0) 257 | y = self.step_m(seq) 258 | print("kkk", np.shape(y)) 259 | big_out.append(y) 260 | return tf.stack(big_out, axis=0) 261 | -------------------------------------------------------------------------------- /IPDPS2020/DNC_PPO.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import tensorflow.contrib.rnn as rnn 3 | import numpy as np 4 | from collections import deque 5 | import random 6 | from DNC import DNC 7 | 8 | BELTA = 0.0003 9 | METHOD = [ 10 | dict(name='kl_pen', kl_target=0.01, lam=0.5), # KL penalty 11 | # Clipped surrogate objective, find this is better 12 | dict(name='clip', epsilon=0.1), 13 | dict(name='a2c', epsilon=0) 14 | ][1] # choose the method for optimization 15 | 16 | 17 | class PPO(object): 18 | replay_memory = deque() 19 | memory_size = 100 20 | 21 | def __init__(self, S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, num): # num是什么意思 22 | self.sess = tf.Session() 23 | self.tfs = tf.placeholder(tf.float32, [None, S_DIM], 'state') 24 | self.S_DIM = S_DIM 25 | self.A_DIM = A_DIM 26 | self.BATCH = BATCH 27 | self.A_UPDATE_STEPS = A_UPDATE_STEPS 28 | self.C_UPDATE_STEPS = C_UPDATE_STEPS 29 | self.decay = tf.placeholder(tf.float32, (), 'decay') 30 | self.a_lr = tf.placeholder(tf.float32, (), 'a_lr') 31 | self.c_lr = tf.placeholder(tf.float32, (), 'c_lr') 32 | self.num = num 33 | 34 | # critic 35 | with tf.variable_scope('critic'): 36 | # w1 = tf.Variable(tf.truncated_normal(shape=(3, 3, 1, 6), mean = 0, stddev = 0.01)) 37 | # w2 = tf.Variable(tf.truncated_normal(shape=(3, 3, 6, 16), mean = 0, stddev = 0.01)) 38 | # b1 = tf.Variable(tf.zeros(6)) 39 | # b2 = tf.Variable(tf.zeros(16)) 40 | # conv1 = tf.nn.conv2d(x, w1, strides=[1, 1, 1, 1], padding='SAME') 41 | # conv1 = tf.nn.bias_add(conv1, b1) 42 | # conv1 = tf.nn.relu(conv1) 43 | # conv1 = tf.nn.avg_pool(conv1, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='VALID') 44 | w1 = tf.Variable(tf.truncated_normal( 45 | [self.S_DIM, self.S_DIM * 5], stddev=0.01), name='w1') 46 | bias1 = tf.Variable(tf.constant( 47 | 0.0, shape=[self.S_DIM * 5], dtype=tf.float32), name='b1') 48 | l1 = tf.nn.relu(tf.matmul(self.tfs, w1) + bias1) 49 | 50 | w2 = tf.Variable(tf.truncated_normal( 51 | [self.S_DIM * 5, 50], stddev=0.01), name='w2') 52 | bias2 = tf.Variable(tf.constant( 53 | 0.0, shape=[50], dtype=tf.float32), name='b2') 54 | l2 = tf.nn.relu(tf.matmul(l1, w2) + bias2) 55 | # dnc = DNC(input_size=50, output_size=1, 56 | # seq_len=0, num_words=10, word_size=32, num_heads=1) 57 | # self.v = tf.reshape(dnc.run(l2), [-1, np.shape(dnc.run(l2))[-1]]) 58 | 59 | w3 = tf.Variable(tf.truncated_normal( 60 | [50, 1], stddev=0.01), name='w3') 61 | bias3 = tf.Variable(tf.constant( 62 | 0.0, shape=[1], dtype=tf.float32), name='b3') 63 | self.v = tf.matmul(l2, w3) + bias3 64 | 65 | self.tfdc_r = tf.placeholder(tf.float32, [None, 1], 'discounted_r') 66 | self.advantage = self.tfdc_r - self.v 67 | self.closs = tf.reduce_mean(tf.square(self.advantage)) + \ 68 | BELTA # * (tf.nn.l2_loss(w1) + tf.nn.l2_loss(w3)) 69 | optimizer = tf.train.AdamOptimizer(learning_rate=self.c_lr) 70 | vars_ = tf.trainable_variables() 71 | grads, _ = tf.clip_by_global_norm( 72 | tf.gradients(self.closs, vars_), 5.0) 73 | self.ctrain_op = optimizer.apply_gradients(zip(grads, vars_)) 74 | 75 | # actor 76 | pi, pi_params, l2_loss_a = self._build_anet('pi', trainable=True) 77 | oldpi, oldpi_params, _ = self._build_anet('oldpi', trainable=False) 78 | with tf.variable_scope('sample_action'): 79 | # choosing action squeeze:reduce the first dimension 80 | self.sample_op = tf.squeeze(pi.sample(1), axis=0) 81 | with tf.variable_scope('update_oldpi'): 82 | self.update_oldpi_op = [oldp.assign( 83 | p) for p, oldp in zip(pi_params, oldpi_params)] 84 | 85 | self.tfa = tf.placeholder(tf.float32, [None, self.A_DIM], 'action') 86 | self.tfadv = tf.placeholder(tf.float32, [None, 1], 'advantage') 87 | with tf.variable_scope('loss'): 88 | with tf.variable_scope('surrogate'): 89 | # ratio = tf.exp(pi.log_prob(self.tfa) - oldpi.log_prob(self.tfa)) 90 | ratio = pi.prob(self.tfa) / oldpi.prob(self.tfa) 91 | surr = ratio * self.tfadv 92 | if METHOD['name'] == 'kl_pen': 93 | self.tflam = tf.placeholder(tf.float32, None, 'lambda') 94 | kl = tf.distributions.kl_divergence(oldpi, pi) 95 | self.kl_mean = tf.reduce_mean(kl) 96 | self.aloss = -(tf.reduce_mean(surr - self.tflam * kl)) 97 | elif METHOD['name'] == 'ddpg': 98 | self.aloss = -(tf.reduce_mean(pi.prob(self.tfa) * self.tfadv)) 99 | else: # clipping method, find this is better 100 | self.aloss = -tf.reduce_mean(tf.minimum( 101 | surr, 102 | tf.clip_by_value(ratio, 1. - METHOD['epsilon'], 1. + METHOD['epsilon']) * self.tfadv)) + \ 103 | BELTA * l2_loss_a 104 | 105 | with tf.variable_scope('atrain'): 106 | # self.atrain_op = tf.train.AdamOptimizer(A_LR).minimize(self.aloss) 107 | optimizer = tf.train.AdamOptimizer(learning_rate=self.a_lr) 108 | vars_ = tf.trainable_variables() 109 | grads, _ = tf.clip_by_global_norm( 110 | tf.gradients(self.aloss, vars_), 5.0) 111 | self.atrain_op = optimizer.apply_gradients(zip(grads, vars_)) 112 | 113 | tf.summary.FileWriter("log/", self.sess.graph) 114 | init = tf.global_variables_initializer() 115 | 116 | self.saver = tf.train.Saver() 117 | for var in tf.trainable_variables(): 118 | tf.summary.histogram(var.op.name, var) 119 | summary_op = tf.summary.merge_all() 120 | summary_writer = tf.summary.FileWriter('tmp/vintf/', self.sess.graph) 121 | self.sess.run(init) 122 | if HAVE_TRAIN == True: 123 | model_file = tf.train.latest_checkpoint( 124 | 'ckpt/' + str(self.num) + "/") 125 | self.saver.restore(self.sess, model_file) 126 | 127 | def update(self, s, a, r, dec, alr, clr, epoch): 128 | self.sess.run(self.update_oldpi_op) 129 | adv = self.sess.run( 130 | self.advantage, {self.tfs: s, self.tfdc_r: r, self.decay: dec}) 131 | # adv = (adv - adv.mean())/(adv.std()+1e-6) # sometimes helpful 132 | 133 | # update actor 134 | if METHOD['name'] == 'kl_pen': 135 | for _ in range(self.A_UPDATE_STEPS): 136 | _, kl = self.sess.run( 137 | [self.atrain_op, self.kl_mean], 138 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.tflam: METHOD['lam']}) 139 | if kl > 4 * METHOD['kl_target']: # this in in google's paper 140 | break 141 | # adaptive lambda, this is in OpenAI's paper 142 | if kl < METHOD['kl_target'] / 1.5: 143 | METHOD['lam'] /= 2 144 | elif kl > METHOD['kl_target'] * 1.5: 145 | METHOD['lam'] *= 2 146 | # sometimes explode, this clipping is my solution 147 | METHOD['lam'] = np.clip(METHOD['lam'], 1e-4, 10) 148 | else: # clipping method, find this is better (OpenAI's paper) 149 | for i in range(self.A_UPDATE_STEPS): 150 | aloss, _ = self.sess.run([self.aloss, self.atrain_op], 151 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 152 | 153 | # update critic 154 | for i in range(self.C_UPDATE_STEPS): 155 | closs, _ = self.sess.run([self.closs, self.ctrain_op], { 156 | self.tfs: s, self.tfdc_r: r, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 157 | if epoch % 5 == 0: 158 | tf.reset_default_graph() 159 | self.saver.save(self.sess, "ckpt/" + str(self.num) + "/", global_step=epoch) 160 | return closs, aloss 161 | 162 | def _build_anet(self, name, trainable): 163 | with tf.variable_scope(name): 164 | w4 = tf.Variable(tf.truncated_normal( 165 | [self.S_DIM, self.S_DIM * 5], stddev=0.01), name='w4') 166 | bias4 = tf.Variable(tf.constant( 167 | 0.0, shape=[self.S_DIM * 5], dtype=tf.float32), name='b4') 168 | l3 = tf.nn.sigmoid(tf.matmul(self.tfs, w4) + bias4) 169 | 170 | # print(np.shape(l4)) 171 | 172 | w5 = tf.Variable(tf.truncated_normal( 173 | [self.S_DIM * 5, 50], stddev=0.01), name='w5') 174 | bias5 = tf.Variable(tf.constant( 175 | 0.0, shape=[50], dtype=tf.float32), name='b5') 176 | l4 = tf.nn.sigmoid(tf.matmul(l3, w5) + bias5) 177 | 178 | # dnc = DNC(input_size=50, output_size=50, 179 | # seq_len=0, num_words=10, word_size=4, num_heads=1) 180 | # l5 = tf.reshape(dnc.run(l4), [-1, np.shape(dnc.run(l4))[-1]]) 181 | # # print(np.shape(l4)) 182 | 183 | w6 = tf.Variable(tf.truncated_normal( 184 | [50, self.A_DIM], stddev=0.01), name='w6') 185 | bias6 = tf.Variable(tf.constant( 186 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b6') 187 | 188 | mu = 1 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) 189 | # mu = 5 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) + 0.0001 190 | # print('mu:', np.shape(mu)) 191 | 192 | w7 = tf.Variable(tf.truncated_normal( 193 | [50, self.A_DIM], stddev=0.01), name='w7') 194 | bias7 = tf.Variable(tf.constant( 195 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b7') 196 | sigma = self.decay * \ 197 | tf.nn.sigmoid(tf.matmul(l4, w7) + bias7) + 0.00001 198 | # print('sigma:',np.shape(sigma)) 199 | 200 | # mu = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) 201 | # sigma = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) + 0.0001 202 | norm_dist = tf.distributions.Normal( 203 | loc=mu, scale=sigma) # loc:mean scale:sigma 204 | params = tf.get_collection( 205 | tf.GraphKeys.GLOBAL_VARIABLES, scope=name) 206 | # tf.nn.l2_loss(w4) + tf.nn.l2_loss(w5) + tf.nn.l2_loss(w6) + tf.nn.l2_loss(w7) 207 | l2_loss_a = 0 208 | return norm_dist, params, l2_loss_a 209 | 210 | def choose_action(self, s, dec): 211 | if s.ndim < 2: 212 | s = s[np.newaxis, :] 213 | a = self.sess.run(self.sample_op, feed_dict={ 214 | self.tfs: s, self.decay: dec}) 215 | # a, sigma, mu = self.sess.run([self.sample_op, self.sigma, self.mu], feed_dict={self.tfs: s, self.decay: dec}) 216 | 217 | return np.clip(a[0], 0.0001, 1) # clip the output 218 | 219 | def get_v(self, s): 220 | if s.ndim < 2: 221 | s = s[np.newaxis, :] 222 | return self.sess.run(self.v, {self.tfs: s})[0, 0] 223 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_bus_0004.log: -------------------------------------------------------------------------------- 1 | 1453107448798 79 51.0325625521879 3.7237956936691 0 79 2 | 1453107449798 1079 51.0325625521879 3.7237956936691 1424136 1000 3 | 1453107450798 2079 51.032558254626 3.72392371929715 3657124 1000 4 | 1453107451798 3079 51.0326454931218 3.72361662023614 4572416 1000 5 | 1453107452798 4079 51.0325959206683 3.72359565978997 5329992 1000 6 | 1453107453799 5080 51.0326180103173 3.72365310657542 3663864 1001 7 | 1453107454799 6080 51.0326570070828 3.72372172139942 6301900 1000 8 | 1453107455798 7079 51.0326688841881 3.72377293499155 4030520 999 9 | 1453107456799 8080 51.0326688841881 3.72377293499155 5321156 1001 10 | 1453107457798 9079 51.0326944676468 3.72384646634974 6223920 999 11 | 1453107458798 10079 51.0327609322476 3.72395957450815 6282224 1000 12 | 1453107459798 11079 51.0328310369242 3.72403222219113 6742696 1000 13 | 1453107460798 12079 51.0328708268154 3.72407007346913 6283028 1000 14 | 1453107461799 13080 51.0329051278317 3.72411015783086 4668124 1001 15 | 1453107462798 14079 51.0329358023907 3.72415351311999 6811444 999 16 | 1453107463798 15079 51.032968958404 3.7241977619163 7427156 1000 17 | 1453107464798 16079 51.033000195267 3.72424457789595 7694336 1000 18 | 1453107465798 17079 51.033000195267 3.72424457789595 3911680 1000 19 | 1453107466799 18080 51.0330303203995 3.72428328910753 3584000 1001 20 | 1453107467798 19079 51.0330303203995 3.72428328910753 7907328 999 21 | 1453107468798 20079 51.0331133245574 3.72436652451327 7999488 1000 22 | 1453107469798 21079 51.033139646858 3.72439516556227 7909376 1000 23 | 1453107470799 22080 51.0331673659814 3.72442500238983 5009696 1001 24 | 1453107471798 23079 51.0331936351695 3.72445523707668 3481600 999 25 | 1453107472798 24079 51.0332219261873 3.72448493949159 8278016 1000 26 | 1453107473798 25079 51.0332512327342 3.7245166808396 7397376 1000 27 | 1453107474798 26079 51.03327737875 3.72454523522796 7686144 1000 28 | 1453107475798 27079 51.03327737875 3.72454523522796 4298752 1000 29 | 1453107476798 28079 51.0333019245168 3.72457550383737 4124672 1000 30 | 1453107477798 29079 51.0333019245168 3.72457550383737 7278592 1000 31 | 1453107478798 30079 51.0333794657052 3.72467108008744 7565312 1000 32 | 1453107479799 31080 51.0334065808924 3.72470563364132 7645184 1001 33 | 1453107480798 32079 51.0334440257162 3.72475619820288 4175872 999 34 | 1453107481799 33080 51.0334785194097 3.72479793972168 3059712 1001 35 | 1453107482798 34079 51.0335183397765 3.7248471375783 7524352 999 36 | 1453107483798 35079 51.0335585395726 3.72490183841514 7634944 1000 37 | 1453107484798 36079 51.0335936052331 3.72494680914246 7645184 1000 38 | 1453107485799 37080 51.0336247629863 3.72498539843458 4759552 1001 39 | 1453107486798 38079 51.0336580292704 3.72503002266016 3880960 999 40 | 1453107487799 39080 51.033692640772 3.72507797500064 6801408 1001 41 | 1453107488798 40079 51.0337274156921 3.7251245286632 7632896 999 42 | 1453107489798 41079 51.0337623541981 3.72516521765012 7896920 1000 43 | 1453107490798 42079 51.0337989255985 3.72520480595628 3638740 1000 44 | 1453107491799 43080 51.0338364260711 3.72524298739117 4091904 1001 45 | 1453107492798 44079 51.0338713578278 3.72528323349616 7475200 999 46 | 1453107493798 45079 51.0339045732778 3.72532080572004 4971436 1000 47 | 1453107494799 46080 51.0339403915732 3.72535673340535 4542992 1001 48 | 1453107495798 47079 51.0339745831318 3.72538979574403 3631732 999 49 | 1453107496798 48079 51.0340048823541 3.72541543132205 5199872 1000 50 | 1453107497799 49080 51.0340313120032 3.72543374839197 6048156 1001 51 | 1453107498798 50079 51.0340550246685 3.72544715863641 3482724 999 52 | 1453107499798 51079 51.0340791573035 3.72546621638255 4507088 1000 53 | 1453107500798 52079 51.0341036682345 3.72548796994229 2327080 1000 54 | 1453107501798 53079 51.0341334716471 3.72551053230299 4607628 1000 55 | 1453107502798 54079 51.0342044881148 3.72554923959659 3346488 1000 56 | 1453107503798 55079 51.0342044881148 3.72554923959659 4169960 1000 57 | 1453107504798 56079 51.0342463160894 3.72556347769332 4649668 1000 58 | 1453107505798 57079 51.0342894674853 3.72558071367567 4164164 1000 59 | 1453107506798 58079 51.0343328397115 3.72559829699098 4632576 1000 60 | 1453107507799 59080 51.0343328397115 3.72559829699098 7460012 1001 61 | 1453107508798 60079 51.0344111755832 3.72562546275694 7680776 999 62 | 1453107509798 61079 51.0344325810202 3.7256333795421 7060208 1000 63 | 1453107510798 62079 51.0344458807728 3.72564086729248 4702208 1000 64 | 1453107511798 63079 51.0344556206524 3.72564965569273 3518464 1000 65 | 1453107512798 64079 51.0344556206524 3.72564965569273 7417856 1000 66 | 1453107513798 65079 51.0344639625293 3.72565725207594 7694336 1000 67 | 1453107514798 66079 51.0344639625293 3.72565725207594 7739392 1000 68 | 1453107515798 67079 51.0344639625293 3.72565725207594 6680576 1000 69 | 1453107516798 68079 51.0344639625293 3.72565725207594 3715072 1000 70 | 1453107517799 69080 51.0344639625293 3.72565725207594 7927808 1001 71 | 1453107518799 70080 51.0344639625293 3.72565725207594 7796736 1000 72 | 1453107519798 71079 51.0344639625293 3.72565725207594 7610368 999 73 | 1453107520798 72079 51.0344639625293 3.72565725207594 5398528 1000 74 | 1453107521799 73080 51.0344639625293 3.72565725207594 4739072 1001 75 | 1453107522798 74079 51.0344639625293 3.72565725207594 4978688 999 76 | 1453107523798 75079 51.0344639625293 3.72565725207594 7854080 1000 77 | 1453107524799 76080 51.0344639625293 3.72565725207594 7522304 1001 78 | 1453107525798 77079 51.0344639625293 3.72565725207594 6107136 999 79 | 1453107526798 78079 51.0344639625293 3.72565725207594 3678208 1000 80 | 1453107527798 79079 51.0344639625293 3.72565725207594 6909952 1000 81 | 1453107528798 80079 51.0344639625293 3.72565725207594 7809024 1000 82 | 1453107529798 81079 51.0344639625293 3.72565725207594 8151040 1000 83 | 1453107530799 82080 51.0344639625293 3.72565725207594 5911964 1001 84 | 1453107531799 83080 51.0344639625293 3.72565725207594 3205480 1000 85 | 1453107532799 84080 51.0344639625293 3.72565725207594 4137212 1000 86 | 1453107533799 85080 51.0344734464858 3.72566154113243 3555688 1000 87 | 1453107534798 86079 51.0344925390173 3.72566834667877 3751068 999 88 | 1453107535798 87079 51.034516012231 3.72567375869569 3745988 1000 89 | 1453107536798 88079 51.0345478270306 3.72568274991511 3809448 1000 90 | 1453107537798 89079 51.034594236209 3.72569472470614 4000864 1000 91 | 1453107538799 90080 51.0346485073985 3.72571532124215 4067668 1001 92 | 1453107539798 91079 51.034712277163 3.7257435410138 4579104 999 93 | 1453107540798 92079 51.0347914574087 3.72578136184606 5243020 1000 94 | 1453107541798 93079 51.03487992001 3.72582437299387 5791008 1000 95 | 1453107542798 94079 51.0349733248061 3.72587211440613 4443008 1000 96 | 1453107543798 95079 51.0350654741187 3.7259176968043 5820664 1000 97 | 1453107544799 96080 51.03515394815 3.7259632977241 7574412 1001 98 | 1453107545798 97079 51.0352349940986 3.72599733025225 7625636 999 99 | 1453107546799 98080 51.0353111024418 3.72603391639375 8985356 1001 100 | 1453107547798 99079 51.0353831735714 3.72606986643933 6020580 999 101 | 1453107548799 100080 51.0354521572686 3.72610056510209 7934432 1001 102 | 1453107549799 101080 51.0355180943764 3.72612225020069 8105420 1000 103 | 1453107550798 102079 51.0355834833951 3.72613701905553 5081960 999 104 | 1453107551799 103080 51.0356448365478 3.72615786222216 6993424 1001 105 | 1453107552799 104080 51.0356448365478 3.72615786222216 5094092 1000 106 | 1453107553798 105079 51.03574758628 3.72618425039729 6595764 999 107 | 1453107554798 106079 51.0357894890545 3.72620323149864 8189100 1000 108 | 1453107555799 107080 51.0358317936868 3.72622254833121 7835924 1001 109 | 1453107556798 108079 51.0358800716422 3.72623416863585 6873488 999 110 | 1453107557798 109079 51.035933894362 3.72624165983923 7254900 1000 111 | 1453107558799 110080 51.0359903996846 3.7262516574202 10908016 1001 112 | 1453107559798 111079 51.0360503227072 3.72626215509392 10820060 999 113 | 1453107560798 112079 51.0361129569354 3.72627574291014 9474080 1000 114 | 1453107561799 113080 51.036175770736 3.72629137981714 10391584 1001 115 | 1453107562798 114079 51.0362407508357 3.72630855840313 10475696 999 116 | 1453107563799 115080 51.0363061715086 3.72632931850452 8695708 1001 117 | 1453107564798 116079 51.0363061715086 3.72632931850452 8303680 999 118 | 1453107565798 117079 51.0364960378946 3.72637263607726 8036776 1000 119 | 1453107566799 118080 51.0364960378946 3.72637263607726 3500756 1001 120 | 1453107567798 119079 51.0365514311276 3.72638263400006 7248948 999 121 | 1453107568799 120080 51.0366011045808 3.72639599223284 4798128 1001 122 | 1453107569799 121080 51.0366467093691 3.72640873322745 5154560 1000 123 | 1453107570798 122079 51.0366467093691 3.72640873322745 5106416 999 124 | 1453107571798 123079 51.0367299536278 3.72644481514698 5360348 1000 125 | 1453107572799 124080 51.0367717751107 3.72646175863229 5296940 1001 126 | 1453107573798 125079 51.0368112089327 3.72647503117144 5048960 999 127 | 1453107574798 126079 51.0368464684367 3.72648924624047 5235632 1000 128 | 1453107575799 127080 51.0368755321167 3.72650194994762 4888496 1001 129 | 1453107576798 128079 51.0368953997622 3.72650799661995 5209920 999 130 | 1453107577798 129079 51.0368953997622 3.72650799661995 5739884 1000 131 | 1453107578798 130079 51.0368953997622 3.72650799661995 5365040 1000 132 | 1453107579798 131079 51.0368953997622 3.72650799661995 5222956 1000 133 | 1453107580799 132080 51.0368953997622 3.72650799661995 4986796 1001 134 | 1453107581798 133079 51.0368953997622 3.72650799661995 4608812 999 135 | 1453107582798 134079 51.0368953997622 3.72650799661995 3968512 1000 136 | 1453107583799 135080 51.0368953997622 3.72650799661995 5224356 1001 137 | 1453107584798 136079 51.0368953997622 3.72650799661995 3869252 999 138 | 1453107585799 137080 51.0368953997622 3.72650799661995 3895720 1001 139 | 1453107586799 138080 51.0368953997622 3.72650799661995 5380568 1000 140 | 1453107587799 139080 51.0368953997622 3.72650799661995 5499140 1000 141 | 1453107588798 140079 51.0368953997622 3.72650799661995 5985820 999 142 | 1453107589798 141079 51.0369179299054 3.72651695734154 5490352 1000 143 | 1453107590799 142080 51.0369472136595 3.72652873358553 5285120 1001 144 | 1453107591798 143079 51.0369839929901 3.72653587286565 4843752 999 145 | 1453107592798 144079 51.037029259798 3.72654564485745 3727220 1000 146 | 1453107593798 145079 51.0370781608149 3.7265601287983 3375392 1000 147 | 1453107594798 146079 51.0371253404827 3.72657359628896 3622076 1000 148 | 1453107595798 147079 51.0371795601437 3.72658166565723 3897068 1000 149 | 1453107596799 148080 51.0372430905412 3.72658493224598 3789228 1001 150 | 1453107597799 149080 51.0373097615294 3.72657198724415 3338996 1000 151 | 1453107598798 150079 51.0373820941714 3.72654808983427 3334952 999 152 | 1453107599798 151079 51.037455513834 3.72651568248934 3340344 1000 153 | 1453107600798 152079 51.0375278596479 3.72648529108317 4971424 1000 154 | 1453107601798 153079 51.0375935478984 3.72646473877255 5103528 1000 155 | 1453107602798 154079 51.0376518682993 3.72644600959245 5413568 1000 156 | 1453107603798 155079 51.0377029959058 3.72642672131085 7116508 1000 157 | 1453107604799 156080 51.0377497179675 3.72640794503501 8624944 1001 158 | 1453107605798 157079 51.0377911594254 3.72639384015731 9573288 999 159 | 1453107606798 158079 51.037826538342 3.72637826051608 11841152 1000 160 | 1453107607798 159079 51.0378571225059 3.72636475753914 10434548 1000 161 | 1453107608798 160079 51.0378571225059 3.72636475753914 11464092 1000 162 | 1453107609798 161079 51.0379024647486 3.72634899377111 11569472 1000 163 | 1453107610798 162079 51.0379024647486 3.72634899377111 13060484 1000 164 | 1453107611798 163079 51.0379024647486 3.72634899377111 13054032 1000 165 | 1453107612798 164079 51.0379024647486 3.72634899377111 12837004 1000 166 | 1453107613798 165079 51.0379024647486 3.72634899377111 12442040 1000 167 | 1453107614799 166080 51.0379024647486 3.72634899377111 12712988 1001 168 | 1453107615798 167079 51.0379024647486 3.72634899377111 12451580 999 169 | 1453107616798 168079 51.0379024647486 3.72634899377111 13117984 1000 170 | 1453107617798 169079 51.0379024647486 3.72634899377111 13085088 1000 171 | 1453107618799 170080 51.0379024647486 3.72634899377111 12791668 1001 172 | 1453107619798 171079 51.0379024647486 3.72634899377111 13127624 999 173 | 1453107620799 172080 51.0379024647486 3.72634899377111 13068108 1001 174 | 1453107621798 173079 51.0379024647486 3.72634899377111 13079644 999 175 | 1453107622799 174080 51.0379024647486 3.72634899377111 13035160 1001 176 | 1453107623799 175080 51.0379024647486 3.72634899377111 12106144 1000 177 | 1453107624799 176080 51.0379024647486 3.72634899377111 12104740 1000 178 | 1453107625798 177079 51.0379024647486 3.72634899377111 11989112 999 179 | 1453107626798 178079 51.0379024647486 3.72634899377111 11976624 1000 180 | 1453107627798 179079 51.0379024647486 3.72634899377111 11940864 1000 181 | 1453107628798 180079 51.0379024647486 3.72634899377111 12071080 1000 182 | 1453107629799 181080 51.0379024647486 3.72634899377111 12006588 1001 183 | 1453107630798 182079 51.0379024647486 3.72634899377111 11997532 999 184 | 1453107631798 183079 51.0379024647486 3.72634899377111 12012028 1000 185 | 1453107632798 184079 51.0379024647486 3.72634899377111 11758220 1000 186 | 1453107633798 185079 51.0379024647486 3.72634899377111 12150072 1000 187 | 1453107634799 186080 51.0379024647486 3.72634899377111 12087352 1001 188 | 1453107635799 187080 51.0379024647486 3.72634899377111 11859704 1000 189 | 1453107636799 188080 51.0379024647486 3.72634899377111 12141436 1000 190 | 1453107637799 189080 51.0379024647486 3.72634899377111 11908136 1000 191 | 1453107638798 190079 51.0379024647486 3.72634899377111 11980820 999 192 | 1453107639798 191079 51.0379024647486 3.72634899377111 12052768 1000 193 | 1453107640798 192079 51.0379024647486 3.72634899377111 11967544 1000 194 | 1453107641798 193079 51.0379024647486 3.72634899377111 11924076 1000 195 | 1453107642798 194079 51.0379024647486 3.72634899377111 12149856 1000 196 | 1453107643798 195079 51.0379024647486 3.72634899377111 11844364 1000 197 | 1453107644798 196079 51.0379024647486 3.72634899377111 11973284 1000 198 | 1453107645798 197079 51.0379024647486 3.72634899377111 11821996 1000 199 | 1453107646798 198079 51.0379024647486 3.72634899377111 10488512 1000 200 | 1453107647798 199079 51.0379024647486 3.72634899377111 13871324 1000 201 | 1453107648798 200079 51.0379024647486 3.72634899377111 11997252 1000 202 | 1453107649798 201079 51.0379024647486 3.72634899377111 12003292 1000 203 | 1453107650799 202080 51.0379024647486 3.72634899377111 11998444 1001 204 | 1453107651798 203079 51.0379024647486 3.72634899377111 11983720 999 205 | 1453107652798 204079 51.0379024647486 3.72634899377111 11557752 1000 206 | 1453107653798 205079 51.0379024647486 3.72634899377111 11422304 1000 207 | 1453107654798 206079 51.0379024647486 3.72634899377111 11894700 1000 208 | 1453107655798 207079 51.0379024647486 3.72634899377111 12992024 1000 209 | 1453107656798 208079 51.0379024647486 3.72634899377111 12103648 1000 210 | 1453107657798 209079 51.0379024647486 3.72634899377111 11974328 1000 211 | 1453107658798 210079 51.0379024647486 3.72634899377111 12057212 1000 212 | 1453107659798 211079 51.0379024647486 3.72634899377111 11989528 1000 213 | 1453107660798 212079 51.0379024647486 3.72634899377111 12000284 1000 214 | 1453107661798 213079 51.0379024647486 3.72634899377111 11986908 1000 215 | 1453107662798 214079 51.0379024647486 3.72634899377111 12014724 1000 216 | 1453107663798 215079 51.0379024647486 3.72634899377111 11948672 1000 217 | 1453107664798 216079 51.0379024647486 3.72634899377111 12040596 1000 218 | 1453107665799 217080 51.0379024647486 3.72634899377111 12011068 1001 219 | 1453107666798 218079 51.0379024647486 3.72634899377111 11987896 999 220 | 1453107667799 219080 51.0379024647486 3.72634899377111 11898332 1001 221 | 1453107668798 220079 51.0379024647486 3.72634899377111 11461676 999 222 | 1453107669798 221079 51.0379024647486 3.72634899377111 12609612 1000 223 | 1453107670798 222079 51.0379024647486 3.72634899377111 12031984 1000 224 | 1453107671798 223079 51.0379024647486 3.72634899377111 11999740 1000 225 | 1453107672799 224080 51.0379024647486 3.72634899377111 11985680 1001 226 | 1453107673799 225080 51.0379024647486 3.72634899377111 12022200 1000 227 | 1453107674799 226080 51.0379024647486 3.72634899377111 11997200 1000 228 | 1453107675798 227079 51.0379024647486 3.72634899377111 11997252 999 229 | 1453107676798 228079 51.0379024647486 3.72634899377111 11494136 1000 230 | 1453107677798 229079 51.0379024647486 3.72634899377111 12422028 1000 231 | 1453107678798 230079 51.0379024647486 3.72634899377111 12018668 1000 232 | 1453107679798 231079 51.0379024647486 3.72634899377111 11187800 1000 233 | 1453107680798 232079 51.0379024647486 3.72634899377111 12482480 1000 234 | 1453107681798 233079 51.0379024647486 3.72634899377111 12383428 1000 235 | 1453107682798 234079 51.0379024647486 3.72634899377111 12005392 1000 236 | 1453107683798 235079 51.0379194854382 3.72634493007332 11995048 1000 237 | 1453107684798 236079 51.0379393313738 3.72633799796251 12010136 1000 238 | 1453107685799 237080 51.0379623100279 3.72632796102784 11994660 1001 239 | 1453107686799 238080 51.0379868332096 3.72632023731719 11380440 1000 240 | 1453107687799 239080 51.0380140884221 3.72631446406945 11442392 1000 241 | 1453107688799 240080 51.0380463330914 3.72630402759378 12067896 1000 242 | 1453107689798 241079 51.0380801877125 3.72629044406036 10660688 999 243 | 1453107690799 242080 51.0381527244655 3.72626387511894 7643852 1001 244 | 1453107691798 243079 51.0381905689373 3.72625571311806 5227544 999 245 | 1453107692798 244079 51.0382292335903 3.72625748216389 4705868 1000 246 | 1453107693798 245079 51.0382665669411 3.72626773566651 4498276 1000 247 | 1453107694798 246079 51.0383037752507 3.72627389948519 4749108 1000 248 | 1453107695799 247080 51.038346303116 3.72627973630476 5177564 1001 249 | 1453107696798 248079 51.0383966018884 3.7262866995807 5251136 999 250 | 1453107697798 249079 51.0384448401076 3.72628840388858 3811468 1000 251 | 1453107698798 250079 51.0384867967472 3.72628336060798 3642296 1000 252 | 1453107699798 251079 51.03852186473 3.72627494066444 4713956 1000 253 | 1453107700798 252079 51.0385518639805 3.72626319228306 4008952 1000 254 | 1453107701799 253080 51.0385806645602 3.72624918846792 4321688 1001 255 | 1453107702799 254080 51.0385806645602 3.72624918846792 2445272 1000 256 | 1453107703799 255080 51.0386368619727 3.72621086202426 4785400 1000 257 | 1453107704798 256079 51.0386659360153 3.7261937554022 4213848 999 258 | 1453107705798 257079 51.0386955274676 3.72618505856428 4053436 1000 259 | 1453107706798 258079 51.038721160787 3.72617291067585 3055916 1000 260 | 1453107707798 259079 51.0387414336397 3.7261583567535 3766312 1000 261 | 1453107708798 260079 51.0387414336397 3.7261583567535 2842932 1000 262 | 1453107709798 261079 51.0387542273521 3.72614853930134 3982772 1000 263 | 1453107710798 262079 51.0387542273521 3.72614853930134 3422492 1000 264 | 1453107711798 263079 51.0387542273521 3.72614853930134 3503452 1000 265 | 1453107712798 264079 51.0387542273521 3.72614853930134 3677344 1000 266 | 1453107713798 265079 51.0387542273521 3.72614853930134 3665212 1000 267 | 1453107714799 266080 51.0387542273521 3.72614853930134 3673456 1001 268 | 1453107715799 267080 51.0387542273521 3.72614853930134 3553172 1000 269 | 1453107716799 268080 51.0387542273521 3.72614853930134 3524424 1000 270 | 1453107717798 269079 51.0387542273521 3.72614853930134 3605900 999 271 | 1453107718798 270079 51.0387542273521 3.72614853930134 3453472 1000 272 | 1453107719798 271079 51.0387542273521 3.72614853930134 3768308 1000 273 | 1453107720798 272079 51.0387542273521 3.72614853930134 4228676 1000 274 | 1453107721798 273079 51.0387542273521 3.72614853930134 4420092 1000 275 | 1453107722799 274080 51.0387542273521 3.72614853930134 3942900 1001 276 | 1453107723799 275080 51.0387704325703 3.72614107348688 5032084 1000 277 | 1453107724798 276079 51.0387870235558 3.72613201269261 5245068 999 278 | 1453107725799 277080 51.0388111734434 3.72612077810202 5382384 1001 279 | 1453107726798 278079 51.0388423673102 3.72610685834297 5165716 999 280 | 1453107727798 279079 51.038883255946 3.72609418248759 5743828 1000 281 | 1453107728799 280080 51.0389344685965 3.72608023223276 6103100 1001 282 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_bus_0011.log: -------------------------------------------------------------------------------- 1 | 1453115382313 5 51.0502389197823 3.73007557215122 0 5 2 | 1453115383313 1005 51.0502532330974 3.7300256930561 2617116 1000 3 | 1453115384313 2005 51.050257186612 3.72999828913751 5262592 1000 4 | 1453115385314 3006 51.0502947677451 3.72991962331269 5951420 1001 5 | 1453115386313 4005 51.0503381183441 3.72986583687563 6071392 999 6 | 1453115387313 5005 51.0504067970177 3.72980068188586 6897716 1000 7 | 1453115388313 6005 51.0505557116083 3.72964736209746 5997952 1000 8 | 1453115389314 7006 51.0506596825695 3.7295364342492 5030840 1001 9 | 1453115390314 8006 51.0507450545291 3.72945182392614 5028584 1000 10 | 1453115391313 9005 51.0508054468031 3.72936783224871 5060004 999 11 | 1453115392313 10005 51.0508704602969 3.72929905024963 5008104 1000 12 | 1453115393314 11006 51.0509361465546 3.72923743958709 5063192 1001 13 | 1453115394313 12005 51.0509926327997 3.72917213682592 5013108 999 14 | 1453115395314 13006 51.0510374536463 3.72910693496364 5068480 1001 15 | 1453115396313 14005 51.0510752933397 3.7290440482296 4598028 999 16 | 1453115397313 15005 51.0511076531906 3.7289800455453 4852800 1000 17 | 1453115398314 16006 51.0511408619697 3.72891529806908 5121256 1001 18 | 1453115399313 17005 51.0511737721743 3.7288482450863 5548164 999 19 | 1453115400313 18005 51.0512053980135 3.72878375346453 5075220 1000 20 | 1453115401314 19006 51.0512377551293 3.72871468207571 5001080 1001 21 | 1453115402313 20005 51.0512733202809 3.72865810166166 5053704 999 22 | 1453115403313 21005 51.0513143516969 3.72861855748251 5034884 1000 23 | 1453115404314 22006 51.051355581086 3.72862464399817 5047508 1001 24 | 1453115405313 23005 51.0513945397272 3.72863006998337 5017852 999 25 | 1453115406313 24005 51.0514323194938 3.72859156228406 5055960 1000 26 | 1453115407314 25006 51.0514601461758 3.7285310201878 5035040 1001 27 | 1453115408313 26005 51.0514898621723 3.72847723106131 3385960 999 28 | 1453115409313 27005 51.0515326947143 3.72844944708941 5005312 1000 29 | 1453115410313 28005 51.05157462638 3.728422065137 6720404 1000 30 | 1453115411314 29006 51.0516132104947 3.72839454254428 5012956 1001 31 | 1453115412313 30005 51.0516451923644 3.72836937747068 5053804 999 32 | 1453115413313 31005 51.0516823068387 3.72834877941138 3980216 1000 33 | 1453115414313 32005 51.0517165395545 3.72833047540633 4845568 1000 34 | 1453115415313 33005 51.0517582505633 3.72831891458386 6243456 1000 35 | 1453115416313 34005 51.0517758134955 3.72831471933074 5066380 1000 36 | 1453115417314 35006 51.0517758134955 3.72831471933074 5041572 1001 37 | 1453115418314 36006 51.0517758134955 3.72831471933074 3797816 1000 38 | 1453115419314 37006 51.0517758134955 3.72831471933074 5367808 1000 39 | 1453115420314 38006 51.0517741903145 3.72832960302472 5962332 1000 40 | 1453115421313 39005 51.0517741903145 3.72832960302472 5038124 999 41 | 1453115422313 40005 51.0517741903145 3.72832960302472 3912492 1000 42 | 1453115423313 41005 51.0517741903145 3.72832960302472 2232320 1000 43 | 1453115424314 42006 51.0517741903145 3.72832960302472 5103616 1001 44 | 1453115425313 43005 51.0517784134427 3.72830569422472 7264256 999 45 | 1453115426313 44005 51.0517573714886 3.72827138945776 6657820 1000 46 | 1453115427313 45005 51.0517446128495 3.72823356596168 5063036 1000 47 | 1453115428313 46005 51.0517446128495 3.72823356596168 2543680 1000 48 | 1453115429314 47006 51.0517446128495 3.72823356596168 983040 1001 49 | 1453115430313 48005 51.0517451317312 3.72820234909399 5046272 999 50 | 1453115431313 49005 51.0518270473265 3.728103644904 7297024 1000 51 | 1453115432315 50007 51.0518802175432 3.7280620153958 7360716 1002 52 | 1453115433313 51005 51.0519378488078 3.72801538396597 4899980 998 53 | 1453115434314 52006 51.0519872232027 3.72796082383053 3391568 1001 54 | 1453115435313 53005 51.0520398849197 3.72788360208939 3382184 999 55 | 1453115436314 54006 51.0520987090131 3.72780431793352 3260760 1001 56 | 1453115437313 55005 51.0521553098306 3.72773134578316 3822928 999 57 | 1453115438313 56005 51.0522111224969 3.72766177283472 2873936 1000 58 | 1453115439313 57005 51.0522567961554 3.72761019867405 2664996 1000 59 | 1453115440314 58006 51.0523678904308 3.72749641161013 2621860 1001 60 | 1453115441313 59005 51.0524025398774 3.72742438402739 1992344 999 61 | 1453115442313 60005 51.0524323278118 3.72735476666982 1647256 1000 62 | 1453115443313 61005 51.0524647318736 3.72728702676251 1527284 1000 63 | 1453115444313 62005 51.0525033671966 3.72722225011616 1407312 1000 64 | 1453115445313 63005 51.052528488447 3.72714814301978 1113448 1000 65 | 1453115446313 64005 51.0525432971517 3.72707107329137 885636 1000 66 | 1453115447313 65005 51.0525599949984 3.72700443493567 603904 1000 67 | 1453115448313 66005 51.052576356111 3.72694644745706 25612 1000 68 | 1453115449313 67005 51.052576356111 3.72694644745706 0 1000 69 | 1453115450313 68005 51.0525829361347 3.72685835498596 0 1000 70 | 1453115451314 69006 51.0525829361347 3.72685835498596 0 1001 71 | 1453115452314 70006 51.0525794736188 3.72682190108182 4695136 1000 72 | 1453115453313 71005 51.0525813173956 3.726780283625 5097100 999 73 | 1453115454313 72005 51.0526030998299 3.72672085107757 4921784 1000 74 | 1453115455313 73005 51.0526235783099 3.72666757110651 5040220 1000 75 | 1453115456313 74005 51.0526629000228 3.72658676472113 4959604 1000 76 | 1453115457313 75005 51.0526963495401 3.72651471786833 5094120 1000 77 | 1453115458313 76005 51.0527529357721 3.72642193758817 5092072 1000 78 | 1453115459313 77005 51.0527984287764 3.72635026223845 4453424 1000 79 | 1453115460313 78005 51.0528397936773 3.72627986991656 5644076 1000 80 | 1453115461313 79005 51.0529042111194 3.72621509000116 4898192 1000 81 | 1453115462313 80005 51.0529555282108 3.72614409816699 5113900 1000 82 | 1453115463314 81006 51.0530060007549 3.72607810626472 5040324 1001 83 | 1453115464313 82005 51.0530540402178 3.72599339363049 5096716 999 84 | 1453115465313 83005 51.0531049569107 3.72589434254266 3432304 1000 85 | 1453115466313 84005 51.0531690775393 3.72577928173607 5259264 1000 86 | 1453115467313 85005 51.0532232803408 3.7256680467296 6396828 1000 87 | 1453115468313 86005 51.0532693549321 3.72555614211548 4977384 1000 88 | 1453115469314 87006 51.0533144270144 3.72542998774108 3816808 1001 89 | 1453115470313 88005 51.0533531299525 3.72531025682616 4670412 999 90 | 1453115471313 89005 51.0533959661706 3.72518928597653 4847616 1000 91 | 1453115472314 90006 51.0534482210691 3.72502278740012 6885992 1001 92 | 1453115473313 91005 51.0534927376329 3.72487504579202 4973368 999 93 | 1453115474313 92005 51.0535370971412 3.72474026479661 5043412 1000 94 | 1453115475313 93005 51.0535737349387 3.72462368863521 4442724 1000 95 | 1453115476313 94005 51.0536412216116 3.72449876076779 3518464 1000 96 | 1453115477313 95005 51.0536544728254 3.7243855811883 5138676 1000 97 | 1453115478313 96005 51.0536616681716 3.72427402957131 2729700 1000 98 | 1453115479313 97005 51.0536712203463 3.72415664916703 4177920 1000 99 | 1453115480313 98005 51.0536860423626 3.72403749683058 6376920 1000 100 | 1453115481313 99005 51.0536954994129 3.72392040268075 4685648 1000 101 | 1453115482314 100006 51.0537109280363 3.72381381241076 4817752 1001 102 | 1453115483313 101005 51.053728906528 3.72370888070367 4902080 999 103 | 1453115484313 102005 51.0537441303716 3.7236066571133 2404616 1000 104 | 1453115485313 103005 51.0537563413841 3.72350694145245 7127688 1000 105 | 1453115486314 104006 51.0537711826557 3.72341276024798 4254988 1001 106 | 1453115487314 105006 51.0537858283307 3.72331785931781 3083016 1000 107 | 1453115488313 106005 51.0538000356239 3.72322290735841 4827048 999 108 | 1453115489313 107005 51.053814993302 3.72313209401136 2941904 1000 109 | 1453115490313 108005 51.0538257932631 3.72304893222344 3666640 1000 110 | 1453115491314 109006 51.0538336042169 3.72298515914777 3148928 1001 111 | 1453115492313 110005 51.0538399241272 3.72293448040796 2859808 999 112 | 1453115493313 111005 51.053850604496 3.72290411022051 2454008 1000 113 | 1453115494313 112005 51.0538514163303 3.7228886636986 2756660 1000 114 | 1453115495314 113006 51.0538417160102 3.7228937807204 2954816 1001 115 | 1453115496313 114005 51.0538300914859 3.72290789368262 3085444 999 116 | 1453115497313 115005 51.0538196627246 3.72292351444884 3092440 1000 117 | 1453115498314 116006 51.0538122240004 3.7229343281427 2879328 1001 118 | 1453115499313 117005 51.0538122240004 3.7229343281427 2983124 999 119 | 1453115500313 118005 51.0538122240004 3.7229343281427 2964408 1000 120 | 1453115501313 119005 51.0538033056631 3.72292590356681 2920960 1000 121 | 1453115502313 120005 51.0538033056631 3.72292590356681 2941336 1000 122 | 1453115503314 121006 51.0538033056631 3.72292590356681 2764180 1001 123 | 1453115504314 122006 51.0538033056631 3.72292590356681 2640260 1000 124 | 1453115505313 123005 51.0538033056631 3.72292590356681 3205236 999 125 | 1453115506313 124005 51.0538033056631 3.72292590356681 3049176 1000 126 | 1453115507313 125005 51.0538033056631 3.72292590356681 3417180 1000 127 | 1453115508313 126005 51.0538033056631 3.72292590356681 3173764 1000 128 | 1453115509313 127005 51.0538033056631 3.72292590356681 2895832 1000 129 | 1453115510313 128005 51.0538033056631 3.72292590356681 3723624 1000 130 | 1453115511313 129005 51.0538033056631 3.72292590356681 3429468 1000 131 | 1453115512313 130005 51.0538033056631 3.72292590356681 2255048 1000 132 | 1453115513313 131005 51.0538033056631 3.72292590356681 3498060 1000 133 | 1453115514313 132005 51.0538033056631 3.72292590356681 3125248 1000 134 | 1453115515313 133005 51.0538033056631 3.72292590356681 3989496 1000 135 | 1453115516313 134005 51.0538033056631 3.72292590356681 2951784 1000 136 | 1453115517314 135006 51.0538033056631 3.72292590356681 2785824 1001 137 | 1453115518313 136005 51.0538033056631 3.72292590356681 2802388 999 138 | 1453115519313 137005 51.0538033056631 3.72292590356681 2914116 1000 139 | 1453115520314 138006 51.0538033056631 3.72292590356681 2980272 1001 140 | 1453115521313 139005 51.0538033056631 3.72292590356681 2988024 999 141 | 1453115522313 140005 51.0538033056631 3.72292590356681 3108980 1000 142 | 1453115523313 141005 51.0538033056631 3.72292590356681 3047828 1000 143 | 1453115524314 142006 51.0538033056631 3.72292590356681 2590908 1001 144 | 1453115525314 143006 51.0538033056631 3.72292590356681 3831016 1000 145 | 1453115526313 144005 51.0538033056631 3.72292590356681 3490128 999 146 | 1453115527314 145006 51.0538033056631 3.72292590356681 3433148 1001 147 | 1453115528313 146005 51.0538033056631 3.72292590356681 3248680 999 148 | 1453115529313 147005 51.0538033056631 3.72292590356681 3928248 1000 149 | 1453115530313 148005 51.0538033056631 3.72292590356681 4406904 1000 150 | 1453115531313 149005 51.0538033056631 3.72292590356681 4011984 1000 151 | 1453115532313 150005 51.0538033056631 3.72292590356681 3874360 1000 152 | 1453115533313 151005 51.0538033056631 3.72292590356681 4370552 1000 153 | 1453115534314 152006 51.0538033056631 3.72292590356681 4141044 1001 154 | 1453115535313 153005 51.0538033056631 3.72292590356681 4816896 999 155 | 1453115536313 154005 51.0538033056631 3.72292590356681 5151576 1000 156 | 1453115537313 155005 51.0538033056631 3.72292590356681 5035480 1000 157 | 1453115538313 156005 51.0538033056631 3.72292590356681 3566024 1000 158 | 1453115539313 157005 51.0538033056631 3.72292590356681 2049052 1000 159 | 1453115540314 158006 51.0538033056631 3.72292590356681 1339392 1001 160 | 1453115541314 159006 51.0538033056631 3.72292590356681 5433344 1000 161 | 1453115542314 160006 51.0538033056631 3.72292590356681 7884800 1000 162 | 1453115543313 161005 51.0538033056631 3.72292590356681 4455128 999 163 | 1453115544313 162005 51.0538033056631 3.72292590356681 3528084 1000 164 | 1453115545314 163006 51.0538033056631 3.72292590356681 3928064 1001 165 | 1453115546313 164005 51.0538033056631 3.72292590356681 4258844 999 166 | 1453115547313 165005 51.0538033056631 3.72292590356681 3650780 1000 167 | 1453115548313 166005 51.0538033056631 3.72292590356681 3100400 1000 168 | 1453115549314 167006 51.0538033056631 3.72292590356681 3124348 1001 169 | 1453115550313 168005 51.0537925993571 3.72291116432018 3413452 999 170 | 1453115551313 169005 51.0537896947361 3.72289300404103 2430704 1000 171 | 1453115552314 170006 51.0537921646752 3.72286947932315 2434876 1001 172 | 1453115553314 171006 51.0537956694695 3.72283807646857 1667036 1000 173 | 1453115554314 172006 51.0537956694695 3.72283807646857 4116196 1000 174 | 1453115555314 173006 51.0538145549276 3.72280442609797 4473700 1000 175 | 1453115556313 174005 51.0538324137703 3.72272417784381 4100720 999 176 | 1453115557313 175005 51.0538372589252 3.722696681771 3956276 1000 177 | 1453115558313 176005 51.0538455629033 3.72266413338749 3988732 1000 178 | 1453115559313 177005 51.0538765888901 3.72261211039656 4111856 1000 179 | 1453115560314 178006 51.0538765888901 3.72261211039656 3659776 1001 180 | 1453115561313 179005 51.0539466096429 3.72248105615818 5363280 999 181 | 1453115562313 180005 51.053986985597 3.72240245612136 6149084 1000 182 | 1453115563313 181005 51.0540353592878 3.72232611555616 7189972 1000 183 | 1453115564314 182006 51.0540353592878 3.72232611555616 5946704 1001 184 | 1453115565314 183006 51.0540621878583 3.72224068083009 3106816 1000 185 | 1453115566314 184006 51.0541045579949 3.722041863018 7002112 1000 186 | 1453115567313 185005 51.0541103548515 3.72192961030346 6112980 999 187 | 1453115568313 186005 51.0541102720985 3.72181045494275 4753024 1000 188 | 1453115569313 187005 51.0541085452772 3.72168804785376 3863848 1000 189 | 1453115570314 188006 51.0541085452772 3.72168804785376 3225308 1001 190 | 1453115571314 189006 51.0540677919909 3.72142056564362 2256516 1000 191 | 1453115572313 190005 51.0540580243187 3.72128740360489 5798588 999 192 | 1453115573314 191006 51.0540421049992 3.72116060957703 5028584 1001 193 | 1453115574313 192005 51.0540421049992 3.72116060957703 4972416 999 194 | 1453115575314 193006 51.0540457013536 3.72105148699532 4538368 1001 195 | 1453115576313 194005 51.054106590681 3.72088556163779 5608384 999 196 | 1453115577313 195005 51.0540843421567 3.72077839697958 5038824 1000 197 | 1453115578323 196015 51.054054046151 3.72067195090223 5051760 1010 198 | 1453115579314 197006 51.0540155540832 3.72055731683305 4678444 991 199 | 1453115580313 198005 51.0539768156725 3.72043962416665 3061760 999 200 | 1453115581314 199006 51.0539400714808 3.72031927507875 7363980 1001 201 | 1453115582314 200006 51.0539155406917 3.72020811135438 5038824 1000 202 | 1453115583314 201006 51.0539021709815 3.72010071011168 5040172 1000 203 | 1453115584313 202005 51.0538768476286 3.71998553839611 4711940 999 204 | 1453115585313 203005 51.0538533744003 3.71987683174126 4009984 1000 205 | 1453115586314 204006 51.0538403298669 3.71977591315882 5715968 1001 206 | 1453115587313 205005 51.0538389561105 3.7196813059953 5730884 999 207 | 1453115588314 206006 51.0538392945252 3.71959342251167 5023996 1001 208 | 1453115589313 207005 51.0538490798764 3.71951259993715 5038824 999 209 | 1453115590314 208006 51.0538581808954 3.7194337368153 3370184 1001 210 | 1453115591313 209005 51.0538558430348 3.71934854706869 6706116 999 211 | 1453115592313 210005 51.0538477979075 3.71925944554438 5021116 1000 212 | 1453115593313 211005 51.0538489357058 3.7191744542798 5059228 1000 213 | 1453115594314 212006 51.0538678907611 3.71911298776762 5036128 1001 214 | 1453115595313 213005 51.0538670043165 3.71903423256621 3182592 999 215 | 1453115596313 214005 51.0538041324949 3.7189433628745 6219776 1000 216 | 1453115597313 215005 51.0537911828008 3.71887287884915 5710708 1000 217 | 1453115598314 216006 51.0537767731403 3.71880971871423 5036932 1001 218 | 1453115599313 217005 51.0537554085303 3.71874630151992 4912656 999 219 | 1453115600313 218005 51.053717948322 3.71866748582159 3315256 1000 220 | 1453115601313 219005 51.0537030651162 3.71859874712311 4401344 1000 221 | 1453115602313 220005 51.0536832728007 3.71853034311656 2957512 1000 222 | 1453115603314 221006 51.053712342959 3.71839524109311 971908 1001 223 | 1453115604314 222006 51.053712342959 3.71839524109311 4697780 1000 224 | 1453115605313 223005 51.0537486687028 3.71831811169303 3731876 999 225 | 1453115606313 224005 51.0537924970958 3.71823922713733 4342048 1000 226 | 1453115607314 225006 51.0538390395961 3.71816067735771 4291280 1001 227 | 1453115608313 226005 51.0538819264597 3.71808466887436 2645424 999 228 | 1453115609313 227005 51.0539204206617 3.71801015319456 990780 1000 229 | 1453115610313 228005 51.0539685524146 3.71794197266562 861372 1000 230 | 1453115611314 229006 51.0540069879486 3.71787200760756 2053704 1001 231 | 1453115612313 230005 51.0540475316686 3.71780253989203 1633776 999 232 | 1453115613313 231005 51.0541251410012 3.71766171741248 788580 1000 233 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_car_0007.log: -------------------------------------------------------------------------------- 1 | 1454607996013 378 51.054446507346 3.73935390080985 11432 378 2 | 1454607997013 1378 51.054446507346 3.73935390080985 4243504 1000 3 | 1454607998014 2379 51.0544374047244 3.73935423936446 6060608 1001 4 | 1454607999013 3378 51.0544374047244 3.73935423936446 6049824 999 5 | 1454608000013 4378 51.0544374047244 3.73935423936446 6157664 1000 6 | 1454608001014 5379 51.0544374047244 3.73935423936446 6155280 1001 7 | 1454608002013 6378 51.0544374047244 3.73935423936446 5926144 999 8 | 1454608003013 7378 51.0544006062063 3.73934583214857 6044432 1000 9 | 1454608004013 8378 51.0543523351024 3.73934974335527 5991860 1000 10 | 1454608005014 9379 51.0542959438414 3.73936993776723 5087380 1001 11 | 1454608006013 10378 51.0542386502411 3.73940151554864 3923712 999 12 | 1454608007014 11379 51.054173401066 3.73944017300242 3397632 1001 13 | 1454608008013 12378 51.0540954030544 3.73948235890021 4369444 999 14 | 1454608009014 13379 51.0540103389534 3.73952591093688 3225584 1001 15 | 1454608010013 14378 51.0539242219322 3.7395632967063 2982344 999 16 | 1454608011013 15378 51.0538385491862 3.73960065902559 2585464 1000 17 | 1454608012013 16378 51.0537477754515 3.73964179437922 2802492 1000 18 | 1454608013013 17378 51.0536534621599 3.73968175970843 3793272 1000 19 | 1454608014014 18379 51.0535602369478 3.73971940510839 2652864 1001 20 | 1454608015013 19378 51.0534659614249 3.73976119828071 2588160 999 21 | 1454608016013 20378 51.0533651313847 3.73980497537188 2487060 1000 22 | 1454608017013 21378 51.0532640610236 3.73984955288886 2910024 1000 23 | 1454608018013 22378 51.0531681166994 3.7398854239244 3787152 1000 24 | 1454608019013 23378 51.0530726823832 3.7399299322039 3339436 1000 25 | 1454608020013 24378 51.0529728159051 3.73998231448601 3726680 1000 26 | 1454608021013 25378 51.0528727547906 3.74003270377461 3412900 1000 27 | 1454608022013 26378 51.0527720416389 3.74008381100412 3308200 1000 28 | 1454608023013 27378 51.0526702363058 3.74013977120223 2897368 1000 29 | 1454608024013 28378 51.0525684552611 3.74019464739046 2418908 1000 30 | 1454608025013 29378 51.0524659271486 3.7402460412691 2237060 1000 31 | 1454608026013 30378 51.0523692093677 3.74027919212404 2372400 1000 32 | 1454608027014 31379 51.0522682034177 3.74032803533957 1499676 1001 33 | 1454608028014 32379 51.0521620109692 3.74039506746414 3316576 1000 34 | 1454608029014 33379 51.0520517917296 3.74049496270402 3225840 1000 35 | 1454608030013 34378 51.0519486300504 3.74058226228777 4224216 999 36 | 1454608031013 35378 51.0518546342234 3.74068054770408 3401444 1000 37 | 1454608032013 36378 51.0517666198874 3.74078946649297 3375392 1000 38 | 1454608033013 37378 51.0516774483079 3.74088484131995 2832588 1000 39 | 1454608034013 38378 51.0515919127537 3.74100993621628 3225324 1000 40 | 1454608035013 39378 51.0515040970813 3.74113552383867 2834044 1000 41 | 1454608036013 40378 51.0514120908248 3.74125084108021 3064156 1000 42 | 1454608037013 41378 51.0513264447027 3.74135154325137 3931520 1000 43 | 1454608038013 42378 51.0512464423745 3.7414468340169 3240188 1000 44 | 1454608039013 43378 51.0511614173009 3.74154540291699 4201296 1000 45 | 1454608040013 44378 51.0510743370639 3.74164604194596 4526840 1000 46 | 1454608041014 45379 51.0509828800836 3.74175564710952 4096176 1001 47 | 1454608042013 46378 51.0508921486038 3.74185822883369 4478248 999 48 | 1454608043013 47378 51.0507973923095 3.74196154021124 4810752 1000 49 | 1454608044013 48378 51.0507058207864 3.74206324782865 4648960 1000 50 | 1454608045013 49378 51.0506139306143 3.74216872477605 5226112 1000 51 | 1454608046013 50378 51.0505221764731 3.74227749965019 4150372 1000 52 | 1454608047014 51379 51.0503477209221 3.74247560599946 4690028 1001 53 | 1454608048013 52378 51.0503477209221 3.74247560599946 2640472 999 54 | 1454608049014 53379 51.050268973554 3.74257065890437 4573764 1001 55 | 1454608050013 54378 51.0501896135372 3.74266024358981 5594876 999 56 | 1454608051013 55378 51.0501022300884 3.74275296709677 4653328 1000 57 | 1454608052013 56378 51.0500124659161 3.74285532426659 4804860 1000 58 | 1454608053013 57378 51.0499267962465 3.74295339012984 3965816 1000 59 | 1454608054013 58378 51.0498497195763 3.7430339673557 4747656 1000 60 | 1454608055014 59379 51.0497831036887 3.74310193478059 4801576 1001 61 | 1454608056013 60378 51.0497224058708 3.74316979540439 4499624 999 62 | 1454608057013 61378 51.0496695400053 3.74323212796814 4211676 1000 63 | 1454608058013 62378 51.049627107725 3.74327835247093 3897864 1000 64 | 1454608059013 63378 51.049627107725 3.74327835247093 4347716 1000 65 | 1454608060014 64379 51.0495673380095 3.74334767065496 4929200 1001 66 | 1454608061014 65379 51.0495424833211 3.74337523593412 4123228 1000 67 | 1454608062014 66379 51.0495180383947 3.74340755669658 3263552 1000 68 | 1454608063013 67378 51.0494915445215 3.74344002403327 5890048 999 69 | 1454608064013 68378 51.0494555276267 3.74348614918816 3883008 1000 70 | 1454608065013 69378 51.0494119275341 3.74354800001656 6393856 1000 71 | 1454608066014 70379 51.0493598666479 3.74361908846474 5656756 1001 72 | 1454608067014 71379 51.0492990637804 3.74369349425135 6695352 1000 73 | 1454608068013 72378 51.049236073153 3.74377025903827 5491548 999 74 | 1454608069013 73378 51.0491739990293 3.74386111687273 4265984 1000 75 | 1454608070013 74378 51.0491097274651 3.74395500550242 4945920 1000 76 | 1454608071013 75378 51.049041051027 3.74404490762693 6465536 1000 77 | 1454608072013 76378 51.0489679021244 3.74414135893925 6727680 1000 78 | 1454608073014 77379 51.0488896498682 3.74423802679991 5906432 1001 79 | 1454608074013 78378 51.0488103754686 3.74433467089226 1802240 999 80 | 1454608075014 79379 51.0487303385808 3.74443421965835 3387392 1001 81 | 1454608076013 80378 51.0486499714509 3.74453259601684 6287360 999 82 | 1454608077013 81378 51.0485646866966 3.74463257426041 6668288 1000 83 | 1454608078013 82378 51.0484729673638 3.74472356670663 6094848 1000 84 | 1454608079014 83379 51.0483895609764 3.74481517712121 3878912 1001 85 | 1454608080013 84378 51.0483083771917 3.74490597590831 5447680 999 86 | 1454608081013 85378 51.0482167290041 3.74500444203828 6172672 1000 87 | 1454608082013 86378 51.0481385056906 3.74510003510659 6545408 1000 88 | 1454608083013 87378 51.0480536843245 3.74519743289467 6240256 1000 89 | 1454608084013 88378 51.0479678948281 3.74529758987283 3891200 1000 90 | 1454608085013 89378 51.0478825511726 3.74539946660234 5023744 1000 91 | 1454608086013 90378 51.0477972271991 3.74550553801204 6258688 1000 92 | 1454608087013 91378 51.047712119575 3.74560871554794 6322176 1000 93 | 1454608088013 92378 51.0476289625413 3.74570173685029 5824512 1000 94 | 1454608089013 93378 51.0475441501156 3.74578359676267 4050944 1000 95 | 1454608090013 94378 51.04745502188 3.74585434470511 5253120 1000 96 | 1454608091013 95378 51.0473569040072 3.74589830047689 6428672 1000 97 | 1454608092013 96378 51.0472498616324 3.74592209716122 6676480 1000 98 | 1454608093013 97378 51.0471476606212 3.74592701774283 6131712 1000 99 | 1454608094013 98378 51.0470473682333 3.74590708093942 4919296 1000 100 | 1454608095014 99379 51.0469430916005 3.7458547612052 4505600 1001 101 | 1454608096013 100378 51.0468396583612 3.74577734145743 6500352 999 102 | 1454608097013 101378 51.0467422445906 3.74567431881288 6561792 1000 103 | 1454608098013 102378 51.0466444308066 3.74558102019432 6928384 1000 104 | 1454608099013 103378 51.04655420748 3.74547606712978 4653056 1000 105 | 1454608100013 104378 51.0464694214526 3.74536372783036 3788800 1000 106 | 1454608101014 105379 51.0463842602656 3.74525092093246 5885952 1001 107 | 1454608102013 106378 51.0463000441321 3.74513696815751 6516736 999 108 | 1454608103014 107379 51.0462178563156 3.74502446819507 6475776 1001 109 | 1454608104014 108379 51.0461344761569 3.74492484704123 4892672 1000 110 | 1454608105014 109379 51.0460523828781 3.74481592758374 4268032 1000 111 | 1454608106013 110378 51.0459681661655 3.74470347229314 6283264 999 112 | 1454608107014 111379 51.0458844865681 3.7445941323258 6658048 1001 113 | 1454608108013 112378 51.0457986572935 3.74448278181261 6298628 999 114 | 1454608109013 113378 51.0457118203102 3.74437160482288 3416064 1000 115 | 1454608110013 114378 51.0456227325913 3.74425845568347 4624384 1000 116 | 1454608111013 115378 51.0455350923314 3.74413922551332 6520832 1000 117 | 1454608112013 116378 51.0454474951706 3.74401580607347 6430720 1000 118 | 1454608113014 117379 51.0453597475419 3.74389053027196 6135808 1001 119 | 1454608114013 118378 51.0452693935989 3.74376733240397 4321280 999 120 | 1454608115013 119378 51.0451799767307 3.74364216075516 4216832 1000 121 | 1454608116013 120378 51.0450960562391 3.74350631828942 6823936 1000 122 | 1454608117013 121378 51.045011586128 3.74336973301335 6481920 1000 123 | 1454608118014 122379 51.0449251704659 3.74323062943673 6467584 1001 124 | 1454608119013 123378 51.0448361038707 3.74309443061043 4943872 999 125 | 1454608120013 124378 51.0447460828965 3.74295596169429 3936256 1000 126 | 1454608121013 125378 51.0446565044394 3.74282195992709 6328320 1000 127 | 1454608122013 126378 51.0445681270198 3.74269102156091 6537216 1000 128 | 1454608123013 127378 51.0444799684158 3.74255842399194 6793216 1000 129 | 1454608124014 128379 51.0443865943742 3.74243053107037 3479552 1001 130 | 1454608125013 129378 51.0442921872144 3.74230211721367 4845568 999 131 | 1454608126013 130378 51.0441959152453 3.74217529397503 6418432 1000 132 | 1454608127013 131378 51.0440987010827 3.74205071852138 6473728 1000 133 | 1454608128013 132378 51.0440008467545 3.74192449162792 6846464 1000 134 | 1454608129013 133378 51.043903753603 3.7417950767884 4091904 1000 135 | 1454608130013 134378 51.0438075886297 3.74166632970119 4337664 1000 136 | 1454608131014 135379 51.0437109804687 3.74154004354522 6477824 1001 137 | 1454608132013 136378 51.0436110430122 3.74141547393609 6400000 999 138 | 1454608133014 137379 51.0435098953089 3.7412928568688 6821888 1001 139 | 1454608134013 138378 51.0434093417112 3.74117500959285 4519936 999 140 | 1454608135013 139378 51.0433053606761 3.74106850454237 4177920 1000 141 | 1454608136013 140378 51.0432048433597 3.74095415453361 6135808 1000 142 | 1454608137013 141378 51.0431043921454 3.74084397410713 6426624 1000 143 | 1454608138013 142378 51.0430023292387 3.74074009709513 6451200 1000 144 | 1454608139014 143379 51.0428966851228 3.74063738332358 4833280 1001 145 | 1454608140013 144378 51.0427891216169 3.74053944845379 4022272 999 146 | 1454608141013 145378 51.0425745963781 3.74034942386179 6289408 1000 147 | 1454608142013 146378 51.0425745963781 3.74034942386179 2674340 1000 148 | 1454608143013 147378 51.0424690125703 3.74024531327205 4309088 1000 149 | 1454608144013 148378 51.0423641935588 3.74013542099205 4847616 1000 150 | 1454608145013 149378 51.0423641935588 3.74013542099205 4390912 1000 151 | 1454608146013 150378 51.042153478053 3.73992654765826 5986488 1000 152 | 1454608147013 151378 51.0420490050483 3.73982351165726 3419072 1000 153 | 1454608148013 152378 51.0419457192791 3.73971838343607 4061776 1000 154 | 1454608149014 153379 51.041843172692 3.739615273211 4190736 1001 155 | 1454608150013 154378 51.0417353023957 3.73951072782566 4501504 999 156 | 1454608151013 155378 51.0416250057255 3.73940382245208 6119424 1000 157 | 1454608152013 156378 51.0415114517308 3.73929187100584 6639616 1000 158 | 1454608153013 157378 51.0413910144346 3.73917082768657 6815744 1000 159 | 1454608154013 158378 51.0412678528664 3.73905351051165 4923392 1000 160 | 1454608155013 159378 51.0411463339845 3.73894071879314 4595712 1000 161 | 1454608156014 160379 51.0410205786165 3.73883253102148 5515264 1001 162 | 1454608157013 161378 51.0408883553691 3.73873050963805 6496256 999 163 | 1454608158013 162378 51.0407511383033 3.73864011077698 6219776 1000 164 | 1454608159013 163378 51.0406126430458 3.73856209475587 5253120 1000 165 | 1454608160013 164378 51.0404773619489 3.73849294377227 4239360 1000 166 | 1454608161013 165378 51.0403479530209 3.73843790581475 5267456 1000 167 | 1454608162014 166379 51.0402229236098 3.73839213728936 6582272 1001 168 | 1454608163013 167378 51.0401010551228 3.73834909151727 6623232 999 169 | 1454608164013 168378 51.0401010551228 3.73834909151727 5660672 1000 170 | 1454608165014 169379 51.0399880184855 3.73829034750422 6119424 1001 171 | 1454608166014 170379 51.0397578238379 3.73819612185675 4765696 1000 172 | 1454608167013 171378 51.0396434825561 3.73814576733912 6019072 999 173 | 1454608168013 172378 51.0395303753925 3.7380932286942 6434816 1000 174 | 1454608169013 173378 51.0394206448552 3.73803885327583 5591040 1000 175 | 1454608170013 174378 51.0394206448552 3.73803885327583 4366336 1000 176 | 1454608171013 175378 51.0392102043974 3.73791645309476 4972544 1000 177 | 1454608172014 176379 51.0391160085264 3.73783961423685 6410240 1001 178 | 1454608173013 177378 51.0390327663577 3.73774237318493 6176768 999 179 | 1454608174013 178378 51.0389590336098 3.73762421368114 5603328 1000 180 | 1454608175013 179378 51.0388953734936 3.73749365023758 4241408 1000 181 | 1454608176013 180378 51.0388375546227 3.73736094231915 6328320 1000 182 | 1454608177013 181378 51.0387468768984 3.7370838849388 5985440 1000 183 | 1454608178013 182378 51.0387468768984 3.7370838849388 3287960 1000 184 | 1454608179013 183378 51.0387121838693 3.73694062733336 2465972 1000 185 | 1454608180013 184378 51.0386705729716 3.73662406843368 3444964 1000 186 | 1454608181013 185378 51.0386538196906 3.73647140793263 397948 1000 187 | 1454608182014 186379 51.0386301941943 3.73631645275809 4644964 1001 188 | 1454608183013 187378 51.0385983139335 3.73617046188828 2154104 999 189 | 1454608184014 188379 51.038601005651 3.73604778166685 1213200 1001 190 | 1454608185013 189378 51.0385922390544 3.7359123307853 1848108 999 191 | 1454608186014 190379 51.0385880770485 3.73575029419451 1622992 1001 192 | 1454608187013 191378 51.0385880770485 3.73575029419451 974604 999 193 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_car_0008.log: -------------------------------------------------------------------------------- 1 | 1454609144452 432 51.0387484313652 3.73009018052231 104444 432 2 | 1454609145452 1432 51.0387484313652 3.73009018052231 0 1000 3 | 1454609146451 2431 51.0386965303075 3.73012922972194 707700 999 4 | 1454609147451 3431 51.0386556151989 3.73016033740438 669956 1000 5 | 1454609148452 4432 51.0386264560501 3.73017208127455 404400 1001 6 | 1454609149452 5432 51.0385989709468 3.73017690901821 435404 1000 7 | 1454609150451 6431 51.0385766505193 3.7301812385264 1062224 999 8 | 1454609151451 7431 51.0385577801183 3.73017331450112 1490888 1000 9 | 1454609152451 8431 51.0385390708764 3.73016284734146 1517848 1000 10 | 1454609153451 9431 51.0385262803112 3.7301351130774 2045616 1000 11 | 1454609154451 10431 51.0385209875771 3.73011632208845 2613772 1000 12 | 1454609155451 11431 51.0385209875771 3.73011632208845 2646124 1000 13 | 1454609156451 12431 51.0385209875771 3.73011632208845 2814624 1000 14 | 1454609157451 13431 51.0385209875771 3.73011632208845 3225764 1000 15 | 1454609158451 14431 51.0385209875771 3.73011632208845 2885472 1000 16 | 1454609159452 15432 51.0385209875771 3.73011632208845 3021464 1001 17 | 1454609160451 16431 51.0385209875771 3.73011632208845 3127360 999 18 | 1454609161451 17431 51.0385209875771 3.73011632208845 3161060 1000 19 | 1454609162452 18432 51.0385209875771 3.73011632208845 3146232 1001 20 | 1454609163451 19431 51.0385209875771 3.73011632208845 2869892 999 21 | 1454609164451 20431 51.0385209875771 3.73011632208845 2898200 1000 22 | 1454609165452 21432 51.0385209875771 3.73011632208845 3229808 1001 23 | 1454609166451 22431 51.0385209875771 3.73011632208845 3031652 999 24 | 1454609167451 23431 51.0385209875771 3.73011632208845 3041088 1000 25 | 1454609168452 24432 51.0385209875771 3.73011632208845 2986520 1001 26 | 1454609169451 25431 51.0385209875771 3.73011632208845 3178196 999 27 | 1454609170452 26432 51.0385209875771 3.73011632208845 1194784 1001 28 | 1454609171452 27432 51.0385209875771 3.73011632208845 2948656 1000 29 | 1454609172451 28431 51.0385209875771 3.73011632208845 4231372 999 30 | 1454609173451 29431 51.0385209875771 3.73011632208845 2694652 1000 31 | 1454609174452 30432 51.0385209875771 3.73011632208845 2599256 1001 32 | 1454609175451 31431 51.0385209875771 3.73011632208845 2461136 999 33 | 1454609176451 32431 51.0385209875771 3.73011632208845 2431792 1000 34 | 1454609177452 33432 51.0385209875771 3.73011632208845 1469320 1001 35 | 1454609178452 34432 51.0385106549293 3.73011163134294 1461232 1000 36 | 1454609179451 35431 51.038492718257 3.73009135281256 1955948 999 37 | 1454609180452 36432 51.0384737174139 3.73005527228332 1237464 1001 38 | 1454609181451 37431 51.0384681903515 3.72999446289274 1241508 999 39 | 1454609182452 38432 51.0384734898353 3.72991300095186 2397444 1001 40 | 1454609183451 39431 51.038479137179 3.72980805244142 1065568 999 41 | 1454609184451 40431 51.0384831914069 3.72967607807785 919576 1000 42 | 1454609185451 41431 51.0384873215862 3.72951513952333 2270440 1000 43 | 1454609186452 42432 51.0384896410179 3.72934164537102 1306708 1001 44 | 1454609187451 43431 51.0384905353831 3.72916606038341 1040160 999 45 | 1454609188451 44431 51.0384968102293 3.7289847038608 0 1000 46 | 1454609189451 45431 51.0385100310693 3.72879193064673 0 1000 47 | 1454609190451 46431 51.0385178039009 3.72859018276753 1455764 1000 48 | 1454609191451 47431 51.0385223881588 3.72838285628256 2144744 1000 49 | 1454609192451 48431 51.0385223881588 3.72838285628256 1866908 1000 50 | 1454609193452 49432 51.0385258400846 3.7281827106521 2588232 1001 51 | 1454609194451 50431 51.0385253770644 3.72799107537788 3322820 999 52 | 1454609195451 51431 51.0385134353761 3.72764806427271 2578724 1000 53 | 1454609196452 52432 51.0385134353761 3.72764806427271 2217460 1001 54 | 1454609197451 53431 51.0385038063395 3.72748994077875 2312008 999 55 | 1454609198452 54432 51.0384999535849 3.72722892992154 2662112 1001 56 | 1454609199451 55431 51.0384994048127 3.72712693860163 2092956 999 57 | 1454609200451 56431 51.0384994048127 3.72712693860163 2736644 1000 58 | 1454609201452 57432 51.038499236939 3.72704647203785 3165804 1001 59 | 1454609202452 58432 51.0384989426075 3.72699113121755 1794236 1000 60 | 1454609203452 59432 51.0384989426075 3.72699113121755 3751936 1000 61 | 1454609204452 60432 51.0384948608477 3.72692524020093 5160732 1000 62 | 1454609205451 61431 51.0384916243899 3.72690001040056 4761676 999 63 | 1454609206452 62432 51.0384873992418 3.72687876713296 5545884 1001 64 | 1454609207451 63431 51.0384840906 3.72686131272571 3223552 999 65 | 1454609208451 64431 51.0384816464831 3.72684081073263 7333888 1000 66 | 1454609209452 65432 51.0384804952359 3.7268115700377 5655540 1001 67 | 1454609210451 66431 51.0384804436003 3.72678323687365 5113916 999 68 | 1454609211451 67431 51.0384820708202 3.72676772119076 6038700 1000 69 | 1454609212451 68431 51.0384820708202 3.72676772119076 4099736 1000 70 | 1454609213451 69431 51.0384820708202 3.72676772119076 5369856 1000 71 | 1454609214451 70431 51.0384820708202 3.72676772119076 4783456 1000 72 | 1454609215451 71431 51.0384820708202 3.72676772119076 4313548 1000 73 | 1454609216451 72431 51.0384820708202 3.72676772119076 4521296 1000 74 | 1454609217452 73432 51.0384820708202 3.72676772119076 3150856 1001 75 | 1454609218451 74431 51.0384820708202 3.72676772119076 4714620 999 76 | 1454609219451 75431 51.0384820708202 3.72676772119076 3766312 1000 77 | 1454609220451 76431 51.0384820708202 3.72676772119076 4178800 1000 78 | 1454609221451 77431 51.0384820708202 3.72676772119076 4195236 1000 79 | 1454609222451 78431 51.0384820708202 3.72676772119076 4157724 1000 80 | 1454609223452 79432 51.0384820708202 3.72676772119076 5003672 1001 81 | 1454609224451 80431 51.0384820708202 3.72676772119076 4070864 999 82 | 1454609225451 81431 51.0384820708202 3.72676772119076 4173504 1000 83 | 1454609226451 82431 51.0384820708202 3.72676772119076 3159012 1000 84 | 1454609227451 83431 51.0384820708202 3.72676772119076 3070504 1000 85 | 1454609228451 84431 51.0384820708202 3.72676772119076 5359940 1000 86 | 1454609229451 85431 51.0384820708202 3.72676772119076 4432872 1000 87 | 1454609230451 86431 51.0384820708202 3.72676772119076 3539904 1000 88 | 1454609231451 87431 51.0384820708202 3.72676772119076 4296824 1000 89 | 1454609232451 88431 51.0384820708202 3.72676772119076 2552956 1000 90 | 1454609233452 89432 51.0384820708202 3.72676772119076 3874152 1001 91 | 1454609234451 90431 51.0384829755603 3.72671737642223 3382132 999 92 | 1454609235452 91432 51.0384856407155 3.72668028537969 4133720 1001 93 | 1454609236451 92431 51.0384922106754 3.72665298673291 2831396 999 94 | 1454609237451 93431 51.0384976891617 3.72663844881864 2525768 1000 95 | 1454609238451 94431 51.0384992548665 3.72662190195054 2945764 1000 96 | 1454609239451 95431 51.0384996496168 3.72659128299964 2860016 1000 97 | 1454609240452 96432 51.0385020310248 3.72654404978366 2462564 1001 98 | 1454609241452 97432 51.0385063114331 3.72649048073326 3276312 1000 99 | 1454609242452 98432 51.0385036347373 3.72643879681272 2780924 1000 100 | 1454609243452 99432 51.0384861426454 3.7263821748757 2344172 1000 101 | 1454609244451 100431 51.0384861426454 3.7263821748757 1515152 999 102 | 1454609245451 101431 51.0384559312385 3.72633153624268 4243504 1000 103 | 1454609246451 102431 51.0384224709879 3.72628164457624 5188452 1000 104 | 1454609247452 103432 51.0384090744107 3.72627699129936 3954836 1001 105 | 1454609248452 104432 51.0384090744107 3.72627699129936 5386676 1000 106 | 1454609249451 105431 51.0383959421748 3.72627888263217 4898296 999 107 | 1454609250452 106432 51.038381377404 3.72627746230508 4888440 1001 108 | 1454609251451 107431 51.0383603865491 3.72627253944438 4397412 999 109 | 1454609252452 108432 51.0383284788357 3.7262635324701 4363384 1001 110 | 1454609253451 109431 51.0383284788357 3.7262635324701 3426304 999 111 | 1454609254452 110432 51.0382873909634 3.7262609796977 6840320 1001 112 | 1454609255451 111431 51.0382435592185 3.72626473948496 7688192 999 113 | 1454609256452 112432 51.0381920619936 3.72627401908362 7770112 1001 114 | 1454609257453 113433 51.038130642559 3.72629175179369 6729728 1001 115 | 1454609258451 114431 51.0380598158876 3.72630930516134 3330048 998 116 | 1454609259451 115431 51.0379782558735 3.72632626670209 5341184 1000 117 | 1454609260452 116432 51.0378919713793 3.72634645049783 6438912 1001 118 | 1454609261451 117431 51.037805280482 3.72636318660114 7495680 999 119 | 1454609262453 118433 51.0376403435341 3.72638540579277 6211584 1002 120 | 1454609263451 119431 51.0375640225416 3.72639697071161 5232640 998 121 | 1454609264451 120431 51.0374903722337 3.72640422422094 4818944 1000 122 | 1454609265451 121431 51.0374903722337 3.72640422422094 4783460 1000 123 | 1454609266451 122431 51.0374903722337 3.72640422422094 2989164 1000 124 | 1454609267451 123431 51.0373408206315 3.72642361007352 4045348 1000 125 | 1454609268451 124431 51.0372752554216 3.7264354854777 3149296 1000 126 | 1454609269451 125431 51.0371379115715 3.72641959957826 2678784 1000 127 | 1454609270451 126431 51.0371379115715 3.72641959957826 6414336 1000 128 | 1454609271452 127432 51.0370682827323 3.72640342189569 5793824 1001 129 | 1454609272452 128432 51.0369978597707 3.72638839615881 4879144 1000 130 | 1454609273452 129432 51.0369243707203 3.72636810712411 4170828 1000 131 | 1454609274452 130432 51.0368496875995 3.726344142974 5099520 1000 132 | 1454609275451 131431 51.0367742002889 3.72632480675111 7368704 999 133 | 1454609276451 132431 51.0367096141603 3.72630201086852 6488800 1000 134 | 1454609277451 133431 51.036664631127 3.72628222728028 7248616 1000 135 | 1454609278452 134432 51.0366416292665 3.72625781161435 3788800 1001 136 | 1454609279451 135431 51.0366301243385 3.72624760609366 4020224 999 137 | 1454609280452 136432 51.0366210258377 3.72625226269298 7385088 1001 138 | 1454609281451 137431 51.0366210258377 3.72625226269298 7933952 999 139 | 1454609282451 138431 51.0366067962146 3.7262613964852 7380992 1000 140 | 1454609283451 139431 51.0365811045286 3.72626439556281 5079040 1000 141 | 1454609284451 140431 51.0365811045286 3.72626439556281 3567616 1000 142 | 1454609285451 141431 51.0364619606942 3.72628349864523 6918144 1000 143 | 1454609286451 142431 51.0363786228971 3.72628151703242 7876608 1000 144 | 1454609287451 143431 51.0362906280863 3.72626221587071 7962624 1000 145 | 1454609288452 144432 51.0362043246351 3.72622966009772 4816896 1001 146 | 1454609289451 145431 51.0361162121113 3.72618847257172 4911104 999 147 | 1454609290451 146431 51.0360272191699 3.72614527475543 6664192 1000 148 | 1454609291452 147432 51.0359355403203 3.72610480279075 7725056 1001 149 | 1454609292451 148431 51.0358486361149 3.72606843871822 7821312 999 150 | 1454609293451 149431 51.0357680312965 3.72603457722635 5099520 1000 151 | 1454609294451 150431 51.035693075584 3.72600741221261 4149248 1000 152 | 1454609295451 151431 51.0356212891395 3.72599033398722 6578176 1000 153 | 1454609296451 152431 51.035556330503 3.72597123232227 7821312 1000 154 | 1454609297451 153431 51.0355000174912 3.72595867960695 7446528 1000 155 | 1454609298452 154432 51.0355000174912 3.72595867960695 5566464 1001 156 | 1454609299452 155432 51.0354193555249 3.72593454578371 4675584 1000 157 | 1454609300451 156431 51.0353991676508 3.7259198298918 7036928 999 158 | 1454609301451 157431 51.0353923681307 3.72589888659465 7397376 1000 159 | 1454609302451 158431 51.0353912450174 3.72585796915947 7585792 1000 160 | 1454609303451 159431 51.0353824255921 3.72582604499448 5718528 1000 161 | 1454609304452 160432 51.0353705499198 3.72580647338243 3567616 1001 162 | 1454609305451 161431 51.0353597078814 3.72579956049189 5693440 999 163 | 1454609306451 162431 51.0353597078814 3.72579956049189 6923260 1000 164 | 1454609307452 163432 51.0353597078814 3.72579956049189 5575192 1001 165 | 1454609308452 164432 51.0353597078814 3.72579956049189 4656916 1000 166 | 1454609309452 165432 51.0353597078814 3.72579956049189 4501504 1000 167 | 1454609310452 166432 51.0353597078814 3.72579956049189 6590464 1000 168 | 1454609311452 167432 51.0353597078814 3.72579956049189 6135452 1000 169 | 1454609312451 168431 51.0353597078814 3.72579956049189 5996720 999 170 | 1454609313451 169431 51.0353597078814 3.72579956049189 4824596 1000 171 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_foot_0004.log: -------------------------------------------------------------------------------- 1 | 1453198122300 36 51.0377635496993 3.71870048721394 0 36 2 | 1453198123300 1036 51.0377545804772 3.71869296982367 1673516 1000 3 | 1453198124300 2036 51.0377322232968 3.71868100492141 2330692 1000 4 | 1453198125300 3036 51.0377322232968 3.71868100492141 1626472 1000 5 | 1453198126300 4036 51.037713802198 3.71867994545715 2388080 1000 6 | 1453198127299 5035 51.0377010206341 3.71867906984274 1897284 999 7 | 1453198128299 6035 51.0376863814199 3.71867997558797 2428888 1000 8 | 1453198129299 7035 51.0376699509062 3.71869038756583 2042920 1000 9 | 1453198130300 8036 51.0376529383317 3.71869895117907 2079472 1001 10 | 1453198131300 9036 51.0376328389559 3.71870603539639 1966524 1000 11 | 1453198132299 10035 51.0376226800822 3.71870633134285 2065136 999 12 | 1453198133300 11036 51.0376226800822 3.71870633134285 3011432 1001 13 | 1453198134299 12035 51.0376095226443 3.71871178216205 2593552 999 14 | 1453198135299 13035 51.0376005867974 3.71872140535235 2224200 1000 15 | 1453198136299 14035 51.0376005867974 3.71872140535235 2302384 1000 16 | 1453198137299 15035 51.0375853863409 3.71872422935396 2223552 1000 17 | 1453198138299 16035 51.0375729781178 3.71871388954466 1925592 1000 18 | 1453198139300 17036 51.0375662581261 3.71870354452572 1920900 1001 19 | 1453198140299 18035 51.0375602488806 3.71868531341958 2213416 999 20 | 1453198141299 19035 51.0375514295724 3.71866270429063 2670388 1000 21 | 1453198142299 20035 51.0375439525914 3.7186422049929 2399440 1000 22 | 1453198143300 21036 51.037536864478 3.71861988375903 2538284 1001 23 | 1453198144299 22035 51.0375276925785 3.718608018535 2044916 999 24 | 1453198145300 23036 51.0375215865308 3.71859301203356 3024912 1001 25 | 1453198146299 24035 51.0375172822095 3.71857607234704 2741832 999 26 | 1453198147299 25035 51.0375086671322 3.71855721271663 2326648 1000 27 | 1453198148300 26036 51.0375001027732 3.71854213928537 2530896 1001 28 | 1453198149300 27036 51.0374921947605 3.71852581899327 2643376 1000 29 | 1453198150299 28035 51.0374844366382 3.71851102985102 1871024 999 30 | 1453198151299 29035 51.0374787428504 3.71849425295744 2427748 1000 31 | 1453198152300 30036 51.0374675525862 3.71847410541052 2197240 1001 32 | 1453198153300 31036 51.0374600211279 3.7184517733145 2853716 1000 33 | 1453198154300 32036 51.0374515265596 3.71842891522569 1250944 1000 34 | 1453198155300 33036 51.0374449617441 3.71840943478095 3254072 1000 35 | 1453198156299 34035 51.0374358871603 3.71839215990292 3080180 999 36 | 1453198157300 35036 51.037426254052 3.71837094394079 2065136 1001 37 | 1453198158299 36035 51.0374226548993 3.71834832068942 2398092 999 38 | 1453198159299 37035 51.0374183339755 3.71832465774233 2185108 1000 39 | 1453198160299 38035 51.0374128642916 3.71830740119101 1899332 1000 40 | 1453198161299 39035 51.0374111597395 3.71829304670314 1671520 1000 41 | 1453198162299 40035 51.037407283356 3.71827465797274 2047612 1000 42 | 1453198163299 41035 51.0374041524391 3.71825575500155 1728136 1000 43 | 1453198164300 42036 51.0374030346767 3.71823285373218 2287556 1001 44 | 1453198165300 43036 51.0373980958921 3.71821112959911 3138144 1000 45 | 1453198166299 44035 51.0373916467833 3.71818968595714 2835544 999 46 | 1453198167299 45035 51.0373814695625 3.7181717353644 3366604 1000 47 | 1453198168300 46036 51.0373755378319 3.71814896127961 2702740 1001 48 | 1453198169299 47035 51.0373700120266 3.7181279098919 2942684 999 49 | 1453198170299 48035 51.0373654506151 3.71811302087986 2115012 1000 50 | 1453198171299 49035 51.0373641485597 3.71809723964828 2477624 1000 51 | 1453198172299 50035 51.037361301579 3.71808169659057 2784320 1000 52 | 1453198173300 51036 51.0373596899607 3.71806016582464 3661816 1001 53 | 1453198174299 52035 51.0373589265169 3.71803794818408 2421064 999 54 | 1453198175300 53036 51.0373592043758 3.71801566567626 3163700 1001 55 | 1453198176299 54035 51.0373554492255 3.71799011894335 3166452 999 56 | 1453198177299 55035 51.0373500197083 3.71796288627923 2131188 1000 57 | 1453198178299 56035 51.0373432981082 3.71794197637813 2453360 1000 58 | 1453198179299 57035 51.0373366144157 3.71792047679737 2484364 1000 59 | 1453198180299 58035 51.0373258006347 3.71789816557307 2245768 1000 60 | 1453198181299 59035 51.0373189231347 3.71787571290993 2303732 1000 61 | 1453198182299 60035 51.0373118287621 3.71785073063087 2210720 1000 62 | 1453198183299 61035 51.0373053146989 3.71782865273327 300604 1000 63 | 1453198184299 62035 51.0372989842254 3.71780461513794 4153188 1000 64 | 1453198185299 63035 51.0372940236241 3.71778202771912 3957728 1000 65 | 1453198186300 64036 51.037289395458 3.71776156333363 3135448 1001 66 | 1453198187300 65036 51.0372812940839 3.71774184224301 2599748 1000 67 | 1453198188299 66035 51.0372722929104 3.71772448595387 3483776 999 68 | 1453198189299 67035 51.0372646087446 3.71770546554466 3150276 1000 69 | 1453198190299 68035 51.0372564504998 3.71768557884414 3212284 1000 70 | 1453198191299 69035 51.0372491541686 3.71766853403448 3651732 1000 71 | 1453198192299 70035 51.0372425801937 3.71765133437301 2771488 1000 72 | 1453198193299 71035 51.0372359543284 3.71763305794451 3615336 1000 73 | 1453198194299 72035 51.0372282818464 3.71761949275894 2516716 1000 74 | 1453198195300 73036 51.0372179441542 3.71759978854213 3314732 1001 75 | 1453198196300 74036 51.0372084095138 3.71758419281012 2829452 1000 76 | 1453198197300 75036 51.0372012249663 3.71756333496643 3446344 1000 77 | 1453198198299 76035 51.0371953632047 3.71754596911304 3149420 999 78 | 1453198199299 77035 51.0371885999265 3.71752723048753 3042436 1000 79 | 1453198200299 78035 51.03718152739 3.71751086010209 2508628 1000 80 | 1453198201300 79036 51.0371731581214 3.71749094482209 3065352 1001 81 | 1453198202299 80035 51.0371654275191 3.71747087332687 3082876 999 82 | 1453198203300 81036 51.0371572124785 3.71745286602483 2733744 1001 83 | 1453198204299 82035 51.0371493279365 3.71743744634368 2127144 999 84 | 1453198205300 83036 51.037143179604 3.71741803187294 2195892 1001 85 | 1453198206300 84036 51.0371343761646 3.71740054611453 2020652 1000 86 | 1453198207300 85036 51.0371251169033 3.7173847882816 2104876 1000 87 | 1453198208299 86035 51.0371199016965 3.71736765025818 2769492 999 88 | 1453198209300 87036 51.0371144230893 3.717355226136 3079480 1001 89 | 1453198210299 88035 51.0371083254674 3.7173404471978 2403484 999 90 | 1453198211299 89035 51.0371083254674 3.7173404471978 2624556 1000 91 | 1453198212299 90035 51.037095852476 3.71732038158119 2990876 1000 92 | 1453198213299 91035 51.0370861015231 3.71731259588744 2843268 1000 93 | 1453198214299 92035 51.0370766921377 3.7173058647974 2499192 1000 94 | 1453198215299 93035 51.0370688577342 3.71729767803318 1993692 1000 95 | 1453198216300 94036 51.0370599908963 3.71728841134358 1841368 1001 96 | 1453198217299 95035 51.0370521825916 3.71727737114084 2061092 999 97 | 1453198218300 96036 51.0370428710227 3.71726496212159 2159496 1001 98 | 1453198219299 97035 51.0370327834446 3.71725174186524 2441228 999 99 | 1453198220300 98036 51.0370209784561 3.71723221993345 1897984 1001 100 | 1453198221299 99035 51.0370080185254 3.71721455339473 1863144 999 101 | 1453198222299 100035 51.0369997074889 3.71719896870587 1744024 1000 102 | 1453198223299 101035 51.0369880918079 3.71718814627227 1846244 1000 103 | 1453198224300 102036 51.0369730644558 3.71718434968608 2131784 1001 104 | 1453198225299 103035 51.0369639146287 3.71717757983594 2016608 999 105 | 1453198226299 104035 51.0369576470274 3.71716606198781 1954600 1000 106 | 1453198227300 105036 51.0369518938127 3.71715216314082 1988300 1001 107 | 1453198228299 106035 51.0369465709421 3.71713882360216 1721396 999 108 | 1453198229299 107035 51.0369426186721 3.71712517614396 1689044 1000 109 | 1453198230299 108035 51.0369426186721 3.71712517614396 2144668 1000 110 | 1453198231299 109035 51.0369350456882 3.71710556077185 2501888 1000 111 | 1453198232300 110036 51.0369281636771 3.71709519534079 1989648 1001 112 | 1453198233300 111036 51.0369186249213 3.71708197151032 2035480 1000 113 | 1453198234299 112035 51.0369082082955 3.7170665205223 1950556 999 114 | 1453198235299 113035 51.0368940989241 3.71706131873872 2062440 1000 115 | 1453198236299 114035 51.0368756012352 3.71706411716425 1976168 1000 116 | 1453198237299 115035 51.0368545284925 3.71707321414713 1560984 1000 117 | 1453198238299 116035 51.036837340238 3.71708662477875 2629300 1000 118 | 1453198239299 117035 51.0368190491315 3.71708236763238 1006256 1000 119 | 1453198240299 118035 51.0368029837465 3.71707711069247 3959128 1000 120 | 1453198241300 119036 51.0367898212355 3.71707801469232 2025448 1001 121 | 1453198242299 120035 51.0367749418233 3.71707944411698 2211344 999 122 | 1453198243300 121036 51.0367597306048 3.71708892038484 2410144 1001 123 | 1453198244299 122035 51.0367435108847 3.71708286217182 2293752 999 124 | 1453198245300 123036 51.0367271045458 3.71707584443738 2088596 1001 125 | 1453198246299 124035 51.0367121565859 3.71707350459161 1648604 999 126 | 1453198247299 125035 51.036694536996 3.7170711967073 1745896 1000 127 | 1453198248299 126035 51.036676678616 3.71706834464923 2539396 1000 128 | 1453198249299 127035 51.0366537113337 3.71706969160652 2109572 1000 129 | 1453198250299 128035 51.0366149343411 3.71709018274494 1924292 1000 130 | 1453198251299 129035 51.036573117769 3.71711713818241 1422140 1000 131 | 1453198252299 130035 51.0365423317857 3.71712603451887 1338564 1000 132 | 1453198253299 131035 51.0365176943136 3.71712778209953 1763184 1000 133 | 1453198254299 132035 51.0364977703838 3.7171284820816 2552464 1000 134 | 1453198255299 133035 51.036476863302 3.71712600093468 2675080 1000 135 | 1453198256299 134035 51.0364575204289 3.71711873239043 2650168 1000 136 | 1453198257299 135035 51.0364344727511 3.7171154362485 2396124 1000 137 | 1453198258299 136035 51.0364121378893 3.71711526656344 2240996 1000 138 | 1453198259299 137035 51.0363838872612 3.71712394080331 1288688 1000 139 | 1453198260299 138035 51.0363589447228 3.71712703107323 384180 1000 140 | 1453198261300 139036 51.036335532542 3.71714255371295 452928 1001 141 | 1453198262299 140035 51.0363101382677 3.71716543164167 427316 999 142 | 1453198263299 141035 51.0362921442836 3.71718130824565 563464 1000 143 | 1453198264299 142035 51.0362752776841 3.71719301745958 444840 1000 144 | 1453198265300 143036 51.036265892234 3.71720048251083 621428 1001 145 | 1453198266299 144035 51.036265892234 3.71720048251083 601208 999 146 | 1453198267300 145036 51.0362488153694 3.71721264583235 761620 1001 147 | 1453198268299 146035 51.036237142764 3.71721795867963 1210504 999 148 | 1453198269299 147035 51.0362261260226 3.71722424084146 1513804 1000 149 | 1453198270299 148035 51.0362135381612 3.7172407994535 1601424 1000 150 | 1453198271299 149035 51.0361980701529 3.71725806416186 1703872 1000 151 | 1453198272299 150035 51.0361837555619 3.71727188348954 1556112 1000 152 | 1453198273299 151035 51.0361704436386 3.71728295912426 1401400 1000 153 | 1453198274299 152035 51.0361578161878 3.71729542114264 1424836 1000 154 | 1453198275299 153035 51.0361450490041 3.71730832184916 1171412 1000 155 | 1453198276299 154035 51.0361290175712 3.7173227061684 1319692 1000 156 | 1453198277299 155035 51.0361112541992 3.71733903014685 423272 1000 157 | 1453198278299 156035 51.0360968396683 3.71735564109029 2793808 1000 158 | 1453198279299 157035 51.0360829423638 3.71737393964667 1531224 1000 159 | 1453198280299 158035 51.0360688149469 3.7173921711724 2130248 1000 160 | 1453198281299 159035 51.0360493063101 3.71741897030246 2616924 1000 161 | 1453198282300 160036 51.0360280323973 3.71744842127014 1716308 1001 162 | 1453198283299 161035 51.0360104943932 3.71747103627506 2214944 999 163 | 1453198284301 162037 51.0359922918022 3.71749445396075 1922948 1002 164 | 1453198285299 163035 51.0359791904207 3.71751396127272 1555592 998 165 | 1453198286299 164035 51.0359679477742 3.71753214298251 2083308 1000 166 | 1453198287299 165035 51.0359589758808 3.71754254995519 1680416 1000 167 | 1453198288300 166036 51.0359513354565 3.71755617820549 1689584 1001 168 | 1453198289299 167035 51.0359398478562 3.71757741500578 1893940 999 169 | 1453198290299 168035 51.0359297838447 3.71759891106633 2204940 1000 170 | 1453198291299 169035 51.035923091637 3.7176139139752 2294684 1000 171 | 1453198292299 170035 51.0359177908267 3.71762705111766 1973472 1000 172 | 1453198293300 171036 51.0359091189121 3.7176512434434 2024696 1001 173 | 1453198294300 172036 51.0359076376879 3.71767887284907 2030088 1000 174 | 1453198295299 173035 51.0359059115638 3.71770376968599 2605140 999 175 | 1453198296300 174036 51.0359010340796 3.71772691209763 2505052 1001 176 | 1453198297300 175036 51.0358929597892 3.71774792118911 2333780 1000 177 | 1453198298299 176035 51.0358887305941 3.71776809146409 2326332 999 178 | 1453198299299 177035 51.0358787287078 3.71778766983951 2103580 1000 179 | 1453198300299 178035 51.0358727896063 3.71780564031246 2518712 1000 180 | 1453198301299 179035 51.03586509106 3.71782028882108 2547720 1000 181 | 1453198302299 180035 51.0358493108453 3.71783391887105 1978844 1000 182 | 1453198303299 181035 51.0358373557102 3.71784516860028 1904744 1000 183 | 1453198304299 182035 51.0358288631117 3.71785489896554 2618516 1000 184 | 1453198305299 183035 51.0358135768046 3.71786212504704 2193844 1000 185 | 1453198306300 184036 51.0358011553928 3.71786652500196 1442360 1001 186 | 1453198307299 185035 51.0358011553928 3.71786652500196 368640 999 187 | 1453198308299 186035 51.0357850266803 3.71788120767752 3951700 1000 188 | 1453198309299 187035 51.0357805906643 3.71789626316357 1729436 1000 189 | 1453198310299 188035 51.0357769269367 3.7179112454054 2931948 1000 190 | 1453198311299 189035 51.0357735279485 3.71792737101767 2191848 1000 191 | 1453198312299 190035 51.0357724379437 3.71794462800937 2193196 1000 192 | 1453198313299 191035 51.0357724379437 3.71794462800937 3240592 1000 193 | 1453198314300 192036 51.03575761004 3.71798388475511 2673084 1001 194 | 1453198315300 193036 51.0357500325986 3.71800698705754 2814116 1000 195 | 1453198316299 194035 51.0357416607489 3.71802445835138 2191760 999 196 | 1453198317299 195035 51.0357332424996 3.7180492383087 2047048 1000 197 | 1453198318299 196035 51.0357332424996 3.7180492383087 2878000 1000 198 | 1453198319299 197035 51.0357141906066 3.71808855878909 1814408 1000 199 | 1453198320299 198035 51.0357042085632 3.71810959293533 2062440 1000 200 | 1453198321299 199035 51.0357039272356 3.71812783710567 1753748 1000 201 | 1453198322300 200036 51.0357062765186 3.71814883876495 2841584 1001 202 | 1453198323299 201035 51.0357062765186 3.71814883876495 2663648 999 203 | 1453198324299 202035 51.0357165226454 3.71817121799527 2582768 1000 204 | 1453198325299 203035 51.0357165226454 3.71817121799527 2253856 1000 205 | 1453198326300 204036 51.035713885463 3.71819211762857 2272860 1001 206 | 1453198327299 205035 51.035713885463 3.71819211762857 3006324 999 207 | 1453198328300 206036 51.0357142214275 3.71821468140438 2531180 1001 208 | 1453198329300 207036 51.0357142214275 3.71821468140438 2295204 1000 209 | 1453198330299 208035 51.0357146328647 3.71823710836761 2433476 999 210 | 1453198331299 209035 51.0357109895366 3.71825880371305 2433140 1000 211 | 1453198332299 210035 51.0357085619005 3.71827679012946 2112088 1000 212 | 1453198333299 211035 51.0357085619005 3.71827679012946 1739148 1000 213 | 1453198334299 212035 51.0357074503812 3.7183155510081 1891244 1000 214 | 1453198335300 213036 51.035706165859 3.71833344493091 1880460 1001 215 | 1453198336299 214035 51.0357050629471 3.71835047119834 1902132 999 216 | 1453198337299 215035 51.0357029893259 3.71836868902382 3029656 1000 217 | 1453198338299 216035 51.0356992585608 3.71838645781361 2417508 1000 218 | 1453198339299 217035 51.0356951995812 3.71840570273823 3134256 1000 219 | 1453198340299 218035 51.0356947037505 3.71842871433346 2134592 1000 220 | 1453198341299 219035 51.0356927804451 3.71845086445131 3272444 1000 221 | 1453198342299 220035 51.0356940582225 3.71847082866072 2163492 1000 222 | 1453198343300 221036 51.0356916766779 3.71848756819712 1397560 1001 223 | 1453198344300 222036 51.0356916766779 3.71848756819712 2799796 1000 224 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_foot_0005.log: -------------------------------------------------------------------------------- 1 | 1453198490299 635 51.034927683969 3.72081103677241 1358784 635 2 | 1453198491299 1635 51.0349337419703 3.72082788489576 4281248 1000 3 | 1453198492300 2636 51.0349547463701 3.72083851960649 5022648 1001 4 | 1453198493300 3636 51.0349696981382 3.72085727178281 4949208 1000 5 | 1453198494299 4635 51.0349835052426 3.72087747713054 4573064 999 6 | 1453198495299 5635 51.0349922303857 3.72090284125249 4116792 1000 7 | 1453198496299 6635 51.0350000970417 3.72092502266955 3824276 1000 8 | 1453198497299 7635 51.0350160426453 3.72094308638783 5095440 1000 9 | 1453198498299 8635 51.0350346985836 3.72096126071641 4983556 1000 10 | 1453198499299 9635 51.0350463097101 3.7209824575755 3893024 1000 11 | 1453198500299 10635 51.035055195383 3.72100538374704 4184192 1000 12 | 1453198501299 11635 51.0350642625614 3.72102971095261 6572848 1000 13 | 1453198502299 12635 51.0350600415177 3.72107512130328 4115444 1000 14 | 1453198503299 13635 51.0350625013879 3.7211118035917 4933680 1000 15 | 1453198504300 14636 51.0350660919572 3.72114040335915 5009324 1001 16 | 1453198505300 15636 51.0350657283651 3.72116516020761 4951048 1000 17 | 1453198506300 16636 51.0350629170188 3.72118856822282 5146016 1000 18 | 1453198507299 17635 51.0350681392224 3.72121207443699 4520492 999 19 | 1453198508299 18635 51.035077949784 3.72122732570287 4691688 1000 20 | 1453198509299 19635 51.0350888565382 3.72123759819155 4329388 1000 21 | 1453198510299 20635 51.035097520897 3.72125049539751 4257736 1000 22 | 1453198511299 21635 51.0351054229089 3.72126110602633 4744596 1000 23 | 1453198512299 22635 51.0351159855403 3.72127082957805 5011864 1000 24 | 1453198513299 23635 51.0351238903458 3.72128202775668 4937724 1000 25 | 1453198514300 24636 51.0351311006468 3.72129488953937 4208456 1001 26 | 1453198515300 25636 51.035134301872 3.72130849597117 4511108 1000 27 | 1453198516299 26635 51.0351371402084 3.72132239790863 3395560 999 28 | 1453198517299 27635 51.0351413172902 3.72133608356789 3721828 1000 29 | 1453198518299 28635 51.0351413172902 3.72133608356789 4081900 1000 30 | 1453198519300 29636 51.0351568087277 3.72135662023719 3925868 1001 31 | 1453198520299 30635 51.0351669097346 3.72136476919901 2313168 999 32 | 1453198521299 31635 51.0351745839903 3.72137581449577 4854408 1000 33 | 1453198522299 32635 51.0351798873253 3.72139374518621 4734564 1000 34 | 1453198523299 33635 51.035185081299 3.72141058633188 4269116 1000 35 | 1453198524299 34635 51.0351923840065 3.72142727468599 4590356 1000 36 | 1453198525299 35635 51.0351923840065 3.72142727468599 4301156 1000 37 | 1453198526299 36635 51.035200250361 3.7214592096574 4396220 1000 38 | 1453198527300 37636 51.0352037068996 3.72147594121536 2277572 1001 39 | 1453198528300 38636 51.0352063026758 3.72149340110018 1763184 1000 40 | 1453198529299 39635 51.0352128907422 3.72151073182568 1818452 999 41 | 1453198530299 40635 51.0352205123173 3.72152715236544 2391352 1000 42 | 1453198531300 41636 51.0352276146485 3.72154487681352 3026260 1001 43 | 1453198532300 42636 51.035235689466 3.72156280353293 3800012 1000 44 | 1453198533299 43635 51.0352433530919 3.72157757796171 3016084 999 45 | 1453198534299 44635 51.035252554145 3.7215906362572 3110576 1000 46 | 1453198535299 45635 51.035252554145 3.7215906362572 2299688 1000 47 | 1453198536300 46636 51.0352778717953 3.72162053269259 2594900 1001 48 | 1453198537299 47635 51.0352945437049 3.72163840798614 2470884 999 49 | 1453198538299 48635 51.0353166179416 3.72165180801244 2656908 1000 50 | 1453198539299 49635 51.0353353985859 3.72166467903748 3374744 1000 51 | 1453198540299 50635 51.0353557410731 3.72167679503856 2790360 1000 52 | 1453198541299 51635 51.0353697677362 3.72169349648038 2644880 1000 53 | 1453198542299 52635 51.0353837001561 3.72171587844855 2417508 1000 54 | 1453198543299 53635 51.0353965716682 3.72173016621562 3731264 1000 55 | 1453198544299 54635 51.035408872361 3.72173639892606 3257468 1000 56 | 1453198545299 55635 51.0354201233711 3.72174686496644 3378736 1000 57 | 1453198546300 56636 51.0354306562067 3.72176326060918 3107140 1001 58 | 1453198547299 57635 51.0354412060253 3.72177924479269 3259464 999 59 | 1453198548299 58635 51.0354506030242 3.72179282392154 3964468 1000 60 | 1453198549299 59635 51.0354622252323 3.72180413025151 3370000 1000 61 | 1453198550299 60635 51.0354622252323 3.72180413025151 2782272 1000 62 | 1453198551299 61635 51.0354734043744 3.72183332894865 2972340 1000 63 | 1453198552299 62635 51.0354782596614 3.72184654392654 3620728 1000 64 | 1453198553299 63635 51.0354863745036 3.72185578010561 3398308 1000 65 | 1453198554299 64635 51.0354951425013 3.72186317004536 4242156 1000 66 | 1453198555300 65636 51.0354951425013 3.72186317004536 1771272 1001 67 | 1453198556300 66636 51.0355143217221 3.7218873736226 5325948 1000 68 | 1453198557299 67635 51.0355232432774 3.72190148848889 3303948 999 69 | 1453198558299 68635 51.0355379126951 3.72191161249363 4474012 1000 70 | 1453198559299 69635 51.0355488355488 3.72192202100697 2948076 1000 71 | 1453198560299 70635 51.0355608761789 3.72193197074339 1798232 1000 72 | 1453198561299 71635 51.035574143479 3.72193989536841 1560984 1000 73 | 1453198562299 72635 51.0355889509066 3.72195058692081 1484148 1000 74 | 1453198563299 73635 51.0356103492578 3.72196102282174 1698480 1000 75 | 1453198564299 74635 51.0356281739599 3.72197475801823 2186456 1000 76 | 1453198565299 75635 51.0356411643689 3.72198871589132 3359216 1000 77 | 1453198566299 76635 51.0356564210048 3.72200184722118 48140 1000 78 | 1453198567300 77636 51.035670940434 3.72201823032892 4378692 1001 79 | 1453198568299 78635 51.0356849051885 3.72203498884623 5123748 999 80 | 1453198569299 79635 51.0356999164978 3.72204641998038 4192280 1000 81 | 1453198570299 80635 51.0357158075334 3.72205506990331 4277204 1000 82 | 1453198571299 81635 51.035731675255 3.72206748773964 3498916 1000 83 | 1453198572299 82635 51.0357435576906 3.72207961697758 3222212 1000 84 | 1453198573299 83635 51.0357578190265 3.72208969392007 3229808 1000 85 | 1453198574299 84635 51.0357698630132 3.7221028869015 3870108 1000 86 | 1453198575299 85635 51.0357809103901 3.72211207306241 4459184 1000 87 | 1453198576299 86635 51.0357914582461 3.7221215714225 4451176 1000 88 | 1453198577300 87636 51.0358020281965 3.72214022082299 3830936 1001 89 | 1453198578299 88635 51.0358175620764 3.7221542117079 4079256 999 90 | 1453198579300 89636 51.0358293977305 3.72216968536598 4475204 1001 91 | 1453198580299 90635 51.0358434435101 3.72218594894213 4350800 999 92 | 1453198581300 91636 51.0358551763053 3.72219877871948 4911256 1001 93 | 1453198582299 92635 51.0358659041321 3.72220895872607 3969860 999 94 | 1453198583299 93635 51.0358754948244 3.72221615188286 3448184 1000 95 | 1453198584299 94635 51.035886117495 3.72221645578845 4265072 1000 96 | 1453198585299 95635 51.035886117495 3.72221645578845 4084440 1000 97 | 1453198586300 96636 51.0359040600195 3.72221851818851 4507712 1001 98 | 1453198587300 97636 51.035913206617 3.72221997350217 3693520 1000 99 | 1453198588300 98636 51.035913206617 3.72221997350217 4228676 1000 100 | 1453198589299 99635 51.0359285978039 3.72222007494446 3759572 999 101 | 1453198590299 100635 51.0359285978039 3.72222007494446 4034884 1000 102 | 1453198591300 101636 51.0359285978039 3.72222007494446 4216872 1001 103 | 1453198592299 102635 51.0359355568701 3.72222936659925 3758224 999 104 | 1453198593299 103635 51.0359470263932 3.72223345510409 3883588 1000 105 | 1453198594299 104635 51.0359470263932 3.72223345510409 4201768 1000 106 | 1453198595299 105635 51.0359745511691 3.72224019744125 4391240 1000 107 | 1453198596299 106635 51.0359890661753 3.7222461931811 4662524 1000 108 | 1453198597299 107635 51.0360094723824 3.72225305685655 3708348 1000 109 | 1453198598300 108636 51.0360267464694 3.72226370932328 3790576 1001 110 | 1453198599299 109635 51.0360459091624 3.72227289081558 3360772 999 111 | 1453198600299 110635 51.0360619052976 3.72228116616168 2910124 1000 112 | 1453198601299 111635 51.0360824441757 3.72229276502647 2824328 1000 113 | 1453198602299 112635 51.0361039158352 3.72230884293932 3608044 1000 114 | 1453198603300 113636 51.0361166309888 3.72232912699302 3105376 1001 115 | 1453198604300 114636 51.0361315393714 3.72233493859358 3196912 1000 116 | 1453198605300 115636 51.0361488671801 3.72234178010368 3054412 1000 117 | 1453198606299 116635 51.0361625080424 3.72234201495707 3845844 999 118 | 1453198607299 117635 51.0361773980685 3.72233518633714 3456272 1000 119 | 1453198608299 118635 51.0361950086423 3.72232784424799 2829452 1000 120 | 1453198609299 119635 51.0362105187597 3.72232205353356 3347084 1000 121 | 1453198610299 120635 51.0362236822305 3.72231239750309 2698308 1000 122 | 1453198611299 121635 51.0362388145894 3.72230644199894 3440484 1000 123 | 1453198612299 122635 51.0362521067435 3.72229511785586 3200956 1000 124 | 1453198613299 123635 51.036269414277 3.72228460230478 3723720 1000 125 | 1453198614299 124635 51.0362849298849 3.72227736759727 3919984 1000 126 | 1453198615300 125636 51.0362995227187 3.7222663674871 2805188 1001 127 | 1453198616300 126636 51.0363103037753 3.72226054286424 3549284 1000 128 | 1453198617299 127635 51.0363204147891 3.72225481885228 3744744 999 129 | 1453198618299 128635 51.0363300944254 3.72224663493549 3818288 1000 130 | 1453198619299 129635 51.0363395945336 3.72224397474801 3867308 1000 131 | 1453198620300 130636 51.0363514660242 3.72224637458222 3831016 1001 132 | 1453198621300 131636 51.0363606828038 3.72224267296004 4035912 1000 133 | 1453198622299 132635 51.0363697371924 3.72223386873019 3831984 999 134 | 1453198623299 133635 51.0363754665811 3.72221813359442 4327460 1000 135 | 1453198624299 134635 51.0363764910423 3.7221969741596 4854148 1000 136 | 1453198625299 135635 51.0363824536844 3.72218054393243 4078036 1000 137 | 1453198626299 136635 51.0363907732864 3.72216481931841 4101628 1000 138 | 1453198627299 137635 51.0363991149946 3.72214844437457 3692872 1000 139 | 1453198628308 138644 51.036408174572 3.72213290485433 3603852 1009 140 | 1453198629299 139635 51.0364146171202 3.72211296465971 2264640 991 141 | 1453198630299 140635 51.0364261057436 3.72209998607399 4073472 1000 142 | 1453198631299 141635 51.0364306872303 3.72207711648276 900888 1000 143 | 1453198632299 142635 51.0364318268179 3.7220504467634 5765156 1000 144 | 1453198633299 143635 51.0364315062558 3.72202219565724 3856656 1000 145 | 1453198634300 144636 51.0364290503392 3.72199249586062 4135404 1001 146 | 1453198635299 145635 51.0364293269081 3.72196262117313 4027536 999 147 | 1453198636299 146635 51.036431623109 3.72193727183005 4155056 1000 148 | 1453198637300 147636 51.0364292642144 3.72191014703929 4302816 1001 149 | 1453198638300 148636 51.0364249981632 3.72188672895186 4060336 1000 150 | 1453198639299 149635 51.0364216432556 3.72186312910738 4132808 999 151 | 1453198640300 150636 51.0364222491066 3.72183446114611 3751344 1001 152 | 1453198641299 151635 51.0364197593259 3.72180826838355 4448256 999 153 | 1453198642299 152635 51.0364197349359 3.72178427324803 4373948 1000 154 | 1453198643299 153635 51.0364184439017 3.72176181208851 3258712 1000 155 | 1453198644299 154635 51.0364214646092 3.72173251676836 3037044 1000 156 | 1453198645300 155636 51.0364214646092 3.72173251676836 4341172 1001 157 | 1453198646299 156635 51.0364201988507 3.72170641788916 4086524 999 158 | 1453198647299 157635 51.0364266055648 3.72165869739778 3539848 1000 159 | 1453198648299 158635 51.0364296035683 3.72163568765287 3791276 1000 160 | 1453198649300 159636 51.0364319331067 3.72160739872304 4003824 1001 161 | 1453198650299 160635 51.0364319331067 3.72160739872304 3711144 999 162 | 1453198651299 161635 51.0364360455251 3.72157774227179 1019904 1000 163 | 1453198652300 162636 51.0364359859816 3.72151631176146 4464640 1001 164 | 1453198653299 163635 51.0364330363822 3.72149045139467 6227164 999 165 | 1453198654299 164635 51.0364333220301 3.72146749728587 3291816 1000 166 | 1453198655300 165636 51.0364333220301 3.72146749728587 4571676 1001 167 | 1453198656299 166635 51.0364375466127 3.72143841332959 3846144 999 168 | 1453198657299 167635 51.0364468626638 3.72138759793761 4697228 1000 169 | 1453198658299 168635 51.0364563453708 3.72135815176636 3673936 1000 170 | 1453198659299 169635 51.0364686083039 3.7213242608763 3449240 1000 171 | 1453198660299 170635 51.0364686083039 3.7213242608763 3949668 1000 172 | 1453198661299 171635 51.036481205379 3.72128990947979 3586048 1000 173 | 1453198662300 172636 51.0364890754799 3.72123830504007 6977536 1001 174 | 1453198663299 173635 51.0364938539587 3.72122216302959 4116756 999 175 | 1453198664299 174635 51.0364938539587 3.72122216302959 3633496 1000 176 | 1453198665299 175635 51.0364996558336 3.72120265230549 3374264 1000 177 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_tram_0007.log: -------------------------------------------------------------------------------- 1 | 1453205523380 837 51.0506973750722 3.71183730126282 713740 837 2 | 1453205524381 1838 51.0507043157784 3.71192786488257 2504584 1001 3 | 1453205525380 2837 51.0507088101884 3.71203513605697 2724960 999 4 | 1453205526380 3837 51.0507181285771 3.71214648633118 2725656 1000 5 | 1453205527381 4838 51.0507226623585 3.7122553050407 3456272 1001 6 | 1453205528381 5838 51.0507272220081 3.71236018447545 3282380 1000 7 | 1453205529380 6837 51.0507307941367 3.71245135327832 2562548 999 8 | 1453205530381 7838 51.0507319822753 3.71253560109218 2446880 1001 9 | 1453205531381 8838 51.0507332550457 3.71261552701856 2641640 1000 10 | 1453205532380 9837 51.0507323875783 3.71269204398515 3389344 999 11 | 1453205533381 10838 51.0507357096916 3.71276873629045 4307968 1001 12 | 1453205534380 11837 51.0507394441961 3.71285200785636 5472312 999 13 | 1453205535380 12837 51.0507416638813 3.71295174353128 5598780 1000 14 | 1453205536381 13838 51.0507529273566 3.71315005039703 5106412 1001 15 | 1453205537380 14837 51.0507529273566 3.71315005039703 3351608 999 16 | 1453205538380 15837 51.0507655915865 3.71334179072478 4749664 1000 17 | 1453205539380 16837 51.0507723782052 3.71342878532982 3492668 1000 18 | 1453205540380 17837 51.0507731349266 3.71351896806034 3113336 1000 19 | 1453205541381 18838 51.0507541242212 3.71361273389114 3795484 1001 20 | 1453205542381 19838 51.0507317515391 3.71370658121884 3523564 1000 21 | 1453205543381 20838 51.0507183569701 3.7138018211946 2937196 1000 22 | 1453205544380 21837 51.0507023911451 3.71389586780778 2883256 999 23 | 1453205545380 22837 51.0506847605252 3.7139841574047 2341476 1000 24 | 1453205546380 23837 51.0506675706341 3.71406999863166 2137928 1000 25 | 1453205547381 24838 51.050637949557 3.71416454351756 2686564 1001 26 | 1453205548381 25838 51.0506158865777 3.71426047740646 2545024 1000 27 | 1453205549380 26837 51.0506158865777 3.71426047740646 18872 999 28 | 1453205550381 27838 51.0505985851411 3.71436067245541 0 1001 29 | 1453205551380 28837 51.0505796632244 3.71445912639067 4721080 999 30 | 1453205552381 29838 51.050549820414 3.71456448353079 3844424 1001 31 | 1453205553380 30837 51.050549820414 3.71456448353079 3038080 999 32 | 1453205554380 31837 51.0504979468039 3.71477420335809 2523456 1000 33 | 1453205555380 32837 51.0504729408865 3.71488335248673 3113908 1000 34 | 1453205556380 33837 51.0504503253179 3.71499025485539 2887388 1000 35 | 1453205557380 34837 51.0504291884342 3.71509769679972 2582396 1000 36 | 1453205558380 35837 51.050402502648 3.71521321658744 3111116 1000 37 | 1453205559381 36838 51.0503789890009 3.71532453632639 2972236 1001 38 | 1453205560380 37837 51.0503393669472 3.71553966115998 2727548 999 39 | 1453205561380 38837 51.0503267859762 3.71563246219663 2266688 1000 40 | 1453205562380 39837 51.0503267859762 3.71563246219663 1881756 1000 41 | 1453205563381 40838 51.0502946659894 3.71582992461144 671304 1001 42 | 1453205564380 41837 51.0502731746761 3.71593770972911 808800 999 43 | 1453205565380 42837 51.0502470823279 3.71604516111599 1733528 1000 44 | 1453205566380 43837 51.0502202474267 3.71615279085285 1658040 1000 45 | 1453205567380 44837 51.0501994760648 3.71625841433722 1415400 1000 46 | 1453205568380 45837 51.0501797292256 3.71635990260244 2034132 1000 47 | 1453205569380 46837 51.0501606123508 3.71646080856067 1742964 1000 48 | 1453205570380 47837 51.050141013549 3.71655893122124 1716004 1000 49 | 1453205571380 48837 51.050141013549 3.71655893122124 2265988 1000 50 | 1453205572380 49837 51.0501210183918 3.71665085532354 2316464 1000 51 | 1453205573381 50838 51.0501210183918 3.71665085532354 2743928 1001 52 | 1453205574380 51837 51.0500681156905 3.71685192179649 2950772 999 53 | 1453205575380 52837 51.0500625028474 3.7168965966641 2914040 1000 54 | 1453205576380 53837 51.0500625028474 3.7168965966641 3093504 1000 55 | 1453205577380 54837 51.0500619317383 3.71694445257331 3087432 1000 56 | 1453205578380 55837 51.0500763114679 3.71705085660088 3446816 1000 57 | 1453205579381 56838 51.0500763114679 3.71705085660088 2710828 1001 58 | 1453205580380 57837 51.0501105267704 3.71715441686837 2758164 999 59 | 1453205581380 58837 51.0501105267704 3.71715441686837 2344016 1000 60 | 1453205582380 59837 51.0501492167029 3.71720773877941 1417304 1000 61 | 1453205583380 60837 51.0502100407685 3.71730188943986 2710272 1000 62 | 1453205584380 61837 51.0502391433747 3.71734087405844 1628384 1000 63 | 1453205585380 62837 51.0502391433747 3.71734087405844 2410208 1000 64 | 1453205586380 63837 51.0502541179159 3.71736783044615 6988172 1000 65 | 1453205587380 64837 51.0502619868691 3.71738973365019 5046272 1000 66 | 1453205588381 65838 51.0502715461257 3.71741406886068 2854912 1001 67 | 1453205589381 66838 51.0502794819487 3.71743089442569 4186112 1000 68 | 1453205590380 67837 51.0502881895021 3.71744812754217 5898240 999 69 | 1453205591380 68837 51.0502881895021 3.71744812754217 7217152 1000 70 | 1453205592380 69837 51.0502881895021 3.71744812754217 6299648 1000 71 | 1453205593380 70837 51.0502881895021 3.71744812754217 1724416 1000 72 | 1453205594380 71837 51.0502881895021 3.71744812754217 4026368 1000 73 | 1453205595380 72837 51.0502881895021 3.71744812754217 6539264 1000 74 | 1453205596380 73837 51.0502881895021 3.71744812754217 7284736 1000 75 | 1453205597380 74837 51.0502881895021 3.71744812754217 7319552 1000 76 | 1453205598380 75837 51.0502881895021 3.71744812754217 4886528 1000 77 | 1453205599381 76838 51.0502990202178 3.71744342486278 4825088 1001 78 | 1453205600380 77837 51.0502990202178 3.71744342486278 6836224 999 79 | 1453205601380 78837 51.0502990202178 3.71744342486278 6930432 1000 80 | 1453205602380 79837 51.0503171279864 3.71745944277241 7753728 1000 81 | 1453205603380 80837 51.0503264549091 3.71748378902763 4630528 1000 82 | 1453205604380 81837 51.0503415389653 3.7175148294969 4028416 1000 83 | 1453205605381 82838 51.0503587620488 3.71755573763585 7116800 1001 84 | 1453205606380 83837 51.0503740688518 3.71760650112678 7102464 999 85 | 1453205607380 84837 51.0503950763756 3.71766574528338 7127040 1000 86 | 1453205608380 85837 51.0504249719076 3.7177469657332 4507648 1000 87 | 1453205609380 86837 51.0504686863456 3.71781484030707 3690496 1000 88 | 1453205610380 87837 51.0505048343307 3.71788019320127 7081984 1000 89 | 1453205611380 88837 51.050540470546 3.71795405282088 7090176 1000 90 | 1453205612380 89837 51.0505706761081 3.71801950637704 5054332 1000 91 | 1453205613380 90837 51.0506156211078 3.71806598132022 3502488 1000 92 | 1453205614380 91837 51.0506627474303 3.7181001788738 4791768 1000 93 | 1453205615380 92837 51.050703406877 3.71813513159493 5349664 1000 94 | 1453205616380 93837 51.0507120181957 3.7181998049637 4879292 1000 95 | 1453205617380 94837 51.0507181697715 3.71827126571773 4787732 1000 96 | 1453205618380 95837 51.050731400849 3.71838221460473 3793120 1000 97 | 1453205619380 96837 51.0507589410631 3.71849902908188 5056500 1000 98 | 1453205620380 97837 51.0507754080601 3.71860031056331 5734348 1000 99 | 1453205621380 98837 51.0507917711466 3.71870348164873 6700700 1000 100 | 1453205622380 99837 51.0508054609457 3.71879688012561 6619136 1000 101 | 1453205623381 100838 51.0508205074122 3.71888671168384 5185536 1001 102 | 1453205624380 101837 51.0508239767137 3.71896896408727 4102144 999 103 | 1453205625380 102837 51.0508226540231 3.71904600500636 5873664 1000 104 | 1453205626380 103837 51.0508192861316 3.71912157590877 7407616 1000 105 | 1453205627381 104838 51.0508128761933 3.71919609884449 7135232 1001 106 | 1453205628380 105837 51.0508021056323 3.71927652545071 5525504 999 107 | 1453205629381 106838 51.0507783067118 3.71936723873921 3651584 1001 108 | 1453205630380 107837 51.0507489547133 3.71945735363394 6430720 999 109 | 1453205631381 108838 51.0507259634 3.71954224736134 6944768 1001 110 | 1453205632380 109837 51.0507039850264 3.71961856829692 6971392 999 111 | 1453205633380 110837 51.0506852523054 3.71969482896739 5718016 1000 112 | 1453205634380 111837 51.0506675424942 3.71977468271497 4325376 1000 113 | 1453205635380 112837 51.0506448160354 3.71985991206635 6494208 1000 114 | 1453205636381 113838 51.0506290322399 3.7199414819629 6877184 1001 115 | 1453205637381 114838 51.050617486691 3.72001715239725 7294976 1000 116 | 1453205638380 115837 51.0506098469705 3.72008560245793 5236736 999 117 | 1453205639380 116837 51.0506098469705 3.72008560245793 4057088 1000 118 | 1453205640380 117837 51.0505882028198 3.72022675017253 6240256 1000 119 | 1453205641380 118837 51.0505782455746 3.72029546964823 6428672 1000 120 | 1453205642381 119838 51.050573542462 3.72035851496122 7024640 1001 121 | 1453205643380 120837 51.0505636649306 3.72042403621156 6017024 999 122 | 1453205644380 121837 51.0505636649306 3.72042403621156 3416064 1000 123 | 1453205645380 122837 51.0505375246064 3.72056766919694 5451776 1000 124 | 1453205646380 123837 51.0505284297032 3.72064355995397 7077888 1000 125 | 1453205647380 124837 51.0505186683184 3.72070857035408 6739968 1000 126 | 1453205648380 125837 51.0505016910019 3.72077029025246 4214784 1000 127 | 1453205649381 126838 51.0505016910019 3.72077029025246 2768896 1001 128 | 1453205650380 127837 51.0504260021784 3.72095186708297 4968448 999 129 | 1453205651380 128837 51.0504260021784 3.72095186708297 5895980 1000 130 | 1453205652380 129837 51.0504153560369 3.72101194706406 5850892 1000 131 | 1453205653380 130837 51.0504141199462 3.72108268924122 4775936 1000 132 | 1453205654381 131838 51.0504141199462 3.72108268924122 2732032 1001 133 | 1453205655380 132837 51.0504090407999 3.72124010137276 4403200 999 134 | 1453205656380 133837 51.0504023297502 3.72132496358721 7282688 1000 135 | 1453205657381 134838 51.0504006378884 3.72141418174029 7188480 1001 136 | 1453205658381 135838 51.0504012331325 3.72150270279282 6414336 1000 137 | 1453205659380 136837 51.0504012331325 3.72150270279282 4747264 999 138 | 1453205660380 137837 51.0503892632485 3.7216716884967 4177920 1000 139 | 1453205661380 138837 51.0503794343117 3.7217524682083 6483968 1000 140 | 1453205662381 139838 51.0503773509594 3.72181975103887 7536640 1001 141 | 1453205663380 140837 51.0503750678474 3.72187248890277 5318656 999 142 | 1453205664380 141837 51.0503710364266 3.72191110998486 917504 1000 143 | 1453205665380 142837 51.0503699469682 3.72193649241988 3457024 1000 144 | 1453205666380 143837 51.0503699469682 3.72193649241988 6684672 1000 145 | 1453205667380 144837 51.0503699469682 3.72193649241988 6934528 1000 146 | 1453205668380 145837 51.0503686212661 3.72195189837531 6258688 1000 147 | 1453205669380 146837 51.0503686212661 3.72195189837531 4755456 1000 148 | 1453205670380 147837 51.0503686212661 3.72195189837531 5365760 1000 149 | 1453205671380 148837 51.0503686212661 3.72195189837531 7163904 1000 150 | 1453205672380 149837 51.0503686212661 3.72195189837531 6979584 1000 151 | 1453205673381 150838 51.0503686212661 3.72195189837531 5888000 1001 152 | 1453205674381 151838 51.0503686212661 3.72195189837531 3866624 1000 153 | 1453205675380 152837 51.0503686212661 3.72195189837531 5234688 999 154 | 1453205676380 153837 51.0503686212661 3.72195189837531 7000064 1000 155 | 1453205677381 154838 51.0503686212661 3.72195189837531 7057408 1001 156 | 1453205678381 155838 51.0503686212661 3.72195189837531 6025216 1000 157 | 1453205679381 156838 51.0503686212661 3.72195189837531 4458496 1000 158 | 1453205680381 157838 51.0503686212661 3.72195189837531 4708352 1000 159 | 1453205681380 158837 51.0503686212661 3.72195189837531 6393856 999 160 | 1453205682380 159837 51.0503686212661 3.72195189837531 7131136 1000 161 | 1453205683380 160837 51.0503686212661 3.72195189837531 5861376 1000 162 | 1453205684380 161837 51.0503667883152 3.72197015545462 4112384 1000 163 | 1453205685381 162838 51.0503667883152 3.72197015545462 5251072 1001 164 | 1453205686382 163839 51.0503667883152 3.72197015545462 6916096 1001 165 | 1453205687381 164838 51.0503667883152 3.72197015545462 7081984 999 166 | 1453205688380 165837 51.0503667883152 3.72197015545462 6498304 999 167 | -------------------------------------------------------------------------------- /IPDPS2020/Dataset/report_tram_0008.log: -------------------------------------------------------------------------------- 1 | 1453206412140 982 0 0 2385260 982 2 | 1453206413139 1981 0 0 3092732 999 3 | 1453206414139 2981 0 0 3337228 1000 4 | 1453206415139 3981 0 0 3330908 1000 5 | 1453206416139 4981 0 0 3535804 1000 6 | 1453206417139 5981 0 0 3422572 1000 7 | 1453206418139 6981 0 0 3640948 1000 8 | 1453206419140 7982 0 0 3822928 1001 9 | 1453206420140 8982 51.0453911688341 3.71342607466163 4100616 1000 10 | 1453206421139 9981 51.0454272331162 3.71343544272669 4387852 999 11 | 1453206422140 10982 51.0452933565172 3.71341687243763 5408064 1001 12 | 1453206423139 11981 51.0453883086161 3.71351734028377 6185612 999 13 | 1453206424139 12981 51.0453033154388 3.71349928012889 7352384 1000 14 | 1453206425139 13981 51.0453033154388 3.71349928012889 5580688 1000 15 | 1453206426139 14981 51.0453033154388 3.71349928012889 4954084 1000 16 | 1453206427139 15981 51.0453033154388 3.71349928012889 4633364 1000 17 | 1453206428140 16982 51.0453033154388 3.71349928012889 2609216 1001 18 | 1453206429139 17981 51.0453033154388 3.71349928012889 6634248 999 19 | 1453206430139 18981 51.0453033154388 3.71349928012889 5557756 1000 20 | 1453206431139 19981 51.0453033154388 3.71349928012889 4989788 1000 21 | 1453206432139 20981 51.0453033154388 3.71349928012889 4661176 1000 22 | 1453206433139 21981 51.0453033154388 3.71349928012889 2848768 1000 23 | 1453206434139 22981 51.0453033154388 3.71349928012889 7170048 1000 24 | 1453206435139 23981 51.0453033154388 3.71349928012889 5022856 1000 25 | 1453206436139 24981 51.0453033154388 3.71349928012889 4987236 1000 26 | 1453206437139 25981 51.0453033154388 3.71349928012889 4411472 1000 27 | 1453206438139 26981 51.0452862046087 3.71349678187669 2875252 1000 28 | 1453206439139 27981 51.0452757264371 3.71348471399398 6976624 1000 29 | 1453206440140 28982 51.0452636423486 3.7134650295085 5468832 1001 30 | 1453206441139 29981 51.0452313557757 3.71344260074006 4977468 999 31 | 1453206442139 30981 51.0452035539343 3.71340985908114 4873992 1000 32 | 1453206443139 31981 51.0451714512957 3.71336837449375 1192600 1000 33 | 1453206444139 32981 51.0451207933375 3.71331820417593 3391488 1000 34 | 1453206445139 33981 51.0450595940125 3.71326220988435 7671808 1000 35 | 1453206446139 34981 51.0449999801218 3.71320289531912 6789900 1000 36 | 1453206447139 35981 51.0449372121626 3.71313790151083 5321688 1000 37 | 1453206448139 36981 51.044867153879 3.71306930975307 3551232 1000 38 | 1453206449139 37981 51.0447948373155 3.7129941271615 6320128 1000 39 | 1453206450139 38981 51.044716201812 3.71291614705857 5115456 1000 40 | 1453206451139 39981 51.0446265412203 3.71282319337 4995248 1000 41 | 1453206452139 40981 51.0445395471825 3.71272952044385 5038644 1000 42 | 1453206453139 41981 51.0444539819779 3.712636013951 3718720 1000 43 | 1453206454140 42982 51.0443799379357 3.71255301406881 6340608 1001 44 | 1453206455140 43982 51.0442873987089 3.71247684135689 4866196 1000 45 | 1453206456140 44982 51.0442070314323 3.71242179462543 4956568 1000 46 | 1453206457140 45982 51.0441399924008 3.71237326079299 4852200 1000 47 | 1453206458139 46981 51.0440624726388 3.7123179394334 3174916 999 48 | 1453206459140 47982 51.0439795490291 3.71224689196907 7042120 1001 49 | 1453206460139 48981 51.0439000773694 3.7121707146455 4658688 999 50 | 1453206461140 49982 51.0438127988204 3.71209181436735 3068048 1001 51 | 1453206462139 50981 51.0437173261925 3.71200607614916 5077320 999 52 | 1453206463140 51982 51.0436206550938 3.71191762267762 2248704 1001 53 | 1453206464140 52982 51.0435588304998 3.71182732251457 3274752 1000 54 | 1453206465139 53981 51.0435588304998 3.71182732251457 6886932 999 55 | 1453206466139 54981 51.0434367939335 3.71166411015044 3531216 1000 56 | 1453206467139 55981 51.0433742615939 3.7115903117293 4160368 1000 57 | 1453206468139 56981 51.0432978384254 3.71149541074601 4441660 1000 58 | 1453206469139 57981 51.04321675786 3.71139146085547 3264332 1000 59 | 1453206470139 58981 51.0431392359521 3.71129350278491 6429164 1000 60 | 1453206471139 59981 51.0430675046329 3.71120834263448 4808936 1000 61 | 1453206472139 60981 51.0430078098324 3.71113760029172 3787104 1000 62 | 1453206473139 61981 51.042959307117 3.71107982131628 3056952 1000 63 | 1453206474139 62981 51.0429181643259 3.71103808792831 2497584 1000 64 | 1453206475139 63981 51.042893273349 3.71100693908198 3651732 1000 65 | 1453206476139 64981 51.0428911191137 3.71099179468048 3144884 1000 66 | 1453206477139 65981 51.0429052870468 3.71098836356698 2783620 1000 67 | 1453206478140 66982 51.0429256490454 3.71098935498257 3174496 1001 68 | 1453206479139 67981 51.0429465354777 3.71099345795253 2793100 999 69 | 1453206480139 68981 51.042967507118 3.71099377828744 3629672 1000 70 | 1453206481140 69982 51.0429825898375 3.71099120272542 3796616 1001 71 | 1453206482139 70981 51.0429825898375 3.71099120272542 3497860 999 72 | 1453206483139 71981 51.0429825898375 3.71099120272542 3461164 1000 73 | 1453206484139 72981 51.0429825898375 3.71099120272542 2023076 1000 74 | 1453206485140 73982 51.0429965075944 3.71098485394224 2684684 1001 75 | 1453206486140 74982 51.0429965075944 3.71098485394224 3349780 1000 76 | 1453206487140 75982 51.0429965075944 3.71098485394224 2925160 1000 77 | 1453206488139 76981 51.0429965075944 3.71098485394224 2551764 999 78 | 1453206489139 77981 51.0429965075944 3.71098485394224 2539736 1000 79 | 1453206490140 78982 51.0429965075944 3.71098485394224 3080076 1001 80 | 1453206491140 79982 51.0429965075944 3.71098485394224 3025276 1000 81 | 1453206492140 80982 51.0429965075944 3.71098485394224 2636324 1000 82 | 1453206493140 81982 51.0429965075944 3.71098485394224 2011216 1000 83 | 1453206494139 82981 51.0429965075944 3.71098485394224 3077484 999 84 | 1453206495139 83981 51.0429965075944 3.71098485394224 2973648 1000 85 | 1453206496139 84981 51.0429965075944 3.71098485394224 3414524 1000 86 | 1453206497139 85981 51.0429965075944 3.71098485394224 2735092 1000 87 | 1453206498139 86981 51.0429563039635 3.71095480706239 2770972 1000 88 | 1453206499139 87981 51.0429287369257 3.71093063624998 3133720 1000 89 | 1453206500139 88981 51.0428908649402 3.71089916598631 3188216 1000 90 | 1453206501139 89981 51.0428438668183 3.71085819410207 2995256 1000 91 | 1453206502139 90981 51.0427844941366 3.71080960998718 2124448 1000 92 | 1453206503139 91981 51.0427220671159 3.71075367086354 1707916 1000 93 | 1453206504139 92981 51.0427220671159 3.71075367086354 2331604 1000 94 | 1453206505139 93981 51.0425781751919 3.7106343329828 2225984 1000 95 | 1453206506139 94981 51.0425781751919 3.7106343329828 1214548 1000 96 | 1453206507139 95981 51.0425032542652 3.71057502003677 3165712 1000 97 | 1453206508140 96982 51.0424239758745 3.71051754821882 3012336 1001 98 | 1453206509139 97981 51.0423481032777 3.71046370016203 3980956 999 99 | 1453206510139 98981 51.0422747217503 3.71041455725859 3167976 1000 100 | 1453206511139 99981 51.042116502399 3.71032240226791 2560756 1000 101 | 1453206512140 100982 51.0420290145351 3.71028373490642 1790328 1001 102 | 1453206513139 101981 51.0419375688082 3.71025285749985 1617856 999 103 | 1453206514140 102982 51.0418469554162 3.71022602093166 917988 1001 104 | 1453206515139 103981 51.041754751634 3.71020153747343 885636 999 105 | 1453206516140 104982 51.0416640724945 3.71018104753457 1131672 1001 106 | 1453206517139 105981 51.0416640724945 3.71018104753457 822928 999 107 | 1453206518139 106981 51.041563987048 3.71016011770724 1511108 1000 108 | 1453206519139 107981 51.0414527903223 3.71013918738565 2832148 1000 109 | 1453206520140 108982 51.041339334201 3.71011766452262 2287556 1001 110 | 1453206521139 109981 51.0412364030667 3.71009835852995 1906072 999 111 | 1453206522139 110981 51.041132467849 3.71008448946536 2829660 1000 112 | 1453206523139 111981 51.0410365664452 3.71007755626473 2937784 1000 113 | 1453206524139 112981 51.0409474324896 3.7100746556099 3223716 1000 114 | 1453206525140 113982 51.0408707432496 3.71007086976683 2780692 1001 115 | 1453206526139 114981 51.0408071217997 3.71006445526655 2979312 999 116 | 1453206527139 115981 51.040740713074 3.71006393054978 2038176 1000 117 | 1453206528139 116981 51.0406065968332 3.7100588432197 2052408 1000 118 | 1453206529139 117981 51.0405242424285 3.71006424377159 1633024 1000 119 | 1453206530140 118982 51.040424896612 3.71007359128471 1783404 1001 120 | 1453206531140 119982 51.040313792354 3.71008311055315 1178152 1000 121 | 1453206532139 120981 51.0402012623018 3.71009348708056 2069768 999 122 | 1453206533139 121981 51.0400915548249 3.71010168576385 1789556 1000 123 | 1453206534139 122981 51.039979688284 3.7101101302604 1962688 1000 124 | 1453206535139 123981 51.0398705290911 3.7101187982501 1709264 1000 125 | 1453206536139 124981 51.0398705290911 3.7101187982501 211636 1000 126 | 1453206537139 125981 51.0397644341453 3.71013028226879 1500056 1000 127 | 1453206538140 126982 51.0395519089456 3.7101739209341 2155720 1001 128 | 1453206539139 127981 51.0394421563529 3.71018899440044 1315648 999 129 | 1453206540139 128981 51.0394421563529 3.71018899440044 1143104 1000 130 | 1453206541140 129982 51.0393356085952 3.71020777904952 1956648 1001 131 | 1453206542140 130982 51.0391567794 3.71025215250754 2476380 1000 132 | 1453206543139 131981 51.0391567794 3.71025215250754 2218004 999 133 | 1453206544139 132981 51.0390914016886 3.71026990116145 2797100 1000 134 | 1453206545139 133981 51.0390405170942 3.71028151283701 3304648 1000 135 | 1453206546139 134981 51.0390002405594 3.71028427762609 3296044 1000 136 | 1453206547140 135982 51.0389664333168 3.71028797469107 3994588 1001 137 | 1453206548139 136981 51.0389307348883 3.71029618600486 4510772 999 138 | 1453206549139 137981 51.0388781851666 3.71030096658046 3307700 1000 139 | 1453206550139 138981 51.0388198067905 3.71030838826188 3768912 1000 140 | 1453206551139 139981 51.0387180830707 3.71034511572344 3485952 1000 141 | 1453206552139 140981 51.0386721244657 3.71036250787214 2414320 1000 142 | 1453206553139 141981 51.0386721244657 3.71036250787214 2361644 1000 143 | 1453206554140 142982 51.0385867514501 3.71039804857197 2525868 1001 144 | 1453206555139 143981 51.0385613934266 3.71041456796375 1651584 999 145 | 1453206556139 144981 51.0385472035132 3.71042900048887 2190500 1000 146 | 1453206557139 145981 51.0385472035132 3.71042900048887 2225548 1000 147 | 1453206558139 146981 51.0385472035132 3.71042900048887 1875068 1000 148 | 1453206559139 147981 51.0385590909886 3.71043892973589 1953252 1000 149 | 1453206560140 148982 51.0385684448213 3.71044131406687 1978864 1001 150 | 1453206561140 149982 51.0385684448213 3.71044131406687 2477624 1000 151 | 1453206562140 150982 51.0385684448213 3.71044131406687 1742964 1000 152 | 1453206563139 151981 51.0385684448213 3.71044131406687 1951904 999 153 | 1453206564139 152981 51.0385684448213 3.71044131406687 595816 1000 154 | 1453206565139 153981 51.0385684448213 3.71044131406687 4101964 1000 155 | 1453206566139 154981 51.0385684448213 3.71044131406687 1983812 1000 156 | 1453206567139 155981 51.0385684448213 3.71044131406687 3760120 1000 157 | 1453206568140 156982 51.0385684448213 3.71044131406687 2843476 1001 158 | 1453206569139 157981 51.0385684448213 3.71044131406687 2997700 999 159 | 1453206570139 158981 51.0385649263675 3.71045804401569 2974096 1000 160 | 1453206571139 159981 51.0385514465739 3.7104717244008 2191692 1000 161 | 1453206572139 160981 51.038531330889 3.71048358659971 1833280 1000 162 | 1453206573140 161982 51.038505850951 3.71049601478683 1234768 1001 163 | 1453206574139 162981 51.0384754907577 3.71051345439813 1200420 999 164 | 1453206575139 163981 51.0384754907577 3.71051345439813 680040 1000 165 | 1453206576139 164981 51.0384323542131 3.71053719287561 2123100 1000 166 | 1453206577139 165981 51.0383167524645 3.7105865696617 16176 1000 167 | 1453206578140 166982 51.0383167524645 3.7105865696617 178124 1001 168 | 1453206579139 167981 51.0382504256338 3.71061826102311 3665372 999 169 | 1453206580139 168981 51.0381777535503 3.71065634189517 3788880 1000 170 | 1453206581140 169982 51.0381028607561 3.71069910139182 2414848 1001 171 | 1453206582139 170981 51.0380275661405 3.71074131698646 3424688 999 172 | 1453206583139 171981 51.0379550059577 3.71078419214728 1425536 1000 173 | 1453206584140 172982 51.0378847989074 3.71082561577844 4426832 1001 174 | 1453206585139 173981 51.0378156485824 3.71086711250892 3492384 999 175 | 1453206586139 174981 51.0377469662927 3.71091100612625 2286056 1000 176 | 1453206587139 175981 51.0376871494553 3.71095532450983 3898956 1000 177 | 1453206588139 176981 51.0376295815261 3.71100057766575 2278664 1000 178 | 1453206589139 177981 51.0375797065906 3.71104074974792 3296360 1000 179 | 1453206590139 178981 51.0375279784001 3.7110816518977 2892308 1000 180 | 1453206591139 179981 51.0374816750309 3.71112163598891 2741832 1000 181 | 1453206592139 180981 51.037443805705 3.71115417744603 2022000 1000 182 | 1453206593139 181981 51.0374058160306 3.71117440175637 2163592 1000 183 | 1453206594139 182981 51.0373743599493 3.71118048343328 795244 1000 184 | 1453206595140 183982 51.0373473121941 3.71117585450181 2942008 1001 185 | 1453206596139 184981 51.0373197941775 3.71116792101176 1889896 999 186 | 1453206597140 185982 51.0372939676954 3.71115153924698 2913028 1001 187 | 1453206598139 186981 51.0372691849882 3.71113328503086 2801248 999 188 | 1453206599139 187981 51.0372526669093 3.71110233029773 3608492 1000 189 | 1453206600139 188981 51.0372448183557 3.71106462613502 2983048 1000 190 | 1453206601139 189981 51.0372406035498 3.71101910215645 2600448 1000 191 | 1453206602139 190981 51.0372338677261 3.71097268900458 2480008 1000 192 | 1453206603139 191981 51.0372335266481 3.71092560244733 2632540 1000 193 | 1453206604139 192981 51.0372271080864 3.71090181030078 2153092 1000 194 | 1453206605139 193981 51.0372313272897 3.71087587429853 2073432 1000 195 | 1453206606139 194981 51.0372313272897 3.71087587429853 2179508 1000 196 | 1453206607140 195982 51.0372337729024 3.71086128122729 1524588 1001 197 | 1453206608139 196981 51.0372383755963 3.71084043939654 1622992 999 198 | 1453206609140 197982 51.037239001605 3.71081385815198 727920 1001 199 | 1453206610139 198981 51.0372340058798 3.71078055833767 2511324 999 200 | 1453206611139 199981 51.0372271943448 3.71075161451434 2000432 1000 201 | 1453206612139 200981 51.0372145082096 3.71072198084064 1819800 1000 202 | 1453206613139 201981 51.0372019130016 3.71069069871944 2567940 1000 203 | 1453206614140 202982 51.0371898732522 3.7106546932408 2770996 1001 204 | 1453206615139 203981 51.0371746513553 3.71062290233707 2041700 999 205 | 1453206616139 204981 51.037157458551 3.71059396914789 4958308 1000 206 | 1453206617140 205982 51.0371367620132 3.71057265517028 2030736 1001 207 | 1453206618139 206981 51.0370823138593 3.71053083501093 1736224 999 208 | 1453206619139 207981 51.0370421722744 3.71051454369046 1230724 1000 209 | 1453206620140 208982 51.0370421722744 3.71051454369046 955732 1001 210 | 1453206621139 209981 51.0370010928259 3.71051030086358 791276 999 211 | 1453206622140 210982 51.0369584613898 3.71051213106313 2594900 1001 212 | 1453206623140 211982 51.0369091476549 3.71052846164248 1404124 1000 213 | 1453206624140 212982 51.0368570343322 3.71054952813434 2496988 1000 214 | 1453206625139 213981 51.0368081500746 3.71057047531562 2685916 999 215 | 1453206626140 214982 51.0367660620053 3.71059207065436 2044216 1001 216 | 1453206627139 215981 51.0367303576451 3.71060681391886 1993692 999 217 | 1453206628139 216981 51.0367082163754 3.7106120049149 1973576 1000 218 | 1453206629140 217982 51.0366966387484 3.71060971109023 1856092 1001 219 | 1453206630139 218981 51.0366966387484 3.71060971109023 1962688 999 220 | 1453206631139 219981 51.0366966387484 3.71060971109023 2174324 1000 221 | 1453206632139 220981 51.0366966387484 3.71060971109023 1620296 1000 222 | 1453206633140 221982 51.0366966387484 3.71060971109023 1455892 1001 223 | 1453206634139 222981 51.0366966387484 3.71060971109023 1786800 999 224 | 1453206635139 223981 51.0366966387484 3.71060971109023 2043516 1000 225 | 1453206636140 224982 51.0366966387484 3.71060971109023 1718000 1001 226 | 1453206637139 225981 51.0366966387484 3.71060971109023 110536 999 227 | 1453206638139 226981 51.0366966387484 3.71060971109023 2410328 1000 228 | 1453206639139 227981 51.0366966387484 3.71060971109023 2633292 1000 229 | 1453206640139 228981 51.0366966387484 3.71060971109023 3007984 1000 230 | 1453206641140 229982 51.0366966387484 3.71060971109023 2585464 1001 231 | 1453206642140 230982 51.0366966387484 3.71060971109023 2607784 1000 232 | 1453206643139 231981 51.0366966387484 3.71060971109023 3502536 999 233 | 1453206644139 232981 51.0366966387484 3.71060971109023 2507444 1000 234 | 1453206645140 233982 51.0366966387484 3.71060971109023 1267120 1001 235 | 1453206646139 234981 51.0366966387484 3.71060971109023 1316996 999 236 | 1453206647140 235982 51.0366966387484 3.71060971109023 1198372 1001 237 | 1453206648139 236981 51.0366966387484 3.71060971109023 1201068 999 238 | 1453206649139 237981 51.0366966387484 3.71060971109023 2305080 1000 239 | 1453206650140 238982 51.0366966387484 3.71060971109023 3104548 1001 240 | 1453206651140 239982 51.0366966387484 3.71060971109023 2807892 1000 241 | 1453206652139 240981 51.0366966387484 3.71060971109023 1912700 999 242 | 1453206653139 241981 51.0366966387484 3.71060971109023 1089884 1000 243 | 1453206654139 242981 51.0366966387484 3.71060971109023 1448400 1000 244 | 1453206655139 243981 51.0366966387484 3.71060971109023 1838388 1000 245 | 1453206656139 244981 51.0366966387484 3.71060971109023 918972 1000 246 | 1453206657139 245981 51.0366966387484 3.71060971109023 2970292 1000 247 | 1453206658139 246981 51.0366966387484 3.71060971109023 1896636 1000 248 | 1453206659139 247981 51.0366966387484 3.71060971109023 1078400 1000 249 | 1453206660139 248981 51.0366966387484 3.71060971109023 0 1000 250 | 1453206661139 249981 51.0366966387484 3.71060971109023 4639816 1000 251 | 1453206662139 250981 51.0366966387484 3.71060971109023 2584116 1000 252 | 1453206663139 251981 51.0366966387484 3.71060971109023 2099724 1000 253 | 1453206664139 252981 51.0366966387484 3.71060971109023 1623452 1000 254 | 1453206665139 253981 51.0366966387484 3.71060971109023 1422140 1000 255 | 1453206666139 254981 51.0366760769932 3.71062286042415 1333872 1000 256 | 1453206667139 255981 51.036652274535 3.71064305851288 1890544 1000 257 | 1453206668139 256981 51.0366249278633 3.71067864335231 1602904 1000 258 | 1453206669139 257981 51.0366249278633 3.71067864335231 1365392 1000 259 | 1453206670140 258982 51.0365684420162 3.71079887566694 2574680 1001 260 | 1453206671140 259982 51.0365452530417 3.710876974465 2132536 1000 261 | 1453206672140 260982 51.0365225123409 3.71095667834544 2505932 1000 262 | 1453206673139 261981 51.036498914638 3.71103551406907 2155452 999 263 | 1453206674140 262982 51.0364753177043 3.71110524731717 1977516 1001 264 | 1453206675139 263981 51.0364545189676 3.71115506672402 2042220 999 265 | 1453206676139 264981 51.0364394780947 3.71118651289891 1871024 1000 266 | 1453206677139 265981 51.0364294584156 3.71120186545147 2039524 1000 267 | 1453206678139 266981 51.0364294584156 3.71120186545147 1753748 1000 268 | 1453206679139 267981 51.0364294584156 3.71120186545147 1466624 1000 269 | 1453206680139 268981 51.0364294584156 3.71120186545147 963820 1000 270 | 1453206681139 269981 51.0364294584156 3.71120186545147 765664 1000 271 | 1453206682139 270981 51.0364190964211 3.71121961530244 1296776 1000 272 | 1453206683139 271981 51.0364116927911 3.71124226113731 792080 1000 273 | 1453206684139 272981 51.0364116927911 3.71124226113731 475040 1000 274 | 1453206685140 273982 51.0364127115917 3.71130013446548 235900 1001 275 | 1453206686139 274981 51.0364122271887 3.71131797054378 1622992 999 276 | 1453206687139 275981 51.0364122271887 3.71131797054378 444840 1000 277 | 1453206688139 276981 51.0364122271887 3.71131797054378 64704 1000 278 | 1453206689139 277981 51.0364122271887 3.71131797054378 0 1000 279 | 1453206690140 278982 51.0364085539575 3.71135650207824 0 1001 280 | 1453206691140 279982 51.0364112285306 3.71139758580122 0 1000 281 | 1453206692139 280981 51.0364225674833 3.71144585023071 0 999 282 | 1453206693139 281981 51.0364225674833 3.71144585023071 0 1000 283 | 1453206694139 282981 51.0364422136164 3.71150467884998 907204 1000 284 | 1453206695139 283981 51.0364934069474 3.71163456765928 1872372 1000 285 | 1453206696139 284981 51.0365183184826 3.71169809825653 775100 1000 286 | 1453206697139 285981 51.0365183184826 3.71169809825653 0 1000 287 | 1453206698139 286981 51.0365648345168 3.71180891969809 2654212 1000 288 | 1453206699139 287981 51.0365648345168 3.71180891969809 1105360 1000 289 | 1453206700140 288982 51.0365868660039 3.7118602373236 1006956 1001 290 | 1453206701139 289981 51.0366289091263 3.71198780807731 633560 999 291 | 1453206702139 290981 51.0366512012708 3.71206908380778 882940 1000 292 | 1453206703139 291981 51.036672959543 3.71214918769756 1305564 1000 293 | 1453206704139 292981 51.036672959543 3.71214918769756 1053436 1000 294 | 1453206705139 293981 51.0366960654126 3.71222211326342 466408 1000 295 | 1453206706139 294981 51.0367384953894 3.71235476946749 1993692 1000 296 | 1453206707139 295981 51.0367583126378 3.71241938680157 1140460 1000 297 | 1453206708140 296982 51.0367830574277 3.71248915279187 1217192 1001 298 | 1453206709139 297981 51.0367830574277 3.71248915279187 986736 999 299 | -------------------------------------------------------------------------------- /IPDPS2020/PPO.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import tensorflow.contrib.rnn as rnn 3 | import numpy as np 4 | from collections import deque 5 | import random 6 | 7 | BELTA = 0.0003 8 | METHOD = [ 9 | dict(name='kl_pen', kl_target=0.01, lam=0.5), # KL penalty 10 | # Clipped surrogate objective, find this is better 11 | dict(name='clip', epsilon=0.1), 12 | ][1] # choose the method for optimization 13 | 14 | 15 | class PPO(object): 16 | replay_memory = deque() 17 | memory_size = 100 18 | 19 | def __init__(self, S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, num): # num是什么意思 20 | self.sess = tf.Session() 21 | self.tfs = tf.placeholder(tf.float32, [None, S_DIM], 'state') 22 | self.S_DIM = S_DIM 23 | self.A_DIM = A_DIM 24 | self.BATCH = BATCH 25 | self.A_UPDATE_STEPS = A_UPDATE_STEPS 26 | self.C_UPDATE_STEPS = C_UPDATE_STEPS 27 | self.decay = tf.placeholder(tf.float32, (), 'decay') 28 | self.a_lr = tf.placeholder(tf.float32, (), 'a_lr') 29 | self.c_lr = tf.placeholder(tf.float32, (), 'c_lr') 30 | self.num = num 31 | 32 | # critic 33 | with tf.variable_scope('critic'): 34 | w1 = tf.Variable(tf.truncated_normal( 35 | [self.S_DIM, 200], stddev=0.01), name='w1') 36 | bias1 = tf.Variable(tf.constant( 37 | 0.0, shape=[200], dtype=tf.float32), name='b1') 38 | l1 = tf.nn.relu(tf.matmul(self.tfs, w1) + bias1) 39 | # l1 = tf.reshape(l1, shape=(-1, 200, 1)) 40 | # lstm_cell = rnn.BasicLSTMCell(num_units=128) 41 | # # init_state = lstm_cell.zero_state( 42 | # # batch_size=self.BATCH, dtype=tf.float32) 43 | # outputs, states = tf.nn.dynamic_rnn( 44 | # cell=lstm_cell, inputs=l1, dtype=tf.float32) # , initial_state=init_state, dtype=tf.float32) 45 | # l2 = outputs[:, -1, :] 46 | # print(np.shape(l2)) 47 | 48 | w2 = tf.Variable(tf.truncated_normal( 49 | [200, 50], stddev=0.01), name='w2') 50 | bias2 = tf.Variable(tf.constant( 51 | 0.0, shape=[50], dtype=tf.float32), name='b2') 52 | l2 = tf.nn.relu(tf.matmul(l1, w2) + bias2) 53 | 54 | w3 = tf.Variable(tf.truncated_normal( 55 | [50, 1], stddev=0.01), name='w3') 56 | bias3 = tf.Variable(tf.constant( 57 | 0.0, shape=[1], dtype=tf.float32), name='b3') 58 | self.v = tf.nn.relu(tf.matmul(l2, w3) + bias3) 59 | 60 | self.tfdc_r = tf.placeholder(tf.float32, [None, 1], 'discounted_r') 61 | self.advantage = self.tfdc_r - self.v 62 | self.closs = tf.reduce_mean(tf.square(self.advantage)) + \ 63 | BELTA * (tf.nn.l2_loss(w1) + tf.nn.l2_loss(w3)) 64 | optimizer = tf.train.AdamOptimizer(learning_rate=self.c_lr) 65 | vars_ = tf.trainable_variables() 66 | grads, _ = tf.clip_by_global_norm( 67 | tf.gradients(self.closs, vars_), 5.0) 68 | self.ctrain_op = optimizer.apply_gradients(zip(grads, vars_)) 69 | 70 | # actor 71 | pi, pi_params, l2_loss_a = self._build_anet('pi', trainable=True) 72 | oldpi, oldpi_params, _ = self._build_anet('oldpi', trainable=False) 73 | with tf.variable_scope('sample_action'): 74 | # choosing action squeeze:去掉第一维(=1) 75 | self.sample_op = tf.squeeze(pi.sample(1), axis=0) 76 | with tf.variable_scope('update_oldpi'): 77 | self.update_oldpi_op = [oldp.assign( 78 | p) for p, oldp in zip(pi_params, oldpi_params)] 79 | 80 | self.tfa = tf.placeholder(tf.float32, [None, self.A_DIM], 'action') 81 | self.tfadv = tf.placeholder(tf.float32, [None, 1], 'advantage') 82 | with tf.variable_scope('loss'): 83 | with tf.variable_scope('surrogate'): 84 | # ratio = tf.exp(pi.log_prob(self.tfa) - oldpi.log_prob(self.tfa)) 85 | ratio = pi.prob(self.tfa) #/oldpi.prob(self.tfa) 86 | surr = ratio * self.tfadv 87 | if METHOD['name'] == 'kl_pen': 88 | self.tflam = tf.placeholder(tf.float32, None, 'lambda') 89 | kl = tf.distributions.kl_divergence(oldpi, pi) 90 | self.kl_mean = tf.reduce_mean(kl) 91 | self.aloss = -(tf.reduce_mean(surr - self.tflam * kl)) 92 | else: # clipping method, find this is better 93 | self.aloss = -tf.reduce_mean(tf.minimum( 94 | surr, 95 | tf.clip_by_value(ratio, 1. - METHOD['epsilon'], 1. + METHOD['epsilon']) * self.tfadv)) + \ 96 | BELTA * l2_loss_a 97 | 98 | with tf.variable_scope('atrain'): 99 | # self.atrain_op = tf.train.AdamOptimizer(A_LR).minimize(self.aloss) 100 | optimizer = tf.train.AdamOptimizer(learning_rate=self.a_lr) 101 | vars_ = tf.trainable_variables() 102 | grads, _ = tf.clip_by_global_norm( 103 | tf.gradients(self.aloss, vars_), 5.0) 104 | self.atrain_op = optimizer.apply_gradients(zip(grads, vars_)) 105 | 106 | tf.summary.FileWriter("log/", self.sess.graph) 107 | init = tf.global_variables_initializer() 108 | self.saver = tf.train.Saver() 109 | for var in tf.trainable_variables(): 110 | tf.summary.histogram(var.op.name, var) 111 | summary_op = tf.summary.merge_all() 112 | summary_writer = tf.summary.FileWriter('tmp/vintf/', self.sess.graph) 113 | self.sess.run(init) 114 | if HAVE_TRAIN == True: 115 | model_file = tf.train.latest_checkpoint( 116 | 'ckpt/' + str(self.num) + "/") 117 | self.saver.restore(self.sess, model_file) 118 | 119 | def update(self, s, a, r, dec, alr, clr, epoch): 120 | self.sess.run(self.update_oldpi_op) 121 | adv = self.sess.run( 122 | self.advantage, {self.tfs: s, self.tfdc_r: r, self.decay: dec}) 123 | # adv = (adv - adv.mean())/(adv.std()+1e-6) # sometimes helpful 124 | 125 | # update actor 126 | if METHOD['name'] == 'kl_pen': 127 | for _ in range(self.A_UPDATE_STEPS): 128 | _, kl = self.sess.run( 129 | [self.atrain_op, self.kl_mean], 130 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.tflam: METHOD['lam']}) 131 | if kl > 4 * METHOD['kl_target']: # this in in google's paper 132 | break 133 | # adaptive lambda, this is in OpenAI's paper 134 | if kl < METHOD['kl_target'] / 1.5: 135 | METHOD['lam'] /= 2 136 | elif kl > METHOD['kl_target'] * 1.5: 137 | METHOD['lam'] *= 2 138 | # sometimes explode, this clipping is my solution 139 | METHOD['lam'] = np.clip(METHOD['lam'], 1e-4, 10) 140 | else: # clipping method, find this is better (OpenAI's paper) 141 | [self.sess.run(self.atrain_op, 142 | {self.tfs: s, self.tfa: a, self.tfadv: adv, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 143 | for _ in range(self.A_UPDATE_STEPS)] 144 | 145 | # update critic 146 | [self.sess.run(self.ctrain_op, {self.tfs: s, self.tfdc_r: r, self.decay: dec, self.a_lr: alr, self.c_lr: clr}) 147 | for _ in range(self.C_UPDATE_STEPS)] 148 | # self.saver.save(self.sess, "ckpt/" + str(self.num) + "/", global_step=epoch) 149 | 150 | def _build_anet(self, name, trainable): 151 | with tf.variable_scope(name): 152 | w4 = tf.Variable(tf.truncated_normal( 153 | [self.S_DIM, 200], stddev=0.01), name='w4') 154 | bias4 = tf.Variable(tf.constant( 155 | 0.0, shape=[200], dtype=tf.float32), name='b4') 156 | l3 = tf.nn.sigmoid(tf.matmul(self.tfs, w4) + bias4) 157 | 158 | w5 = tf.Variable(tf.truncated_normal( 159 | [200, 50], stddev=0.01), name='w5') 160 | bias5 = tf.Variable(tf.constant( 161 | 0.0, shape=[50], dtype=tf.float32), name='b5') 162 | l4 = tf.nn.sigmoid(tf.matmul(l3, w5) + bias5) 163 | 164 | w6 = tf.Variable(tf.truncated_normal( 165 | [50, self.A_DIM], stddev=0.01), name='w6') 166 | bias6 = tf.Variable(tf.constant( 167 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b6') 168 | 169 | mu = 1 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) 170 | # mu = 5 * tf.nn.sigmoid(tf.matmul(l4, w6) + bias6) + 0.0001 171 | # print('mu:', np.shape(mu)) 172 | 173 | w7 = tf.Variable(tf.truncated_normal( 174 | [50, self.A_DIM], stddev=0.01), name='w7') 175 | bias7 = tf.Variable(tf.constant( 176 | 0.0, shape=[self.A_DIM], dtype=tf.float32), name='b7') 177 | sigma = self.decay * \ 178 | tf.nn.sigmoid(tf.matmul(l4, w7) + bias7) + 0.00001 179 | # print('sigma:',np.shape(sigma)) 180 | 181 | # mu = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) 182 | # sigma = tf.layers.dense(l2, A_DIM, tf.nn.sigmoid, trainable=trainable) + 0.0001 183 | norm_dist = tf.distributions.Normal( 184 | loc=mu, scale=sigma) # loc:mean scale:sigma 185 | params = tf.get_collection( 186 | tf.GraphKeys.GLOBAL_VARIABLES, scope=name) # 返回name=="name"的变量列表 187 | l2_loss_a = tf.nn.l2_loss( 188 | w4) + tf.nn.l2_loss(w5) + tf.nn.l2_loss(w6) + tf.nn.l2_loss(w7) 189 | return norm_dist, params, l2_loss_a 190 | 191 | def choose_action(self, s, dec): 192 | s = s[np.newaxis, :] 193 | a = self.sess.run(self.sample_op, feed_dict={ 194 | self.tfs: s, self.decay: dec}) 195 | # a, sigma, mu = self.sess.run([self.sample_op, self.sigma, self.mu], feed_dict={self.tfs: s, self.decay: dec}) 196 | 197 | return np.clip(a[0], 0.0001, 1) # 把输出限制在[0, 5]之内 198 | 199 | def get_v(self, s): 200 | if s.ndim < 2: 201 | s = s[np.newaxis, :] 202 | return self.sess.run(self.v, {self.tfs: s})[0, 0] 203 | 204 | def preceive(self, state, action, reward, dec, alr, clr): 205 | self.replay_memory.append((state, action, reward)) 206 | if len(self.replay_memory) > self.memory_size: 207 | self.replay_memory.popleft() 208 | else: 209 | self.train_network(dec, alr, clr) 210 | 211 | def train_network(self, dec, alr, clr): 212 | mini_batch = random.sample(self.replay_memory, self.BATCH) 213 | state_batch = [data[0] for data in mini_batch] 214 | action_batch = [data[1] for data in mini_batch] 215 | reward_batch = [data[2] for data in mini_batch] 216 | 217 | self.update(state_batch, action_batch, reward_batch, dec, alr, clr) 218 | -------------------------------------------------------------------------------- /IPDPS2020/README.md: -------------------------------------------------------------------------------- 1 | The dataset comes from the following website: 2 | https://users.ugent.be/~jvdrhoof/dataset-4g/logs/ 3 | 4 | For IPDPS2020 5 | Experience-Driven Computational Resource Allocation of Federated Learning by Deep Reinforcement Learning. Yufeng Zhan, Peng Li, and Song Guo 6 | -------------------------------------------------------------------------------- /IPDPS2020/continuousEnv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from collections import defaultdict 3 | 4 | class EnvArgs(object): 5 | def __init__(self, user_num, his_len, info_num, bandwidth, C, D, alpha, tau, epsilon): 6 | self.user_num = user_num 7 | self.his_len = his_len 8 | self.info_num = 1 #info_num 9 | self.bandwidth = bandwidth 10 | self.C = C 11 | self.D = D 12 | self.alpha = alpha 13 | self.tau = tau 14 | self.epsilon = epsilon 15 | 16 | 17 | class ContinuousEnv(object): 18 | state_map = None # state pool 19 | his_len = 5 20 | 21 | def __init__(self, env_args): 22 | self.user_num = env_args.user_num 23 | self.his_len = env_args.his_len 24 | self.info_num = env_args.info_num 25 | self.bandwidth = env_args.bandwidth 26 | self.state = np.zeros( 27 | (self.user_num, self.his_len, self.info_num), 'float32') 28 | self.reward = 0 29 | self.global_step = 0 30 | self.global_time = 0 31 | self.C = env_args.C 32 | self.D = env_args.D 33 | self.alpha = env_args.alpha 34 | self.tau = env_args.tau 35 | self.epsilon = env_args.epsilon 36 | self.cur_delta = np.zeros(self.user_num, "float32") 37 | self.cur_B = np.zeros(self.user_num, "float32") 38 | self.cur_user_T = np.zeros(self.user_num, "float32") 39 | self.cur_T = 0 40 | 41 | def reset(self): 42 | self.global_time = 0 43 | self.global_step = int(self.global_time / 4) 44 | for i in range(self.user_num): 45 | idx = i % 5 46 | for j in range(self.his_len): 47 | self.state[i,j,0] = 0 48 | for k in range(4): 49 | self.state[i,j,0] += np.clip(self.bandwidth[idx][ 50 | ((self.global_step+j-self.his_len) * 4 - k) % len(self.bandwidth[idx])], 51 | 0.2,10) 52 | self.state[i,j,0] = self.state[i,j,0] / 4 53 | # self.state[i,j,1] = np.random.rand() 54 | self.cur_B[i] = self.state[i,-1,0] 55 | self.cur_delta[i] = np.random.rand() # self.state[i,-1,1] 56 | self.reward = 0 57 | self.cur_T, self.cur_user_T = self.count_T(self.cur_delta, self.cur_B) 58 | return self.state 59 | 60 | def count_T(self, delta, B): 61 | user_T = np.zeros(self.user_num, "float32") 62 | for i in range(self.user_num): 63 | idx = i % 5 64 | user_T[i] = self.C[idx] * self.D[idx] / delta[i] * self.tau + self.epsilon / B[i] 65 | 66 | return np.max(user_T), user_T 67 | 68 | def step(self, delta): # continuous action 69 | self.global_time += self.cur_T 70 | self.global_step = int(self.global_time / 4) 71 | self.state[:,:-1,:] = self.state[:,1:,:] 72 | self.reward = 0 73 | for i in range(self.user_num): 74 | # self.state[i,-1,0] = 0 75 | idx = i % 5 76 | tmp_step = int((self.C[idx] * self.D[idx] / delta[idx] * self.tau + self.global_time)/4) 77 | for his in range(self.his_len): 78 | tmp_step = tmp_step - 4 79 | for k in range(4): 80 | self.state[i,-his-1,0] += np.clip(self.bandwidth[idx][ 81 | (tmp_step * 4 - k) % len(self.bandwidth[idx])],0.2,10) 82 | # self.state[i,-1,0] = self.state[i,-1,0] / 4 83 | # self.state[i,-1,1] = delta[i] 84 | self.state[i,:,0] = self.state[i,:,0] / 4 85 | # self.state[i,:,1] = self.state[i,:,0] 86 | self.cur_B[i] = self.state[i,-1,0] 87 | self.cur_delta[i] = delta[i] # self.state[i,-1,1] 88 | self.reward += self.alpha[idx] / 2 * self.C[idx] * self.D[idx] * delta[i] * delta[i] 89 | self.cur_T, self.cur_user_T = self.count_T(self.cur_delta, self.cur_B) 90 | self.reward += 1 * self.cur_T 91 | 92 | return self.state, -self.reward / 10, self.cur_T , self.reward - 1 * self.cur_T 93 | -------------------------------------------------------------------------------- /IPDPS2020/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 3 | from continuousEnv import ContinuousEnv 4 | import numpy as np 5 | import random 6 | import matplotlib.pyplot as plt 7 | from DNC_PPO import PPO 8 | import csv 9 | 10 | 11 | def main(): 12 | train_ppo() 13 | 14 | 15 | def train_ppo(): 16 | user_num = 5 17 | his_len = 5 18 | info_num = 2 19 | A_DIM, S_DIM = user_num, user_num * his_len * info_num 20 | BATCH = 20 21 | A_UPDATE_STEPS = 5 22 | C_UPDATE_STEPS = 5 23 | A_LR = 0.00003 24 | C_LR = 0.00003 25 | v_s = np.zeros(user_num) 26 | env = ContinuousEnv(user_num,his_len,info_num,bandwidth) 27 | GAMMA = 0.95 28 | EP_MAX = 1000 29 | EP_LEN = 400 30 | dec = 0.5 31 | action = np.zeros(user_num) 32 | Algs = "dnc" 33 | 34 | # max_r = 0 35 | # max_a = np.random.random(user_num) 36 | 37 | ppo = PPO(S_DIM,A_DIM,BATCH,A_UPDATE_STEPS,C_UPDATE_STEPS,False,0) 38 | csvFile1 = open("Rewards_" + Algs + 39 | "_" + str(user_num) + ".csv", 'w', newline='') 40 | writer1 = csv.writer(csvFile1) 41 | csvFile2 = open("Actions_" + Algs + 42 | "_" + str(user_num) + ".csv", 'w', newline='') 43 | writer2 = csv.writer(csvFile2) 44 | csvFile3 = open("Aloss_" + Algs + 45 | "_" + str(user_num) + ".csv", 'w', newline='') 46 | writer3 = csv.writer(csvFile3) 47 | csvFile4 = open("Closs_" + Algs + 48 | "_" + str(user_num) + ".csv", 'w', newline='') 49 | writer4 = csv.writer(csvFile4) 50 | 51 | rewards = [] 52 | actions = [] 53 | closs = [] 54 | aloss = [] 55 | cur_state = env.reset() 56 | for ep in range(EP_MAX): 57 | if ep % 50 == 0: 58 | dec = dec * 1 59 | A_LR = A_LR * 0.8 60 | C_LR = C_LR * 0.8 61 | buffer_s = [] 62 | buffer_a = [] 63 | buffer_r = [] 64 | sum_reward = np.zeros(user_num) 65 | sum_action = np.zeros(user_num) 66 | sum_closs = np.zeros(user_num) 67 | sum_aloss = np.zeros(user_num) 68 | for t in range(EP_LEN): 69 | action = ppo.choose_action(cur_state, dec) 70 | # Greedy algorithm 71 | # if np.random.random() < 0.1: 72 | # action[i] = np.random.random() 73 | # else: 74 | # action[i] = max_a[i] 75 | # action[i] = np.random.random() 76 | 77 | next_state, reward = env.step(action) 78 | sum_reward += reward 79 | sum_action += action 80 | 81 | # Greedy algorithm 82 | # for i in range(user_num): 83 | # if reward[i] > max_r[i]: 84 | # max_r[i] = reward[i] 85 | # max_a[i] = action[i] 86 | # if max_a[i] == action[i]: 87 | # max_r[i] = reward[i] 88 | 89 | v_s = ppo.get_v(next_state) 90 | 91 | buffer_a.append(action) 92 | buffer_s.append(cur_state) 93 | buffer_r.append(reward) 94 | 95 | cur_state = next_state 96 | # update ppo 97 | if (t + 1) % BATCH == 0: 98 | discounted_r = np.zeros(len(buffer_r), 'float32') 99 | v_s = ppo.get_v(next_state) 100 | running_add = v_s 101 | 102 | for rd in reversed(range(len(buffer_r))): 103 | running_add = running_add * GAMMA + buffer_r[rd] 104 | discounted_r[rd] = running_add 105 | 106 | discounted_r = discounted_r[np.newaxis, :] 107 | discounted_r = np.transpose(discounted_r) 108 | ppo.update(np.vstack(buffer_s), np.vstack(buffer_a), discounted_r, dec, A_LR, C_LR, ep) 109 | 110 | if ep % 10 == 0: 111 | print('instant ep:', ep) 112 | print("instant reward:", reward) 113 | print("instant action:", action) 114 | rewards.append(sum_reward / EP_LEN) 115 | actions.append(sum_action / EP_LEN) 116 | closs.append(sum_closs / EP_LEN) 117 | aloss.append(sum_aloss / EP_LEN) 118 | print("average reward:", sum_reward / EP_LEN) 119 | print("average action:", sum_action / EP_LEN) 120 | print("average closs:", sum_closs / EP_LEN) 121 | print("average aloss:", sum_aloss / EP_LEN) 122 | 123 | plt.plot(rewards) 124 | plt.show() 125 | writer1.writerow(rewards) 126 | for i in range(len(actions)): 127 | writer2.writerow(actions[i]) 128 | writer3.writerow(closs) 129 | writer4.writerow(aloss) 130 | csvFile1.close() 131 | csvFile2.close() 132 | csvFile3.close() 133 | csvFile4.close() 134 | 135 | 136 | if __name__ == '__main__': 137 | main() 138 | -------------------------------------------------------------------------------- /IPDPS2020/process_data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 10, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "403\n", 13 | "619\n", 14 | "529\n", 15 | "422\n", 16 | "585\n" 17 | ] 18 | } 19 | ], 20 | "source": [ 21 | "# -*- coding: utf-8 -*-\n", 22 | "import json\n", 23 | "import numpy as np\n", 24 | "import os\n", 25 | "import pandas as pd\n", 26 | "import matplotlib.pyplot as plt#matplotlib inline\n", 27 | "import math\n", 28 | "\n", 29 | "main_path = \"./Dataset\"\n", 30 | "file_list = os.listdir(main_path)\n", 31 | "bandwidth = {}\n", 32 | "count = 0\n", 33 | "bandwidth[0] = []\n", 34 | "for f in file_list:\n", 35 | " if (f.startswith(\"report_foot_\") == True and count < 5):\n", 36 | " with open(main_path + '/' + f, 'r') as file_to_read:\n", 37 | " while True:\n", 38 | " lines = file_to_read.readline()\n", 39 | " if not lines:\n", 40 | " break\n", 41 | " item = [i for i in lines.split()]\n", 42 | " bandwidth[count].append(float(item[-2])/1000/1000)\n", 43 | " count += 1\n", 44 | " bandwidth[count] = []\n", 45 | " print(len(bandwidth[count-1]))\n", 46 | "# print(\"Number of user data: \", len(bandwidth))\n", 47 | "# print(bandwidth)\n", 48 | "\n" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [] 57 | } 58 | ], 59 | "metadata": { 60 | "kernelspec": { 61 | "display_name": "Python 3", 62 | "language": "python", 63 | "name": "python3" 64 | }, 65 | "language_info": { 66 | "codemirror_mode": { 67 | "name": "ipython", 68 | "version": 3 69 | }, 70 | "file_extension": ".py", 71 | "mimetype": "text/x-python", 72 | "name": "python", 73 | "nbconvert_exporter": "python", 74 | "pygments_lexer": "ipython3", 75 | "version": "3.7.4" 76 | } 77 | }, 78 | "nbformat": 4, 79 | "nbformat_minor": 2 80 | } 81 | -------------------------------------------------------------------------------- /IPDPS2020/reward.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bitzj2015/DRL-Networking/f42184639c4e54951e8919a04a4d617d1cdd21ea/IPDPS2020/reward.png -------------------------------------------------------------------------------- /IPDPS2020/test.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import json 3 | import numpy as np 4 | filename="report_bus_0001.log" 5 | bandwidth = [] 6 | with open(filename, 'r') as file_to_read: 7 | while True: 8 | lines = file_to_read.readline() 9 | if not lines: 10 | break 11 | item = [i for i in lines.split()] 12 | bandwidth.append(float(item[-2])/1000) 13 | 14 | import pandas as pd 15 | import matplotlib.pyplot as plt#matplotlib inline 16 | import math 17 | dataset=[] 18 | # for data in np.arange(0, 3, .01): 19 | # data = math.sin(data*math.pi) 20 | # dataset.append(data) 21 | for i in range(len(bandwidth)-9): 22 | dataset.append(sum(bandwidth[i:i+10])/10) 23 | dataset=np.array(dataset) 24 | dataset = dataset.astype('float32') 25 | max_value = np.max(dataset) 26 | min_value = np.min(dataset) 27 | scalar = max_value - min_value 28 | dataset = list(map(lambda x: x / scalar, dataset)) 29 | 30 | 31 | 32 | def create_dataset(dataset, look_back=8): 33 | dataX, dataY = [], [] 34 | for i in range(len(dataset) - look_back): 35 | a = dataset[i:(i + look_back)] 36 | dataX.append(a) 37 | dataY.append(dataset[i + look_back]) 38 | return np.array(dataX), np.array(dataY) 39 | 40 | data_X, data_Y = create_dataset(dataset) 41 | 42 | train_size = int(len(data_X) * 0.7) 43 | test_size = len(data_X) - train_size 44 | train_X = data_X[:train_size] 45 | train_Y = data_Y[:train_size] 46 | test_X = data_X[train_size:] 47 | test_Y = data_Y[train_size:] 48 | 49 | import torch 50 | 51 | train_X = train_X.reshape(-1, 1, 8) 52 | train_Y = train_Y.reshape(-1, 1, 1) 53 | test_X = test_X.reshape(-1, 1, 8) 54 | 55 | train_x = torch.from_numpy(train_X) 56 | train_y = torch.from_numpy(train_Y) 57 | test_x = torch.from_numpy(test_X) 58 | from torch import nn 59 | from torch.autograd import Variable 60 | # 定义模型 61 | class lstm_reg(nn.Module): 62 | def __init__(self, input_size, hidden_size, output_size=1, num_layers=2): 63 | super(lstm_reg, self).__init__() 64 | 65 | self.rnn = nn.LSTM(input_size, hidden_size, num_layers) # rnn 66 | self.reg = nn.Linear(hidden_size, output_size) # 回归 67 | 68 | def forward(self, x): 69 | x, _ = self.rnn(x) # (seq, batch, hidden) 70 | s, b, h = x.shape 71 | x = x.view(s*b, h) # 转换成线性层的输入格式 72 | x = self.reg(x) 73 | x = x.view(s, b, -1) 74 | return x 75 | net = lstm_reg(8, 20) 76 | 77 | criterion = nn.MSELoss() 78 | optimizer = torch.optim.Adam(net.parameters(), lr=1e-2) 79 | # 开始训练 80 | for e in range(1000): 81 | var_x = Variable(train_x) 82 | var_y = Variable(train_y) 83 | # 前向传播 84 | out = net(var_x) 85 | loss = criterion(out, var_y) 86 | # 反向传播 87 | optimizer.zero_grad() 88 | loss.backward() 89 | optimizer.step() 90 | if (e + 1) % 100 == 0: # 每 100 次输出结果 91 | print('Epoch: {}, Loss: {:.10f}'.format(e + 1, loss.item())) 92 | net = net.eval() # 转换成测试模式 93 | data_X = data_X.reshape(-1, 1, 8) 94 | data_X = torch.from_numpy(data_X) 95 | var_data = Variable(data_X) 96 | pred_test = net(var_data) # 测试集的预测结果 97 | # 改变输出的格式 98 | pred_test = pred_test.view(-1).data.numpy() 99 | # 画出实际结果和预测的结果 100 | plt.plot(pred_test, 'r', label='prediction') 101 | plt.plot(dataset[7:], 'b', label='real') 102 | plt.legend(loc='best') 103 | plt.show() -------------------------------------------------------------------------------- /IPDPS2020/train.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 3 | from continuousEnv import ContinuousEnv, EnvArgs 4 | import numpy as np 5 | import random 6 | from DNC_PPO import PPO 7 | import csv 8 | import matplotlib.pyplot as plt#matplotlib inline 9 | import math 10 | 11 | 12 | def get_bandwidth(main_path): 13 | file_list = os.listdir(main_path) 14 | bandwidth = {} 15 | count = 0 16 | bandwidth[0] = [] 17 | for f in file_list: 18 | if (f.startswith("report_foot_") == True and count < 5): 19 | with open(main_path + '/' + f, 'r') as file_to_read: 20 | while True: 21 | lines = file_to_read.readline() 22 | if not lines: 23 | break 24 | item = [i for i in lines.split()] 25 | bandwidth[count].append(float(item[-2])/1000/1000) 26 | count += 1 27 | bandwidth[count] = [] 28 | return bandwidth 29 | 30 | # def main(): 31 | # train_ppo() 32 | 33 | # set the experiment environment 34 | user_num = 100 35 | his_len = 5 36 | info_num = 1 37 | main_path = "./Dataset" 38 | bandwidth = get_bandwidth(main_path) 39 | C = np.array([18,20,22,24,26]).astype("float") 40 | D = np.array([0.08, 0.06, 0.07, 0.06, 0.09]).astype("float") 41 | alpha = np.array([1,1,1,1,1]) / 50 42 | tau = 2 43 | epsilon = 5 44 | env_args = EnvArgs(user_num, his_len, info_num, bandwidth, C, D, alpha, tau, epsilon) 45 | env = ContinuousEnv(env_args) 46 | 47 | # set the DRL agent 48 | A_DIM, S_DIM = user_num, user_num * his_len * info_num 49 | BATCH = 20 50 | A_UPDATE_STEPS = 5 51 | C_UPDATE_STEPS = 5 52 | HAVE_TRAIN = False 53 | A_LR = 0.00003 54 | C_LR = 0.00003 55 | v_s = np.zeros(user_num) 56 | GAMMA = 0.95 57 | EP_MAX = 500 58 | EP_LEN = 400 59 | dec = 0.3 60 | action = np.zeros(user_num) 61 | ppo = PPO(S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, 0) 62 | 63 | # define csvfiles for writing results 64 | Algs = "dnc" 65 | csvFile1 = open("test-lambda=0.5-Rewards_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 66 | writer1 = csv.writer(csvFile1) 67 | csvFile2 = open("test-lambda=0.5-Actions_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 68 | writer2 = csv.writer(csvFile2) 69 | csvFile3 = open("test-lambda=0.5-Aloss_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 70 | writer3 = csv.writer(csvFile3) 71 | csvFile4 = open("test-lambda=0.5-Closs_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 72 | writer4 = csv.writer(csvFile4) 73 | 74 | rewards = [] 75 | actions = [] 76 | closses = [] 77 | alosses = [] 78 | Ts = [] 79 | Es = [] 80 | cur_state = env.reset() 81 | for ep in range(EP_MAX): 82 | # cur_state = env.reset() 83 | if ep % 50 == 0: 84 | dec = dec * 0.95 85 | A_LR = A_LR * 0.85 86 | C_LR = C_LR * 0.85 87 | buffer_s = [] 88 | buffer_a = [] 89 | buffer_r = [] 90 | sum_reward = 0 91 | sum_action = 0 92 | sum_closs = 0 93 | sum_aloss = 0 94 | sum_T = 0 95 | sum_E = 0 96 | for t in range(EP_LEN): 97 | action = ppo.choose_action(cur_state.reshape(-1,S_DIM), dec) 98 | # action = np.random.random(np.shape(action)) 99 | next_state, reward, T, E= env.step(1 + action * 1) 100 | # print(action,T,E) 101 | 102 | sum_reward += reward 103 | sum_action += action 104 | sum_T += T 105 | sum_E += E 106 | buffer_a.append(action.copy()) 107 | buffer_s.append(cur_state.reshape(-1,S_DIM).copy()) 108 | buffer_r.append(reward) 109 | 110 | cur_state = next_state 111 | # update ppo 112 | if (t + 1) % BATCH == 0: 113 | discounted_r = np.zeros(len(buffer_r), 'float32') 114 | v_s = ppo.get_v(next_state.reshape(-1, S_DIM)) 115 | running_add = v_s 116 | 117 | for rd in reversed(range(len(buffer_r))): 118 | running_add = running_add * GAMMA + buffer_r[rd] 119 | discounted_r[rd] = running_add 120 | 121 | discounted_r = discounted_r[np.newaxis, :] 122 | discounted_r = np.transpose(discounted_r) 123 | if HAVE_TRAIN == False: 124 | closs, aloss = ppo.update(np.vstack(buffer_s), np.vstack(buffer_a), discounted_r, dec, A_LR, C_LR, ep) 125 | sum_closs += closs 126 | sum_aloss += aloss 127 | if ep % 10 == 0: 128 | print('instant ep:', ep) 129 | print("instant reward:", reward) 130 | print("instant action:", action) 131 | rewards.append(sum_reward / EP_LEN) 132 | actions.append(sum_action / EP_LEN) 133 | closses.append(sum_closs / EP_LEN) 134 | alosses.append(sum_aloss / EP_LEN) 135 | Ts.append(sum_T / EP_LEN) 136 | Es.append(sum_E / EP_LEN) 137 | print("average reward:", sum_reward / EP_LEN) 138 | print("average T:", sum_T / EP_LEN) 139 | print("average E:", sum_E / EP_LEN) 140 | print("average action:", sum_action / EP_LEN) 141 | print("average closs:", sum_closs / EP_LEN) 142 | print("average aloss:", sum_aloss / EP_LEN) 143 | 144 | plt.plot(rewards) 145 | plt.show() 146 | writer1.writerow(-rewards * 10) 147 | writer1.writerow(Ts) 148 | writer1.writerow(Es) 149 | for i in range(len(actions)): 150 | writer2.writerow(actions[i]) 151 | writer3.writerow(closses) 152 | writer4.writerow(alosses) 153 | csvFile1.close() 154 | csvFile2.close() 155 | csvFile3.close() 156 | csvFile4.close() 157 | 158 | 159 | # if __name__ == '__main__': 160 | # main() 161 | writer1.writerow(rewards * (-10)) 162 | writer1.writerow(Ts) 163 | writer1.writerow(Es) 164 | for i in range(len(actions)): 165 | writer2.writerow(actions[i]) 166 | writer3.writerow(closses) 167 | writer4.writerow(alosses) 168 | csvFile1.close() 169 | csvFile2.close() 170 | csvFile3.close() 171 | csvFile4.close() 172 | tmp = [] 173 | fig = plt.figure() 174 | for i in range(len(alosses)): 175 | tmp.append(-sum(alosses[0:0+i+1])/len(alosses[0:0+i+1])) 176 | plt.plot(tmp) 177 | plt.show() 178 | fig.savefig("aloss.png") 179 | print(tmp) -------------------------------------------------------------------------------- /IPDPS2020/train200.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 3 | from continuousEnv import ContinuousEnv, EnvArgs 4 | import numpy as np 5 | import random 6 | from DNC_PPO import PPO 7 | import csv 8 | import matplotlib.pyplot as plt#matplotlib inline 9 | import math 10 | 11 | 12 | def get_bandwidth(main_path): 13 | file_list = os.listdir(main_path) 14 | bandwidth = {} 15 | count = 0 16 | bandwidth[0] = [] 17 | for f in file_list: 18 | if (f.startswith("report_foot_") == True and count < 5): 19 | with open(main_path + '/' + f, 'r') as file_to_read: 20 | while True: 21 | lines = file_to_read.readline() 22 | if not lines: 23 | break 24 | item = [i for i in lines.split()] 25 | bandwidth[count].append(float(item[-2])/1000/1000) 26 | count += 1 27 | bandwidth[count] = [] 28 | return bandwidth 29 | 30 | # def main(): 31 | # train_ppo() 32 | 33 | # set the experiment environment 34 | user_num = 200 35 | his_len = 5 36 | info_num = 1 37 | main_path = "./Dataset" 38 | bandwidth = get_bandwidth(main_path) 39 | C = np.array([18,20,22,24,26]).astype("float") 40 | D = np.array([0.08, 0.06, 0.07, 0.06, 0.09]).astype("float") 41 | alpha = np.array([1,1,1,1,1]) / 50 42 | tau = 2 43 | epsilon = 5 44 | env_args = EnvArgs(user_num, his_len, info_num, bandwidth, C, D, alpha, tau, epsilon) 45 | env = ContinuousEnv(env_args) 46 | 47 | # set the DRL agent 48 | A_DIM, S_DIM = user_num, user_num * his_len * info_num 49 | BATCH = 20 50 | A_UPDATE_STEPS = 5 51 | C_UPDATE_STEPS = 5 52 | HAVE_TRAIN = False 53 | A_LR = 0.00003 54 | C_LR = 0.00003 55 | v_s = np.zeros(user_num) 56 | GAMMA = 0.95 57 | EP_MAX = 500 58 | EP_LEN = 400 59 | dec = 0.3 60 | action = np.zeros(user_num) 61 | ppo = PPO(S_DIM, A_DIM, BATCH, A_UPDATE_STEPS, C_UPDATE_STEPS, HAVE_TRAIN, 0) 62 | 63 | # define csvfiles for writing results 64 | Algs = "dnc" 65 | csvFile1 = open("test-lambda=0.5-Rewards_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 66 | writer1 = csv.writer(csvFile1) 67 | csvFile2 = open("test-lambda=0.5-Actions_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 68 | writer2 = csv.writer(csvFile2) 69 | csvFile3 = open("test-lambda=0.5-Aloss_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 70 | writer3 = csv.writer(csvFile3) 71 | csvFile4 = open("test-lambda=0.5-Closs_" + Algs + "_" + str(user_num) + ".csv", 'w', newline='') 72 | writer4 = csv.writer(csvFile4) 73 | 74 | rewards = [] 75 | actions = [] 76 | closses = [] 77 | alosses = [] 78 | Ts = [] 79 | Es = [] 80 | cur_state = env.reset() 81 | for ep in range(EP_MAX): 82 | # cur_state = env.reset() 83 | if ep % 50 == 0: 84 | dec = dec * 0.95 85 | A_LR = A_LR * 0.85 86 | C_LR = C_LR * 0.85 87 | buffer_s = [] 88 | buffer_a = [] 89 | buffer_r = [] 90 | sum_reward = 0 91 | sum_action = 0 92 | sum_closs = 0 93 | sum_aloss = 0 94 | sum_T = 0 95 | sum_E = 0 96 | for t in range(EP_LEN): 97 | action = ppo.choose_action(cur_state.reshape(-1,S_DIM), dec) 98 | # action = np.random.random(np.shape(action)) 99 | next_state, reward, T, E= env.step(1 + action * 1) 100 | # print(action,T,E) 101 | 102 | sum_reward += reward 103 | sum_action += action 104 | sum_T += T 105 | sum_E += E 106 | buffer_a.append(action.copy()) 107 | buffer_s.append(cur_state.reshape(-1,S_DIM).copy()) 108 | buffer_r.append(reward) 109 | 110 | cur_state = next_state 111 | # update ppo 112 | if (t + 1) % BATCH == 0: 113 | discounted_r = np.zeros(len(buffer_r), 'float32') 114 | v_s = ppo.get_v(next_state.reshape(-1, S_DIM)) 115 | running_add = v_s 116 | 117 | for rd in reversed(range(len(buffer_r))): 118 | running_add = running_add * GAMMA + buffer_r[rd] 119 | discounted_r[rd] = running_add 120 | 121 | discounted_r = discounted_r[np.newaxis, :] 122 | discounted_r = np.transpose(discounted_r) 123 | if HAVE_TRAIN == False: 124 | closs, aloss = ppo.update(np.vstack(buffer_s), np.vstack(buffer_a), discounted_r, dec, A_LR, C_LR, ep) 125 | sum_closs += closs 126 | sum_aloss += aloss 127 | if ep % 10 == 0: 128 | print('instant ep:', ep) 129 | print("instant reward:", reward) 130 | print("instant action:", action) 131 | rewards.append(sum_reward / EP_LEN) 132 | actions.append(sum_action / EP_LEN) 133 | closses.append(sum_closs / EP_LEN) 134 | alosses.append(sum_aloss / EP_LEN) 135 | Ts.append(sum_T / EP_LEN) 136 | Es.append(sum_E / EP_LEN) 137 | print("average reward:", sum_reward / EP_LEN) 138 | print("average T:", sum_T / EP_LEN) 139 | print("average E:", sum_E / EP_LEN) 140 | print("average action:", sum_action / EP_LEN) 141 | print("average closs:", sum_closs / EP_LEN) 142 | print("average aloss:", sum_aloss / EP_LEN) 143 | 144 | plt.plot(rewards) 145 | plt.show() 146 | writer1.writerow(-rewards * 10) 147 | writer1.writerow(Ts) 148 | writer1.writerow(Es) 149 | for i in range(len(actions)): 150 | writer2.writerow(actions[i]) 151 | writer3.writerow(closses) 152 | writer4.writerow(alosses) 153 | csvFile1.close() 154 | csvFile2.close() 155 | csvFile3.close() 156 | csvFile4.close() 157 | 158 | 159 | # if __name__ == '__main__': 160 | # main() 161 | writer1.writerow(rewards * (-10)) 162 | writer1.writerow(Ts) 163 | writer1.writerow(Es) 164 | for i in range(len(actions)): 165 | writer2.writerow(actions[i]) 166 | writer3.writerow(closses) 167 | writer4.writerow(alosses) 168 | csvFile1.close() 169 | csvFile2.close() 170 | csvFile3.close() 171 | csvFile4.close() 172 | tmp = [] 173 | fig = plt.figure() 174 | for i in range(len(alosses)): 175 | tmp.append(-sum(alosses[0:0+i+1])/len(alosses[0:0+i+1])) 176 | plt.plot(tmp) 177 | plt.show() 178 | fig.savefig("aloss.png") 179 | print(tmp) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Project 2 | ## Overview 3 | Research on DRL + Networking systems, including mobile crowdsensing system, edge computing, federated learning. 4 | ## References 5 | [[1]](https://ieeexplore.ieee.org/document/8967118) Y. Zhan, S. Guo, P. Li and J. Zhang, "A Deep Reinforcement Learning Based Offloading Game in Edge Computing," in IEEE Transactions on Computers, vol. 69, no. 6, pp. 883-893, 1 June 2020, doi: 10.1109/TC.2020.2969148. 6 | 7 | [[2]](https://infocom2020.ieee-infocom.org/accepted-paper-list-main-conference) Y. Zhan, J. Zhang, "An Incentive Mechanism Design for Efficient Edge Learning by Deep Reinforcement Learning Approach," in INFOCOM 2020. 8 | 9 | [[3]](https://ieeexplore.ieee.org/document/9139873) Y. Zhan, P. Li and S. Guo, "Experience-Driven Computational Resource Allocation of Federated Learning by Deep Reinforcement Learning," 2020 IEEE International Parallel and Distributed Processing Symposium (IPDPS), New Orleans, LA, USA, 2020, pp. 234-243, doi: 10.1109/IPDPS47924.2020.00033. 10 | --------------------------------------------------------------------------------