├── README.md ├── cloud ├── Executer │ ├── excuteVgg16.py │ ├── excuter.py │ ├── resnet50.py │ ├── utils.py │ └── vgg16.py ├── README.md ├── model │ ├── detectmodel.py │ ├── models.py │ └── record.py ├── network │ ├── client.py │ └── server.py ├── process │ └── processor.py └── utils.py ├── edge ├── Executer │ ├── excuteVgg16.py │ ├── excuter.py │ ├── resnet50.py │ ├── utils.py │ └── vgg16.py ├── README.md ├── model │ ├── detectmodel.py │ ├── models.py │ └── record.py ├── network │ ├── client.py │ └── server.py ├── process │ └── processor.py └── utils.py ├── iot ├── Executer │ ├── excuteResnet50.py │ ├── excuteVgg16.py │ ├── excuter.py │ ├── resnet50.py │ ├── utils.py │ ├── vgg16.py │ └── vgg16boostvgg19.py ├── README.md ├── code_algor.py ├── controle.py ├── elephant.jpg ├── log_100328.txt ├── log_100353.txt ├── log_22315.txt ├── log_22381.txt ├── log_controle_58952.txt ├── log_controle_58967.txt ├── log_controle_98333.txt ├── log_controle_98487.txt ├── log_controle_98861.txt ├── log_controle_98889.txt ├── log_controle_9927.txt ├── log_controle_99439.txt ├── log_controle_99494.txt ├── log_controle_99539.txt ├── log_controle_99750.txt ├── log_controle_99907.txt ├── model │ ├── detectmodel.py │ ├── files │ │ └── network.txt │ ├── models.py │ └── record.py ├── network │ ├── client.py │ └── server.py ├── offloading.py ├── process │ └── processor.py ├── test │ ├── testExcute.py │ ├── testServer.py │ └── testrecord.py ├── utils.py └── 思路流程图.vsdx └── schedule_results /README.md: -------------------------------------------------------------------------------- 1 | # task-merging 2 | The code for the paper task merging and scheduling for parallel deep learning applications in mobile edge computing 3 | 4 | ## description 5 | IOT is the platform part of mobile device, edge is the platfrom part of edge device, cloud is the platform part of remote cloud 6 | 7 | 8 | ## the Scheduling results are in the log files of IOT 9 | -------------------------------------------------------------------------------- /cloud/Executer/excuteVgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: vgg16的执行 9 | ''' 10 | class operation: 11 | 12 | def __init__(self, operation_id, generate_operation_model, input_shape,weights_model): 13 | import numpy as np 14 | 15 | self.operation_id = operation_id 16 | self.operation_model = generate_operation_model(input_shape) 17 | self.input_shape = input_shape 18 | 19 | 'load the weight' 20 | for layer in self.operation_model.layers: 21 | try: 22 | if weights_model.get_layer(layer.name) != None: 23 | layer.set_weights(weights_model.get_layer(name=layer.name).get_weights()) 24 | except Exception as e: 25 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 26 | e)) 27 | pass 28 | if type(input_shape) == list: 29 | testdata = [] 30 | for tmp in input_shape: 31 | testdata.append([np.zeros(shape=tmp, dtype=np.float32)]) 32 | self.operation_model.predict(testdata) 33 | pass 34 | else: 35 | self.operation_model.predict(np.array([np.zeros(shape=input_shape, dtype=np.float32)])) 36 | 37 | def excute(self, input): 38 | import numpy as np 39 | 40 | x_input = input 41 | # if np.shape(input)[0] == self.input_shape[0]: 42 | # x_input = [input] 43 | if type(self.input_shape) != list: 44 | x_input = np.array(x_input) 45 | 46 | if type(self.input_shape) == list: 47 | input_data = [] 48 | print("the raw shape of the input is {0} of operation {1}".format(np.shape(input), 49 | self.operation_id)) 50 | for i in range(len(self.input_shape)): 51 | print("operation {0} the input shape is {1}".format(self.operation_id, 52 | np.shape(input[i]))) 53 | # x_input.append(np.array(input[i])) 54 | input_data.append(input[i]) 55 | print("operation {0} the input shape is {1}".format(self.operation_id, 56 | np.shape(input_data[i]))) 57 | 58 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 59 | embedding = self.operation_model.predict(input_data) 60 | return embedding 61 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 62 | embedding = self.operation_model.predict(x_input) 63 | print("the operation {0} output shape is {1}".format(self.operation_id, np.shape(embedding))) 64 | 65 | return embedding 66 | pass 67 | 68 | class excuteVgg16: 69 | 70 | def __func0__(self, input_shape): 71 | from keras.models import Model 72 | from keras.layers import Flatten 73 | from keras.layers import Dense 74 | from keras.layers import Input 75 | from keras.layers import Conv2D 76 | from keras.layers import MaxPooling2D 77 | from keras.utils.data_utils import get_file 78 | 79 | img_input = Input(shape=input_shape) 80 | 81 | # Block 1 82 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 83 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 84 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 85 | 86 | model = Model(inputs=img_input, outputs=x) 87 | return model 88 | 89 | def __func1__(self, input_shape): 90 | from keras.layers import Flatten 91 | from keras.layers import Dense 92 | from keras.layers import Input 93 | from keras.layers import Conv2D 94 | from keras.layers import MaxPooling2D 95 | from keras.utils.data_utils import get_file 96 | from keras.models import Model 97 | 98 | 99 | input = Input(shape=input_shape) 100 | # Block 2 101 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(input) 102 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 103 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 104 | 105 | model = Model(inputs=input, outputs=x) 106 | return model 107 | 108 | def __func2__(self, input_shape): 109 | from keras.layers import Flatten 110 | from keras.layers import Dense 111 | from keras.layers import Input 112 | from keras.layers import Conv2D 113 | from keras.layers import MaxPooling2D 114 | from keras.utils.data_utils import get_file 115 | from keras.models import Model 116 | 117 | input = Input(shape=input_shape) 118 | # Block 3 119 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(input) 120 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 121 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 122 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 123 | 124 | model = Model(inputs=input, outputs=x) 125 | return model 126 | 127 | def __func3__(self, input_shape): 128 | from keras.layers import Flatten 129 | from keras.layers import Dense 130 | from keras.layers import Input 131 | from keras.layers import Conv2D 132 | from keras.layers import MaxPooling2D 133 | from keras.utils.data_utils import get_file 134 | from keras.models import Model 135 | 136 | input = Input(shape=input_shape) 137 | # Block 4 138 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(input) 139 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 140 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 141 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 142 | 143 | model = Model(inputs=input, outputs=x) 144 | return model 145 | 146 | def __func4__(self, input_shape): 147 | from keras.layers import Flatten 148 | from keras.layers import Dense 149 | from keras.layers import Input 150 | from keras.layers import Conv2D 151 | from keras.layers import MaxPooling2D 152 | from keras.utils.data_utils import get_file 153 | from keras.models import Model 154 | 155 | input = Input(shape=input_shape) 156 | # Block 5 157 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(input) 158 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 159 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 160 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 161 | model = Model(inputs=input, outputs=x) 162 | return model 163 | 164 | def __func5__(self, input_shape): 165 | from keras.layers import Flatten 166 | from keras.layers import Dense 167 | from keras.layers import Input 168 | from keras.layers import Conv2D 169 | from keras.layers import MaxPooling2D 170 | from keras.utils.data_utils import get_file 171 | from keras.models import Model 172 | 173 | input = Input(shape=input_shape) 174 | # Classification block 175 | x = Flatten(name='flatten')(input) 176 | x = Dense(4096, activation='relu', name='fc1')(x) 177 | x = Dense(4096, activation='relu', name='fc2')(x) 178 | x = Dense(1000, activation='softmax', name='predictions')(x) 179 | 180 | model = Model(inputs=input, outputs=x) 181 | return model 182 | 183 | 184 | def __init__(self): 185 | from Executer.vgg16 import vgg16 186 | self.operations = [] 187 | weights_model = vgg16(input_shape=(224,224, 3), 188 | classes=1000).model 189 | 190 | operation0 = operation(0, self.__func0__, (224, 224, 3), weights_model) 191 | operation1 = operation(1, self.__func1__, (112, 112, 64), weights_model) 192 | operation2 = operation(2, self.__func2__, (56, 56, 128), weights_model) 193 | operation3 = operation(3, self.__func3__, (28, 28, 256), weights_model) 194 | operation4 = operation(4, self.__func4__, (14, 14, 512), weights_model) 195 | operation5 = operation(5, self.__func5__, (7, 7, 512), weights_model) 196 | 197 | self.operations.append(operation0) 198 | self.operations.append(operation1) 199 | self.operations.append(operation2) 200 | self.operations.append(operation3) 201 | self.operations.append(operation4) 202 | self.operations.append(operation5) 203 | 204 | 205 | def excute(self, operationid, inputdata): 206 | return self.operations[operationid].excute(inputdata) 207 | -------------------------------------------------------------------------------- /cloud/Executer/excuter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class operation: 12 | 13 | def __init__(self, operationid, operationfuction): 14 | self.operationid = operationid 15 | self.operationfunction = operationfuction 16 | 17 | def checkid(self, operationid): 18 | if self.operationid == operationid: 19 | return True 20 | else: 21 | return False 22 | 23 | def getid(self): 24 | return self.operationid 25 | 26 | def excute(self, inputdata): 27 | return self.operationfunction(inputdata) 28 | 29 | 30 | class ExecuteAgent: 31 | 32 | # 定义函数 33 | def __func0__(self, input): 34 | tmp = input[0] + 1 35 | return tmp 36 | 37 | def __func1__(self, input): 38 | tmp = input[0] - 1 39 | return tmp 40 | 41 | def __func2__(self, input): 42 | tmp = input[0] * 2 43 | return tmp 44 | 45 | def __func3__(self, input): 46 | tmp = input[0] * input[1] 47 | return tmp 48 | 49 | def __func4__(self, input): 50 | tmp = input[0] * 0.5 51 | return tmp 52 | 53 | def __func5__(self, input): 54 | tmp = input[0] * input[1] 55 | return tmp 56 | 57 | def __func6__(self, input): 58 | tmp = input[0] + input[1] 59 | return tmp 60 | 61 | 62 | 63 | def __init__(self): 64 | self.operations = [] 65 | 66 | operation0 = operation(0, self.__func0__) 67 | operation1 = operation(1, self.__func1__) 68 | operation2 = operation(2, self.__func2__) 69 | operation3 = operation(3, self.__func3__) 70 | operation4 = operation(4, self.__func4__) 71 | operation5 = operation(5, self.__func5__) 72 | operation6 = operation(6, self.__func6__) 73 | 74 | 75 | self.operations.append(operation0) 76 | self.operations.append(operation1) 77 | self.operations.append(operation2) 78 | self.operations.append(operation3) 79 | self.operations.append(operation4) 80 | self.operations.append(operation5) 81 | self.operations.append(operation6) 82 | 83 | 84 | def excute(self, operationid, inputdata): 85 | # #检查是否有操作id 检查输入数据格式 86 | # if int(operationid) >= len(self.operations)-1 or operationid < 0: 87 | # return None 88 | # 89 | # if not isinstance(inputdata, list): 90 | # return None 91 | 92 | return self.operations[operationid].excute(inputdata) 93 | 94 | -------------------------------------------------------------------------------- /cloud/Executer/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import tensorflow as tf 3 | from keras.layers import Conv2D, ZeroPadding2D, Activation 4 | from keras.layers.normalization import BatchNormalization 5 | from numpy import genfromtxt 6 | 7 | _FLOATX = 'float32' 8 | 9 | # def variable(value, dtype=_FLOATX, name=None): 10 | # v = tf.Variable(np.asarray(value, dtype=dtype), name=name) 11 | # _get_session().run(v.initializer) 12 | # return v 13 | 14 | def shape(x): 15 | return x.get_shape() 16 | 17 | def square(x): 18 | return tf.square(x) 19 | 20 | # def zeros(shape, dtype=_FLOATX, name=None): 21 | # return variable(np.zeros(shape), dtype, name) 22 | 23 | def concatenate(tensors, axis=-1): 24 | if axis < 0: 25 | axis = axis % len(tensors[0].get_shape()) 26 | return tf.concat(axis, tensors) 27 | 28 | def LRN2D(x): 29 | return tf.nn.lrn(x, alpha=1e-4, beta=0.75) 30 | 31 | def conv2d_bn( 32 | x, 33 | layer=None, 34 | cv1_out=None, 35 | cv1_filter=(1, 1), 36 | cv1_strides=(1, 1), 37 | cv2_out=None, 38 | cv2_filter=(3, 3), 39 | cv2_strides=(1, 1), 40 | padding=None, 41 | ): 42 | num = '' if cv2_out == None else '1' 43 | tensor = Conv2D(cv1_out, cv1_filter, strides=cv1_strides, name=layer+'_conv'+num)(x) 44 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+num)(tensor) 45 | tensor = Activation('relu')(tensor) 46 | if padding == None: 47 | return tensor 48 | tensor = ZeroPadding2D(padding=padding)(tensor) 49 | if cv2_out == None: 50 | return tensor 51 | tensor = Conv2D(cv2_out, cv2_filter, strides=cv2_strides, name=layer+'_conv'+'2')(tensor) 52 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+'2')(tensor) 53 | tensor = Activation('relu')(tensor) 54 | return tensor 55 | 56 | weights = [ 57 | 'conv1', 'bn1', 'conv2', 'bn2', 'conv3', 'bn3', 58 | 'inception_3a_1x1_conv', 'inception_3a_1x1_bn', 59 | 'inception_3a_pool_conv', 'inception_3a_pool_bn', 60 | 'inception_3a_5x5_conv1', 'inception_3a_5x5_conv2', 'inception_3a_5x5_bn1', 'inception_3a_5x5_bn2', 61 | 'inception_3a_3x3_conv1', 'inception_3a_3x3_conv2', 'inception_3a_3x3_bn1', 'inception_3a_3x3_bn2', 62 | 'inception_3b_3x3_conv1', 'inception_3b_3x3_conv2', 'inception_3b_3x3_bn1', 'inception_3b_3x3_bn2', 63 | 'inception_3b_5x5_conv1', 'inception_3b_5x5_conv2', 'inception_3b_5x5_bn1', 'inception_3b_5x5_bn2', 64 | 'inception_3b_pool_conv', 'inception_3b_pool_bn', 65 | 'inception_3b_1x1_conv', 'inception_3b_1x1_bn', 66 | 'inception_3c_3x3_conv1', 'inception_3c_3x3_conv2', 'inception_3c_3x3_bn1', 'inception_3c_3x3_bn2', 67 | 'inception_3c_5x5_conv1', 'inception_3c_5x5_conv2', 'inception_3c_5x5_bn1', 'inception_3c_5x5_bn2', 68 | 'inception_4a_3x3_conv1', 'inception_4a_3x3_conv2', 'inception_4a_3x3_bn1', 'inception_4a_3x3_bn2', 69 | 'inception_4a_5x5_conv1', 'inception_4a_5x5_conv2', 'inception_4a_5x5_bn1', 'inception_4a_5x5_bn2', 70 | 'inception_4a_pool_conv', 'inception_4a_pool_bn', 71 | 'inception_4a_1x1_conv', 'inception_4a_1x1_bn', 72 | 'inception_4e_3x3_conv1', 'inception_4e_3x3_conv2', 'inception_4e_3x3_bn1', 'inception_4e_3x3_bn2', 73 | 'inception_4e_5x5_conv1', 'inception_4e_5x5_conv2', 'inception_4e_5x5_bn1', 'inception_4e_5x5_bn2', 74 | 'inception_5a_3x3_conv1', 'inception_5a_3x3_conv2', 'inception_5a_3x3_bn1', 'inception_5a_3x3_bn2', 75 | 'inception_5a_pool_conv', 'inception_5a_pool_bn', 76 | 'inception_5a_1x1_conv', 'inception_5a_1x1_bn', 77 | 'inception_5b_3x3_conv1', 'inception_5b_3x3_conv2', 'inception_5b_3x3_bn1', 'inception_5b_3x3_bn2', 78 | 'inception_5b_pool_conv', 'inception_5b_pool_bn', 79 | 'inception_5b_1x1_conv', 'inception_5b_1x1_bn', 80 | 'dense_layer' 81 | ] 82 | 83 | conv_shape = { 84 | 'conv1': [64, 3, 7, 7], 85 | 'conv2': [64, 64, 1, 1], 86 | 'conv3': [192, 64, 3, 3], 87 | 'inception_3a_1x1_conv': [64, 192, 1, 1], 88 | 'inception_3a_pool_conv': [32, 192, 1, 1], 89 | 'inception_3a_5x5_conv1': [16, 192, 1, 1], 90 | 'inception_3a_5x5_conv2': [32, 16, 5, 5], 91 | 'inception_3a_3x3_conv1': [96, 192, 1, 1], 92 | 'inception_3a_3x3_conv2': [128, 96, 3, 3], 93 | 'inception_3b_3x3_conv1': [96, 256, 1, 1], 94 | 'inception_3b_3x3_conv2': [128, 96, 3, 3], 95 | 'inception_3b_5x5_conv1': [32, 256, 1, 1], 96 | 'inception_3b_5x5_conv2': [64, 32, 5, 5], 97 | 'inception_3b_pool_conv': [64, 256, 1, 1], 98 | 'inception_3b_1x1_conv': [64, 256, 1, 1], 99 | 'inception_3c_3x3_conv1': [128, 320, 1, 1], 100 | 'inception_3c_3x3_conv2': [256, 128, 3, 3], 101 | 'inception_3c_5x5_conv1': [32, 320, 1, 1], 102 | 'inception_3c_5x5_conv2': [64, 32, 5, 5], 103 | 'inception_4a_3x3_conv1': [96, 640, 1, 1], 104 | 'inception_4a_3x3_conv2': [192, 96, 3, 3], 105 | 'inception_4a_5x5_conv1': [32, 640, 1, 1,], 106 | 'inception_4a_5x5_conv2': [64, 32, 5, 5], 107 | 'inception_4a_pool_conv': [128, 640, 1, 1], 108 | 'inception_4a_1x1_conv': [256, 640, 1, 1], 109 | 'inception_4e_3x3_conv1': [160, 640, 1, 1], 110 | 'inception_4e_3x3_conv2': [256, 160, 3, 3], 111 | 'inception_4e_5x5_conv1': [64, 640, 1, 1], 112 | 'inception_4e_5x5_conv2': [128, 64, 5, 5], 113 | 'inception_5a_3x3_conv1': [96, 1024, 1, 1], 114 | 'inception_5a_3x3_conv2': [384, 96, 3, 3], 115 | 'inception_5a_pool_conv': [96, 1024, 1, 1], 116 | 'inception_5a_1x1_conv': [256, 1024, 1, 1], 117 | 'inception_5b_3x3_conv1': [96, 736, 1, 1], 118 | 'inception_5b_3x3_conv2': [384, 96, 3, 3], 119 | 'inception_5b_pool_conv': [96, 736, 1, 1], 120 | 'inception_5b_1x1_conv': [256, 736, 1, 1], 121 | } 122 | 123 | def load_weights_by_name(name): 124 | import os 125 | # Set weights path 126 | local_dir_path = r'C:\Users\derfei\Desktop\distribute cloud\disbributed-deep-learning-cloud\Executer' 127 | dirPath = os.path.join(local_dir_path, 'weights') 128 | fileNames = filter(lambda f: not f.startswith('.'), os.listdir(dirPath)) 129 | paths = {} 130 | weights_dict = {} 131 | 132 | for n in fileNames: 133 | paths[n.replace('.csv', '')] = dirPath + '/' + n 134 | 135 | 136 | if 'conv' in name: 137 | conv_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 138 | conv_w = np.reshape(conv_w, conv_shape[name]) 139 | conv_w = np.transpose(conv_w, (2, 3, 1, 0)) 140 | conv_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 141 | weights_dict[name] = [conv_w, conv_b] 142 | elif 'bn' in name: 143 | bn_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 144 | bn_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 145 | bn_m = genfromtxt(paths[name + '_m'], delimiter=',', dtype=None) 146 | bn_v = genfromtxt(paths[name + '_v'], delimiter=',', dtype=None) 147 | weights_dict[name] = [bn_w, bn_b, bn_m, bn_v] 148 | elif 'dense' in name: 149 | dense_w = genfromtxt(dirPath + '/dense_w.csv', delimiter=',', dtype=None) 150 | dense_w = np.reshape(dense_w, (128, 736)) 151 | dense_w = np.transpose(dense_w, (1, 0)) 152 | dense_b = genfromtxt(dirPath + '/dense_b.csv', delimiter=',', dtype=None) 153 | weights_dict[name] = [dense_w, dense_b] 154 | 155 | return weights_dict 156 | 157 | def load_weights(): 158 | import os 159 | # Set weights path 160 | local_dir_path = r'C:\Users\derfei\Desktop\distribute cloud\disbributed-deep-learning-cloud\Executer' 161 | dirPath = os.path.join(local_dir_path, 'weights') 162 | fileNames = filter(lambda f: not f.startswith('.'), os.listdir(dirPath)) 163 | paths = {} 164 | weights_dict = {} 165 | 166 | for n in fileNames: 167 | paths[n.replace('.csv', '')] = dirPath + '/' + n 168 | 169 | for name in weights: 170 | if 'conv' in name: 171 | conv_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 172 | conv_w = np.reshape(conv_w, conv_shape[name]) 173 | conv_w = np.transpose(conv_w, (2, 3, 1, 0)) 174 | conv_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 175 | weights_dict[name] = [conv_w, conv_b] 176 | elif 'bn' in name: 177 | bn_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 178 | bn_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 179 | bn_m = genfromtxt(paths[name + '_m'], delimiter=',', dtype=None) 180 | bn_v = genfromtxt(paths[name + '_v'], delimiter=',', dtype=None) 181 | weights_dict[name] = [bn_w, bn_b, bn_m, bn_v] 182 | elif 'dense' in name: 183 | dense_w = genfromtxt(dirPath+'/dense_w.csv', delimiter=',', dtype=None) 184 | dense_w = np.reshape(dense_w, (128, 736)) 185 | dense_w = np.transpose(dense_w, (1, 0)) 186 | dense_b = genfromtxt(dirPath+'/dense_b.csv', delimiter=',', dtype=None) 187 | weights_dict[name] = [dense_w, dense_b] 188 | 189 | return weights_dict 190 | 191 | -------------------------------------------------------------------------------- /cloud/Executer/vgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class utils_vgg16: 12 | 13 | @classmethod 14 | def load_weight(cls, vgg_model, model): 15 | 16 | for layer in model.layers: 17 | try: 18 | if vgg_model.get_layer(layer.name) != None: 19 | layer.set_weights(vgg_model.get_layer(name=layer.name).get_weights()) 20 | except Exception as e: 21 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 22 | e)) 23 | pass 24 | 25 | class vgg16: 26 | 27 | def __init__(self, input_shape, classes): 28 | from keras.models import Model 29 | from keras.layers import Flatten 30 | from keras.layers import Dense 31 | from keras.layers import Input 32 | from keras.layers import Conv2D 33 | from keras.layers import MaxPooling2D 34 | from keras.utils.data_utils import get_file 35 | 36 | WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5' 37 | 38 | img_input = Input(shape=input_shape) 39 | 40 | # Block 1 41 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 42 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 43 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 44 | 45 | # Block 2 46 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 47 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 48 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 49 | 50 | # Block 3 51 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 52 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 53 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 54 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 55 | 56 | # Block 4 57 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 58 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 59 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 60 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 61 | 62 | # Block 5 63 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 64 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 65 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 66 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 67 | 68 | # Classification block 69 | x = Flatten(name='flatten')(x) 70 | x = Dense(4096, activation='relu', name='fc1')(x) 71 | x = Dense(4096, activation='relu', name='fc2')(x) 72 | x = Dense(classes, activation='softmax', name='predictions')(x) 73 | 74 | self.model = Model(inputs=img_input, output=x, name='vgg16') 75 | 76 | 'load model weights' 77 | weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', 78 | WEIGHTS_PATH, 79 | cache_subdir='models') 80 | self.model.load_weights(weights_path) 81 | 82 | 83 | def plot_model(self): 84 | from keras.utils import plot_model 85 | 86 | plot_model(model=self.model, to_file='modelvgg16.png', show_shapes=True, show_layer_names=True) 87 | 88 | if __name__ == "__main__": 89 | import numpy as np 90 | from keras.models import Model 91 | from keras.layers import Flatten 92 | from keras.layers import Dense 93 | from keras.layers import Input 94 | from keras.layers import Conv2D 95 | from keras.layers import MaxPooling2D 96 | from keras.utils.data_utils import get_file 97 | from keras.preprocessing import image 98 | from keras.applications.imagenet_utils import decode_predictions 99 | from keras.applications.imagenet_utils import preprocess_input 100 | 101 | img_input = Input(shape=(224, 224, 3)) 102 | 103 | # Block 1 104 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 105 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 106 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 107 | 108 | # Block 2 109 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 110 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 111 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 112 | 113 | # Block 3 114 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 115 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 116 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 117 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 118 | 119 | # Block 4 120 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 121 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 122 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 123 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 124 | 125 | # Block 5 126 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 127 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 128 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 129 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 130 | 131 | # Classification block 132 | x = Flatten(name='flatten')(x) 133 | x = Dense(4096, activation='relu', name='fc1')(x) 134 | x = Dense(4096, activation='relu', name='fc2')(x) 135 | x = Dense(1000, activation='softmax', name='predictions')(x) 136 | 137 | model = Model(inputs=img_input, outputs=x) 138 | vgg16model = vgg16(input_shape=(224, 224, 3), classes=1000) 139 | utils_vgg16.load_weight(vgg16model.model, model) 140 | 141 | # 'try to predict' 142 | # input_data = np.array([np.zeros(shape=(224, 224, 3))]) 143 | # output = model.predict(input_data) 144 | # 145 | # print("the output is ", output) 146 | 147 | img_path = 'elephant.jpg' 148 | img = image.load_img(img_path, target_size=(224, 224)) 149 | x = image.img_to_array(img) 150 | x = np.expand_dims(x, axis=0) 151 | x = preprocess_input(x) 152 | print('Input image shape:', x.shape) 153 | 154 | preds = model.predict(x) 155 | print('Predicted:', decode_predictions(preds)) 156 | preds_resnet50 = vgg16model.model.predict(x) 157 | print('Predicted:', decode_predictions(preds_resnet50)) 158 | 159 | 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /cloud/README.md: -------------------------------------------------------------------------------- 1 | # distributeed-deep-learning/edge 2 | 3 | ## 分布式神经网络的Edge服务器项目 4 | 该项目还有其他的三个兄弟项目 分别是 IoT Android Remote Cloud 分别运行于 5 | 其他的三种物理设备上面 -------------------------------------------------------------------------------- /cloud/model/detectmodel.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Derfei/task-merging/0002b5b0c62bc4c4cc8f754474d9c750ccf026e4/cloud/model/detectmodel.py -------------------------------------------------------------------------------- /cloud/model/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 所有任务实体 9 | ''' 10 | import json 11 | class MyEncoder(json.JSONEncoder): 12 | 13 | def default(self, o): 14 | import numpy 15 | if isinstance(o, bytes): 16 | return str(o, encoding='utf-8') 17 | if isinstance(o, numpy.ndarray): 18 | return o.tolist() 19 | 20 | return json.JSONEncoder.default(o) 21 | 22 | class task: 23 | 24 | def __init__(self, requestdeviceid, applicationid, offloadingpolicyid, taskid, operationid, inputdata, formertasklist, 25 | nexttasklist, timecostlist): 26 | self.requestdevicdid = requestdeviceid 27 | self.applicationid = applicationid 28 | self.offloadingpolicyid = offloadingpolicyid 29 | self.taskid = taskid 30 | self.operationid = operationid 31 | self.inputdata = inputdata 32 | self.formertasklist = formertasklist 33 | self.nexttasklist = nexttasklist 34 | self.timecostlist = timecostlist 35 | 36 | @classmethod 37 | def initfromdict(cls, taskdict): 38 | tmptask = task(taskdict['requestdeviceid'], taskdict['applicationid'], taskdict['offloadingpolicyid'], taskdict['taskid'], 39 | taskdict['operationid'], taskdict['inputdata'], taskdict['formertasklist'], taskdict['nexttasklist'], taskdict['timecostlist']) 40 | return tmptask 41 | 42 | @classmethod 43 | def initfromstring(cls, taskstring): 44 | import json 45 | taskdict = json.loads(taskstring) 46 | return task.initfromdict(taskdict=taskdict) 47 | 48 | def todict(self): 49 | tmpdict = {} 50 | tmpdict['requestdeviceid'] = self.requestdevicdid 51 | tmpdict['applicationid'] = self.applicationid 52 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 53 | tmpdict['taskid'] = self.taskid 54 | tmpdict['operationid'] = self.operationid 55 | tmpdict['inputdata'] = self.inputdata 56 | tmpdict['formertasklist'] = self.formertasklist 57 | tmpdict['nexttasklist'] = self.nexttasklist 58 | tmpdict['timecostlist'] = self.timecostlist 59 | return tmpdict 60 | 61 | def tostring(self): 62 | import json 63 | tmpdict = self.todict() 64 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 65 | 66 | class msg: 67 | ''' 68 | requestdeviceid: 代表的是发送信息的设备编号 69 | senddeviceid: 发送的目标设备的id 70 | ''' 71 | 72 | def __init__(self, requestdeviceid, senddeviceid, sendtime, sendmsgtype, sendmsgcontent): 73 | self.requestdeviceid = requestdeviceid 74 | self.senddeviceid = senddeviceid 75 | self.sendtime = sendtime 76 | self.sendmsgtype = sendmsgtype 77 | self.sendmsgcontent = sendmsgcontent 78 | 79 | @classmethod 80 | def initfromdict(cls, msgdict): 81 | tmpmsg = msg(msgdict['requestdeviceid'], msgdict['senddeviceid'], msgdict['sendtime'], msgdict['sendmsgtype'], 82 | msgdict['sendmsgcontent']) 83 | return tmpmsg 84 | 85 | @classmethod 86 | def initfromstring(cls, msgstring): 87 | import json 88 | msgdict = json.loads(msgstring) 89 | return msg.initfromdict(msgdict) 90 | 91 | def todict(self): 92 | msgdict = {} 93 | msgdict['requestdeviceid'] = self.requestdeviceid 94 | msgdict['senddeviceid'] = self.senddeviceid 95 | msgdict['sendtime'] = self.sendtime 96 | msgdict['sendmsgtype'] = self.sendmsgtype 97 | msgdict['sendmsgcontent'] = self.sendmsgcontent 98 | 99 | return msgdict 100 | 101 | 102 | def tostring(self): 103 | import json 104 | tmpdict = self.todict() 105 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 106 | 107 | 108 | class offloadingPolicy: 109 | 110 | def __init__(self, offloadingpolicyid, requestdeviceid, applicationid, taskid, excutedeviceid): 111 | self.offloadingpolicyid = offloadingpolicyid 112 | self.requestdeviceid = requestdeviceid 113 | self.applicationid = applicationid 114 | self.taskid = taskid 115 | self.excutedeviceid = excutedeviceid 116 | 117 | @classmethod 118 | def initfromdict(cls, offloadingpolicydict): 119 | tmpoffloadingpolicy = offloadingPolicy(offloadingpolicydict['offloadingpolicyid'], offloadingpolicydict['requestdeviceid'], 120 | offloadingpolicydict['applicationid'], offloadingpolicydict['taskid'],offloadingpolicydict['excutedeviceid']) 121 | return tmpoffloadingpolicy 122 | 123 | @classmethod 124 | def initfromstring(cls, offloadingpolicystring): 125 | import json 126 | tmpdict = json.loads(offloadingpolicystring) 127 | return offloadingPolicy.initfromdict(tmpdict) 128 | 129 | def todict(self): 130 | tmpdict = {} 131 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 132 | tmpdict['requestdeviceid'] = self.requestdeviceid 133 | tmpdict['applicationid'] = self.applicationid 134 | tmpdict['taskid'] = self.taskid 135 | tmpdict['excutedeviceid'] = self.excutedeviceid 136 | 137 | return tmpdict 138 | 139 | def tostring(self): 140 | import json 141 | tmpdict = self.todict() 142 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 143 | 144 | 145 | class application: 146 | 147 | def __init__(self, requestdeviceid, applicationid, taskidlist, formertasklist, nexttasklist, operationlist): 148 | self.requestdeviceid = requestdeviceid 149 | self.applicationid = applicationid 150 | self.taskidlist = taskidlist 151 | self.formertasklist = formertasklist 152 | self.nexttasklist = nexttasklist 153 | self.operationlist = operationlist 154 | 155 | @classmethod 156 | def initfromdict(cls, applicationdict): 157 | tmpapplication = application(applicationdict['requestdeviceid'], applicationdict['applicationid'], applicationdict['taskidlist'], 158 | applicationdict['formertasklist'], applicationdict['nexttasklist'], applicationdict['operationidlist']) 159 | return tmpapplication 160 | 161 | @classmethod 162 | def initfromstring(cls, applicationstring): 163 | import json 164 | tmpdict = json.loads(applicationstring) 165 | return application.initfromdict(tmpdict) 166 | 167 | @classmethod 168 | def initfromString(cls, applicationstringlines): 169 | # 将文本中的内转换为application对象 170 | firstline = applicationstringlines[0] 171 | requestdeviceid = firstline.split()[0] 172 | applicationid = firstline.split()[1] 173 | taskidlist = [] 174 | formertasklist = [] 175 | nexttasklist = [] 176 | operationidlist = [] 177 | for line in applicationstringlines: 178 | taskidlist.append(int(line.split()[2])) 179 | formertasklist.append([int(tmp) for tmp in line.split()[3].split(',')]) 180 | nexttasklist.append([int(tmp) for tmp in line.split()[4].split(',')]) 181 | operationidlist.append(int(line.split()[5])) 182 | return application(requestdeviceid, applicationid, taskidlist, formertasklist, 183 | nexttasklist, operationidlist) 184 | 185 | 186 | def todict(self): 187 | tmpdict = {} 188 | tmpdict['requestdeviceid'] = self.requestdeviceid 189 | tmpdict['applicationid'] = self.applicationid 190 | tmpdict['taskidlist'] = self.taskidlist 191 | tmpdict['formertasklist'] = self.formertasklist 192 | tmpdict['nexttasklist'] = self.nexttasklist 193 | tmpdict['operationidlist'] = self.operationlist 194 | 195 | return tmpdict 196 | 197 | def tostring(self): 198 | import json 199 | tmpdict = self.todict() 200 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 201 | 202 | 203 | class networkinfo: 204 | 205 | def __init__(self, deviceid, devicetype, ip, port): 206 | self.deviceid = deviceid 207 | self.devicetype = devicetype 208 | self.ip = ip 209 | self.port = port 210 | 211 | @classmethod 212 | def initfromdict(cls, networkinfodict): 213 | tmpnetworkinfo = networkinfo(networkinfodict['deviceid'], networkinfodict['devicetype'], 214 | networkinfodict['ip'], networkinfodict['port']) 215 | return tmpnetworkinfo 216 | 217 | @classmethod 218 | def initfromstring(cls, networkinfostring): 219 | import json 220 | tmpnetworkinfodict = json.loads(networkinfostring) 221 | return networkinfo.initfromdict(tmpnetworkinfodict) 222 | 223 | @classmethod 224 | def initfromString(cls, networkinfoString): 225 | content = networkinfoString.split() 226 | # print("When init from String the networkinfo, the len of the content is:", len(content), content) 227 | tmpnetworkinfo = networkinfo(content[0], content[1], content[2], content[3]) 228 | return tmpnetworkinfo 229 | 230 | def todict(self): 231 | tmpdict = {} 232 | tmpdict['deviceid'] = self.deviceid 233 | tmpdict['devicetype'] = self.devicetype 234 | tmpdict['ip'] = self.ip 235 | tmpdict['port'] = self.port 236 | return tmpdict 237 | 238 | def toString(self): 239 | tmpdict = self.todict() 240 | return str(tmpdict) 241 | 242 | def tostring(self): 243 | import json 244 | tmpdict = self.todict() 245 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 246 | 247 | 248 | 249 | 250 | 251 | 252 | -------------------------------------------------------------------------------- /cloud/model/record.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 写入和读取离线文件 9 | ''' 10 | recordbasedir = r"C:\Users\derfei\Desktop\distribute cloud\disbributed-deep-learning-cloud\model\files" 11 | from model.models import networkinfo 12 | from model.models import * 13 | def writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, offloadingpolicy): 14 | ''' 15 | offloadingpolicy 离线保存格式为: 16 | offloaindpolicy_requestdeviceid_applicationid_offloadingpolicyid 17 | offloading: 格式为: 18 | offloadingpolicyid requestdeviceid applicationid, executedeviceid 19 | :param requestdeviceid: 20 | :param applicationid: 21 | :param offloadingpolicyid: 22 | :param offloadingpolicy: 23 | :return: 24 | ''' 25 | import os 26 | filepath = os.path.join(recordbasedir, 'offloadingpolicy_'+str(requestdeviceid)+"_"+str(applicationid)+"_"+str(offloadingpolicyid)+".txt") 27 | 28 | # 写入文件 覆盖式 29 | with open(filepath, "w+") as file: 30 | for policy in offloadingpolicy: 31 | line = "{0}\t{1}\t{2}\t{3}\t{4}\n".format(offloadingpolicyid, requestdeviceid, applicationid, policy['taskid'], policy['excutedeviceid']) 32 | file.write(line) 33 | 34 | def writenetworkinfo(networkinfo_list): 35 | ''' 36 | 将传回的networkinfolist 数据写入文件当中 37 | :param networkinfo_list: 38 | :return: 39 | ''' 40 | import os 41 | import json 42 | filepath = os.path.join(recordbasedir, "network.txt") 43 | 44 | with open(filepath, "w+") as file: 45 | for networkinfo in networkinfo_list: 46 | if not isinstance(networkinfo, dict): 47 | networkinfo = json.loads(networkinfo) 48 | line = "{0}\t{1}\t{2}\t{3}\n".format(networkinfo['deviceid'], networkinfo['devicetype'], 49 | networkinfo['ip'], networkinfo['port']) 50 | file.write(line) 51 | 52 | 53 | def getnetworkinfo(deviceid): 54 | ''' 55 | 从离线网络中获取网络信息 56 | :param deviceid: 如果为-1则为获取全部的网络信息 否则为获取一个网络信息 57 | :return: [type: networkinfo] (type: ip, type: port) 58 | ''' 59 | import os 60 | filepath = os.path.join(recordbasedir, "network.txt") 61 | 62 | with open(filepath, "r+") as file: 63 | lines = file.readlines() 64 | networkinfolist = [] 65 | 66 | for line in lines: 67 | line = line.replace('\n', '') 68 | line = line.strip() 69 | if len(line) != 0: 70 | networkinfolist.append(networkinfo.initfromString(line).todict()) 71 | 72 | # find the deviceid and return the url and the port 73 | 74 | if int(deviceid) == -1: 75 | deviceiplist = [] 76 | deviceidlist = [] 77 | devicetypelist = [] 78 | deviceportlist = [] 79 | 80 | for device in networkinfolist: 81 | deviceidlist.append(device['deviceid']) 82 | deviceiplist.append(device['ip']) 83 | devicetypelist.append(device['devicetype']) 84 | deviceportlist.append(device['port']) 85 | 86 | 87 | devicelist = [networkinfo(deviceidlist[tmp], devicetypelist[tmp], deviceiplist[tmp], deviceportlist[tmp]) for tmp in range(0, len(deviceiplist))] 88 | 89 | return devicelist 90 | else: 91 | for device in networkinfolist: 92 | if int(device['deviceid']) == int(deviceid): 93 | return device['ip'], device['port'] 94 | return None, None 95 | 96 | 97 | def getapplicationinfo(taskid, requestdeviceid, applicationid): 98 | import os 99 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 100 | +str(applicationid)+".txt") 101 | 102 | print("Begin to read the application file", filepath) 103 | # 获取应用信息 104 | try: 105 | with open(filepath, "r+") as file: 106 | lines = file.readlines() 107 | tmpapplication = application.initfromString(lines) 108 | 109 | # 查找相应的应用 110 | formertasklist = None 111 | nexttasklist = None 112 | operationid = None 113 | 114 | tmpapplicationdict = tmpapplication.todict() 115 | for i, tmptaskid in enumerate(tmpapplicationdict['taskidlist']): 116 | if int(tmptaskid) == int(taskid): 117 | formertasklist = tmpapplicationdict['formertasklist'][i] 118 | nexttasklist = tmpapplicationdict['nexttasklist'][i] 119 | operationid = tmpapplicationdict['operationidlist'][i] 120 | 121 | return formertasklist, nexttasklist, operationid 122 | 123 | return formertasklist, nexttasklist, operationid 124 | except Exception as e: 125 | return None, None, None 126 | 127 | def getapplicationdict(requestdeviceid, applicationid): 128 | import os 129 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_"+ 130 | str(applicationid)+".txt") 131 | 132 | # 获取全部的应用信息 不存在应用为空的情况 133 | try: 134 | with open(filepath, "r+") as file: 135 | lines = file.readlines() 136 | 137 | tmpapplication = application.initfromString(lines) 138 | 139 | return tmpapplication.todict() 140 | except Exception as e: 141 | return None 142 | 143 | def writeapplication(tmpapplication): 144 | ''' 145 | 将应用直接写入文件当中 146 | :param tmpapplication: 147 | :return: 148 | ''' 149 | tmpapplicationdict = tmpapplication.todict() 150 | 151 | writeapplicationinfo(tmpapplicationdict['requestdeviceid'], tmpapplicationdict['applicationid'], tmpapplicationdict['taskidlist'], 152 | tmpapplicationdict['formertasklist'], tmpapplicationdict['nexttasklist'], tmpapplicationdict['operationidlist']) 153 | 154 | 155 | 156 | def writeapplicationinfo(requestdeviceid, applicationid, taskidlist, formertaskidlist, 157 | nexttasklist, operationidlist): 158 | ''' 159 | 写入应用信息 160 | :param requestdeviceid: 请求设备id 161 | :param applicationid: 应用id 162 | :param taskidlist: 任务id list 163 | :param formetaskidlist: the percessortask list 164 | :param nextdeviceidlist: the nextdevice list 165 | :param operationlist: the operation list 166 | :return: 167 | ''' 168 | import os 169 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 170 | +str(applicationid)+".txt") 171 | 172 | 173 | with open(filepath, "w+") as file: 174 | for i in range(0, len(taskidlist)): 175 | line = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\n".format(requestdeviceid, applicationid, 176 | taskidlist[i], ','.join([str(tmp) for tmp in formertaskidlist[i]]), 177 | ','.join([str(tmp) for tmp in nexttasklist[i]]), str(operationidlist[i])) 178 | file.write(line) 179 | 180 | 181 | 182 | def getoffloadingpolicyinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 183 | import os 184 | 185 | filepath = os.path.join(recordbasedir,"offloadingpolicy_"+str(requestdeviceid)+"_"+str(applicationid) 186 | +"_"+str(offloadingpolicyid)+".txt") 187 | 188 | try: 189 | with open(filepath, 'r+') as file: 190 | lines = file.readlines() 191 | 192 | if int(taskid) != -1: 193 | # 查找对应的task 194 | for line in lines: 195 | line = line.replace('\n', '') 196 | if int(line.split('\t')[3]) == int(taskid): 197 | return int(line.split('\t')[4]) 198 | else: 199 | # 获取全部的调度策略 200 | taskidlist = [] 201 | excuteddeviceidlist = [] 202 | 203 | for line in lines: 204 | line = line.replace('\n', '') 205 | 206 | taskidlist.append(line.split('\t')[3]) 207 | excuteddeviceidlist.append(line.split('\t')[4]) 208 | 209 | # 构建调度策略应用 210 | offloadingpolicylist = [] 211 | 212 | for i in range(0, len(taskidlist)): 213 | tmpoffloadingpolciy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, taskidlist[i], 214 | excuteddeviceidlist[i]) 215 | offloadingpolicylist.append(tmpoffloadingpolciy) 216 | 217 | return offloadingpolicylist 218 | except Exception as e: 219 | return None 220 | 221 | def getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 222 | ''' 223 | 这里有错误 还需要知道是谁的任务idlist 224 | 获取前置任务的处理结果 225 | :param taskid: 需要查询任务id 226 | :param requestdeviceid: 应用请求设备id 227 | :param applicationid: 应用id号 228 | :return: 返回字典 229 | ''' 230 | import os 231 | import json 232 | import numpy as np 233 | formertaskfilepath = os.path.join(recordbasedir, 234 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, offloadingpolicyid)) 235 | try: 236 | with open(formertaskfilepath, 'r+') as file: 237 | taskdictlist = [] 238 | 239 | lines = file.readlines() 240 | for line in lines: 241 | line = line.replace('\n', '') 242 | # print("The line split len is ", len(line.split('\t'))) 243 | tmpdict = {} 244 | tmpdict['taskid'] = line.split('\t')[0] 245 | tmpdict['requestdeviceid'] = line.split('\t')[1] 246 | tmpdict['applicationid'] = line.split('\t')[2] 247 | tmpdict['offloadingpolicyid'] = line.split('\t')[3] 248 | tmpdict['formertaskid'] = line.split('\t')[4] 249 | # tmpdict['inputdata'] = list(line.split('\t')[5]) 250 | # print("The tmp inputdata is {0} and the format is {1}".format(json.loads(line.split('\t')[5]), type(json.loads(line.split('\t')[5])))) 251 | tmpdict['inputdata'] = json.loads(line.split('\t')[5]) 252 | tmpdict['timecost'] = json.loads(line.split('\t')[6]) 253 | 254 | 255 | taskdictlist.append(tmpdict) 256 | return taskdictlist 257 | except Exception as e: 258 | print("There is a exception happend, when get the formertaskinfo", e) 259 | return None 260 | 261 | def writeformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid, taskdictlist): 262 | ''' 263 | 将前置任务的信息写入离线数据中 264 | :param taskid: 需要写入前置任务的任务id 265 | :param requestdeviceid: 提出应用的请求id 266 | :param applicationid: 应用id 267 | :param offloadingpolicyid: 迁移策略id 268 | :param taskdictlist: 任务字典列表上 269 | :return: 270 | ''' 271 | import os 272 | import numpy as np 273 | import json 274 | formertaskfilepath = os.path.join(recordbasedir, 275 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, 276 | offloadingpolicyid)) 277 | with open(formertaskfilepath, 'a+') as file: 278 | for tmp in taskdictlist: 279 | file.write("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\n".format(taskid, requestdeviceid, applicationid,offloadingpolicyid, 280 | tmp['formertaskid'], json.dumps(tmp['inputdata']), json.dumps(tmp['timecost']))) 281 | -------------------------------------------------------------------------------- /cloud/network/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.record import * 11 | from network.server import printOut 12 | LOCAL_DEVICEID = 3 13 | def sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid): 14 | import json 15 | import requests 16 | import datetime 17 | ''' 18 | 发送获得迁移策略请求 获得结果 写入离线文件 19 | ''' 20 | # 根据请求设备 获取ip 和端口号 21 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 22 | if requestDeviceIp == None: 23 | sendNetworkinfoRequest() 24 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 25 | 26 | requestUrl = "http://{0}:{1}/getOffloadingPolicy".format(requestDeviceIp, requestDevicePort) 27 | 28 | # 发送请求 29 | tmpOffloadingPolicy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, 30 | -1, -1) 31 | tmpMsg = msg(LOCAL_DEVICEID, requestdeviceid, datetime.datetime.now().__str__(), 32 | 'query', tmpOffloadingPolicy.todict()) 33 | rtnMsg = requests.post(url=requestUrl, data=tmpMsg.tostring()) 34 | 35 | #将信息写入离线文件 36 | # printOut("the rtnMsg is {0} 请求网络路径为 {1}".format(rtnMsg, requestUrl)) 37 | rtnData = json.loads(rtnMsg.text) 38 | writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, 39 | rtnData) 40 | 41 | def sendApplicationRequest(requestdeviceid, applicationid): 42 | import json 43 | import requests 44 | import datetime 45 | 46 | # 找到requestdeviceid 的 ip 和端口 47 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 48 | 49 | if tmpdeviceip == None: 50 | sendNetworkinfoRequest() 51 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 52 | 53 | requrl = "http://{0}:{1}/getApplicationInfo".format(tmpdeviceip, tmpdeviceport) 54 | 55 | tmpapplicationinfo = application(-1, applicationid, [], [], [], []) 56 | tmpmsg = msg(1, requestdeviceid, datetime.datetime.now().__str__(), 'qury', tmpapplicationinfo.todict()) 57 | # 发送请求 58 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 59 | 60 | applicationdict = json.loads(req.text) 61 | 62 | writeapplicationinfo(requestdeviceid=applicationdict['requestdeviceid'], applicationid=applicationdict['applicationid'], 63 | taskidlist=applicationdict['taskidlist'], formertaskidlist=applicationdict['formertasklist'], 64 | nexttasklist=applicationdict['nexttasklist'], operationidlist=applicationdict['operationidlist'])# 写入文件 65 | 66 | 67 | 68 | def sendNetworkinfoRequest(): 69 | import requests 70 | import json 71 | 72 | try: 73 | requrl = "http://10.21.23.103:8000/getInternetInfo" 74 | 75 | req = requests.post(url=requrl) 76 | networkinfolist = json.loads(req) 77 | 78 | writenetworkinfo(networkinfolist) 79 | 80 | return True 81 | except Exception as e: 82 | printOut("写入网络信息返回结果出错") 83 | return False 84 | 85 | def SendTask(requestdeviceid, applicationid, offloadingpolicyid, 86 | nexttaskid, localdeviceid, newtask): 87 | import threading 88 | thSendTask = threading.Thread(target=sendTask, args=(requestdeviceid, applicationid, offloadingpolicyid, 89 | nexttaskid, localdeviceid, newtask)) 90 | thSendTask.run() 91 | def sendTask(requestdeviceid, applicationid, offloadingpolicyid, 92 | nexttaskid, localdeviceid, newtask): 93 | import json 94 | import requests 95 | import datetime 96 | 97 | objectdeviceid = -1 98 | 99 | # 根据调度信息获取执行设备 error !!!!! 100 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 101 | offloadingpolicyid) 102 | 103 | if objectdeviceid == None: 104 | sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid) # 请求调度信息 105 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 106 | offloadingpolicyid) 107 | 108 | # 获取网络信息 109 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 110 | if tmpdeviceip == None: 111 | sendNetworkinfoRequest() 112 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 113 | 114 | # 发送网络请求 115 | requlr = "http://{0}:{1}/dojob".format(tmpdeviceip, tmpdeviceport) 116 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'dojob', newtask.todict()) 117 | 118 | requests.post(url=requlr, data=tmpmsg.tostring()) 119 | 120 | printOut("向{0}发送任务{1}成功".format(requlr, nexttaskid)) 121 | 122 | return requlr 123 | 124 | 125 | def sendFinal(objectdeviceid, localdeviceid, newtask): 126 | import json 127 | import requests 128 | import datetime 129 | 130 | # 获取网络信息 131 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 132 | if tmpdeviceip == None: 133 | sendNetworkinfoRequest() 134 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 135 | 136 | # 发送网络请求 137 | requlr = "http://{0}:{1}/getFinalResult".format(tmpdeviceip, tmpdeviceport) 138 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'finalresult', newtask.todict()) 139 | 140 | requests.post(url=requlr, data=tmpmsg.tostring()) 141 | -------------------------------------------------------------------------------- /cloud/network/server.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 服务器 9 | ''' 10 | from flask import Flask 11 | from flask import request 12 | from process.processor import * 13 | from model.record import * 14 | from Executer.executerDeepLearning import excuterDeepLearning 15 | # from Executer.excuter import ExecuteAgent 16 | # from flask.views import request 17 | from Executer.excuteVgg16 import excuteVgg16 18 | from Executer.excuteResnet50 import excuteResnet50 19 | from Executer.excuteVgg16boostVgg19 import excuteVgg16boostVgg19 20 | from Executer.excuteDistributedDeepLearning import excuteDistributedDeepLearningAgent 21 | app = Flask(__name__) 22 | localdeviceid = 3 23 | 24 | # set the excute agent for global 25 | print("Begin to load set the execute agent") 26 | # excuteagent = excuterDeepLearning() 27 | excuteagent = excuteDistributedDeepLearningAgent() 28 | # excuteagent = excuteVgg16() 29 | # excuteagent = excuteResnet50() 30 | # excuteagent = excuteVgg16boostVgg19() 31 | print("End to load set the execute agent") 32 | def printOut(msg): 33 | app.logger.info(msg) 34 | @app.route('/dojob', methods=['POST']) 35 | def dojob(): 36 | import json 37 | import datetime 38 | import time 39 | import numpy as np 40 | # 提取任务信息 41 | # app.logger.info("Do Job get data {0} \t the data content is {1}".format(request.get_data(), type(request.get_data()))) 42 | # app.logger.info("After change the data to string, the string is {0}".format(str(request.get_data()))) 43 | data = json.loads(request.get_data().decode(encoding='utf-8')) 44 | data = data['sendmsgcontent'] 45 | 46 | requestdeviceid = data['requestdeviceid'] 47 | applicationid = data['applicationid'] 48 | offloadingpolicyid = data['offloadingpolicyid'] 49 | taskid = data['taskid'] 50 | operationid = data['operationid'] 51 | inputdata = data['inputdata'] 52 | formertasklist = data['formertasklist'] 53 | nexttasklist = data['nexttasklist'] 54 | timecloselist = data['timecostlist'] 55 | 56 | # 应用信息中获取该任务的所有的前置任务 57 | actualformertasklist = gettaskFormertask(requestdeviceid, applicationid, taskid) 58 | # attention 任务结束时间这里需要进行重新设计 应该设计为任务结束的时间 59 | # 将任务写入前置任务中 60 | tmptaskdict = {} 61 | tmptaskdict['formertaskid'] = formertasklist[0] 62 | tmptaskdict['inputdata'] = inputdata 63 | tmptaskdict['timecost'] = timecloselist 64 | writeformertaskinfo(taskid=taskid, requestdeviceid=requestdeviceid, applicationid=applicationid, offloadingpolicyid=offloadingpolicyid, 65 | taskdictlist=[tmptaskdict]) 66 | # app.logger.info("Task {0} 写入前置任务 {1} 到离线文件成功".format(taskid, formertasklist)) 67 | 68 | # 确认前置任务数据已经全部完成 69 | if len(actualformertasklist) != 1: 70 | formertaskdictlist = getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid) 71 | # app.logger.info("该任务需要等待前置任务{0}完成,现在只有{1}完成".format(actualformertasklist, [tmpFormerTask['formertaskid'] for tmpFormerTask 72 | # in formertaskdictlist])) 73 | 74 | if len(formertaskdictlist) == len(actualformertasklist): # 任务已经全部完成 完成任务 75 | # 执行任务 76 | #excuteagent = ExecuteAgent() 77 | 78 | inputdatalist = [] # 整理输入数据按照任务id大小进行排序 79 | for i in range(len(formertaskdictlist)-1): 80 | for j in range(len(formertaskdictlist)-i-1): 81 | if int(formertaskdictlist[j]['formertaskid']) > int(formertaskdictlist[j+1]['formertaskid']): 82 | tmp = formertaskdictlist[j] 83 | formertaskdictlist[j] = formertaskdictlist[j+1] 84 | formertaskdictlist[j+1] = tmp 85 | 86 | for tmp in formertaskdictlist: 87 | # inputdatalist.append(tmp['inputdata'][0]) 88 | inputdatalist.append(tmp['inputdata']) 89 | 90 | # 合并任务完成时间 91 | tmpTimeCost = [tmpTime for tmpTime in timecloselist] 92 | for taskindex in range(len(timecloselist)): 93 | for tmpformertask in formertaskdictlist: 94 | if int(tmpformertask['timecost'][taskindex][0]) != 0: 95 | tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 96 | break 97 | timecloselist = tmpTimeCost 98 | timecloselist[int(taskid)][0] = time.time() 99 | print("operation id is: {0} and shape of input is {1}".format(operationid, np.shape(inputdatalist))) 100 | output = excuteagent.excute(operationid, inputdatalist) 101 | timecloselist[int(taskid)][1] = time.time() 102 | # app.logger.info("任务{0}已经完成 nexttasklist 为: {1} 输出为 {2}".format(taskid, nexttasklist, np.shape(output))) 103 | 104 | # 判断是不是最后一个任务 105 | if len(nexttasklist) == 1 and int(nexttasklist[0]) == -1: 106 | tmpnewtask = produce_newtask(taskid, timecloselist, taskid, output, requestdeviceid, applicationid, 107 | offloadingpolicyid) 108 | sendFinal(requestdeviceid, localdeviceid, tmpnewtask) 109 | 110 | else: 111 | # 生成新的任务 112 | for tmp in nexttasklist: 113 | # app.logger.info("开始生成新的任务{0}".format(tmp)) 114 | tmpnewtask = produce_newtask(taskid, timecloselist, tmp, output, requestdeviceid, applicationid, 115 | offloadingpolicyid) 116 | # app.logger.info("生成新的任务为{0}".format(tmpnewtask.todict())) 117 | SendTask(requestdeviceid, applicationid, offloadingpolicyid, tmp, 118 | localdeviceid, tmpnewtask) # 发送任务到另外的服务器 119 | 120 | else: # 任务还没有全部完成 121 | # app.logger.info("任务{0}进入等待中".format(taskid)) 122 | pass 123 | else: # 任务已经全部完成 124 | formertaskdictlist = getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid) 125 | # 执行任务 126 | #excuteagent = ExecuteAgent() 127 | 128 | inputdatalist = [] # 整理输入数据按照任务id大小进行排序 129 | for i in range(len(formertaskdictlist) - 1): 130 | for j in range(len(formertaskdictlist) - i - 1): 131 | if int(formertaskdictlist[j]['formertaskid']) > int(formertaskdictlist[j + 1]['formertaskid']): 132 | tmp = formertaskdictlist[j] 133 | formertaskdictlist[j] = formertaskdictlist[j + 1] 134 | formertaskdictlist[j + 1] = tmp 135 | 136 | for tmp in formertaskdictlist: 137 | # inputdatalist.append(tmp['inputdata'][0]) 138 | inputdatalist.append(tmp['inputdata']) 139 | # 合并任务完成时间 140 | tmpTimeCost = [tmpTime for tmpTime in timecloselist] 141 | for taskindex in range(len(timecloselist)): 142 | for tmpformertask in formertaskdictlist: 143 | if int(tmpformertask['timecost'][taskindex][0]) != 0: 144 | tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 145 | break 146 | timecloselist = tmpTimeCost 147 | timecloselist[int(taskid)][0] = time.time() 148 | # app.logger.info("operation id is: {0}".format(operationid)) 149 | if len(formertaskdictlist) == 1: 150 | inputdatalist = inputdatalist[0] 151 | print("operation id is: {0} and shape of input is {1}".format(operationid, np.shape(inputdatalist))) 152 | output = excuteagent.excute(operationid, inputdatalist) 153 | timecloselist[int(taskid)][1] = time.time() 154 | # app.logger.info("任务{0}已经完成 nexttasklist 为: {1} 输出为 {2}".format(taskid, nexttasklist, np.shape(output))) 155 | 156 | # 判断是不是最后一个任务 157 | if len(nexttasklist) == 1 and int(nexttasklist[0]) == -1: 158 | tmpnewtask = produce_newtask(taskid, timecloselist, taskid, output, requestdeviceid, applicationid, 159 | offloadingpolicyid) 160 | sendFinal(requestdeviceid, localdeviceid, tmpnewtask) 161 | else: 162 | # 生成新的任务 163 | for tmp in nexttasklist: 164 | tmpnewtask = produce_newtask(taskid, timecloselist, tmp, output, requestdeviceid, applicationid, 165 | offloadingpolicyid) 166 | # 根据id获取应该执行的设备 167 | # 根据id获取应该执行的设备 168 | reqUrl = SendTask(requestdeviceid, applicationid, offloadingpolicyid, tmp, localdeviceid, 169 | tmpnewtask) # 发送任务到另外的服务器 170 | 171 | # app.logger.info("从 设备 {0} 发送任务 {1} 任务内容为 {2} 到设备{3} 执行完任务 {4}".format(localdeviceid, tmp, 172 | # tmpnewtask.todict(), reqUrl, 173 | # taskid)) 174 | 175 | return 'OK' 176 | 177 | 178 | 179 | @app.route('/getOffloadingPolicy', methods=['POST']) 180 | def getoffloadingpolicy(): 181 | import json 182 | # 从数据请求中获取 应用设备id 应用id 调度策略id 183 | tmpoffloadingpolicydict = json.loads(request.get_data().decode('utf-8')) 184 | tmpoffloadingpolicydict = tmpoffloadingpolicydict['sendmsgcontent'] 185 | applicationdeviceid = tmpoffloadingpolicydict['requestdeviceid'] 186 | applicationid = tmpoffloadingpolicydict['applicationid'] 187 | offloadingpolicyid = tmpoffloadingpolicydict['offloadingpolicyid'] 188 | 189 | # 从离线数据中获取迁移策略 190 | offloadingpolicylist = getoffloadingpolicyinfo(taskid=-1, requestdeviceid=applicationdeviceid, applicationid=applicationid, 191 | offloadingpolicyid=offloadingpolicyid) 192 | offloadingpolicylist = [tmp.todict() for tmp in offloadingpolicylist] 193 | 194 | # 返回offloading策略 195 | return json.dumps(offloadingpolicylist, cls=MyEncoder) 196 | 197 | 198 | @app.route('/getInternetInfo', methods=['POST']) 199 | def getinternetinfo(): 200 | import json 201 | # 从离线数据读取网络信息 202 | networkinfolist = getnetworkinfo(-1) 203 | 204 | # 返回信息 205 | networkinfolist = [tmp.todict() for tmp in networkinfolist] 206 | 207 | return json.dumps(networkinfolist, cls=MyEncoder) 208 | 209 | @app.route('/updateInternetInfo', methods=['POST']) 210 | def updateinternetinfo(): 211 | import json 212 | # 读取网络信息 213 | data = json.loads(request.get_data()) 214 | data = data['sendmsgcontent'] 215 | 216 | # 将网络信息写入到离线文件当中 217 | writenetworkinfo(data) 218 | 219 | return "更新成功" 220 | 221 | 222 | @app.route('/getApplicationInfo', methods=['POST']) 223 | def getApplicationInfo(): 224 | import json 225 | 226 | data = json.loads(request.get_data().decode(encoding='utf-8')) 227 | 228 | # 获取本设备的设备编号 229 | senddeviceid = data['senddeviceid'] 230 | 231 | # 获取需要获取的应用id 232 | tmpapplication = data['sendmsgcontent'] 233 | applicationid = tmpapplication['applicationid'] 234 | 235 | # 处理器进行处理 读取离线数据 转成json格式 进行发送 236 | applicationdict = getapplicationdict(senddeviceid, applicationid) 237 | 238 | applicationobject = application.initfromdict(applicationdict) 239 | 240 | return applicationobject.tostring() 241 | 242 | 243 | @app.route('/getFinalResult', methods=['POST']) 244 | def getFinalResult(): 245 | import json 246 | 247 | data = json.loads(request.get_data()) 248 | data = data['sendmsgcontent'] 249 | 250 | tmpapplicationid = data['applicationid'] 251 | tmprequestdeviceid = data['requestdeviceid'] 252 | tmpoffloadingpolicyid = data['offloadingpolicyid'] 253 | tmpinputdata = data['inputdata'] 254 | tmptimecostlist = data['timecostlist'] 255 | 256 | # app.logger.info("应用编号{0}\t请求设备号{1}\t调度号{2}\t返回结果为{3}\t时间花费为{4} 完成任务".format(tmpapplicationid, tmprequestdeviceid, 257 | # tmpoffloadingpolicyid, tmpinputdata,tmptimecostlist)) 258 | if __name__ == "__main__": 259 | print("Begin the app run") 260 | app.run(host='0.0.0.0', port=8002, debug=True, threaded=True) -------------------------------------------------------------------------------- /cloud/process/processor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.models import * 11 | from model.record import * 12 | from network.client import * 13 | def processor_dojob(task): 14 | pass 15 | 16 | def processor_getoffloadingpolicy(): 17 | pass 18 | 19 | def processor_getinternetinfo(): 20 | pass 21 | 22 | def processor_updateinternetinfo(): 23 | pass 24 | 25 | def processor_getapplicationinfo(): 26 | pass 27 | 28 | def produce_newtask(thistaskid, thistimecostlist, newtaskid,outputdata, requestdeviceid, applicatonid, offloadingpolicyid): 29 | ''' 30 | 生成新的任务 31 | :param thistaskid: 已经完成的任务id 32 | :param outputdata: 完成任务输出的大小 33 | :param requestdeviceid: 应用请求设备的id 34 | :param applicatonid: 应用编号 35 | :param offloadingpolicyid: 迁移策略id 36 | :return: 37 | ''' 38 | tmprequestdeviceid = requestdeviceid # 请求设备id与上一个设备相同 39 | tmpapplicationid = applicatonid # 应用编号与上一个任务相同 40 | tmpoffloadingpolicyid = offloadingpolicyid # 调度策略与上一个任务相同 41 | tmptaskid = newtaskid 42 | 43 | # 通过查询应用信息获取该任务的操作编号 44 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 45 | 46 | if tmpapplcation == None: 47 | sendApplicationRequest(requestdeviceid, applicatonid) # 客户端发送应用请求信息 48 | print("由于应用信息不存在,向设备{0}发送请求应用{1}信息 更新设备信息".format(requestdeviceid, applicatonid)) 49 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 50 | 51 | tmpinputdata = outputdata 52 | 53 | tmpformertask = [thistaskid] 54 | 55 | 56 | # 根据应用信息获得nexttask operationid 57 | tmptaskidlist = tmpapplcation['taskidlist'] 58 | tmptaskidindex = 0 59 | for i in range(len(tmptaskidlist)): 60 | # print("Type of the tmptaskidlist is{0} Type of the tmptaskid is{1}".format(type(tmptaskidlist[i]), type(tmptaskid))) 61 | if int(tmptaskidlist[i]) == int(tmptaskid): 62 | tmptaskidindex = i 63 | break 64 | tmpnexttasklist = tmpapplcation['nexttasklist'][tmptaskidindex] 65 | tmpoperationid = tmpapplcation['operationidlist'][tmptaskidindex] 66 | 67 | tmptimecostlist = thistimecostlist 68 | 69 | tmptask = task(tmprequestdeviceid, tmpapplicationid, tmpoffloadingpolicyid, tmptaskid, tmpoperationid, tmpinputdata, 70 | tmpformertask, tmpnexttasklist, tmptimecostlist) 71 | 72 | return tmptask 73 | 74 | def gettaskFormertask(requestdeviceid, applicationid, taskid): 75 | ''' 76 | 获取特定任务的前置任务 77 | :param requestdeviceid: 应用请求设备编号 78 | :param applicationid: 应用编号 79 | :param taskid: 任务编号 80 | :return: 81 | ''' 82 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 83 | 84 | if tmpapplication == None: 85 | sendApplicationRequest(requestdeviceid, applicationid) 86 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 87 | 88 | tmptaskidlist = tmpapplication['taskidlist'] 89 | # tmptaskindex = lambda i: int(tmptaskidlist[i])==int(taskid) 90 | tmptaskindex = 0 91 | for i in range(len(tmptaskidlist)): 92 | if int(tmptaskidlist[i])==int(taskid): 93 | tmptaskindex = i 94 | break 95 | return tmpapplication['formertasklist'][tmptaskindex] -------------------------------------------------------------------------------- /cloud/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | def getRandomId(): 11 | import random 12 | return random.randint(0, 200000) -------------------------------------------------------------------------------- /edge/Executer/excuteVgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: vgg16的执行 9 | ''' 10 | class operation: 11 | 12 | def __init__(self, operation_id, generate_operation_model, input_shape,weights_model): 13 | import numpy as np 14 | 15 | self.operation_id = operation_id 16 | self.operation_model = generate_operation_model(input_shape) 17 | self.input_shape = input_shape 18 | 19 | 'load the weight' 20 | for layer in self.operation_model.layers: 21 | try: 22 | if weights_model.get_layer(layer.name) != None: 23 | layer.set_weights(weights_model.get_layer(name=layer.name).get_weights()) 24 | except Exception as e: 25 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 26 | e)) 27 | pass 28 | if type(input_shape) == list: 29 | testdata = [] 30 | for tmp in input_shape: 31 | testdata.append([np.zeros(shape=tmp, dtype=np.float32)]) 32 | self.operation_model.predict(testdata) 33 | pass 34 | else: 35 | self.operation_model.predict(np.array([np.zeros(shape=input_shape, dtype=np.float32)])) 36 | 37 | def excute(self, input): 38 | import numpy as np 39 | 40 | x_input = input 41 | # if np.shape(input)[0] == self.input_shape[0]: 42 | # x_input = [input] 43 | if type(self.input_shape) != list: 44 | x_input = np.array(x_input) 45 | 46 | if type(self.input_shape) == list: 47 | input_data = [] 48 | print("the raw shape of the input is {0} of operation {1}".format(np.shape(input), 49 | self.operation_id)) 50 | for i in range(len(self.input_shape)): 51 | print("operation {0} the input shape is {1}".format(self.operation_id, 52 | np.shape(input[i]))) 53 | # x_input.append(np.array(input[i])) 54 | input_data.append(input[i]) 55 | print("operation {0} the input shape is {1}".format(self.operation_id, 56 | np.shape(input_data[i]))) 57 | 58 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 59 | embedding = self.operation_model.predict(input_data) 60 | return embedding 61 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 62 | embedding = self.operation_model.predict(x_input) 63 | print("the operation {0} output shape is {1}".format(self.operation_id, np.shape(embedding))) 64 | return embedding 65 | pass 66 | 67 | class excuteVgg16: 68 | 69 | def __func0__(self, input_shape): 70 | from keras.models import Model 71 | from keras.layers import Input 72 | from keras.layers import Conv2D 73 | from keras.layers import MaxPooling2D 74 | 75 | img_input = Input(shape=input_shape) 76 | 77 | # Block 1 78 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 79 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 80 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 81 | 82 | model = Model(inputs=img_input, outputs=x) 83 | return model 84 | 85 | def __func1__(self, input_shape): 86 | from keras.layers import Input 87 | from keras.layers import Conv2D 88 | from keras.layers import MaxPooling2D 89 | from keras.models import Model 90 | 91 | 92 | input = Input(shape=input_shape) 93 | # Block 2 94 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(input) 95 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 96 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 97 | 98 | model = Model(inputs=input, outputs=x) 99 | return model 100 | 101 | def __func2__(self, input_shape): 102 | from keras.layers import Input 103 | from keras.layers import Conv2D 104 | from keras.layers import MaxPooling2D 105 | from keras.models import Model 106 | 107 | input = Input(shape=input_shape) 108 | # Block 3 109 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(input) 110 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 111 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 112 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 113 | 114 | model = Model(inputs=input, outputs=x) 115 | return model 116 | 117 | def __func3__(self, input_shape): 118 | from keras.layers import Input 119 | from keras.layers import Conv2D 120 | from keras.layers import MaxPooling2D 121 | from keras.models import Model 122 | 123 | input = Input(shape=input_shape) 124 | # Block 4 125 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(input) 126 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 127 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 128 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 129 | 130 | model = Model(inputs=input, outputs=x) 131 | return model 132 | 133 | def __func4__(self, input_shape): 134 | from keras.layers import Input 135 | from keras.layers import Conv2D 136 | from keras.layers import MaxPooling2D 137 | from keras.models import Model 138 | 139 | input = Input(shape=input_shape) 140 | # Block 5 141 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(input) 142 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 143 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 144 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 145 | model = Model(inputs=input, outputs=x) 146 | return model 147 | 148 | def __func5__(self, input_shape): 149 | from keras.layers import Flatten 150 | from keras.layers import Dense 151 | from keras.layers import Input 152 | from keras.models import Model 153 | 154 | input = Input(shape=input_shape) 155 | # Classification block 156 | x = Flatten(name='flatten')(input) 157 | x = Dense(4096, activation='relu', name='fc1')(x) 158 | x = Dense(4096, activation='relu', name='fc2')(x) 159 | x = Dense(1000, activation='softmax', name='predictions')(x) 160 | 161 | model = Model(inputs=input, outputs=x) 162 | return model 163 | 164 | 165 | def __init__(self): 166 | from Executer.vgg16 import vgg16 167 | self.operations = [] 168 | weights_model = vgg16(input_shape=(224,224, 3), 169 | classes=1000).model 170 | 171 | operation0 = operation(0, self.__func0__, (224, 224, 3), weights_model) 172 | operation1 = operation(1, self.__func1__, (112, 112, 64), weights_model) 173 | operation2 = operation(2, self.__func2__, (56, 56, 128), weights_model) 174 | operation3 = operation(3, self.__func3__, (28, 28, 256), weights_model) 175 | operation4 = operation(4, self.__func4__, (14, 14, 512), weights_model) 176 | operation5 = operation(5, self.__func5__, (7, 7, 512), weights_model) 177 | 178 | self.operations.append(operation0) 179 | self.operations.append(operation1) 180 | self.operations.append(operation2) 181 | self.operations.append(operation3) 182 | self.operations.append(operation4) 183 | self.operations.append(operation5) 184 | 185 | 186 | def excute(self, operationid, inputdata): 187 | return self.operations[operationid].excute(inputdata) 188 | 189 | -------------------------------------------------------------------------------- /edge/Executer/excuter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class operation: 12 | 13 | def __init__(self, operationid, operationfuction): 14 | self.operationid = operationid 15 | self.operationfunction = operationfuction 16 | 17 | def checkid(self, operationid): 18 | if self.operationid == operationid: 19 | return True 20 | else: 21 | return False 22 | 23 | def getid(self): 24 | return self.operationid 25 | 26 | def excute(self, inputdata): 27 | return self.operationfunction(inputdata) 28 | 29 | 30 | class ExecuteAgent: 31 | 32 | # 定义函数 33 | def __func0__(self, input): 34 | tmp = input[0] + 1 35 | return tmp 36 | 37 | def __func1__(self, input): 38 | tmp = input[0] - 1 39 | return tmp 40 | 41 | def __func2__(self, input): 42 | tmp = input[0] * 2 43 | return tmp 44 | 45 | def __func3__(self, input): 46 | tmp = input[0] * input[1] 47 | return tmp 48 | 49 | def __func4__(self, input): 50 | tmp = input[0] * 0.5 51 | return tmp 52 | 53 | def __func5__(self, input): 54 | tmp = input[0] * input[1] 55 | return tmp 56 | 57 | def __func6__(self, input): 58 | tmp = input[0] + input[1] 59 | return tmp 60 | 61 | 62 | 63 | def __init__(self): 64 | self.operations = [] 65 | 66 | operation0 = operation(0, self.__func0__) 67 | operation1 = operation(1, self.__func1__) 68 | operation2 = operation(2, self.__func2__) 69 | operation3 = operation(3, self.__func3__) 70 | operation4 = operation(4, self.__func4__) 71 | operation5 = operation(5, self.__func5__) 72 | operation6 = operation(6, self.__func6__) 73 | 74 | 75 | self.operations.append(operation0) 76 | self.operations.append(operation1) 77 | self.operations.append(operation2) 78 | self.operations.append(operation3) 79 | self.operations.append(operation4) 80 | self.operations.append(operation5) 81 | self.operations.append(operation6) 82 | 83 | 84 | def excute(self, operationid, inputdata): 85 | # #检查是否有操作id 检查输入数据格式 86 | # if int(operationid) >= len(self.operations)-1 or operationid < 0: 87 | # return None 88 | # 89 | # if not isinstance(inputdata, list): 90 | # return None 91 | 92 | return self.operations[operationid].excute(inputdata) 93 | 94 | -------------------------------------------------------------------------------- /edge/Executer/utils.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | import os 4 | from numpy import genfromtxt 5 | from keras.layers import Conv2D, ZeroPadding2D, Activation, Input, concatenate 6 | from keras.models import Model 7 | from keras.layers.normalization import BatchNormalization 8 | from keras.layers.pooling import MaxPooling2D, AveragePooling2D 9 | 10 | 11 | _FLOATX = 'float32' 12 | 13 | # def variable(value, dtype=_FLOATX, name=None): 14 | # v = tf.Variable(np.asarray(value, dtype=dtype), name=name) 15 | # _get_session().run(v.initializer) 16 | # return v 17 | 18 | def shape(x): 19 | return x.get_shape() 20 | 21 | def square(x): 22 | return tf.square(x) 23 | 24 | # def zeros(shape, dtype=_FLOATX, name=None): 25 | # return variable(np.zeros(shape), dtype, name) 26 | 27 | def concatenate(tensors, axis=-1): 28 | if axis < 0: 29 | axis = axis % len(tensors[0].get_shape()) 30 | return tf.concat(axis, tensors) 31 | 32 | def LRN2D(x): 33 | return tf.nn.lrn(x, alpha=1e-4, beta=0.75) 34 | 35 | def conv2d_bn( 36 | x, 37 | layer=None, 38 | cv1_out=None, 39 | cv1_filter=(1, 1), 40 | cv1_strides=(1, 1), 41 | cv2_out=None, 42 | cv2_filter=(3, 3), 43 | cv2_strides=(1, 1), 44 | padding=None, 45 | ): 46 | num = '' if cv2_out == None else '1' 47 | tensor = Conv2D(cv1_out, cv1_filter, strides=cv1_strides, name=layer+'_conv'+num)(x) 48 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+num)(tensor) 49 | tensor = Activation('relu')(tensor) 50 | if padding == None: 51 | return tensor 52 | tensor = ZeroPadding2D(padding=padding)(tensor) 53 | if cv2_out == None: 54 | return tensor 55 | tensor = Conv2D(cv2_out, cv2_filter, strides=cv2_strides, name=layer+'_conv'+'2')(tensor) 56 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+'2')(tensor) 57 | tensor = Activation('relu')(tensor) 58 | return tensor 59 | 60 | weights = [ 61 | 'conv1', 'bn1', 'conv2', 'bn2', 'conv3', 'bn3', 62 | 'inception_3a_1x1_conv', 'inception_3a_1x1_bn', 63 | 'inception_3a_pool_conv', 'inception_3a_pool_bn', 64 | 'inception_3a_5x5_conv1', 'inception_3a_5x5_conv2', 'inception_3a_5x5_bn1', 'inception_3a_5x5_bn2', 65 | 'inception_3a_3x3_conv1', 'inception_3a_3x3_conv2', 'inception_3a_3x3_bn1', 'inception_3a_3x3_bn2', 66 | 'inception_3b_3x3_conv1', 'inception_3b_3x3_conv2', 'inception_3b_3x3_bn1', 'inception_3b_3x3_bn2', 67 | 'inception_3b_5x5_conv1', 'inception_3b_5x5_conv2', 'inception_3b_5x5_bn1', 'inception_3b_5x5_bn2', 68 | 'inception_3b_pool_conv', 'inception_3b_pool_bn', 69 | 'inception_3b_1x1_conv', 'inception_3b_1x1_bn', 70 | 'inception_3c_3x3_conv1', 'inception_3c_3x3_conv2', 'inception_3c_3x3_bn1', 'inception_3c_3x3_bn2', 71 | 'inception_3c_5x5_conv1', 'inception_3c_5x5_conv2', 'inception_3c_5x5_bn1', 'inception_3c_5x5_bn2', 72 | 'inception_4a_3x3_conv1', 'inception_4a_3x3_conv2', 'inception_4a_3x3_bn1', 'inception_4a_3x3_bn2', 73 | 'inception_4a_5x5_conv1', 'inception_4a_5x5_conv2', 'inception_4a_5x5_bn1', 'inception_4a_5x5_bn2', 74 | 'inception_4a_pool_conv', 'inception_4a_pool_bn', 75 | 'inception_4a_1x1_conv', 'inception_4a_1x1_bn', 76 | 'inception_4e_3x3_conv1', 'inception_4e_3x3_conv2', 'inception_4e_3x3_bn1', 'inception_4e_3x3_bn2', 77 | 'inception_4e_5x5_conv1', 'inception_4e_5x5_conv2', 'inception_4e_5x5_bn1', 'inception_4e_5x5_bn2', 78 | 'inception_5a_3x3_conv1', 'inception_5a_3x3_conv2', 'inception_5a_3x3_bn1', 'inception_5a_3x3_bn2', 79 | 'inception_5a_pool_conv', 'inception_5a_pool_bn', 80 | 'inception_5a_1x1_conv', 'inception_5a_1x1_bn', 81 | 'inception_5b_3x3_conv1', 'inception_5b_3x3_conv2', 'inception_5b_3x3_bn1', 'inception_5b_3x3_bn2', 82 | 'inception_5b_pool_conv', 'inception_5b_pool_bn', 83 | 'inception_5b_1x1_conv', 'inception_5b_1x1_bn', 84 | 'dense_layer' 85 | ] 86 | 87 | conv_shape = { 88 | 'conv1': [64, 3, 7, 7], 89 | 'conv2': [64, 64, 1, 1], 90 | 'conv3': [192, 64, 3, 3], 91 | 'inception_3a_1x1_conv': [64, 192, 1, 1], 92 | 'inception_3a_pool_conv': [32, 192, 1, 1], 93 | 'inception_3a_5x5_conv1': [16, 192, 1, 1], 94 | 'inception_3a_5x5_conv2': [32, 16, 5, 5], 95 | 'inception_3a_3x3_conv1': [96, 192, 1, 1], 96 | 'inception_3a_3x3_conv2': [128, 96, 3, 3], 97 | 'inception_3b_3x3_conv1': [96, 256, 1, 1], 98 | 'inception_3b_3x3_conv2': [128, 96, 3, 3], 99 | 'inception_3b_5x5_conv1': [32, 256, 1, 1], 100 | 'inception_3b_5x5_conv2': [64, 32, 5, 5], 101 | 'inception_3b_pool_conv': [64, 256, 1, 1], 102 | 'inception_3b_1x1_conv': [64, 256, 1, 1], 103 | 'inception_3c_3x3_conv1': [128, 320, 1, 1], 104 | 'inception_3c_3x3_conv2': [256, 128, 3, 3], 105 | 'inception_3c_5x5_conv1': [32, 320, 1, 1], 106 | 'inception_3c_5x5_conv2': [64, 32, 5, 5], 107 | 'inception_4a_3x3_conv1': [96, 640, 1, 1], 108 | 'inception_4a_3x3_conv2': [192, 96, 3, 3], 109 | 'inception_4a_5x5_conv1': [32, 640, 1, 1,], 110 | 'inception_4a_5x5_conv2': [64, 32, 5, 5], 111 | 'inception_4a_pool_conv': [128, 640, 1, 1], 112 | 'inception_4a_1x1_conv': [256, 640, 1, 1], 113 | 'inception_4e_3x3_conv1': [160, 640, 1, 1], 114 | 'inception_4e_3x3_conv2': [256, 160, 3, 3], 115 | 'inception_4e_5x5_conv1': [64, 640, 1, 1], 116 | 'inception_4e_5x5_conv2': [128, 64, 5, 5], 117 | 'inception_5a_3x3_conv1': [96, 1024, 1, 1], 118 | 'inception_5a_3x3_conv2': [384, 96, 3, 3], 119 | 'inception_5a_pool_conv': [96, 1024, 1, 1], 120 | 'inception_5a_1x1_conv': [256, 1024, 1, 1], 121 | 'inception_5b_3x3_conv1': [96, 736, 1, 1], 122 | 'inception_5b_3x3_conv2': [384, 96, 3, 3], 123 | 'inception_5b_pool_conv': [96, 736, 1, 1], 124 | 'inception_5b_1x1_conv': [256, 736, 1, 1], 125 | } 126 | 127 | def load_weights(): 128 | import os 129 | # Set weights path 130 | local_dir_path = r'/home/derfei/Desktop/edge/Executer' 131 | dirPath = os.path.join(local_dir_path, 'weights') 132 | fileNames = filter(lambda f: not f.startswith('.'), os.listdir(dirPath)) 133 | paths = {} 134 | weights_dict = {} 135 | 136 | for n in fileNames: 137 | paths[n.replace('.csv', '')] = dirPath + '/' + n 138 | 139 | for name in weights: 140 | if 'conv' in name: 141 | conv_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 142 | conv_w = np.reshape(conv_w, conv_shape[name]) 143 | conv_w = np.transpose(conv_w, (2, 3, 1, 0)) 144 | conv_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 145 | weights_dict[name] = [conv_w, conv_b] 146 | elif 'bn' in name: 147 | bn_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 148 | bn_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 149 | bn_m = genfromtxt(paths[name + '_m'], delimiter=',', dtype=None) 150 | bn_v = genfromtxt(paths[name + '_v'], delimiter=',', dtype=None) 151 | weights_dict[name] = [bn_w, bn_b, bn_m, bn_v] 152 | elif 'dense' in name: 153 | dense_w = genfromtxt(dirPath+'/dense_w.csv', delimiter=',', dtype=None) 154 | dense_w = np.reshape(dense_w, (128, 736)) 155 | dense_w = np.transpose(dense_w, (1, 0)) 156 | dense_b = genfromtxt(dirPath+'/dense_b.csv', delimiter=',', dtype=None) 157 | weights_dict[name] = [dense_w, dense_b] 158 | 159 | return weights_dict 160 | 161 | -------------------------------------------------------------------------------- /edge/Executer/vgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class utils_vgg16: 12 | 13 | @classmethod 14 | def load_weight(cls, vgg_model, model): 15 | 16 | for layer in model.layers: 17 | try: 18 | if vgg_model.get_layer(layer.name) != None: 19 | layer.set_weights(vgg_model.get_layer(name=layer.name).get_weights()) 20 | except Exception as e: 21 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 22 | e)) 23 | pass 24 | 25 | class vgg16: 26 | 27 | def __init__(self, input_shape, classes): 28 | from keras.models import Model 29 | from keras.layers import Flatten 30 | from keras.layers import Dense 31 | from keras.layers import Input 32 | from keras.layers import Conv2D 33 | from keras.layers import MaxPooling2D 34 | from keras.utils.data_utils import get_file 35 | 36 | WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5' 37 | 38 | img_input = Input(shape=input_shape) 39 | 40 | # Block 1 41 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 42 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 43 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 44 | 45 | # Block 2 46 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 47 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 48 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 49 | 50 | # Block 3 51 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 52 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 53 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 54 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 55 | 56 | # Block 4 57 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 58 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 59 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 60 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 61 | 62 | # Block 5 63 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 64 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 65 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 66 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 67 | 68 | # Classification block 69 | x = Flatten(name='flatten')(x) 70 | x = Dense(4096, activation='relu', name='fc1')(x) 71 | x = Dense(4096, activation='relu', name='fc2')(x) 72 | x = Dense(classes, activation='softmax', name='predictions')(x) 73 | 74 | self.model = Model(inputs=img_input, output=x, name='vgg16') 75 | 76 | 'load model weights' 77 | weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', 78 | WEIGHTS_PATH, 79 | cache_subdir='models') 80 | self.model.load_weights(weights_path) 81 | 82 | 83 | def plot_model(self): 84 | from keras.utils import plot_model 85 | 86 | plot_model(model=self.model, to_file='modelvgg16.png', show_shapes=True, show_layer_names=True) 87 | 88 | if __name__ == "__main__": 89 | import numpy as np 90 | from keras.models import Model 91 | from keras.layers import Flatten 92 | from keras.layers import Dense 93 | from keras.layers import Input 94 | from keras.layers import Conv2D 95 | from keras.layers import MaxPooling2D 96 | from keras.utils.data_utils import get_file 97 | from keras.preprocessing import image 98 | from keras.applications.imagenet_utils import decode_predictions 99 | from keras.applications.imagenet_utils import preprocess_input 100 | 101 | img_input = Input(shape=(224, 224, 3)) 102 | 103 | # Block 1 104 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 105 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 106 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 107 | 108 | # Block 2 109 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 110 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 111 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 112 | 113 | # Block 3 114 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 115 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 116 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 117 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 118 | 119 | # Block 4 120 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 121 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 122 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 123 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 124 | 125 | # Block 5 126 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 127 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 128 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 129 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 130 | 131 | # Classification block 132 | x = Flatten(name='flatten')(x) 133 | x = Dense(4096, activation='relu', name='fc1')(x) 134 | x = Dense(4096, activation='relu', name='fc2')(x) 135 | x = Dense(1000, activation='softmax', name='predictions')(x) 136 | 137 | model = Model(inputs=img_input, outputs=x) 138 | vgg16model = vgg16(input_shape=(224, 224, 3), classes=1000) 139 | utils_vgg16.load_weight(vgg16model.model, model) 140 | 141 | # 'try to predict' 142 | # input_data = np.array([np.zeros(shape=(224, 224, 3))]) 143 | # output = model.predict(input_data) 144 | # 145 | # print("the output is ", output) 146 | 147 | img_path = 'elephant.jpg' 148 | img = image.load_img(img_path, target_size=(224, 224)) 149 | x = image.img_to_array(img) 150 | x = np.expand_dims(x, axis=0) 151 | x = preprocess_input(x) 152 | print('Input image shape:', x.shape) 153 | 154 | preds = model.predict(x) 155 | print('Predicted:', decode_predictions(preds)) 156 | preds_resnet50 = vgg16model.model.predict(x) 157 | print('Predicted:', decode_predictions(preds_resnet50)) 158 | 159 | 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /edge/README.md: -------------------------------------------------------------------------------- 1 | # distributeed-deep-learning/edge 2 | 3 | ## 分布式神经网络的Edge服务器项目 4 | 该项目还有其他的三个兄弟项目 分别是 IoT Android Remote Cloud 分别运行于 5 | 其他的三种物理设备上面 -------------------------------------------------------------------------------- /edge/model/detectmodel.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Derfei/task-merging/0002b5b0c62bc4c4cc8f754474d9c750ccf026e4/edge/model/detectmodel.py -------------------------------------------------------------------------------- /edge/model/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 所有任务实体 9 | ''' 10 | import json 11 | class MyEncoder(json.JSONEncoder): 12 | 13 | def default(self, o): 14 | import numpy 15 | if isinstance(o, bytes): 16 | return str(o, encoding='utf-8') 17 | if isinstance(o, numpy.ndarray): 18 | return o.tolist() 19 | 20 | return json.JSONEncoder.default(o) 21 | 22 | class task: 23 | 24 | def __init__(self, requestdeviceid, applicationid, offloadingpolicyid, taskid, operationid, inputdata, formertasklist, 25 | nexttasklist, timecostlist): 26 | self.requestdevicdid = requestdeviceid 27 | self.applicationid = applicationid 28 | self.offloadingpolicyid = offloadingpolicyid 29 | self.taskid = taskid 30 | self.operationid = operationid 31 | self.inputdata = inputdata 32 | self.formertasklist = formertasklist 33 | self.nexttasklist = nexttasklist 34 | self.timecostlist = timecostlist 35 | 36 | @classmethod 37 | def initfromdict(cls, taskdict): 38 | tmptask = task(taskdict['requestdeviceid'], taskdict['applicationid'], taskdict['offloadingpolicyid'], taskdict['taskid'], 39 | taskdict['operationid'], taskdict['inputdata'], taskdict['formertasklist'], taskdict['nexttasklist'], taskdict['timecostlist']) 40 | return tmptask 41 | 42 | @classmethod 43 | def initfromstring(cls, taskstring): 44 | import json 45 | taskdict = json.loads(taskstring) 46 | return task.initfromdict(taskdict=taskdict) 47 | 48 | def todict(self): 49 | tmpdict = {} 50 | tmpdict['requestdeviceid'] = self.requestdevicdid 51 | tmpdict['applicationid'] = self.applicationid 52 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 53 | tmpdict['taskid'] = self.taskid 54 | tmpdict['operationid'] = self.operationid 55 | tmpdict['inputdata'] = self.inputdata 56 | tmpdict['formertasklist'] = self.formertasklist 57 | tmpdict['nexttasklist'] = self.nexttasklist 58 | tmpdict['timecostlist'] = self.timecostlist 59 | return tmpdict 60 | 61 | def tostring(self): 62 | import json 63 | tmpdict = self.todict() 64 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 65 | 66 | class msg: 67 | ''' 68 | requestdeviceid: 代表的是发送信息的设备编号 69 | senddeviceid: 发送的目标设备的id 70 | ''' 71 | 72 | def __init__(self, requestdeviceid, senddeviceid, sendtime, sendmsgtype, sendmsgcontent): 73 | self.requestdeviceid = requestdeviceid 74 | self.senddeviceid = senddeviceid 75 | self.sendtime = sendtime 76 | self.sendmsgtype = sendmsgtype 77 | self.sendmsgcontent = sendmsgcontent 78 | 79 | @classmethod 80 | def initfromdict(cls, msgdict): 81 | tmpmsg = msg(msgdict['requestdeviceid'], msgdict['senddeviceid'], msgdict['sendtime'], msgdict['sendmsgtype'], 82 | msgdict['sendmsgcontent']) 83 | return tmpmsg 84 | 85 | @classmethod 86 | def initfromstring(cls, msgstring): 87 | import json 88 | msgdict = json.loads(msgstring) 89 | return msg.initfromdict(msgdict) 90 | 91 | def todict(self): 92 | msgdict = {} 93 | msgdict['requestdeviceid'] = self.requestdeviceid 94 | msgdict['senddeviceid'] = self.senddeviceid 95 | msgdict['sendtime'] = self.sendtime 96 | msgdict['sendmsgtype'] = self.sendmsgtype 97 | msgdict['sendmsgcontent'] = self.sendmsgcontent 98 | 99 | return msgdict 100 | 101 | 102 | def tostring(self): 103 | import json 104 | tmpdict = self.todict() 105 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 106 | 107 | 108 | class offloadingPolicy: 109 | 110 | def __init__(self, offloadingpolicyid, requestdeviceid, applicationid, taskid, excutedeviceid): 111 | self.offloadingpolicyid = offloadingpolicyid 112 | self.requestdeviceid = requestdeviceid 113 | self.applicationid = applicationid 114 | self.taskid = taskid 115 | self.excutedeviceid = excutedeviceid 116 | 117 | @classmethod 118 | def initfromdict(cls, offloadingpolicydict): 119 | tmpoffloadingpolicy = offloadingPolicy(offloadingpolicydict['offloadingpolicyid'], offloadingpolicydict['requestdeviceid'], 120 | offloadingpolicydict['applicationid'], offloadingpolicydict['taskid'],offloadingpolicydict['excutedeviceid']) 121 | return tmpoffloadingpolicy 122 | 123 | @classmethod 124 | def initfromstring(cls, offloadingpolicystring): 125 | import json 126 | tmpdict = json.loads(offloadingpolicystring) 127 | return offloadingPolicy.initfromdict(tmpdict) 128 | 129 | def todict(self): 130 | tmpdict = {} 131 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 132 | tmpdict['requestdeviceid'] = self.requestdeviceid 133 | tmpdict['applicationid'] = self.applicationid 134 | tmpdict['taskid'] = self.taskid 135 | tmpdict['excutedeviceid'] = self.excutedeviceid 136 | 137 | return tmpdict 138 | 139 | def tostring(self): 140 | import json 141 | tmpdict = self.todict() 142 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 143 | 144 | 145 | class application: 146 | 147 | def __init__(self, requestdeviceid, applicationid, taskidlist, formertasklist, nexttasklist, operationlist): 148 | self.requestdeviceid = requestdeviceid 149 | self.applicationid = applicationid 150 | self.taskidlist = taskidlist 151 | self.formertasklist = formertasklist 152 | self.nexttasklist = nexttasklist 153 | self.operationlist = operationlist 154 | 155 | @classmethod 156 | def initfromdict(cls, applicationdict): 157 | tmpapplication = application(applicationdict['requestdeviceid'], applicationdict['applicationid'], applicationdict['taskidlist'], 158 | applicationdict['formertasklist'], applicationdict['nexttasklist'], applicationdict['operationidlist']) 159 | return tmpapplication 160 | 161 | @classmethod 162 | def initfromstring(cls, applicationstring): 163 | import json 164 | tmpdict = json.loads(applicationstring) 165 | return application.initfromdict(tmpdict) 166 | 167 | @classmethod 168 | def initfromString(cls, applicationstringlines): 169 | # 将文本中的内转换为application对象 170 | firstline = applicationstringlines[0] 171 | requestdeviceid = firstline.split()[0] 172 | applicationid = firstline.split()[1] 173 | taskidlist = [] 174 | formertasklist = [] 175 | nexttasklist = [] 176 | operationidlist = [] 177 | for line in applicationstringlines: 178 | taskidlist.append(int(line.split()[2])) 179 | formertasklist.append([int(tmp) for tmp in line.split()[3].split(',')]) 180 | nexttasklist.append([int(tmp) for tmp in line.split()[4].split(',')]) 181 | operationidlist.append(int(line.split()[5])) 182 | return application(requestdeviceid, applicationid, taskidlist, formertasklist, 183 | nexttasklist, operationidlist) 184 | 185 | 186 | def todict(self): 187 | tmpdict = {} 188 | tmpdict['requestdeviceid'] = self.requestdeviceid 189 | tmpdict['applicationid'] = self.applicationid 190 | tmpdict['taskidlist'] = self.taskidlist 191 | tmpdict['formertasklist'] = self.formertasklist 192 | tmpdict['nexttasklist'] = self.nexttasklist 193 | tmpdict['operationidlist'] = self.operationlist 194 | 195 | return tmpdict 196 | 197 | def tostring(self): 198 | import json 199 | tmpdict = self.todict() 200 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 201 | 202 | 203 | class networkinfo: 204 | 205 | def __init__(self, deviceid, devicetype, ip, port): 206 | self.deviceid = deviceid 207 | self.devicetype = devicetype 208 | self.ip = ip 209 | self.port = port 210 | 211 | @classmethod 212 | def initfromdict(cls, networkinfodict): 213 | tmpnetworkinfo = networkinfo(networkinfodict['deviceid'], networkinfodict['devicetype'], 214 | networkinfodict['ip'], networkinfodict['port']) 215 | return tmpnetworkinfo 216 | 217 | @classmethod 218 | def initfromstring(cls, networkinfostring): 219 | import json 220 | tmpnetworkinfodict = json.loads(networkinfostring) 221 | return networkinfo.initfromdict(tmpnetworkinfodict) 222 | 223 | @classmethod 224 | def initfromString(cls, networkinfoString): 225 | content = networkinfoString.split() 226 | # print("When init from String the networkinfo, the len of the content is:", len(content), content) 227 | tmpnetworkinfo = networkinfo(content[0], content[1], content[2], content[3]) 228 | return tmpnetworkinfo 229 | 230 | def todict(self): 231 | tmpdict = {} 232 | tmpdict['deviceid'] = self.deviceid 233 | tmpdict['devicetype'] = self.devicetype 234 | tmpdict['ip'] = self.ip 235 | tmpdict['port'] = self.port 236 | return tmpdict 237 | 238 | def toString(self): 239 | tmpdict = self.todict() 240 | return str(tmpdict) 241 | 242 | def tostring(self): 243 | import json 244 | tmpdict = self.todict() 245 | return json.dumps(tmpdict, cls=MyEncoder, ensure_ascii=True).encode() 246 | 247 | 248 | 249 | 250 | 251 | 252 | -------------------------------------------------------------------------------- /edge/model/record.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 写入和读取离线文件 9 | ''' 10 | recordbasedir = r"/home/derfei/Desktop/edge/model/files" 11 | from model.models import networkinfo 12 | from model.models import * 13 | def writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, offloadingpolicy): 14 | ''' 15 | offloadingpolicy 离线保存格式为: 16 | offloaindpolicy_requestdeviceid_applicationid_offloadingpolicyid 17 | offloading: 格式为: 18 | offloadingpolicyid requestdeviceid applicationid, executedeviceid 19 | :param requestdeviceid: 20 | :param applicationid: 21 | :param offloadingpolicyid: 22 | :param offloadingpolicy: 23 | :return: 24 | ''' 25 | import os 26 | filepath = os.path.join(recordbasedir, 'offloadingpolicy_'+str(requestdeviceid)+"_"+str(applicationid)+"_"+str(offloadingpolicyid)+".txt") 27 | 28 | # 写入文件 覆盖式 29 | with open(filepath, "w+") as file: 30 | for policy in offloadingpolicy: 31 | line = "{0}\t{1}\t{2}\t{3}\t{4}\n".format(offloadingpolicyid, requestdeviceid, applicationid, policy['taskid'], policy['excutedeviceid']) 32 | file.write(line) 33 | 34 | def writenetworkinfo(networkinfo_list): 35 | ''' 36 | 将传回的networkinfolist 数据写入文件当中 37 | :param networkinfo_list: 38 | :return: 39 | ''' 40 | import os 41 | import json 42 | filepath = os.path.join(recordbasedir, "network.txt") 43 | 44 | with open(filepath, "w+") as file: 45 | for networkinfo in networkinfo_list: 46 | if not isinstance(networkinfo, dict): 47 | networkinfo = json.loads(networkinfo) 48 | line = "{0}\t{1}\t{2}\t{3}\n".format(networkinfo['deviceid'], networkinfo['devicetype'], 49 | networkinfo['ip'], networkinfo['port']) 50 | file.write(line) 51 | 52 | 53 | def getnetworkinfo(deviceid): 54 | ''' 55 | 从离线网络中获取网络信息 56 | :param deviceid: 如果为-1则为获取全部的网络信息 否则为获取一个网络信息 57 | :return: [type: networkinfo] (type: ip, type: port) 58 | ''' 59 | import os 60 | filepath = os.path.join(recordbasedir, "network.txt") 61 | 62 | with open(filepath, "r+") as file: 63 | lines = file.readlines() 64 | networkinfolist = [] 65 | 66 | for line in lines: 67 | line = line.replace('\n', '') 68 | line = line.strip() 69 | if len(line) != 0: 70 | networkinfolist.append(networkinfo.initfromString(line).todict()) 71 | 72 | # find the deviceid and return the url and the port 73 | 74 | if int(deviceid) == -1: 75 | deviceiplist = [] 76 | deviceidlist = [] 77 | devicetypelist = [] 78 | deviceportlist = [] 79 | 80 | for device in networkinfolist: 81 | deviceidlist.append(device['deviceid']) 82 | deviceiplist.append(device['ip']) 83 | devicetypelist.append(device['devicetype']) 84 | deviceportlist.append(device['port']) 85 | 86 | 87 | devicelist = [networkinfo(deviceidlist[tmp], devicetypelist[tmp], deviceiplist[tmp], deviceportlist[tmp]) for tmp in range(0, len(deviceiplist))] 88 | 89 | return devicelist 90 | else: 91 | for device in networkinfolist: 92 | if int(device['deviceid']) == int(deviceid): 93 | return device['ip'], device['port'] 94 | return None, None 95 | 96 | 97 | def getapplicationinfo(taskid, requestdeviceid, applicationid): 98 | import os 99 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 100 | +str(applicationid)+".txt") 101 | 102 | print("Begin to read the application file", filepath) 103 | # 获取应用信息 104 | try: 105 | with open(filepath, "r+") as file: 106 | lines = file.readlines() 107 | tmpapplication = application.initfromString(lines) 108 | 109 | # 查找相应的应用 110 | formertasklist = None 111 | nexttasklist = None 112 | operationid = None 113 | 114 | tmpapplicationdict = tmpapplication.todict() 115 | for i, tmptaskid in enumerate(tmpapplicationdict['taskidlist']): 116 | if int(tmptaskid) == int(taskid): 117 | formertasklist = tmpapplicationdict['formertasklist'][i] 118 | nexttasklist = tmpapplicationdict['nexttasklist'][i] 119 | operationid = tmpapplicationdict['operationidlist'][i] 120 | 121 | return formertasklist, nexttasklist, operationid 122 | 123 | return formertasklist, nexttasklist, operationid 124 | except Exception as e: 125 | return None, None, None 126 | 127 | def getapplicationdict(requestdeviceid, applicationid): 128 | import os 129 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_"+ 130 | str(applicationid)+".txt") 131 | 132 | # 获取全部的应用信息 不存在应用为空的情况 133 | try: 134 | with open(filepath, "r+") as file: 135 | lines = file.readlines() 136 | 137 | tmpapplication = application.initfromString(lines) 138 | 139 | return tmpapplication.todict() 140 | except Exception as e: 141 | return None 142 | 143 | def writeapplication(tmpapplication): 144 | ''' 145 | 将应用直接写入文件当中 146 | :param tmpapplication: 147 | :return: 148 | ''' 149 | tmpapplicationdict = tmpapplication.todict() 150 | 151 | writeapplicationinfo(tmpapplicationdict['requestdeviceid'], tmpapplicationdict['applicationid'], tmpapplicationdict['taskidlist'], 152 | tmpapplicationdict['formertasklist'], tmpapplicationdict['nexttasklist'], tmpapplicationdict['operationidlist']) 153 | 154 | 155 | 156 | def writeapplicationinfo(requestdeviceid, applicationid, taskidlist, formertaskidlist, 157 | nexttasklist, operationidlist): 158 | ''' 159 | 写入应用信息 160 | :param requestdeviceid: 请求设备id 161 | :param applicationid: 应用id 162 | :param taskidlist: 任务id list 163 | :param formetaskidlist: the percessortask list 164 | :param nextdeviceidlist: the nextdevice list 165 | :param operationlist: the operation list 166 | :return: 167 | ''' 168 | import os 169 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 170 | +str(applicationid)+".txt") 171 | 172 | 173 | with open(filepath, "w+") as file: 174 | for i in range(0, len(taskidlist)): 175 | line = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\n".format(requestdeviceid, applicationid, 176 | taskidlist[i], ','.join([str(tmp) for tmp in formertaskidlist[i]]), 177 | ','.join([str(tmp) for tmp in nexttasklist[i]]), str(operationidlist[i])) 178 | file.write(line) 179 | 180 | 181 | 182 | def getoffloadingpolicyinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 183 | import os 184 | 185 | filepath = os.path.join(recordbasedir,"offloadingpolicy_"+str(requestdeviceid)+"_"+str(applicationid) 186 | +"_"+str(offloadingpolicyid)+".txt") 187 | 188 | try: 189 | with open(filepath, 'r+') as file: 190 | lines = file.readlines() 191 | 192 | if int(taskid) != -1: 193 | # 查找对应的task 194 | for line in lines: 195 | line = line.replace('\n', '') 196 | if int(line.split('\t')[3]) == int(taskid): 197 | return int(line.split('\t')[4]) 198 | else: 199 | # 获取全部的调度策略 200 | taskidlist = [] 201 | excuteddeviceidlist = [] 202 | 203 | for line in lines: 204 | line = line.replace('\n', '') 205 | 206 | taskidlist.append(line.split('\t')[3]) 207 | excuteddeviceidlist.append(line.split('\t')[4]) 208 | 209 | # 构建调度策略应用 210 | offloadingpolicylist = [] 211 | 212 | for i in range(0, len(taskidlist)): 213 | tmpoffloadingpolciy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, taskidlist[i], 214 | excuteddeviceidlist[i]) 215 | offloadingpolicylist.append(tmpoffloadingpolciy) 216 | 217 | return offloadingpolicylist 218 | except Exception as e: 219 | return None 220 | 221 | def getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 222 | ''' 223 | 这里有错误 还需要知道是谁的任务idlist 224 | 获取前置任务的处理结果 225 | :param taskid: 需要查询任务id 226 | :param requestdeviceid: 应用请求设备id 227 | :param applicationid: 应用id号 228 | :return: 返回字典 229 | ''' 230 | import os 231 | import json 232 | import numpy as np 233 | formertaskfilepath = os.path.join(recordbasedir, 234 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, offloadingpolicyid)) 235 | try: 236 | with open(formertaskfilepath, 'r+') as file: 237 | taskdictlist = [] 238 | 239 | lines = file.readlines() 240 | for line in lines: 241 | line = line.replace('\n', '') 242 | # print("The line split len is ", len(line.split('\t'))) 243 | tmpdict = {} 244 | tmpdict['taskid'] = line.split('\t')[0] 245 | tmpdict['requestdeviceid'] = line.split('\t')[1] 246 | tmpdict['applicationid'] = line.split('\t')[2] 247 | tmpdict['offloadingpolicyid'] = line.split('\t')[3] 248 | tmpdict['formertaskid'] = line.split('\t')[4] 249 | # tmpdict['inputdata'] = list(line.split('\t')[5]) 250 | # print("The tmp inputdata is {0} and the format is {1}".format(json.loads(line.split('\t')[5]), type(json.loads(line.split('\t')[5])))) 251 | tmpdict['inputdata'] = json.loads(line.split('\t')[5]) 252 | tmpdict['timecost'] = json.loads(line.split('\t')[6]) 253 | 254 | 255 | taskdictlist.append(tmpdict) 256 | return taskdictlist 257 | except Exception as e: 258 | print("There is a exception happend, when get the formertaskinfo", e) 259 | return None 260 | 261 | def writeformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid, taskdictlist): 262 | ''' 263 | 将前置任务的信息写入离线数据中 264 | :param taskid: 需要写入前置任务的任务id 265 | :param requestdeviceid: 提出应用的请求id 266 | :param applicationid: 应用id 267 | :param offloadingpolicyid: 迁移策略id 268 | :param taskdictlist: 任务字典列表上 269 | :return: 270 | ''' 271 | import os 272 | import numpy as np 273 | import json 274 | formertaskfilepath = os.path.join(recordbasedir, 275 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, 276 | offloadingpolicyid)) 277 | with open(formertaskfilepath, 'a+') as file: 278 | for tmp in taskdictlist: 279 | file.write("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\n".format(taskid, requestdeviceid, applicationid,offloadingpolicyid, 280 | tmp['formertaskid'], json.dumps(tmp['inputdata']), json.dumps(tmp['timecost']))) 281 | -------------------------------------------------------------------------------- /edge/network/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.record import * 11 | from network.server import printOut 12 | LOCAL_DEVICEID = 2 13 | def sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid): 14 | import json 15 | import requests 16 | import datetime 17 | ''' 18 | 发送获得迁移策略请求 获得结果 写入离线文件 19 | ''' 20 | # 根据请求设备 获取ip 和端口号 21 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 22 | if requestDeviceIp == None: 23 | sendNetworkinfoRequest() 24 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 25 | 26 | requestUrl = "http://{0}:{1}/getOffloadingPolicy".format(requestDeviceIp, requestDevicePort) 27 | 28 | # 发送请求 29 | tmpOffloadingPolicy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, 30 | -1, -1) 31 | tmpMsg = msg(LOCAL_DEVICEID, requestdeviceid, datetime.datetime.now().__str__(), 32 | 'query', tmpOffloadingPolicy.todict()) 33 | rtnMsg = requests.post(url=requestUrl, data=tmpMsg.tostring()) 34 | 35 | #将信息写入离线文件 36 | # printOut("the rtnMsg is {0} 请求网络路径为 {1}".format(rtnMsg, requestUrl)) 37 | rtnData = json.loads(rtnMsg.text) 38 | writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, 39 | rtnData) 40 | 41 | def sendApplicationRequest(requestdeviceid, applicationid): 42 | import json 43 | import requests 44 | import datetime 45 | 46 | # 找到requestdeviceid 的 ip 和端口 47 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 48 | 49 | if tmpdeviceip == None: 50 | sendNetworkinfoRequest() 51 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 52 | 53 | requrl = "http://{0}:{1}/getApplicationInfo".format(tmpdeviceip, tmpdeviceport) 54 | 55 | tmpapplicationinfo = application(-1, applicationid, [], [], [], []) 56 | tmpmsg = msg(1, requestdeviceid, datetime.datetime.now().__str__(), 'qury', tmpapplicationinfo.todict()) 57 | # 发送请求 58 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 59 | 60 | applicationdict = json.loads(req.text) 61 | 62 | writeapplicationinfo(requestdeviceid=applicationdict['requestdeviceid'], applicationid=applicationdict['applicationid'], 63 | taskidlist=applicationdict['taskidlist'], formertaskidlist=applicationdict['formertasklist'], 64 | nexttasklist=applicationdict['nexttasklist'], operationidlist=applicationdict['operationidlist'])# 写入文件 65 | 66 | 67 | 68 | def sendNetworkinfoRequest(): 69 | import requests 70 | import json 71 | 72 | try: 73 | requrl = "http://10.21.23.103:8000/getInternetInfo" 74 | 75 | req = requests.post(url=requrl) 76 | networkinfolist = json.loads(req) 77 | 78 | writenetworkinfo(networkinfolist) 79 | 80 | return True 81 | except Exception as e: 82 | printOut("写入网络信息返回结果出错") 83 | return False 84 | 85 | def SendTask(requestdeviceid, applicationid, offloadingpolicyid, 86 | nexttaskid, localdeviceid, newtask): 87 | import threading 88 | thSendTask = threading.Thread(target=sendTask, args=(requestdeviceid, applicationid, offloadingpolicyid, 89 | nexttaskid, localdeviceid, newtask)) 90 | thSendTask.run() 91 | def sendTask(requestdeviceid, applicationid, offloadingpolicyid, 92 | nexttaskid, localdeviceid, newtask): 93 | import json 94 | import requests 95 | import datetime 96 | 97 | objectdeviceid = -1 98 | 99 | # 根据调度信息获取执行设备 error !!!!! 100 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 101 | offloadingpolicyid) 102 | 103 | if objectdeviceid == None: 104 | sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid) # 请求调度信息 105 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 106 | offloadingpolicyid) 107 | 108 | # 获取网络信息 109 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 110 | if tmpdeviceip == None: 111 | sendNetworkinfoRequest() 112 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 113 | 114 | # 发送网络请求 115 | requlr = "http://{0}:{1}/dojob".format(tmpdeviceip, tmpdeviceport) 116 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'dojob', newtask.todict()) 117 | 118 | requests.post(url=requlr, data=tmpmsg.tostring()) 119 | 120 | printOut("向{0}发送任务{1}成功".format(requlr, nexttaskid)) 121 | 122 | return requlr 123 | 124 | 125 | def sendFinal(objectdeviceid, localdeviceid, newtask): 126 | import json 127 | import requests 128 | import datetime 129 | 130 | # 获取网络信息 131 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 132 | if tmpdeviceip == None: 133 | sendNetworkinfoRequest() 134 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 135 | 136 | # 发送网络请求 137 | requlr = "http://{0}:{1}/getFinalResult".format(tmpdeviceip, tmpdeviceport) 138 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'finalresult', newtask.todict()) 139 | 140 | requests.post(url=requlr, data=tmpmsg.tostring()) 141 | -------------------------------------------------------------------------------- /edge/network/server.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 服务器 9 | ''' 10 | from flask import Flask 11 | from flask import request 12 | from process.processor import * 13 | from model.record import * 14 | from Executer.executerDeepLearning import excuterDeepLearning 15 | from Executer.excuteDistributedDeepLearning import excuteDistributedDeepLearningAgent 16 | from Executer.excuteVgg16 import excuteVgg16 17 | from Executer.excuter import ExecuteAgent 18 | from Executer.excuteResnet50 import excuteResnet50 19 | from Executer.excuteVgg16boostVgg19 import excuteVgg16boostVgg19 20 | # from flask.views import request 21 | app = Flask(__name__) 22 | localdeviceid = 2 23 | 24 | # set the excute agent for global 25 | print("Begin to load set the execute agent") 26 | # excuteagent = excuterDeepLearning() 27 | excuteagent = excuteDistributedDeepLearningAgent() 28 | # excuteagent = excuteVgg16() 29 | # excuteagent = excuteResnet50() 30 | # excuteagent = excuteVgg16boostVgg19() 31 | print("End to load set the execute agent") 32 | def printOut(msg): 33 | app.logger.info(msg) 34 | @app.route('/dojob', methods=['POST']) 35 | def dojob(): 36 | import json 37 | import datetime 38 | import time 39 | import numpy as np 40 | # 提取任务信息 41 | # app.logger.info("Do Job get data {0} \t the data content is {1}".format(request.get_data(), type(request.get_data()))) 42 | # app.logger.info("After change the data to string, the string is {0}".format(str(request.get_data()))) 43 | data = json.loads(request.get_data().decode(encoding='utf-8')) 44 | data = data['sendmsgcontent'] 45 | 46 | requestdeviceid = data['requestdeviceid'] 47 | applicationid = data['applicationid'] 48 | offloadingpolicyid = data['offloadingpolicyid'] 49 | taskid = data['taskid'] 50 | operationid = data['operationid'] 51 | inputdata = data['inputdata'] 52 | formertasklist = data['formertasklist'] 53 | nexttasklist = data['nexttasklist'] 54 | timecloselist = data['timecostlist'] 55 | 56 | # 应用信息中获取该任务的所有的前置任务 57 | actualformertasklist = gettaskFormertask(requestdeviceid, applicationid, taskid) 58 | # attention 任务结束时间这里需要进行重新设计 应该设计为任务结束的时间 59 | # 将任务写入前置任务中 60 | tmptaskdict = {} 61 | tmptaskdict['formertaskid'] = formertasklist[0] 62 | tmptaskdict['inputdata'] = inputdata 63 | tmptaskdict['timecost'] = timecloselist 64 | writeformertaskinfo(taskid=taskid, requestdeviceid=requestdeviceid, applicationid=applicationid, offloadingpolicyid=offloadingpolicyid, 65 | taskdictlist=[tmptaskdict]) 66 | # app.logger.info("Task {0} 写入前置任务 {1} 到离线文件成功".format(taskid, formertasklist)) 67 | 68 | # 确认前置任务数据已经全部完成 69 | if len(actualformertasklist) != 1: 70 | formertaskdictlist = getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid) 71 | # app.logger.info("该任务需要等待前置任务{0}完成,现在只有{1}完成".format(actualformertasklist, [tmpFormerTask['formertaskid'] for tmpFormerTask 72 | # in formertaskdictlist])) 73 | 74 | if len(formertaskdictlist) == len(actualformertasklist): # 任务已经全部完成 完成任务 75 | # 执行任务 76 | #excuteagent = ExecuteAgent() 77 | 78 | inputdatalist = [] # 整理输入数据按照任务id大小进行排序 79 | for i in range(len(formertaskdictlist)-1): 80 | for j in range(len(formertaskdictlist)-i-1): 81 | if int(formertaskdictlist[j]['formertaskid']) > int(formertaskdictlist[j+1]['formertaskid']): 82 | tmp = formertaskdictlist[j] 83 | formertaskdictlist[j] = formertaskdictlist[j+1] 84 | formertaskdictlist[j+1] = tmp 85 | 86 | for tmp in formertaskdictlist: 87 | # inputdatalist.append(tmp['inputdata'][0]) 88 | inputdatalist.append(tmp['inputdata']) 89 | 90 | # 合并任务完成时间 91 | tmpTimeCost = [tmpTime for tmpTime in timecloselist] 92 | for taskindex in range(len(timecloselist)): 93 | for tmpformertask in formertaskdictlist: 94 | 95 | 'debug: get cut the send time and exute time' 96 | if int(tmpformertask['timecost'][taskindex][0] ) != 0: 97 | tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 98 | break 99 | # if int(tmpformertask['timecost'][taskindex]) != 0: 100 | # tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 101 | # break 102 | timecloselist = tmpTimeCost 103 | # print("前置任务不唯一,但是已经完成") 104 | timecloselist[int(taskid)][0] = time.time() 105 | print("operation id is: {0} and shape of input is {1}".format(operationid, np.shape(inputdatalist))) 106 | output = excuteagent.excute(operationid, inputdatalist) 107 | timecloselist[int(taskid)][1] = time.time() 108 | # app.logger.info("任务{0}已经完成 nexttasklist 为: {1} 输出为 {2}".format(taskid, nexttasklist, np.shape(output))) 109 | 110 | # 判断是不是最后一个任务 111 | if len(nexttasklist) == 1 and int(nexttasklist[0]) == -1: 112 | tmpnewtask = produce_newtask(taskid, timecloselist, taskid, output, requestdeviceid, applicationid, 113 | offloadingpolicyid) 114 | sendFinal(requestdeviceid, localdeviceid, tmpnewtask) 115 | 116 | else: 117 | # 生成新的任务 118 | for tmp in nexttasklist: 119 | # app.logger.info("开始生成新的任务{0}".format(tmp)) 120 | tmpnewtask = produce_newtask(taskid, timecloselist, tmp, output, requestdeviceid, applicationid, 121 | offloadingpolicyid) 122 | # app.logger.info("生成新的任务为{0}".format(tmpnewtask.todict())) 123 | SendTask(requestdeviceid, applicationid, offloadingpolicyid, tmp, 124 | localdeviceid, tmpnewtask) # 发送任务到另外的服务器 125 | 126 | else: # 任务还没有全部完成 127 | app.logger.info("任务{0}进入等待中".format(taskid)) 128 | pass 129 | else: # 任务已经全部完成 130 | formertaskdictlist = getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid) 131 | # 执行任务 132 | #excuteagent = ExecuteAgent() 133 | 134 | inputdatalist = [] # 整理输入数据按照任务id大小进行排序 135 | for i in range(len(formertaskdictlist) - 1): 136 | for j in range(len(formertaskdictlist) - i - 1): 137 | if int(formertaskdictlist[j]['formertaskid']) > int(formertaskdictlist[j + 1]['formertaskid']): 138 | tmp = formertaskdictlist[j] 139 | formertaskdictlist[j] = formertaskdictlist[j + 1] 140 | formertaskdictlist[j + 1] = tmp 141 | 142 | for tmp in formertaskdictlist: 143 | # inputdatalist.append(tmp['inputdata'][0]) 144 | inputdatalist.append(tmp['inputdata']) 145 | # 合并任务完成时间 146 | tmpTimeCost = [tmpTime for tmpTime in timecloselist] 147 | for taskindex in range(len(timecloselist)): 148 | for tmpformertask in formertaskdictlist: 149 | 'debug the time cut the time into network time and cpu time' 150 | if int(tmpformertask['timecost'][taskindex][0]) != 0: 151 | tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 152 | break 153 | # if int(tmpformertask['timecost'][taskindex]) != 0: 154 | # tmpTimeCost[taskindex] = tmpformertask['timecost'][taskindex] 155 | # break 156 | timecloselist = tmpTimeCost 157 | timecloselist[int(taskid)][0] = time.time() 158 | if len(formertaskdictlist) == 1: 159 | inputdatalist = inputdatalist[0] 160 | print("operation id is: {0} and shape of input is {1}".format(operationid, np.shape(inputdatalist))) 161 | output = excuteagent.excute(operationid, inputdatalist) 162 | timecloselist[int(taskid)][1] = time.time() 163 | # app.logger.info("任务{0}已经完成 nexttasklist 为: {1} 输出为 {2}".format(taskid, nexttasklist, np.shape(output))) 164 | 165 | # 判断是不是最后一个任务 166 | if len(nexttasklist) == 1 and int(nexttasklist[0]) == -1: 167 | tmpnewtask = produce_newtask(taskid, timecloselist, taskid, output, requestdeviceid, applicationid, 168 | offloadingpolicyid) 169 | sendFinal(requestdeviceid, localdeviceid, tmpnewtask) 170 | else: 171 | # 生成新的任务 172 | for tmp in nexttasklist: 173 | tmpnewtask = produce_newtask(taskid, timecloselist, tmp, output, requestdeviceid, applicationid, 174 | offloadingpolicyid) 175 | # 根据id获取应该执行的设备 176 | # 根据id获取应该执行的设备 177 | reqUrl = SendTask(requestdeviceid, applicationid, offloadingpolicyid, tmp, localdeviceid, 178 | tmpnewtask) # 发送任务到另外的服务器 179 | 180 | # app.logger.info("从 设备 {0} 发送任务 {1} 任务内容为 {2} 到设备{3} 执行完任务 {4}".format(localdeviceid, tmp, 181 | # tmpnewtask.todict(), reqUrl, 182 | # taskid)) 183 | 184 | return 'OK' 185 | 186 | 187 | 188 | @app.route('/getOffloadingPolicy', methods=['POST']) 189 | def getoffloadingpolicy(): 190 | import json 191 | # 从数据请求中获取 应用设备id 应用id 调度策略id 192 | tmpoffloadingpolicydict = json.loads(request.get_data().decode('utf-8')) 193 | tmpoffloadingpolicydict = tmpoffloadingpolicydict['sendmsgcontent'] 194 | applicationdeviceid = tmpoffloadingpolicydict['requestdeviceid'] 195 | applicationid = tmpoffloadingpolicydict['applicationid'] 196 | offloadingpolicyid = tmpoffloadingpolicydict['offloadingpolicyid'] 197 | 198 | # 从离线数据中获取迁移策略 199 | offloadingpolicylist = getoffloadingpolicyinfo(taskid=-1, requestdeviceid=applicationdeviceid, applicationid=applicationid, 200 | offloadingpolicyid=offloadingpolicyid) 201 | offloadingpolicylist = [tmp.todict() for tmp in offloadingpolicylist] 202 | 203 | # 返回offloading策略 204 | return json.dumps(offloadingpolicylist, cls=MyEncoder) 205 | 206 | 207 | @app.route('/getInternetInfo', methods=['POST']) 208 | def getinternetinfo(): 209 | import json 210 | # 从离线数据读取网络信息 211 | networkinfolist = getnetworkinfo(-1) 212 | 213 | # 返回信息 214 | networkinfolist = [tmp.todict() for tmp in networkinfolist] 215 | 216 | return json.dumps(networkinfolist, cls=MyEncoder) 217 | 218 | @app.route('/updateInternetInfo', methods=['POST']) 219 | def updateinternetinfo(): 220 | import json 221 | # 读取网络信息 222 | data = json.loads(request.get_data()) 223 | data = data['sendmsgcontent'] 224 | 225 | # 将网络信息写入到离线文件当中 226 | writenetworkinfo(data) 227 | 228 | return "更新成功" 229 | 230 | 231 | @app.route('/getApplicationInfo', methods=['POST']) 232 | def getApplicationInfo(): 233 | import json 234 | 235 | data = json.loads(request.get_data().decode(encoding='utf-8')) 236 | 237 | # 获取本设备的设备编号 238 | senddeviceid = data['senddeviceid'] 239 | 240 | # 获取需要获取的应用id 241 | tmpapplication = data['sendmsgcontent'] 242 | applicationid = tmpapplication['applicationid'] 243 | 244 | # 处理器进行处理 读取离线数据 转成json格式 进行发送 245 | applicationdict = getapplicationdict(senddeviceid, applicationid) 246 | 247 | applicationobject = application.initfromdict(applicationdict) 248 | 249 | return applicationobject.tostring() 250 | 251 | 252 | @app.route('/getFinalResult', methods=['POST']) 253 | def getFinalResult(): 254 | import json 255 | 256 | data = json.loads(request.get_data()) 257 | data = data['sendmsgcontent'] 258 | 259 | tmpapplicationid = data['applicationid'] 260 | tmprequestdeviceid = data['requestdeviceid'] 261 | tmpoffloadingpolicyid = data['offloadingpolicyid'] 262 | tmpinputdata = data['inputdata'] 263 | tmptimecostlist = data['timecostlist'] 264 | 265 | # app.logger.info("应用编号{0}\t请求设备号{1}\t调度号{2}\t返回结果为{3}\t时间花费为{4} 完成任务".format(tmpapplicationid, tmprequestdeviceid, 266 | # tmpoffloadingpolicyid, tmpinputdata,tmptimecostlist)) 267 | if __name__ == "__main__": 268 | print("Begin the app run") 269 | app.run(host='0.0.0.0', port=8001, debug=True, threaded=True) -------------------------------------------------------------------------------- /edge/process/processor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.models import * 11 | from model.record import * 12 | from network.client import * 13 | def processor_dojob(task): 14 | pass 15 | 16 | def processor_getoffloadingpolicy(): 17 | pass 18 | 19 | def processor_getinternetinfo(): 20 | pass 21 | 22 | def processor_updateinternetinfo(): 23 | pass 24 | 25 | def processor_getapplicationinfo(): 26 | pass 27 | 28 | def produce_newtask(thistaskid, thistimecostlist, newtaskid,outputdata, requestdeviceid, applicatonid, offloadingpolicyid): 29 | ''' 30 | 生成新的任务 31 | :param thistaskid: 已经完成的任务id 32 | :param outputdata: 完成任务输出的大小 33 | :param requestdeviceid: 应用请求设备的id 34 | :param applicatonid: 应用编号 35 | :param offloadingpolicyid: 迁移策略id 36 | :return: 37 | ''' 38 | tmprequestdeviceid = requestdeviceid # 请求设备id与上一个设备相同 39 | tmpapplicationid = applicatonid # 应用编号与上一个任务相同 40 | tmpoffloadingpolicyid = offloadingpolicyid # 调度策略与上一个任务相同 41 | tmptaskid = newtaskid 42 | 43 | # 通过查询应用信息获取该任务的操作编号 44 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 45 | 46 | if tmpapplcation == None: 47 | sendApplicationRequest(requestdeviceid, applicatonid) # 客户端发送应用请求信息 48 | print("由于应用信息不存在,向设备{0}发送请求应用{1}信息 更新设备信息".format(requestdeviceid, applicatonid)) 49 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 50 | 51 | tmpinputdata = outputdata 52 | 53 | tmpformertask = [thistaskid] 54 | 55 | 56 | # 根据应用信息获得nexttask operationid 57 | tmptaskidlist = tmpapplcation['taskidlist'] 58 | tmptaskidindex = 0 59 | for i in range(len(tmptaskidlist)): 60 | # print("Type of the tmptaskidlist is{0} Type of the tmptaskid is{1}".format(type(tmptaskidlist[i]), type(tmptaskid))) 61 | if int(tmptaskidlist[i]) == int(tmptaskid): 62 | tmptaskidindex = i 63 | break 64 | tmpnexttasklist = tmpapplcation['nexttasklist'][tmptaskidindex] 65 | tmpoperationid = tmpapplcation['operationidlist'][tmptaskidindex] 66 | 67 | tmptimecostlist = thistimecostlist 68 | 69 | tmptask = task(tmprequestdeviceid, tmpapplicationid, tmpoffloadingpolicyid, tmptaskid, tmpoperationid, tmpinputdata, 70 | tmpformertask, tmpnexttasklist, tmptimecostlist) 71 | 72 | return tmptask 73 | 74 | def gettaskFormertask(requestdeviceid, applicationid, taskid): 75 | ''' 76 | 获取特定任务的前置任务 77 | :param requestdeviceid: 应用请求设备编号 78 | :param applicationid: 应用编号 79 | :param taskid: 任务编号 80 | :return: 81 | ''' 82 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 83 | 84 | if tmpapplication == None: 85 | sendApplicationRequest(requestdeviceid, applicationid) 86 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 87 | 88 | tmptaskidlist = tmpapplication['taskidlist'] 89 | # tmptaskindex = lambda i: int(tmptaskidlist[i])==int(taskid) 90 | tmptaskindex = 0 91 | for i in range(len(tmptaskidlist)): 92 | if int(tmptaskidlist[i])==int(taskid): 93 | tmptaskindex = i 94 | break 95 | return tmpapplication['formertasklist'][tmptaskindex] -------------------------------------------------------------------------------- /edge/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | def getRandomId(): 11 | import random 12 | return random.randint(0, 200000) -------------------------------------------------------------------------------- /iot/Executer/excuteVgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: vgg16的执行 9 | ''' 10 | class operation: 11 | 12 | def __init__(self, operation_id, generate_operation_model, input_shape,weights_model): 13 | import numpy as np 14 | 15 | self.operation_id = operation_id 16 | self.operation_model = generate_operation_model(input_shape) 17 | self.input_shape = input_shape 18 | 19 | 'load the weight' 20 | for layer in self.operation_model.layers: 21 | try: 22 | if weights_model.get_layer(layer.name) != None: 23 | layer.set_weights(weights_model.get_layer(name=layer.name).get_weights()) 24 | except Exception as e: 25 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 26 | e)) 27 | pass 28 | if type(input_shape) == list: 29 | testdata = [] 30 | for tmp in input_shape: 31 | testdata.append([np.zeros(shape=tmp, dtype=np.float32)]) 32 | self.operation_model.predict(testdata) 33 | pass 34 | else: 35 | self.operation_model.predict(np.array([np.zeros(shape=input_shape, dtype=np.float32)])) 36 | 37 | def excute(self, input): 38 | import numpy as np 39 | 40 | x_input = input 41 | # if np.shape(input)[0] == self.input_shape[0]: 42 | # x_input = [input] 43 | if type(self.input_shape) != list: 44 | x_input = np.array(x_input) 45 | 46 | if type(self.input_shape) == list: 47 | input_data = [] 48 | print("the raw shape of the input is {0} of operation {1}".format(np.shape(input), 49 | self.operation_id)) 50 | for i in range(len(self.input_shape)): 51 | print("operation {0} the input shape is {1}".format(self.operation_id, 52 | np.shape(input[i]))) 53 | # x_input.append(np.array(input[i])) 54 | input_data.append(input[i]) 55 | print("operation {0} the input shape is {1}".format(self.operation_id, 56 | np.shape(input_data[i]))) 57 | 58 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 59 | embedding = self.operation_model.predict(input_data) 60 | return embedding 61 | print("the operation {0} input shape is:{1}".format(self.operation_id, np.shape(x_input))) 62 | embedding = self.operation_model.predict(x_input) 63 | print("the operation {0} output shape is {1}".format(self.operation_id, np.shape(embedding))) 64 | 65 | return embedding 66 | pass 67 | 68 | class excuteVgg16: 69 | 70 | def __func0__(self, input_shape): 71 | from keras.models import Model 72 | from keras.layers import Flatten 73 | from keras.layers import Dense 74 | from keras.layers import Input 75 | from keras.layers import Conv2D 76 | from keras.layers import MaxPooling2D 77 | from keras.utils.data_utils import get_file 78 | 79 | img_input = Input(shape=input_shape) 80 | 81 | # Block 1 82 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 83 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 84 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 85 | 86 | model = Model(inputs=img_input, outputs=x) 87 | return model 88 | 89 | def __func1__(self, input_shape): 90 | from keras.layers import Flatten 91 | from keras.layers import Dense 92 | from keras.layers import Input 93 | from keras.layers import Conv2D 94 | from keras.layers import MaxPooling2D 95 | from keras.utils.data_utils import get_file 96 | from keras.models import Model 97 | 98 | 99 | input = Input(shape=input_shape) 100 | # Block 2 101 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(input) 102 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 103 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 104 | 105 | model = Model(inputs=input, outputs=x) 106 | return model 107 | 108 | def __func2__(self, input_shape): 109 | from keras.layers import Flatten 110 | from keras.layers import Dense 111 | from keras.layers import Input 112 | from keras.layers import Conv2D 113 | from keras.layers import MaxPooling2D 114 | from keras.utils.data_utils import get_file 115 | from keras.models import Model 116 | 117 | input = Input(shape=input_shape) 118 | # Block 3 119 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(input) 120 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 121 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 122 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 123 | 124 | model = Model(inputs=input, outputs=x) 125 | return model 126 | 127 | def __func3__(self, input_shape): 128 | from keras.layers import Flatten 129 | from keras.layers import Dense 130 | from keras.layers import Input 131 | from keras.layers import Conv2D 132 | from keras.layers import MaxPooling2D 133 | from keras.utils.data_utils import get_file 134 | from keras.models import Model 135 | 136 | input = Input(shape=input_shape) 137 | # Block 4 138 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(input) 139 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 140 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 141 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 142 | 143 | model = Model(inputs=input, outputs=x) 144 | return model 145 | 146 | def __func4__(self, input_shape): 147 | from keras.layers import Flatten 148 | from keras.layers import Dense 149 | from keras.layers import Input 150 | from keras.layers import Conv2D 151 | from keras.layers import MaxPooling2D 152 | from keras.utils.data_utils import get_file 153 | from keras.models import Model 154 | 155 | input = Input(shape=input_shape) 156 | # Block 5 157 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(input) 158 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 159 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 160 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 161 | model = Model(inputs=input, outputs=x) 162 | return model 163 | 164 | def __func5__(self, input_shape): 165 | from keras.layers import Flatten 166 | from keras.layers import Dense 167 | from keras.layers import Input 168 | from keras.layers import Conv2D 169 | from keras.layers import MaxPooling2D 170 | from keras.utils.data_utils import get_file 171 | from keras.models import Model 172 | 173 | input = Input(shape=input_shape) 174 | # Classification block 175 | x = Flatten(name='flatten')(input) 176 | x = Dense(4096, activation='relu', name='fc1')(x) 177 | x = Dense(4096, activation='relu', name='fc2')(x) 178 | x = Dense(1000, activation='softmax', name='predictions')(x) 179 | 180 | model = Model(inputs=input, outputs=x) 181 | return model 182 | 183 | 184 | def __init__(self): 185 | from Executer.vgg16 import vgg16 186 | self.operations = [] 187 | weights_model = vgg16(input_shape=(224,224, 3), 188 | classes=1000).model 189 | 190 | operation0 = operation(0, self.__func0__, (224, 224, 3), weights_model) 191 | operation1 = operation(1, self.__func1__, (112, 112, 64), weights_model) 192 | operation2 = operation(2, self.__func2__, (56, 56, 128), weights_model) 193 | operation3 = operation(3, self.__func3__, (28, 28, 256), weights_model) 194 | operation4 = operation(4, self.__func4__, (14, 14, 512), weights_model) 195 | operation5 = operation(5, self.__func5__, (7, 7, 512), weights_model) 196 | 197 | self.operations.append(operation0) 198 | self.operations.append(operation1) 199 | self.operations.append(operation2) 200 | self.operations.append(operation3) 201 | self.operations.append(operation4) 202 | self.operations.append(operation5) 203 | 204 | 205 | def excute(self, operationid, inputdata): 206 | return self.operations[operationid].excute(inputdata) 207 | -------------------------------------------------------------------------------- /iot/Executer/excuter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class operation: 12 | 13 | def __init__(self, operationid, operationfuction): 14 | self.operationid = operationid 15 | self.operationfunction = operationfuction 16 | 17 | def checkid(self, operationid): 18 | if self.operationid == operationid: 19 | return True 20 | else: 21 | return False 22 | 23 | def getid(self): 24 | return self.operationid 25 | 26 | def excute(self, inputdata): 27 | return self.operationfunction(inputdata) 28 | 29 | 30 | class ExecuteAgent: 31 | 32 | # 定义函数 33 | def __func0__(self, input): 34 | tmp = input[0] + 1 35 | return tmp 36 | 37 | def __func1__(self, input): 38 | tmp = input[0] - 1 39 | return tmp 40 | 41 | def __func2__(self, input): 42 | tmp = input[0] * 2 43 | return tmp 44 | 45 | def __func3__(self, input): 46 | tmp = input[0] * input[1] 47 | return tmp 48 | 49 | def __func4__(self, input): 50 | tmp = input[0] * 0.5 51 | return tmp 52 | 53 | def __func5__(self, input): 54 | tmp = input[0] * input[1] 55 | return tmp 56 | 57 | def __func6__(self, input): 58 | tmp = input[0] + input[1] 59 | return tmp 60 | 61 | 62 | 63 | def __init__(self): 64 | self.operations = [] 65 | 66 | operation0 = operation(0, self.__func0__) 67 | operation1 = operation(1, self.__func1__) 68 | operation2 = operation(2, self.__func2__) 69 | operation3 = operation(3, self.__func3__) 70 | operation4 = operation(4, self.__func4__) 71 | operation5 = operation(5, self.__func5__) 72 | operation6 = operation(6, self.__func6__) 73 | 74 | 75 | self.operations.append(operation0) 76 | self.operations.append(operation1) 77 | self.operations.append(operation2) 78 | self.operations.append(operation3) 79 | self.operations.append(operation4) 80 | self.operations.append(operation5) 81 | self.operations.append(operation6) 82 | 83 | 84 | def excute(self, operationid, inputdata): 85 | # #检查是否有操作id 检查输入数据格式 86 | # if int(operationid) >= len(self.operations)-1 or operationid < 0: 87 | # return None 88 | # 89 | # if not isinstance(inputdata, list): 90 | # return None 91 | 92 | return self.operations[operationid].excute(inputdata) 93 | 94 | -------------------------------------------------------------------------------- /iot/Executer/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import tensorflow as tf 3 | from keras.layers import Conv2D, ZeroPadding2D, Activation 4 | from keras.layers.normalization import BatchNormalization 5 | from numpy import genfromtxt 6 | 7 | _FLOATX = 'float32' 8 | 9 | # def variable(value, dtype=_FLOATX, name=None): 10 | # v = tf.Variable(np.asarray(value, dtype=dtype), name=name) 11 | # _get_session().run(v.initializer) 12 | # return v 13 | 14 | def shape(x): 15 | return x.get_shape() 16 | 17 | def square(x): 18 | return tf.square(x) 19 | 20 | # def zeros(shape, dtype=_FLOATX, name=None): 21 | # return variable(np.zeros(shape), dtype, name) 22 | 23 | def concatenate(tensors, axis=-1): 24 | if axis < 0: 25 | axis = axis % len(tensors[0].get_shape()) 26 | return tf.concat(axis, tensors) 27 | 28 | def LRN2D(x): 29 | return tf.nn.lrn(x, alpha=1e-4, beta=0.75) 30 | 31 | def conv2d_bn( 32 | x, 33 | layer=None, 34 | cv1_out=None, 35 | cv1_filter=(1, 1), 36 | cv1_strides=(1, 1), 37 | cv2_out=None, 38 | cv2_filter=(3, 3), 39 | cv2_strides=(1, 1), 40 | padding=None, 41 | ): 42 | num = '' if cv2_out == None else '1' 43 | tensor = Conv2D(cv1_out, cv1_filter, strides=cv1_strides, name=layer+'_conv'+num)(x) 44 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+num)(tensor) 45 | tensor = Activation('relu')(tensor) 46 | if padding == None: 47 | return tensor 48 | tensor = ZeroPadding2D(padding=padding)(tensor) 49 | if cv2_out == None: 50 | return tensor 51 | tensor = Conv2D(cv2_out, cv2_filter, strides=cv2_strides, name=layer+'_conv'+'2')(tensor) 52 | tensor = BatchNormalization(axis=3, epsilon=0.00001, name=layer+'_bn'+'2')(tensor) 53 | tensor = Activation('relu')(tensor) 54 | return tensor 55 | 56 | weights = [ 57 | 'conv1', 'bn1', 'conv2', 'bn2', 'conv3', 'bn3', 58 | 'inception_3a_1x1_conv', 'inception_3a_1x1_bn', 59 | 'inception_3a_pool_conv', 'inception_3a_pool_bn', 60 | 'inception_3a_5x5_conv1', 'inception_3a_5x5_conv2', 'inception_3a_5x5_bn1', 'inception_3a_5x5_bn2', 61 | 'inception_3a_3x3_conv1', 'inception_3a_3x3_conv2', 'inception_3a_3x3_bn1', 'inception_3a_3x3_bn2', 62 | 'inception_3b_3x3_conv1', 'inception_3b_3x3_conv2', 'inception_3b_3x3_bn1', 'inception_3b_3x3_bn2', 63 | 'inception_3b_5x5_conv1', 'inception_3b_5x5_conv2', 'inception_3b_5x5_bn1', 'inception_3b_5x5_bn2', 64 | 'inception_3b_pool_conv', 'inception_3b_pool_bn', 65 | 'inception_3b_1x1_conv', 'inception_3b_1x1_bn', 66 | 'inception_3c_3x3_conv1', 'inception_3c_3x3_conv2', 'inception_3c_3x3_bn1', 'inception_3c_3x3_bn2', 67 | 'inception_3c_5x5_conv1', 'inception_3c_5x5_conv2', 'inception_3c_5x5_bn1', 'inception_3c_5x5_bn2', 68 | 'inception_4a_3x3_conv1', 'inception_4a_3x3_conv2', 'inception_4a_3x3_bn1', 'inception_4a_3x3_bn2', 69 | 'inception_4a_5x5_conv1', 'inception_4a_5x5_conv2', 'inception_4a_5x5_bn1', 'inception_4a_5x5_bn2', 70 | 'inception_4a_pool_conv', 'inception_4a_pool_bn', 71 | 'inception_4a_1x1_conv', 'inception_4a_1x1_bn', 72 | 'inception_4e_3x3_conv1', 'inception_4e_3x3_conv2', 'inception_4e_3x3_bn1', 'inception_4e_3x3_bn2', 73 | 'inception_4e_5x5_conv1', 'inception_4e_5x5_conv2', 'inception_4e_5x5_bn1', 'inception_4e_5x5_bn2', 74 | 'inception_5a_3x3_conv1', 'inception_5a_3x3_conv2', 'inception_5a_3x3_bn1', 'inception_5a_3x3_bn2', 75 | 'inception_5a_pool_conv', 'inception_5a_pool_bn', 76 | 'inception_5a_1x1_conv', 'inception_5a_1x1_bn', 77 | 'inception_5b_3x3_conv1', 'inception_5b_3x3_conv2', 'inception_5b_3x3_bn1', 'inception_5b_3x3_bn2', 78 | 'inception_5b_pool_conv', 'inception_5b_pool_bn', 79 | 'inception_5b_1x1_conv', 'inception_5b_1x1_bn', 80 | 'dense_layer' 81 | ] 82 | 83 | conv_shape = { 84 | 'conv1': [64, 3, 7, 7], 85 | 'conv2': [64, 64, 1, 1], 86 | 'conv3': [192, 64, 3, 3], 87 | 'inception_3a_1x1_conv': [64, 192, 1, 1], 88 | 'inception_3a_pool_conv': [32, 192, 1, 1], 89 | 'inception_3a_5x5_conv1': [16, 192, 1, 1], 90 | 'inception_3a_5x5_conv2': [32, 16, 5, 5], 91 | 'inception_3a_3x3_conv1': [96, 192, 1, 1], 92 | 'inception_3a_3x3_conv2': [128, 96, 3, 3], 93 | 'inception_3b_3x3_conv1': [96, 256, 1, 1], 94 | 'inception_3b_3x3_conv2': [128, 96, 3, 3], 95 | 'inception_3b_5x5_conv1': [32, 256, 1, 1], 96 | 'inception_3b_5x5_conv2': [64, 32, 5, 5], 97 | 'inception_3b_pool_conv': [64, 256, 1, 1], 98 | 'inception_3b_1x1_conv': [64, 256, 1, 1], 99 | 'inception_3c_3x3_conv1': [128, 320, 1, 1], 100 | 'inception_3c_3x3_conv2': [256, 128, 3, 3], 101 | 'inception_3c_5x5_conv1': [32, 320, 1, 1], 102 | 'inception_3c_5x5_conv2': [64, 32, 5, 5], 103 | 'inception_4a_3x3_conv1': [96, 640, 1, 1], 104 | 'inception_4a_3x3_conv2': [192, 96, 3, 3], 105 | 'inception_4a_5x5_conv1': [32, 640, 1, 1,], 106 | 'inception_4a_5x5_conv2': [64, 32, 5, 5], 107 | 'inception_4a_pool_conv': [128, 640, 1, 1], 108 | 'inception_4a_1x1_conv': [256, 640, 1, 1], 109 | 'inception_4e_3x3_conv1': [160, 640, 1, 1], 110 | 'inception_4e_3x3_conv2': [256, 160, 3, 3], 111 | 'inception_4e_5x5_conv1': [64, 640, 1, 1], 112 | 'inception_4e_5x5_conv2': [128, 64, 5, 5], 113 | 'inception_5a_3x3_conv1': [96, 1024, 1, 1], 114 | 'inception_5a_3x3_conv2': [384, 96, 3, 3], 115 | 'inception_5a_pool_conv': [96, 1024, 1, 1], 116 | 'inception_5a_1x1_conv': [256, 1024, 1, 1], 117 | 'inception_5b_3x3_conv1': [96, 736, 1, 1], 118 | 'inception_5b_3x3_conv2': [384, 96, 3, 3], 119 | 'inception_5b_pool_conv': [96, 736, 1, 1], 120 | 'inception_5b_1x1_conv': [256, 736, 1, 1], 121 | } 122 | 123 | def load_weights_by_name(name): 124 | import os 125 | # Set weights path 126 | local_dir_path = r'/home/pi/Desktop/IoT/Executer' 127 | dirPath = os.path.join(local_dir_path, 'weights') 128 | fileNames = filter(lambda f: not f.startswith('.'), os.listdir(dirPath)) 129 | paths = {} 130 | weights_dict = {} 131 | 132 | for n in fileNames: 133 | paths[n.replace('.csv', '')] = dirPath + '/' + n 134 | 135 | 136 | if 'conv' in name: 137 | conv_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 138 | conv_w = np.reshape(conv_w, conv_shape[name]) 139 | conv_w = np.transpose(conv_w, (2, 3, 1, 0)) 140 | conv_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 141 | weights_dict[name] = [conv_w, conv_b] 142 | elif 'bn' in name: 143 | bn_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 144 | bn_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 145 | bn_m = genfromtxt(paths[name + '_m'], delimiter=',', dtype=None) 146 | bn_v = genfromtxt(paths[name + '_v'], delimiter=',', dtype=None) 147 | weights_dict[name] = [bn_w, bn_b, bn_m, bn_v] 148 | elif 'dense' in name: 149 | dense_w = genfromtxt(dirPath + '/dense_w.csv', delimiter=',', dtype=None) 150 | dense_w = np.reshape(dense_w, (128, 736)) 151 | dense_w = np.transpose(dense_w, (1, 0)) 152 | dense_b = genfromtxt(dirPath + '/dense_b.csv', delimiter=',', dtype=None) 153 | weights_dict[name] = [dense_w, dense_b] 154 | 155 | return weights_dict 156 | 157 | def load_weights(): 158 | import os 159 | # Set weights path 160 | local_dir_path = r'/home/pi/Desktop/IoT/Executer' 161 | dirPath = os.path.join(local_dir_path, 'weights') 162 | fileNames = filter(lambda f: not f.startswith('.'), os.listdir(dirPath)) 163 | paths = {} 164 | weights_dict = {} 165 | 166 | for n in fileNames: 167 | paths[n.replace('.csv', '')] = dirPath + '/' + n 168 | 169 | for name in weights: 170 | if 'conv' in name: 171 | conv_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 172 | conv_w = np.reshape(conv_w, conv_shape[name]) 173 | conv_w = np.transpose(conv_w, (2, 3, 1, 0)) 174 | conv_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 175 | weights_dict[name] = [conv_w, conv_b] 176 | elif 'bn' in name: 177 | bn_w = genfromtxt(paths[name + '_w'], delimiter=',', dtype=None) 178 | bn_b = genfromtxt(paths[name + '_b'], delimiter=',', dtype=None) 179 | bn_m = genfromtxt(paths[name + '_m'], delimiter=',', dtype=None) 180 | bn_v = genfromtxt(paths[name + '_v'], delimiter=',', dtype=None) 181 | weights_dict[name] = [bn_w, bn_b, bn_m, bn_v] 182 | elif 'dense' in name: 183 | dense_w = genfromtxt(dirPath+'/dense_w.csv', delimiter=',', dtype=None) 184 | dense_w = np.reshape(dense_w, (128, 736)) 185 | dense_w = np.transpose(dense_w, (1, 0)) 186 | dense_b = genfromtxt(dirPath+'/dense_b.csv', delimiter=',', dtype=None) 187 | weights_dict[name] = [dense_w, dense_b] 188 | 189 | return weights_dict 190 | 191 | -------------------------------------------------------------------------------- /iot/Executer/vgg16.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | 11 | class utils_vgg16: 12 | 13 | @classmethod 14 | def load_weight(cls, vgg_model, model): 15 | 16 | for layer in model.layers: 17 | try: 18 | if vgg_model.get_layer(layer.name) != None: 19 | layer.set_weights(vgg_model.get_layer(name=layer.name).get_weights()) 20 | except Exception as e: 21 | print("cannot find the layer {0} in the vgg model and exception is {1}".format(layer.name, 22 | e)) 23 | pass 24 | 25 | class vgg16: 26 | 27 | def __init__(self, input_shape, classes): 28 | from keras.models import Model 29 | from keras.layers import Flatten 30 | from keras.layers import Dense 31 | from keras.layers import Input 32 | from keras.layers import Conv2D 33 | from keras.layers import MaxPooling2D 34 | from keras.utils.data_utils import get_file 35 | 36 | WEIGHTS_PATH = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels.h5' 37 | 38 | img_input = Input(shape=input_shape) 39 | 40 | # Block 1 41 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 42 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 43 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 44 | 45 | # Block 2 46 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 47 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 48 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 49 | 50 | # Block 3 51 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 52 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 53 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 54 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 55 | 56 | # Block 4 57 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 58 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 59 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 60 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 61 | 62 | # Block 5 63 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 64 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 65 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 66 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 67 | 68 | # Classification block 69 | x = Flatten(name='flatten')(x) 70 | x = Dense(4096, activation='relu', name='fc1')(x) 71 | x = Dense(4096, activation='relu', name='fc2')(x) 72 | x = Dense(classes, activation='softmax', name='predictions')(x) 73 | 74 | self.model = Model(inputs=img_input, output=x, name='vgg16') 75 | 76 | 'load model weights' 77 | weights_path = get_file('vgg16_weights_tf_dim_ordering_tf_kernels.h5', 78 | WEIGHTS_PATH, 79 | cache_subdir='models') 80 | self.model.load_weights(weights_path) 81 | 82 | 83 | def plot_model(self): 84 | from keras.utils import plot_model 85 | 86 | plot_model(model=self.model, to_file='modelvgg16.png', show_shapes=True, show_layer_names=True) 87 | 88 | if __name__ == "__main__": 89 | import numpy as np 90 | from keras.models import Model 91 | from keras.layers import Flatten 92 | from keras.layers import Dense 93 | from keras.layers import Input 94 | from keras.layers import Conv2D 95 | from keras.layers import MaxPooling2D 96 | from keras.utils.data_utils import get_file 97 | from keras.preprocessing import image 98 | from keras.applications.imagenet_utils import decode_predictions 99 | from keras.applications.imagenet_utils import preprocess_input 100 | 101 | img_input = Input(shape=(224, 224, 3)) 102 | 103 | # Block 1 104 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input) 105 | x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) 106 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) 107 | 108 | # Block 2 109 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) 110 | x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) 111 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) 112 | 113 | # Block 3 114 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) 115 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) 116 | x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) 117 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) 118 | 119 | # Block 4 120 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) 121 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) 122 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) 123 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) 124 | 125 | # Block 5 126 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) 127 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) 128 | x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) 129 | x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) 130 | 131 | # Classification block 132 | x = Flatten(name='flatten')(x) 133 | x = Dense(4096, activation='relu', name='fc1')(x) 134 | x = Dense(4096, activation='relu', name='fc2')(x) 135 | x = Dense(1000, activation='softmax', name='predictions')(x) 136 | 137 | model = Model(inputs=img_input, outputs=x) 138 | vgg16model = vgg16(input_shape=(224, 224, 3), classes=1000) 139 | utils_vgg16.load_weight(vgg16model.model, model) 140 | 141 | # 'try to predict' 142 | # input_data = np.array([np.zeros(shape=(224, 224, 3))]) 143 | # output = model.predict(input_data) 144 | # 145 | # print("the output is ", output) 146 | 147 | img_path = 'elephant.jpg' 148 | img = image.load_img(img_path, target_size=(224, 224)) 149 | x = image.img_to_array(img) 150 | x = np.expand_dims(x, axis=0) 151 | x = preprocess_input(x) 152 | print('Input image shape:', x.shape) 153 | 154 | preds = model.predict(x) 155 | print('Predicted:', decode_predictions(preds)) 156 | preds_resnet50 = vgg16model.model.predict(x) 157 | print('Predicted:', decode_predictions(preds_resnet50)) 158 | 159 | 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /iot/README.md: -------------------------------------------------------------------------------- 1 | # distribut 2 | -- ed-deep-learning/IoT 3 | 4 | 分布式神经网络在嵌入式设备上需要运行的代码 5 | 6 | ## Bug 日志 7 | ### 2018-12-07 8 | 在IoT端的Server中有以下的错误 9 | * 在dojob接口中,nexttask被用错了,nexttask是交给我执行的任务的nexttask,而不是该任务的前置任务 10 | * 在dojob接口中,formertask是前面完成任务的id,而不是需要完成任务的formertasklist 11 | * 在dojob接口中,对于最后一个任务的处理需要注意。 12 | 13 | 14 | ### 2018-12-08 15 | 在Iot端上的错误仔细思考发现有一些是没有错的 16 | * 在dojob接口中,nexttask并没有被用错 17 | 18 | ### 2018-12-14 19 | 今天最终的任务结果已经跑出来来了 但是发现当初设计中存在的问题,第一个是 20 | 等待前置任务的时候任务完成时间没有记录的问题,所以在当有多个前置任务,获得前置 21 | 任务的过程过程中我们需要将前置任务的任务执行时间也是需要合并的,并不是只合并 22 | 输入数据。第二个就是,使用http服务器虽然可以简化网络层的设计,但是同样也存在 23 | 问题,问题就是数据发送方必须等待服务器将结果返回才能发送下个任务,这个过程中会有 24 | 等待的时延,显然这个时延试不必要的,所以解决办法是,将返回结果没有意义的函数直接 25 | 抽象出来,然后使用多线程进行执行,That is a great idea~~~ 26 | 27 | 28 | 总的来说主要的Bug分为两个: 29 | * 前置任务的完成时间的合并问题 30 | * 因为Http请求导致等待的问题,我们需要一个异步信息机制,也就是我们根本不需要等待 31 | 返回的请求,或者说我们对返回结果没有时间要求 32 | 33 | 34 | ### 2018-12-15 35 | 下面来说主要进行两个方面的测试:第一个方面就是使用keras,使用的是不同层,使用现有的 36 | 网络模型进行测试,然后做出可视化界面,看看时间上的问题,进行offloading加速的速 37 | 率问题。下面一步就是找一个dependcy更加强的模型进行应用,查看offloading的效果, 38 | 刚刚突然想到一个idea就是在多个个设备的情况,如何要对这两个数据进行协同的话, 39 | 我们如何在offloading的时候同时考虑数据的协同,这是一个问题。总的来说接下来的工作 40 | 安排是这样的: 41 | * 第一步: 使用线性的模型,做出可视化的界面 42 | * 第二步: 使用dependency的模型,进行测试 43 | * 第三步: 思考多设备情况下的系统表现 44 | 45 | ## 思考 46 | ### 2018-12-07 47 | 仔细思考,其实IoT端的程序,Edge端的程序,远程云端的程序在代码上其实没有什么区别, 48 | 三者的代码应该是可以通用的,但是还需要进行下一个深入的测试,可能当初在写 49 | 某些函数的时候想着三者代码会不一样,所以异常处理可能有些问题。 50 | Iot端的程序主要是用来生成应用,进行调度的主题。突然想到一问题就是: 51 | * 调度角色在不同的设备上进行运行的时候,系统会有怎样的表现 52 | * 应用的生成不一定全是在IoT端,也可以在Edge端,也可以在云端。一般来说数据的采 53 | 者往往也是应用的产生者,IoT可以成为数据的来源,但是其他设备也是可以成为数据的 54 | 来源,我们可以假设有这样一种情形: Iot应用的数据来源不仅需要来自本省产生的数据, 55 | 也需要来自Edge的数据,那我们应该如何进行调度? 56 | * 数据流的调度和计算能力的调度是否可以进行分离 两者之间是否可以产生 57 | 某种联系,某种关系 58 | 59 | ### 2018-12-14 60 | 仔细思考,如果客户端对服务端的请求返回的结果并没有直接的动作响应,或者说对于服务端 61 | 的请求结果返回我们可以等待更多的时间,那我们该如何编写请求处理? 62 | * 第一种办法是使用多线程,将请求处理放到线程当中 63 | * 第二种办法就是使用异步信息机制,这个是我们目前还没有用过的,I need a try!!! 64 | 65 | 66 | ### 2018-12-15 67 | 对于上面的异步信息机制,已经使用多线程得到解决,每次发送任务的时候,对于不需要 68 | 及时获得信息的任务使用线程的方式进行发送,做到异步信息处理,这样不必等待请回回答 69 | 就可以执行下一步的操作。 70 | 71 | 对于多设备,多数据情况下的offloading问题,如何根据需要多数据协同设计一个比较 72 | 适用的offloading算法,这是一个问题 -------------------------------------------------------------------------------- /iot/elephant.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Derfei/task-merging/0002b5b0c62bc4c4cc8f754474d9c750ccf026e4/iot/elephant.jpg -------------------------------------------------------------------------------- /iot/log_100328.txt: -------------------------------------------------------------------------------- 1 | [[1571757622.3199124, 1571757622.35398], [1571757622.7262022, 1571757622.7374132], [1571757622.8634048, 1571757622.8738673], [1571757622.9830923, 1571757623.004926], [1571757623.1234894, 1571757623.1395001], [1571757623.4793797, 1571757623.499022], [1571757623.8777192, 1571757623.8864808], [1571757623.9403064, 1571757623.946769], [1571757624.0366786, 1571757624.0421996], [1571757623.9870372, 1571757623.9925551], [1571757624.1316543, 1571757624.1356235], [1571757624.275561, 1571757624.2833626], [1571757624.327795, 1571757624.3355005], [1571757624.3807197, 1571757624.3856761], [1571757624.477584, 1571757624.4800391], [1571757624.5841913, 1571757624.5885623], [1571757624.6177025, 1571757624.6227698], [1571757624.6877005, 1571757624.691387], [1571757624.6530683, 1571757624.6580515], [1571757624.7596612, 1571757624.7621331], [1571757624.9144225, 1571757624.9191031], [1571757624.839484, 1571757624.8432255], [1571757624.865966, 1571757624.8688028], [1571757624.988275, 1571757624.9942214], [1571757625.0962803, 1571757625.103646], [1571757625.1338294, 1571757625.1408737], [1571757625.181726, 1571757625.1896825], [1571757625.2409503, 1571757625.2466223], [1571757625.33155, 1571757625.3334239], [1571757625.3598974, 1571757625.3654993], [1571757625.3988924, 1571757625.406246], [1571757625.4451547, 1571757625.4518588], [1571757625.5098858, 1571757625.5111806], [1571757622.0560138, 1571757622.0560138]] 2 | -------------------------------------------------------------------------------- /iot/log_100353.txt: -------------------------------------------------------------------------------- 1 | [[1571751720.3496287, 1571751720.3807344], [1571751720.5363073, 1571751720.5430229], [1571751720.6389937, 1571751720.6431293], [1571751720.5926325, 1571751720.5960429], [1571751720.6904366, 1571751720.694447], [1571751720.7733116, 1571751720.7766814], [1571751720.922558, 1571751720.930088], [1571751720.9749687, 1571751720.9807749], [1571751721.0276544, 1571751721.0322325], [1571751721.0714977, 1571751721.0762742], [1571751721.1774807, 1571751721.1814532], [1571751721.3155155, 1571751721.3232865], [1571751721.3697362, 1571751721.3751578], [1571751721.4188192, 1571751721.4239273], [1571751721.5078323, 1571751721.5101686], [1571751721.5974987, 1571751721.6043205], [1571751721.6825461, 1571751721.6862195], [1571751721.7163384, 1571751721.7196221], [1571751721.648758, 1571751721.6531167], [1571751721.7813642, 1571751721.7838504], [1571751721.9006357, 1571751721.9052963], [1571751721.9295733, 1571751721.933288], [1571751721.9565306, 1571751721.9593935], [1571751721.99641, 1571751721.998068], [1571751722.0548983, 1571751722.0584414], [1571751722.088738, 1571751722.093063], [1571751722.073187, 1571751722.0756974], [1571751722.1361191, 1571751722.1376026], [1571751722.1942935, 1571751722.1972575], [1571751722.205691, 1571751722.2099657], [1571751722.214499, 1571751722.217842], [1571751722.283933, 1571751722.2908936], [1571751722.3463027, 1571751722.3542635], [1571751720.0603244, 1571751720.0603244]] 2 | -------------------------------------------------------------------------------- /iot/log_22315.txt: -------------------------------------------------------------------------------- 1 | [[1571801706.8428829, 1571801706.9581327], [1571801707.6378112, 1571801707.7375329], [1571801708.5438497, 1571801708.6146493], [1571801712.3165722, 1571801712.673142], [1571801715.2115045, 1571801715.338645], [1571801718.8752372, 1571801719.2350047], [1571801722.0950847, 1571801722.244924], [1571801725.7382917, 1571801726.099881], [1571801728.9432352, 1571801729.0583527], [1571801730.356245, 1571801730.4513159], [1571801732.356431, 1571801732.5428543], [1571801733.67435, 1571801733.767637], [1571801735.3919935, 1571801735.5788367], [1571801736.8135948, 1571801736.8874717], [1571801738.5525708, 1571801738.739509], [1571801739.8927367, 1571801739.9712193], [1571801741.63789, 1571801741.8214269], [1571801743.267921, 1571801743.3317742], [1571801744.0060127, 1571801744.060754], [1571801744.992553, 1571801745.0871823], [1571801745.602021, 1571801745.6499403], [1571801746.5019808, 1571801746.5978525], [1571801747.1506946, 1571801747.2078977], [1571801748.0633094, 1571801748.1576982], [1571801748.6732862, 1571801748.7172008], [1571801749.5627387, 1571801749.6559756], [1571801750.1259825, 1571801750.1742995], [1571801751.0172594, 1571801751.110837], [1571801751.6398237, 1571801751.6888635], [1571801752.5477357, 1571801752.6422036], [1571801753.2958472, 1571801753.3402927], [1571801753.6282175, 1571801753.6589122], [1571801754.1488907, 1571801754.1979423], [1571801754.450734, 1571801754.4939907], [1571801754.8582745, 1571801754.906756], [1571801755.127438, 1571801755.1673093], [1571801756.0035367, 1571801756.245733], [1571801756.5577188, 1571801756.605757], [1571801705.8850305, 1571801705.8850305]] 2 | -------------------------------------------------------------------------------- /iot/log_22381.txt: -------------------------------------------------------------------------------- 1 | [[1571802044.5554237, 1571802044.5899973], [1571802044.7486138, 1571802044.7548616], [1571802044.8025289, 1571802044.8065333], [1571802044.9099948, 1571802044.9134483], [1571802044.8549755, 1571802044.8590589], [1571802044.9979749, 1571802045.001244], [1571802045.1429424, 1571802045.150266], [1571802045.1937463, 1571802045.199487], [1571802045.2469947, 1571802045.2519994], [1571802045.2952275, 1571802045.3011243], [1571802045.391087, 1571802045.395013], [1571802045.5251296, 1571802045.533019], [1571802045.577781, 1571802045.583347], [1571802045.6251955, 1571802045.629867], [1571802045.7252967, 1571802045.72775], [1571802045.8428264, 1571802045.8472126], [1571802045.9150765, 1571802045.9182718], [1571802045.8765411, 1571802045.8798444], [1571802045.9449089, 1571802045.9486365], [1571802046.0419729, 1571802046.0443587], [1571802046.1291258, 1571802046.1335838], [1571802046.2124667, 1571802046.2160103], [1571802046.1759412, 1571802046.1786788], [1571802046.2547252, 1571802046.2562997], [1571802046.3170276, 1571802046.3261106], [1571802046.3507254, 1571802046.3594375], [1571802046.376886, 1571802046.3792582], [1571802046.4310346, 1571802046.4324598], [1571802046.5127044, 1571802046.5154338], [1571802046.5010095, 1571802046.5027008], [1571802046.4746354, 1571802046.485765], [1571802046.5536554, 1571802046.5602322], [1571802046.618256, 1571802046.6213272], [1571802044.3132825, 1571802044.3132825]] 2 | -------------------------------------------------------------------------------- /iot/log_controle_58952.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 3. 3.] 100000 3 8.84 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_58967.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 3. 3. 4. 4.] 100000 1 6.43 None 2 | -------------------------------------------------------------------------------- /iot/log_controle_98333.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 3. 3.] 100000 3 15.34 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_98487.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 3. 4.] 100000 2 0.09 None 2 | -------------------------------------------------------------------------------- /iot/log_controle_98861.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 3. 3.] 100000 3 7.59 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_98889.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 3. 3. 4. 4.] 100000 1 5.3 None 2 | -------------------------------------------------------------------------------- /iot/log_controle_9927.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 3. 3. 4. 4.] 100000 1 15.6 None 2 | -------------------------------------------------------------------------------- /iot/log_controle_99439.txt: -------------------------------------------------------------------------------- 1 | [1. 1. 1. 3. 1. 1. 1. 1. 1. 1. 1. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 2 | 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3. 3.] 100000 3 6.43 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_99494.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 3. 3. 4. 4.] 100000 1 0.09 None 2 | -------------------------------------------------------------------------------- /iot/log_controle_99539.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 3. 3.] 100000 3 6.43 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_99750.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 3. 3.] 100000 3 0.07 None 3 | -------------------------------------------------------------------------------- /iot/log_controle_99907.txt: -------------------------------------------------------------------------------- 1 | [4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 4. 2 | 4. 4. 4. 4. 4. 4. 3. 3. 3.] 100000 4 4.47 None 3 | -------------------------------------------------------------------------------- /iot/model/detectmodel.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Derfei/task-merging/0002b5b0c62bc4c4cc8f754474d9c750ccf026e4/iot/model/detectmodel.py -------------------------------------------------------------------------------- /iot/model/files/network.txt: -------------------------------------------------------------------------------- 1 | 1 M 192.168.1.101 8000 2 | 2 E 192.168.1.103 8001 3 | 3 C 192.168.1.100 8002 4 | -------------------------------------------------------------------------------- /iot/model/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 所有任务实体 9 | ''' 10 | import json 11 | class MyEncoder(json.JSONEncoder): 12 | 13 | def default(self, o): 14 | import numpy 15 | if isinstance(o, bytes): 16 | return str(o, encoding='utf-8') 17 | if isinstance(o, numpy.ndarray): 18 | return o.tolist() 19 | 20 | return json.JSONEncoder.default(o) 21 | 22 | class task: 23 | 24 | def __init__(self, requestdeviceid, applicationid, offloadingpolicyid, taskid, operationid, inputdata, formertasklist, 25 | nexttasklist, timecostlist): 26 | self.requestdevicdid = requestdeviceid 27 | self.applicationid = applicationid 28 | self.offloadingpolicyid = offloadingpolicyid 29 | self.taskid = taskid 30 | self.operationid = operationid 31 | self.inputdata = inputdata 32 | self.formertasklist = formertasklist 33 | self.nexttasklist = nexttasklist 34 | self.timecostlist = timecostlist 35 | 36 | @classmethod 37 | def initfromdict(cls, taskdict): 38 | tmptask = task(taskdict['requestdeviceid'], taskdict['applicationid'], taskdict['offloadingpolicyid'], taskdict['taskid'], 39 | taskdict['operationid'], taskdict['inputdata'], taskdict['formertasklist'], taskdict['nexttasklist'], taskdict['timecostlist']) 40 | return tmptask 41 | 42 | @classmethod 43 | def initfromstring(cls, taskstring): 44 | import json 45 | taskdict = json.loads(taskstring) 46 | return task.initfromdict(taskdict=taskdict) 47 | 48 | def todict(self): 49 | tmpdict = {} 50 | tmpdict['requestdeviceid'] = self.requestdevicdid 51 | tmpdict['applicationid'] = self.applicationid 52 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 53 | tmpdict['taskid'] = self.taskid 54 | tmpdict['operationid'] = self.operationid 55 | tmpdict['inputdata'] = self.inputdata 56 | tmpdict['formertasklist'] = self.formertasklist 57 | tmpdict['nexttasklist'] = self.nexttasklist 58 | tmpdict['timecostlist'] = self.timecostlist 59 | return tmpdict 60 | 61 | def tostring(self): 62 | import json 63 | tmpdict = self.todict() 64 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 65 | 66 | class msg: 67 | ''' 68 | requestdeviceid: 代表的是发送信息的设备编号 69 | senddeviceid: 发送的目标设备的id 70 | ''' 71 | 72 | def __init__(self, requestdeviceid, senddeviceid, sendtime, sendmsgtype, sendmsgcontent): 73 | self.requestdeviceid = requestdeviceid 74 | self.senddeviceid = senddeviceid 75 | self.sendtime = sendtime 76 | self.sendmsgtype = sendmsgtype 77 | self.sendmsgcontent = sendmsgcontent 78 | 79 | @classmethod 80 | def initfromdict(cls, msgdict): 81 | tmpmsg = msg(msgdict['requestdeviceid'], msgdict['senddeviceid'], msgdict['sendtime'], msgdict['sendmsgtype'], 82 | msgdict['sendmsgcontent']) 83 | return tmpmsg 84 | 85 | @classmethod 86 | def initfromstring(cls, msgstring): 87 | import json 88 | msgdict = json.loads(msgstring) 89 | return msg.initfromdict(msgdict) 90 | 91 | def todict(self): 92 | msgdict = {} 93 | msgdict['requestdeviceid'] = self.requestdeviceid 94 | msgdict['senddeviceid'] = self.senddeviceid 95 | msgdict['sendtime'] = self.sendtime 96 | msgdict['sendmsgtype'] = self.sendmsgtype 97 | msgdict['sendmsgcontent'] = self.sendmsgcontent 98 | 99 | return msgdict 100 | 101 | 102 | def tostring(self): 103 | import json 104 | tmpdict = self.todict() 105 | return json.dumps(tmpdict, ensure_ascii=True, cls=MyEncoder).encode() 106 | 107 | 108 | class offloadingPolicy: 109 | 110 | def __init__(self, offloadingpolicyid, requestdeviceid, applicationid, taskid, excutedeviceid): 111 | self.offloadingpolicyid = offloadingpolicyid 112 | self.requestdeviceid = requestdeviceid 113 | self.applicationid = applicationid 114 | self.taskid = taskid 115 | self.excutedeviceid = excutedeviceid 116 | 117 | @classmethod 118 | def initfromdict(cls, offloadingpolicydict): 119 | tmpoffloadingpolicy = offloadingPolicy(offloadingpolicydict['offloadingpolicyid'], offloadingpolicydict['requestdeviceid'], 120 | offloadingpolicydict['applicationid'], offloadingpolicydict['taskid'],offloadingpolicydict['excutedeviceid']) 121 | return tmpoffloadingpolicy 122 | 123 | @classmethod 124 | def initfromstring(cls, offloadingpolicystring): 125 | import json 126 | tmpdict = json.loads(offloadingpolicystring) 127 | return offloadingPolicy.initfromdict(tmpdict) 128 | 129 | def todict(self): 130 | tmpdict = {} 131 | tmpdict['offloadingpolicyid'] = self.offloadingpolicyid 132 | tmpdict['requestdeviceid'] = self.requestdeviceid 133 | tmpdict['applicationid'] = self.applicationid 134 | tmpdict['taskid'] = self.taskid 135 | tmpdict['excutedeviceid'] = self.excutedeviceid 136 | 137 | return tmpdict 138 | 139 | def tostring(self): 140 | import json 141 | tmpdict = self.todict() 142 | return json.dumps(tmpdict, cls=MyEncoder).encode() 143 | 144 | 145 | class application: 146 | 147 | def __init__(self, requestdeviceid, applicationid, taskidlist, formertasklist, nexttasklist, operationlist): 148 | self.requestdeviceid = requestdeviceid 149 | self.applicationid = applicationid 150 | self.taskidlist = taskidlist 151 | self.formertasklist = formertasklist 152 | self.nexttasklist = nexttasklist 153 | self.operationlist = operationlist 154 | 155 | @classmethod 156 | def initfromdict(cls, applicationdict): 157 | tmpapplication = application(applicationdict['requestdeviceid'], applicationdict['applicationid'], applicationdict['taskidlist'], 158 | applicationdict['formertasklist'], applicationdict['nexttasklist'], applicationdict['operationidlist']) 159 | return tmpapplication 160 | 161 | @classmethod 162 | def initfromstring(cls, applicationstring): 163 | import json 164 | tmpdict = json.loads(applicationstring) 165 | return application.initfromdict(tmpdict) 166 | 167 | @classmethod 168 | def initfromString(cls, applicationstringlines): 169 | # 将文本中的内转换为application对象 170 | firstline = applicationstringlines[0] 171 | requestdeviceid = firstline.split()[0] 172 | applicationid = firstline.split()[1] 173 | taskidlist = [] 174 | formertasklist = [] 175 | nexttasklist = [] 176 | operationidlist = [] 177 | for line in applicationstringlines: 178 | taskidlist.append(int(line.split()[2])) 179 | formertasklist.append([int(tmp) for tmp in line.split()[3].split(',')]) 180 | nexttasklist.append([int(tmp) for tmp in line.split()[4].split(',')]) 181 | operationidlist.append(int(line.split()[5])) 182 | return application(requestdeviceid, applicationid, taskidlist, formertasklist, 183 | nexttasklist, operationidlist) 184 | 185 | 186 | def todict(self): 187 | tmpdict = {} 188 | tmpdict['requestdeviceid'] = self.requestdeviceid 189 | tmpdict['applicationid'] = self.applicationid 190 | tmpdict['taskidlist'] = self.taskidlist 191 | tmpdict['formertasklist'] = self.formertasklist 192 | tmpdict['nexttasklist'] = self.nexttasklist 193 | tmpdict['operationidlist'] = self.operationlist 194 | 195 | return tmpdict 196 | 197 | def tostring(self): 198 | import json 199 | tmpdict = self.todict() 200 | return json.dumps(tmpdict, cls=MyEncoder).encode() 201 | 202 | 203 | class networkinfo: 204 | 205 | def __init__(self, deviceid, devicetype, ip, port): 206 | self.deviceid = deviceid 207 | self.devicetype = devicetype 208 | self.ip = ip 209 | self.port = port 210 | 211 | @classmethod 212 | def initfromdict(cls, networkinfodict): 213 | tmpnetworkinfo = networkinfo(networkinfodict['deviceid'], networkinfodict['devicetype'], 214 | networkinfodict['ip'], networkinfodict['port']) 215 | return tmpnetworkinfo 216 | 217 | @classmethod 218 | def initfromstring(cls, networkinfostring): 219 | import json 220 | tmpnetworkinfodict = json.loads(networkinfostring) 221 | return networkinfo.initfromdict(tmpnetworkinfodict) 222 | 223 | @classmethod 224 | def initfromString(cls, networkinfoString): 225 | content = networkinfoString.split() 226 | tmpnetworkinfo = networkinfo(content[0], content[1], content[2], content[3]) 227 | return tmpnetworkinfo 228 | 229 | def todict(self): 230 | tmpdict = {} 231 | tmpdict['deviceid'] = self.deviceid 232 | tmpdict['devicetype'] = self.devicetype 233 | tmpdict['ip'] = self.ip 234 | tmpdict['port'] = self.port 235 | return tmpdict 236 | 237 | def toString(self): 238 | tmpdict = self.todict() 239 | return str(tmpdict) 240 | 241 | def tostring(self): 242 | import json 243 | tmpdict = self.todict() 244 | return json.dumps(tmpdict, cls=MyEncoder).encode() 245 | 246 | 247 | 248 | 249 | 250 | 251 | -------------------------------------------------------------------------------- /iot/model/record.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 写入和读取离线文件 9 | ''' 10 | recordbasedir = r"/home/pi/Desktop/IoT/model/files" 11 | from model.models import networkinfo 12 | from model.models import * 13 | def writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, offloadingpolicy): 14 | ''' 15 | offloadingpolicy 离线保存格式为: 16 | offloaindpolicy_requestdeviceid_applicationid_offloadingpolicyid 17 | offloading: 格式为: 18 | offloadingpolicyid requestdeviceid applicationid, executedeviceid 19 | :param requestdeviceid: 20 | :param applicationid: 21 | :param offloadingpolicyid: 22 | :param offloadingpolicy: 23 | :return: 24 | ''' 25 | import os 26 | filepath = os.path.join(recordbasedir, 'offloadingpolicy_'+str(requestdeviceid)+"_"+str(applicationid)+"_"+str(offloadingpolicyid)+".txt") 27 | 28 | # 写入文件 覆盖式 29 | with open(filepath, "w+") as file: 30 | for policy in offloadingpolicy: 31 | line = "{0}\t{1}\t{2}\t{3}\t{4}\n".format(offloadingpolicyid, requestdeviceid, applicationid, policy['taskid'], policy['excuteddeviceid']) 32 | file.write(line) 33 | 34 | def writenetworkinfo(networkinfo_list): 35 | ''' 36 | 将传回的networkinfolist 数据写入文件当中 37 | :param networkinfo_list: 38 | :return: 39 | ''' 40 | import os 41 | import json 42 | filepath = os.path.join(recordbasedir, "network.txt") 43 | 44 | with open(filepath, "w+") as file: 45 | for networkinfo in networkinfo_list: 46 | if not isinstance(networkinfo, dict): 47 | networkinfo = json.loads(networkinfo) 48 | line = "{0}\t{1}\t{2}\t{3}\n".format(networkinfo['deviceid'], networkinfo['devicetype'], 49 | networkinfo['ip'], networkinfo['port']) 50 | file.write(line) 51 | 52 | 53 | def getnetworkinfo(deviceid): 54 | ''' 55 | 从离线网络中获取网络信息 56 | :param deviceid: 如果为-1则为获取全部的网络信息 否则为获取一个网络信息 57 | :return: [type: networkinfo] (type: ip, type: port) 58 | ''' 59 | import os 60 | filepath = os.path.join(recordbasedir, "network.txt") 61 | 62 | with open(filepath, "r+") as file: 63 | lines = file.readlines() 64 | networkinfolist = [] 65 | 66 | for line in lines: 67 | line = line.replace('\n', '') 68 | line = line.strip() 69 | if len(line) != 0: 70 | networkinfolist.append(networkinfo.initfromString(line).todict()) 71 | 72 | # find the deviceid and return the url and the port 73 | 74 | if int(deviceid) == -1: 75 | deviceiplist = [] 76 | deviceidlist = [] 77 | devicetypelist = [] 78 | deviceportlist = [] 79 | 80 | for device in networkinfolist: 81 | deviceidlist.append(device['deviceid']) 82 | deviceiplist.append(device['ip']) 83 | devicetypelist.append(device['devicetype']) 84 | deviceportlist.append(device['port']) 85 | 86 | 87 | devicelist = [networkinfo(deviceidlist[tmp], devicetypelist[tmp], deviceiplist[tmp], deviceportlist[tmp]) for tmp in range(0, len(deviceiplist))] 88 | 89 | return devicelist 90 | else: 91 | for device in networkinfolist: 92 | if int(device['deviceid']) == int(deviceid): 93 | return device['ip'], device['port'] 94 | return None, None 95 | 96 | 97 | def getapplicationinfo(taskid, requestdeviceid, applicationid): 98 | import os 99 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 100 | +str(applicationid)+".txt") 101 | 102 | print("Begin to read the application file", filepath) 103 | # 获取应用信息 104 | try: 105 | with open(filepath, "r+") as file: 106 | lines = file.readlines() 107 | tmpapplication = application.initfromString(lines) 108 | 109 | # 查找相应的应用 110 | formertasklist = None 111 | nexttasklist = None 112 | operationid = None 113 | 114 | tmpapplicationdict = tmpapplication.todict() 115 | for i, tmptaskid in enumerate(tmpapplicationdict['taskidlist']): 116 | if int(tmptaskid) == int(taskid): 117 | formertasklist = tmpapplicationdict['formertasklist'][i] 118 | nexttasklist = tmpapplicationdict['nexttasklist'][i] 119 | operationid = tmpapplicationdict['operationidlist'][i] 120 | 121 | return formertasklist, nexttasklist, operationid 122 | 123 | return formertasklist, nexttasklist, operationid 124 | except Exception as e: 125 | return None, None, None 126 | 127 | def getapplicationdict(requestdeviceid, applicationid): 128 | import os 129 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_"+ 130 | str(applicationid)+".txt") 131 | 132 | # 获取全部的应用信息 不存在应用为空的情况 133 | try: 134 | with open(filepath, "r+") as file: 135 | lines = file.readlines() 136 | 137 | tmpapplication = application.initfromString(lines) 138 | 139 | return tmpapplication.todict() 140 | except Exception as e: 141 | return None 142 | 143 | def writeapplication(tmpapplication): 144 | ''' 145 | 将应用直接写入文件当中 146 | :param tmpapplication: 147 | :return: 148 | ''' 149 | tmpapplicationdict = tmpapplication.todict() 150 | 151 | writeapplicationinfo(tmpapplicationdict['requestdeviceid'], tmpapplicationdict['applicationid'], tmpapplicationdict['taskidlist'], 152 | tmpapplicationdict['formertasklist'], tmpapplicationdict['nexttasklist'], tmpapplicationdict['operationidlist']) 153 | 154 | 155 | 156 | def writeapplicationinfo(requestdeviceid, applicationid, taskidlist, formertaskidlist, 157 | nexttasklist, operationidlist): 158 | ''' 159 | 写入应用信息 160 | :param requestdeviceid: 请求设备id 161 | :param applicationid: 应用id 162 | :param taskidlist: 任务id list 163 | :param formetaskidlist: the percessortask list 164 | :param nextdeviceidlist: the nextdevice list 165 | :param operationlist: the operation list 166 | :return: 167 | ''' 168 | import os 169 | filepath = os.path.join(recordbasedir, "applicationinfo_"+str(requestdeviceid)+"_" 170 | +str(applicationid)+".txt") 171 | 172 | 173 | with open(filepath, "w+") as file: 174 | for i in range(0, len(taskidlist)): 175 | line = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\n".format(requestdeviceid, applicationid, 176 | taskidlist[i], ','.join([str(tmp) for tmp in formertaskidlist[i]]), 177 | ','.join([str(tmp) for tmp in nexttasklist[i]]), str(operationidlist[i])) 178 | file.write(line) 179 | 180 | 181 | 182 | def getoffloadingpolicyinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 183 | import os 184 | 185 | filepath = os.path.join(recordbasedir,"offloadingpolicy_"+str(requestdeviceid)+"_"+str(applicationid) 186 | +"_"+str(offloadingpolicyid)+".txt") 187 | 188 | try: 189 | with open(filepath, 'r+') as file: 190 | lines = file.readlines() 191 | 192 | if int(taskid) != -1: 193 | # 查找对应的task 194 | for line in lines: 195 | line = line.replace('\n', '') 196 | if int(line.split('\t')[3]) == int(taskid): 197 | return int(line.split('\t')[4]) 198 | else: 199 | # 获取全部的调度策略 200 | taskidlist = [] 201 | excuteddeviceidlist = [] 202 | 203 | for line in lines: 204 | line = line.replace('\n', '') 205 | 206 | taskidlist.append(line.split('\t')[3]) 207 | excuteddeviceidlist.append(line.split('\t')[4]) 208 | 209 | # 构建调度策略应用 210 | offloadingpolicylist = [] 211 | 212 | for i in range(0, len(taskidlist)): 213 | tmpoffloadingpolciy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, taskidlist[i], 214 | excuteddeviceidlist[i]) 215 | offloadingpolicylist.append(tmpoffloadingpolciy) 216 | 217 | return offloadingpolicylist 218 | except Exception as e: 219 | return None 220 | 221 | def getformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid): 222 | ''' 223 | 这里有错误 还需要知道是谁的任务idlist 224 | 获取前置任务的处理结果 225 | :param taskid: 需要查询任务id 226 | :param requestdeviceid: 应用请求设备id 227 | :param applicationid: 应用id号 228 | :return: 返回字典 229 | ''' 230 | import os 231 | import json 232 | import numpy as np 233 | formertaskfilepath = os.path.join(recordbasedir, 234 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, offloadingpolicyid)) 235 | try: 236 | with open(formertaskfilepath, 'r+') as file: 237 | taskdictlist = [] 238 | 239 | lines = file.readlines() 240 | for line in lines: 241 | line = line.replace('\n', '') 242 | print("The line split len is ", len(line.split('\t'))) 243 | tmpdict = {} 244 | tmpdict['taskid'] = line.split('\t')[0] 245 | tmpdict['requestdeviceid'] = line.split('\t')[1] 246 | tmpdict['applicationid'] = line.split('\t')[2] 247 | tmpdict['offloadingpolicyid'] = line.split('\t')[3] 248 | tmpdict['formertaskid'] = line.split('\t')[4] 249 | # tmpdict['inputdata'] = list(line.split('\t')[5]) 250 | # print("The tmp inputdata is {0} and the format is {1}".format(json.loads(line.split('\t')[5]), type(json.loads(line.split('\t')[5])))) 251 | tmpdict['inputdata'] = json.loads(line.split('\t')[5]) 252 | tmpdict['timecost'] = json.loads(line.split('\t')[6]) 253 | 254 | 255 | taskdictlist.append(tmpdict) 256 | return taskdictlist 257 | except Exception as e: 258 | print("There is a exception happend, when get the formertaskinfo", e) 259 | return None 260 | 261 | def writeformertaskinfo(taskid, requestdeviceid, applicationid, offloadingpolicyid, taskdictlist): 262 | ''' 263 | 将前置任务的信息写入离线数据中 264 | :param taskid: 需要写入前置任务的任务id 265 | :param requestdeviceid: 提出应用的请求id 266 | :param applicationid: 应用id 267 | :param offloadingpolicyid: 迁移策略id 268 | :param taskdictlist: 任务字典列表上 269 | :return: 270 | ''' 271 | import os 272 | import numpy as np 273 | import json 274 | formertaskfilepath = os.path.join(recordbasedir, 275 | "formertaskinfo_{0}_{1}_{2}_{3}.txt".format(taskid, requestdeviceid, applicationid, 276 | offloadingpolicyid)) 277 | with open(formertaskfilepath, 'a+') as file: 278 | for tmp in taskdictlist: 279 | file.write("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\n".format(taskid, requestdeviceid, applicationid,offloadingpolicyid, 280 | tmp['formertaskid'], json.dumps(tmp['inputdata']), json.dumps(tmp['timecost']))) 281 | -------------------------------------------------------------------------------- /iot/network/client.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.record import * 11 | LOCAL_DEVICEID = 1 12 | def sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid): 13 | import json 14 | import requests 15 | import datetime 16 | ''' 17 | 发送获得迁移策略请求 获得结果 写入离线文件 18 | ''' 19 | # 根据请求设备 获取ip 和端口号 20 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 21 | if requestDeviceIp == None: 22 | sendNetworkinfoRequest() 23 | requestDeviceIp, requestDevicePort = getnetworkinfo(requestdeviceid) 24 | 25 | requestUrl = "http://{0}:{1}/getOffloadingPolicy".format(requestDeviceIp, requestDevicePort) 26 | 27 | # 发送请求 28 | tmpOffloadingPolicy = offloadingPolicy(offloadingpolicyid, requestdeviceid, applicationid, 29 | -1, -1) 30 | tmpMsg = msg(LOCAL_DEVICEID, requestdeviceid, datetime.datetime.now().__str__(), 31 | 'query', tmpOffloadingPolicy.todict()) 32 | rtnMsg = requests.post(url=requestUrl, data=tmpMsg.tostring()) 33 | 34 | #将信息写入离线文件 35 | rtnData = json.loads(rtnMsg) 36 | writeoffloadingpolicy(requestdeviceid, applicationid, offloadingpolicyid, 37 | rtnData) 38 | 39 | def sendApplicationRequest(requestdeviceid, applicationid): 40 | import json 41 | import requests 42 | import datetime 43 | 44 | # 找到requestdeviceid 的 ip 和端口 45 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 46 | 47 | if tmpdeviceip == None: 48 | sendNetworkinfoRequest() 49 | tmpdeviceip, tmpdeviceport = getnetworkinfo(requestdeviceid) 50 | 51 | requrl = "http://{0}:{1}/getApplicationInfo".format(tmpdeviceip, tmpdeviceport) 52 | 53 | tmpapplicationinfo = application(-1, applicationid, [], [], [], []) 54 | tmpmsg = msg(1, requestdeviceid, datetime.datetime.now().__str__(), 'qury', tmpapplicationinfo.todict()) 55 | # 发送请求 56 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 57 | 58 | applicationdict = json.loads(req.text) 59 | 60 | writeapplicationinfo(requestdeviceid=applicationdict['requestdeviceid'], applicationid=applicationdict['applicationid'], 61 | taskidlist=applicationdict['taskidlist'], formertaskidlist=applicationdict['formertasklist'], 62 | nexttasklist=applicationdict['nexttasklist'], operationidlist=applicationdict['operationidlist'])# 写入文件 63 | 64 | 65 | 66 | def sendNetworkinfoRequest(): 67 | import requests 68 | import json 69 | 70 | try: 71 | requrl = "http://10.21.23.103:8000/getInternetInfo" 72 | 73 | req = requests.post(url=requrl) 74 | networkinfolist = json.loads(req) 75 | 76 | writenetworkinfo(networkinfolist) 77 | 78 | return True 79 | except Exception as e: 80 | # print("写入网络信息返回结果出错") 81 | return False 82 | 83 | def SendTask(requestdeviceid, applicationid, offloadingpolicyid, 84 | nexttaskid, localdeviceid, newtask): 85 | import threading 86 | thSendTask = threading.Thread(target=sendTask, args=(requestdeviceid, applicationid, offloadingpolicyid, 87 | nexttaskid, localdeviceid, newtask)) 88 | thSendTask.run() 89 | 90 | 91 | def sendTask(requestdeviceid, applicationid, offloadingpolicyid, 92 | nexttaskid, localdeviceid, newtask): 93 | import json 94 | import requests 95 | import datetime 96 | 97 | objectdeviceid = -1 98 | 99 | # 根据调度信息获取执行设备 error !!!!! 100 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 101 | offloadingpolicyid) 102 | 103 | if objectdeviceid == None: 104 | sendOffloadingpolicyRequest(requestdeviceid, applicationid, offloadingpolicyid) # 请求调度信息 105 | objectdeviceid = getoffloadingpolicyinfo(nexttaskid, requestdeviceid, applicationid, 106 | offloadingpolicyid) 107 | 108 | # 获取网络信息 109 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 110 | if tmpdeviceip == None: 111 | sendNetworkinfoRequest() 112 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 113 | 114 | # 发送网络请求 115 | requlr = "http://{0}:{1}/dojob".format(tmpdeviceip, tmpdeviceport) 116 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'dojob', newtask.todict()) 117 | 118 | requests.post(url=requlr, data=tmpmsg.tostring()) 119 | 120 | return requlr 121 | 122 | 123 | def sendFinal(objectdeviceid, localdeviceid, newtask): 124 | import json 125 | import requests 126 | import datetime 127 | 128 | # 获取网络信息 129 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 130 | if tmpdeviceip == None: 131 | sendNetworkinfoRequest() 132 | tmpdeviceip, tmpdeviceport = getnetworkinfo(objectdeviceid) 133 | 134 | # 发送网络请求 135 | requlr = "http://{0}:{1}/getFinalResult".format(tmpdeviceip, tmpdeviceport) 136 | tmpmsg = msg(localdeviceid, objectdeviceid, datetime.datetime.now().__str__(), 'finalresult', newtask.todict()) 137 | 138 | requests.post(url=requlr, data=tmpmsg.tostring()) 139 | -------------------------------------------------------------------------------- /iot/offloading.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.record import * 11 | from utils import * 12 | from code_algor import get_offloading_result 13 | 14 | 15 | def Offloading(workloadlist, datasizelist, formertasklist, nexttaskList, taskidList, 16 | applicationid, requestdeviceid, algor_type, buget_type): 17 | 18 | # 'vgg 16 test policy' 19 | offloadingpolicy = [2 for tmp in formertasklist] 20 | offloadingpolicy = get_offloading_result(len(taskidList), formertasklist, workloadlist, datasizelist, algor_type, buget_type) 21 | 22 | 23 | offloadingpolicyid = getRandomId() 24 | 25 | policy = [] 26 | for i in range(0, len(taskidList)): 27 | tmpdict = {} 28 | tmpdict['taskid'] = taskidList[i] 29 | tmpdict['excuteddeviceid'] = offloadingpolicy[i] 30 | 31 | policy.append(tmpdict) 32 | 33 | # 将迁移策略写入文件固化层 34 | writeoffloadingpolicy(requestdeviceid=requestdeviceid, applicationid=applicationid, 35 | offloadingpolicyid=offloadingpolicyid, offloadingpolicy=policy) 36 | 37 | return policy, offloadingpolicyid 38 | def offloading(workloadlist, datasizelist, formertasklist, nexttaskList, taskidList, 39 | applicationid, requestdeviceid): 40 | ''' 41 | 该调度策略将前面几个任务放在了IoT端进行 其他的任务放在了其他设备进行 42 | :param workloadlist: 43 | :param datasizelist: 44 | :param formertasklist: 45 | :param nexttaskList: 46 | :param taskidList: 47 | :param offloadingdeviceList: 48 | :param applicationid: 49 | :param requestdeviceid: 50 | :return: 51 | ''' 52 | '根据各项参数进行任务的迁移' 53 | # 将任务调度结果写入离线文本 54 | offloadingpolicy = [3 for tmp in formertasklist] 55 | 56 | 57 | 58 | offloadingpolicyid = getRandomId() 59 | 60 | 61 | policy = [] 62 | for i in range(0, len(taskidList)): 63 | tmpdict = {} 64 | tmpdict['taskid'] = taskidList[i] 65 | tmpdict['excuteddeviceid'] = offloadingpolicy[i] 66 | 67 | policy.append(tmpdict) 68 | 69 | # 将迁移策略写入文件固化层 70 | writeoffloadingpolicy(requestdeviceid=requestdeviceid,applicationid=applicationid, offloadingpolicyid=offloadingpolicyid, offloadingpolicy=policy) 71 | 72 | return policy, offloadingpolicyid 73 | 74 | def offloading_all_tocloud(workloadlist, datasizelist, formertasklist, nexttaskList, taskidList, 75 | applicationid, requestdeviceid, algor_type, budget_type): 76 | 77 | offloadingpolicy = [1 for tmp in formertasklist] 78 | 79 | offloadingpolicyid = getRandomId() 80 | 81 | policy = [] 82 | for i in range(0, len(taskidList)): 83 | tmpdict = {} 84 | tmpdict['taskid'] = taskidList[i] 85 | tmpdict['excuteddeviceid'] = offloadingpolicy[i] 86 | 87 | policy.append(tmpdict) 88 | 89 | # 将迁移策略写入文件固化层 90 | writeoffloadingpolicy(requestdeviceid=requestdeviceid, applicationid=applicationid, 91 | offloadingpolicyid=offloadingpolicyid, offloadingpolicy=policy) 92 | 93 | return policy, offloadingpolicyid 94 | -------------------------------------------------------------------------------- /iot/process/processor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | from model.models import * 11 | from model.record import * 12 | from network.client import * 13 | def processor_dojob(task): 14 | pass 15 | 16 | def processor_getoffloadingpolicy(): 17 | pass 18 | 19 | def processor_getinternetinfo(): 20 | pass 21 | 22 | def processor_updateinternetinfo(): 23 | pass 24 | 25 | def processor_getapplicationinfo(): 26 | pass 27 | 28 | def produce_newtask(thistaskid, thistimecostlist, newtaskid,outputdata, requestdeviceid, applicatonid, offloadingpolicyid): 29 | ''' 30 | 生成新的任务 31 | :param thistaskid: 已经完成的任务id 32 | :param outputdata: 完成任务输出的大小 33 | :param requestdeviceid: 应用请求设备的id 34 | :param applicatonid: 应用编号 35 | :param offloadingpolicyid: 迁移策略id 36 | :return: 37 | ''' 38 | tmprequestdeviceid = requestdeviceid # 请求设备id与上一个设备相同 39 | tmpapplicationid = applicatonid # 应用编号与上一个任务相同 40 | tmpoffloadingpolicyid = offloadingpolicyid # 调度策略与上一个任务相同 41 | tmptaskid = newtaskid 42 | 43 | # 通过查询应用信息获取该任务的操作编号 44 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 45 | 46 | if tmpapplcation == None: 47 | sendApplicationRequest(requestdeviceid, applicatonid) # 客户端发送应用请求信息 48 | print("由于应用信息不存在,向设备{0}发送请求应用{1}信息 更新设备信息".format(requestdeviceid, applicatonid)) 49 | tmpapplcation = getapplicationdict(requestdeviceid, applicatonid) 50 | 51 | tmpinputdata = outputdata 52 | 53 | tmpformertask = [thistaskid] 54 | 55 | 56 | # 根据应用信息获得nexttask operationid 57 | tmptaskidlist = tmpapplcation['taskidlist'] 58 | tmptaskidindex = 0 59 | for i in range(len(tmptaskidlist)): 60 | if int(tmptaskidlist[i]) == int(tmptaskid): 61 | tmptaskidindex = i 62 | break 63 | tmpnexttasklist = tmpapplcation['nexttasklist'][tmptaskidindex] 64 | tmpoperationid = tmpapplcation['operationidlist'][tmptaskidindex] 65 | 66 | tmptimecostlist = thistimecostlist 67 | 68 | tmptask = task(tmprequestdeviceid, tmpapplicationid, tmpoffloadingpolicyid, tmptaskid, tmpoperationid, tmpinputdata, 69 | tmpformertask, tmpnexttasklist, tmptimecostlist) 70 | 71 | return tmptask 72 | 73 | def gettaskFormertask(requestdeviceid, applicationid, taskid): 74 | ''' 75 | 获取特定任务的前置任务 76 | :param requestdeviceid: 应用请求设备编号 77 | :param applicationid: 应用编号 78 | :param taskid: 任务编号 79 | :return: 80 | ''' 81 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 82 | 83 | if tmpapplication == None: 84 | sendApplicationRequest(requestdeviceid, applicationid) 85 | tmpapplication = getapplicationdict(requestdeviceid, applicationid) 86 | 87 | tmptaskidlist = tmpapplication['taskidlist'] 88 | # tmptaskindex = lambda i: int(tmptaskidlist[i])==int(taskid) 89 | tmptaskindex = 0 90 | for i in range(len(tmptaskidlist)): 91 | if int(tmptaskidlist[i])==int(taskid): 92 | tmptaskindex = i 93 | break 94 | return tmpapplication['formertasklist'][tmptaskindex] -------------------------------------------------------------------------------- /iot/test/testExcute.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 测试执行器 9 | ''' 10 | from executer.excuter import * 11 | from unittest import TestCase 12 | 13 | class testExcute(TestCase): 14 | 15 | def test_excuteedgent(self): 16 | tmpagent= ExecuteAgent() 17 | print("when take the operation {0}, the answer is: {1}".format(0, 18 | tmpagent.excute(0, [1]))) 19 | -------------------------------------------------------------------------------- /iot/test/testServer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description:0 9 | ''' 10 | from unittest import TestCase 11 | from model.models import * 12 | class testserver(TestCase): 13 | 14 | def test_getapplicationinfo(self): 15 | import json 16 | import requests 17 | import datetime 18 | requrl = "http://10.21.23.103:8000/getApplicationInfo" 19 | 20 | tmpapplicationinfo = application(1, 60294, [], [], [], []) 21 | tmpmsg = msg(1, 1, datetime.datetime.now().__str__(), 'query', tmpapplicationinfo.todict()) 22 | 23 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 24 | 25 | print("测试返回的数据为: ", req.json()) 26 | 27 | 28 | def test_updatenetworkinfo(self): 29 | import requests 30 | import datetime 31 | 32 | tmpnetworkinfo1 = networkinfo(1, 'M', '10.21.23.103', 8000) 33 | tmpnetworkinfo2 = networkinfo(2, 'E', '10.21.23.103', 8001) 34 | tmpnetworkinfo3 = networkinfo(3, 'C', '10.21.23.103', 8003) 35 | 36 | tmpnetworkinfo = [tmpnetworkinfo1.todict(), tmpnetworkinfo2.todict(), tmpnetworkinfo3.todict()] 37 | 38 | requrl = "http://10.21.23.103:8000/updateInternetInfo" 39 | 40 | tmpmsg = msg(1, 1, datetime.datetime.now().__str__(), 'update', tmpnetworkinfo) 41 | 42 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 43 | 44 | print("测试更新数据返回的数据为: ", req.text) 45 | 46 | def test_getnetworkinfo(self): 47 | 48 | import requests 49 | import datetime 50 | 51 | requrl = "http://10.21.23.103:8000/getInternetInfo" 52 | 53 | tmpmsg = msg(1, 1, datetime.datetime.now().__str__(), 'query', "") 54 | 55 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 56 | 57 | print("测试获取网络信息数据为: ", req.json()) 58 | 59 | def test_getoffloadingpolicy(self): 60 | import requests 61 | import datetime 62 | 63 | requrl = "http://10.21.23.103:8000/getOffloadingPolicy" 64 | 65 | tmpoffloadingpolicy = offloadingPolicy(taskid=-1, requestdeviceid=1, applicationid=16066, 66 | offloadingpolicyid=186643, excutedeviceid=-1) 67 | 68 | tmpmsg = msg(1, 1, datetime.datetime.now().__str__(), "query", tmpoffloadingpolicy.todict()) 69 | 70 | req = requests.post(url=requrl, data=tmpmsg.tostring()) 71 | 72 | print("测试获取调度信息,调度接口返回的结果为: ", req.json()) 73 | 74 | 75 | def test_dojob(self): 76 | 77 | # 构造应用 78 | 79 | # 进行调度 80 | 81 | # 根据调度结果发送任务 82 | pass 83 | 84 | 85 | 86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /iot/test/testrecord.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | import unittest 11 | from unittest import TestCase 12 | from model.record import * 13 | from utils import getRandomId 14 | class testRecord(TestCase): 15 | 16 | def test_writeoffloadingpolicy(self): 17 | writeoffloadingpolicy(1, getRandomId(), getRandomId(), [{'taskid': 0, 'excuteddeviceid': 1}, 18 | {'taskid': 1, 'excuteddeviceid': 1}, 19 | {'taskid': 2, 'excuteddeviceid': 1}]) 20 | 21 | 22 | def test_getNetworkinfo(self): 23 | tmp = getnetworkinfo(1) 24 | print("the answer is:", tmp) 25 | 26 | 27 | def test_writeNetworkinfo(self): 28 | tmpnewworkinfo1 = networkinfo(1, 'M', "10.21.23.103", 8000) 29 | tmpnewworkinfo2 = networkinfo(2, 'E', "10.21.23.110", 8001) 30 | tmpnewworkinfo3 = networkinfo(3, 'C', "10.21.23.107", 8002) 31 | 32 | networkinfolist = [tmpnewworkinfo1, tmpnewworkinfo2, tmpnewworkinfo3] 33 | networkinfolist = [tmp.todict() for tmp in networkinfolist] 34 | 35 | writenetworkinfo(networkinfolist) 36 | 37 | def test_writeapplication(self): 38 | requestdeviceid = 1 39 | applicationid = getRandomId() 40 | taskidlist = [0, 1, 2, 3] 41 | formertasklist = [[-1], [0], [0], [1, 2]] 42 | nexttaskidlist = [[1, 2], [3], [3], [-1]] 43 | operationidlist = [0, 1, 2, 3] 44 | 45 | 46 | writeapplicationinfo(requestdeviceid, applicationid, taskidlist, formertasklist, nexttaskidlist, 47 | operationidlist) 48 | print("生成application", applicationid) 49 | 50 | 51 | def test_getapplicationinfo(self): 52 | taskid = 3 53 | requestdeviceid = 1 54 | applicationid = 134298 55 | 56 | formetasklist, nexttasklist, operationid = getapplicationinfo(taskid, requestdeviceid, 57 | applicationid) 58 | 59 | print("When get the appilication info, the result is: {0}, {1}, {2}".format(formetasklist, nexttasklist, operationid)) 60 | 61 | def test_getoffloadingpolicy(self): 62 | pass 63 | 64 | def test_formertaskinfo(self): 65 | taskid = 1 66 | requestdeviceid = 1 67 | applicationid = 16066 68 | offloadingPolicyid = 186643 69 | 70 | print("get the task {0} the offfloading device is {1}".format(1, getoffloadingpolicy(1, requestdeviceid, 71 | applicationid, offloadingPolicyid))) 72 | print("get the task {0} the offfloading device is {1}".format(-1, getoffloadingpolicy(-1, requestdeviceid, 73 | applicationid, 74 | offloadingPolicyid))) 75 | 76 | 77 | if __name__ == "__main__": 78 | unittest.main() 79 | 80 | 81 | -------------------------------------------------------------------------------- /iot/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | @author: longxin 4 | @version: 1.0 5 | @date: 6 | @changeVersion: 7 | @changeAuthor: 8 | @description: 9 | ''' 10 | def getRandomId(): 11 | import random 12 | return random.randint(0, 200000) -------------------------------------------------------------------------------- /iot/思路流程图.vsdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Derfei/task-merging/0002b5b0c62bc4c4cc8f754474d9c750ccf026e4/iot/思路流程图.vsdx -------------------------------------------------------------------------------- /schedule_results: -------------------------------------------------------------------------------- 1 | 2 | --------------------------------------------------------------------------------