├── celery_projects ├── IoT │ ├── __init__.py │ ├── __pycache__ │ │ ├── celery.cpython-35.pyc │ │ ├── neuron.cpython-35.pyc │ │ └── __init__.cpython-35.pyc │ ├── celery.py │ └── neuron.py ├── jpgs │ ├── flower1.jpg │ └── flower2.jpg ├── __pycache__ │ └── celeryconfig.cpython-35.pyc ├── start_workers.sh ├── celeryconfig.py ├── restart_swarm.sh ├── IoT_as_Brain.md ├── IoT as Brain - en.ipynb └── IoT as Brain.ipynb ├── Celery_config_plotter ├── test.py ├── celeryconfig.xls ├── celeryconfig.py ├── CeleryConfigPlotter.py └── CeleryConfigPlotter.ipynb └── README.md /celery_projects/IoT/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Celery_config_plotter/test.py: -------------------------------------------------------------------------------- 1 | import CeleryConfigPlotter 2 | 3 | CeleryConfigPlotter.genConfigFile() 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [README.md](https://github.com/Wei1234c/IOTasBrain/blob/master/celery_projects/IoT_as_Brain.md) -------------------------------------------------------------------------------- /celery_projects/jpgs/flower1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/jpgs/flower1.jpg -------------------------------------------------------------------------------- /celery_projects/jpgs/flower2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/jpgs/flower2.jpg -------------------------------------------------------------------------------- /Celery_config_plotter/celeryconfig.xls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/Celery_config_plotter/celeryconfig.xls -------------------------------------------------------------------------------- /celery_projects/IoT/__pycache__/celery.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/IoT/__pycache__/celery.cpython-35.pyc -------------------------------------------------------------------------------- /celery_projects/IoT/__pycache__/neuron.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/IoT/__pycache__/neuron.cpython-35.pyc -------------------------------------------------------------------------------- /celery_projects/IoT/__pycache__/__init__.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/IoT/__pycache__/__init__.cpython-35.pyc -------------------------------------------------------------------------------- /celery_projects/__pycache__/celeryconfig.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Wei1234c/IOTasBrain/HEAD/celery_projects/__pycache__/celeryconfig.cpython-35.pyc -------------------------------------------------------------------------------- /celery_projects/IoT/celery.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | from kombu import Exchange, Queue 3 | 4 | 5 | app = Celery() 6 | app.config_from_object('celeryconfig') 7 | 8 | if __name__ == '__main__': 9 | app.start() -------------------------------------------------------------------------------- /celery_projects/start_workers.sh: -------------------------------------------------------------------------------- 1 | # ./start_workers.sh 2 | 3 | PROJECT='IoT' # project 名稱 4 | CONCURRENCY=1 # 每個 worker 可以有幾個 subprocesses 5 | 6 | 7 | 8 | echo "Starting Redis, Flower _________________________________________________" 9 | eval $(docker-machine env master01) 10 | 11 | docker run -dit -p 6379:6379 --name=redis -v /data:/data hypriot/rpi-redis 12 | docker run -d -p 5555:5555 --name=flower --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c "cd /celery_projects && celery -A ${PROJECT} flower" 13 | 14 | 15 | 16 | echo "Starting Celery cluster containers _________________________________________________" 17 | eval $(docker-machine env --swarm master01) 18 | 19 | for id in 'x' 'y' 'h1' 'h2' 'h3' 'z' 20 | do 21 | docker run -d --name=neuron_${id} --hostname=neuron_${id} --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c "cd /celery_projects && celery -A ${PROJECT} worker -n %h -Q neuron_${id} --concurrency=${CONCURRENCY} --loglevel=INFO" 22 | done 23 | 24 | -------------------------------------------------------------------------------- /celery_projects/celeryconfig.py: -------------------------------------------------------------------------------- 1 | 2 | #_____________________________Import Kombu classes_____________________________ 3 | from kombu import Exchange, Queue 4 | 5 | #___________________________CELERY_TIMEZONE & Misc.____________________________ 6 | CELERY_TIMEZONE = 'Asia/Taipei' 7 | CELERYD_POOL_RESTARTS = True 8 | 9 | #__________________________________BROKER_URL__________________________________ 10 | BROKER_URL = 'redis://netbrain.noip.me:6379/0' 11 | 12 | #____________________________CELERY_RESULT_BACKEND_____________________________ 13 | CELERY_RESULT_BACKEND = 'redis://netbrain.noip.me:6379/1' 14 | 15 | #________________________________CELERY_IMPORTS________________________________ 16 | CELERY_IMPORTS = ('IoT.neuron',) 17 | 18 | #________________________________CELERY_QUEUES_________________________________ 19 | CELERY_QUEUES = ( 20 | Queue('neuron_x', Exchange('celery', type = 'direct'), routing_key='neuron_x'), 21 | Queue('neuron_y', Exchange('celery', type = 'direct'), routing_key='neuron_y'), 22 | Queue('neuron_h1', Exchange('celery', type = 'direct'), routing_key='neuron_h1'), 23 | Queue('neuron_h2', Exchange('celery', type = 'direct'), routing_key='neuron_h2'), 24 | Queue('neuron_h3', Exchange('celery', type = 'direct'), routing_key='neuron_h3'), 25 | Queue('neuron_z', Exchange('celery', type = 'direct'), routing_key='neuron_z'), 26 | ) 27 | -------------------------------------------------------------------------------- /celery_projects/restart_swarm.sh: -------------------------------------------------------------------------------- 1 | echo "# Set Docker hosts IPs ____________________________________" 2 | rpi201='192.168.0.109' 3 | rpi202='192.168.0.114' 4 | master01=${rpi202} 5 | node01=${rpi201} 6 | echo ${master01} 7 | echo ${node01} 8 | 9 | 10 | 11 | eval $(docker-machine env master01) 12 | 13 | echo "# Create Consul server ____________________________________" 14 | docker run -d --restart=always -p 8500:8500 --name=consul --hostname=consul nimblestratus/rpi-consul -server -bootstrap 15 | 16 | 17 | 18 | echo "# Create Swarm manager ____________________________________" 19 | docker run -d \ 20 | --restart=always \ 21 | --name swarm-agent-master \ 22 | -p 3376:3376 \ 23 | -v /etc/docker:/etc/docker \ 24 | hypriot/rpi-swarm \ 25 | manage \ 26 | --tlsverify \ 27 | --tlscacert=/etc/docker/ca.pem \ 28 | --tlscert=/etc/docker/server.pem \ 29 | --tlskey=/etc/docker/server-key.pem \ 30 | -H tcp://0.0.0.0:3376 \ 31 | --strategy spread consul://${master01}:8500 32 | 33 | docker run -d \ 34 | --restart=always \ 35 | --name swarm-agent \ 36 | hypriot/rpi-swarm \ 37 | join --advertise ${master01}:2376 consul://${master01}:8500 38 | 39 | 40 | echo " Create Swarm node ____________________________________" 41 | eval $(docker-machine env node01) 42 | docker run -d \ 43 | --restart=always \ 44 | --name swarm-agent \ 45 | hypriot/rpi-swarm \ 46 | join --advertise ${node01}:2376 consul://${master01}:8500 47 | 48 | 49 | 50 | docker $(docker-machine config --swarm master01) info -------------------------------------------------------------------------------- /Celery_config_plotter/celeryconfig.py: -------------------------------------------------------------------------------- 1 | 2 | #_____________________________Import Kombu classes_____________________________ 3 | from kombu import Exchange, Queue 4 | 5 | #___________________________CELERY_TIMEZONE & Misc.____________________________ 6 | CELERY_TIMEZONE = 'Asia/Taipei' 7 | CELERYD_POOL_RESTARTS = True 8 | 9 | #__________________________________BROKER_URL__________________________________ 10 | BROKER_URL = 'redis://weilin.noip.me:6379/0' 11 | 12 | #____________________________CELERY_RESULT_BACKEND_____________________________ 13 | CELERY_RESULT_BACKEND = 'redis://weilin.noip.me:6379/1' 14 | 15 | #________________________________CELERY_IMPORTS________________________________ 16 | CELERY_IMPORTS = ('IoT.neuron',) 17 | 18 | #________________________________CELERY_QUEUES_________________________________ 19 | CELERY_QUEUES = ( 20 | Queue('neuron_x', Exchange('celery', type = 'direct'), routing_key='neuron_x'), 21 | Queue('neuron_y', Exchange('celery', type = 'direct'), routing_key='neuron_y'), 22 | Queue('neuron_h1', Exchange('celery', type = 'direct'), routing_key='neuron_h1'), 23 | Queue('neuron_h2', Exchange('celery', type = 'direct'), routing_key='neuron_h2'), 24 | Queue('neuron_h3', Exchange('celery', type = 'direct'), routing_key='neuron_h3'), 25 | Queue('neuron_z', Exchange('celery', type = 'direct'), routing_key='neuron_z'), 26 | ) 27 | 28 | 29 | #_______________________________Workers Scripts________________________________ 30 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_x --concurrency=1 --loglevel=INFO 31 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_y --concurrency=1 --loglevel=INFO 32 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_h1 --concurrency=1 --loglevel=INFO 33 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_h2 --concurrency=1 --loglevel=INFO 34 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_h3 --concurrency=1 --loglevel=INFO 35 | #[Node - localhost] : celery -A IoT worker -n worker1.%h -Q neuron_z --concurrency=1 --loglevel=INFO 36 | 37 | #____________________________________FLOWER____________________________________ 38 | #[Flower] : celery -A IoT flower 39 | -------------------------------------------------------------------------------- /celery_projects/IoT/neuron.py: -------------------------------------------------------------------------------- 1 | # https://en.wikipedia.org/wiki/Neuron 2 | # https://en.wikipedia.org/wiki/Action_potential 3 | # https://en.wikipedia.org/wiki/Artificial_neuron 4 | # https://en.wikipedia.org/wiki/Neural_coding 5 | 6 | from celery import group 7 | from IoT.celery import app 8 | import pickle 9 | import os 10 | import datetime 11 | 12 | DEBUG = True 13 | CONFIG_FILE = os.path.join(os.environ['HOME'], 'neuron.cfg') 14 | LOG_FILE = os.path.join(os.environ['HOME'], 'neuron.log') 15 | INITIAL_WEIGHT = 0 16 | ACTION_POTENTIAL = 1 17 | RESTING_POTENTIAL = 0 18 | # LASTING_SECONDS = 0.5 19 | POLARIZATION_SECONDS = 0.5 20 | REFRACTORY_PERIOD = 0.1 21 | # ABSOLUTE_REFRACTORY_PERIOD = 0.5 22 | 23 | 24 | @app.task 25 | def getHostname(): 26 | return os.environ['HOSTNAME'] if 'HOSTNAME' in os.environ else os.environ['COMPUTERNAME'] 27 | 28 | 29 | # @app.task 30 | def pickleDump(content, fileName): 31 | with open(fileName, 'wb') as f: 32 | pickle.dump(content, f) 33 | 34 | 35 | # @app.task 36 | def pickleLoad(fileName): 37 | with open(fileName, 'rb') as f: 38 | return pickle.load(f) 39 | 40 | 41 | # @app.task 42 | def touchFiles(): 43 | if not os.path.exists(CONFIG_FILE): emptyConfig() 44 | if not os.path.exists(LOG_FILE): emptyLog() 45 | 46 | 47 | @app.task 48 | def emptyConfig(): 49 | config = {} 50 | config['inputs'] = {} 51 | config['output'] = {'value': RESTING_POTENTIAL, 'polarized_time': datetime.datetime(1970,1,1), 'lasting': datetime.timedelta(0, REFRACTORY_PERIOD)} 52 | setConfig(config) 53 | 54 | 55 | @app.task 56 | def emptyLog(): 57 | content = [] 58 | setLog(content) 59 | 60 | 61 | @app.task 62 | def setConfig(content): 63 | pickleDump(content, CONFIG_FILE) 64 | 65 | 66 | # @app.task 67 | def setLog(content): 68 | pickleDump(content, LOG_FILE) 69 | 70 | 71 | @app.task 72 | def getConfig(): 73 | touchFiles() 74 | return pickleLoad(CONFIG_FILE) 75 | 76 | 77 | @app.task 78 | def getLog(): 79 | touchFiles() 80 | return pickleLoad(LOG_FILE) 81 | 82 | 83 | # @app.task 84 | def log(message): 85 | logs = getLog() 86 | logs.append((datetime.datetime.now(), getHostname(), message)) 87 | setLog(logs) 88 | 89 | 90 | @app.task 91 | def setConnections(connections): 92 | config = getConfig() 93 | config['connections'] = connections 94 | setConfig(config) 95 | 96 | 97 | @app.task 98 | def getConnections(): 99 | return getConfig().get('connections', set()) 100 | 101 | 102 | @app.task 103 | def addConnection(neuron_id): 104 | connections = getConnections() 105 | connections.add(neuron_id) 106 | setConnections(connections) 107 | 108 | 109 | @app.task 110 | def deleteConnection(neuron_id): 111 | connections = getConnections() 112 | connections.remove(neuron_id) 113 | setConnections(connections) 114 | 115 | 116 | @app.task 117 | def setWeights(weights): 118 | config = getConfig() 119 | config['weights'] = weights 120 | setConfig(config) 121 | 122 | 123 | @app.task 124 | def getWeights(): 125 | return getConfig().get('weights', {}) 126 | 127 | 128 | @app.task 129 | def setWeight(neuron_id, weight): 130 | weights = getWeights() 131 | weights[neuron_id] = weight 132 | setWeights(weights) 133 | 134 | 135 | @app.task 136 | def getWeight(neuron_id): 137 | return getWeights().get(neuron_id, INITIAL_WEIGHT) 138 | 139 | 140 | @app.task 141 | def deleteWeight(neuron_id): 142 | weights = getWeights() 143 | del weights[neuron_id] 144 | setWeights(weights) 145 | 146 | 147 | @app.task 148 | def setThreshold(threshold): 149 | config = getConfig() 150 | config['threshold'] = threshold 151 | setConfig(config) 152 | 153 | 154 | @app.task 155 | def getThreshold(): 156 | return getConfig().get('threshold', float("inf")) 157 | 158 | 159 | # @app.task 160 | def in_refractory_period(): 161 | output = getConfig()['output'] 162 | 163 | # 如果 output 還沒有超過有效期 164 | return True if output['polarized_time'] + output['lasting'] >= datetime.datetime.now() else False 165 | 166 | 167 | # @app.task 168 | def sumInputsAndWeights(): 169 | config = getConfig() 170 | weights = config.get('weights', {}) 171 | inputs = config.get('inputs', {}) 172 | sum_of_weighted_inputs = 0 173 | currentTime = datetime.datetime.now() 174 | 175 | # sum weighted inputs 176 | for neuron in inputs: 177 | input = inputs[neuron] 178 | # 如果input還沒有超過有效期 179 | input['value'] = input.get('value', ACTION_POTENTIAL) if input['kick_time'] + input['lasting'] >= currentTime else RESTING_POTENTIAL 180 | sum_of_weighted_inputs += input['value'] * weights.get(neuron, INITIAL_WEIGHT) 181 | 182 | setConfig(config) 183 | 184 | return sum_of_weighted_inputs 185 | 186 | 187 | @app.task 188 | def setOutput(potential): 189 | # 設定 output 為 potential 190 | config = getConfig() 191 | config['output']['value'] = potential 192 | config['output']['polarized_time'] = datetime.datetime.now() 193 | # config['output']['lasting'] = datetime.timedelta(0, REFRACTORY_PERIOD)} 194 | setConfig(config) 195 | 196 | 197 | @app.task 198 | def setOutputActive(): 199 | log('Setting output of {0} to ACTION_POTENTIAL.'.format(getHostname())) 200 | setOutput(ACTION_POTENTIAL) 201 | 202 | 203 | # @app.task 204 | # def setOutputResting(): 205 | # log('Setting output of {0} to RESTING_POTENTIAL.'.format(getHostname())) 206 | # setOutput(RESTING_POTENTIAL) 207 | 208 | 209 | @app.task 210 | def getOutput(): 211 | if in_refractory_period(): 212 | output = getConfig()['output'].get('value', RESTING_POTENTIAL) 213 | else: 214 | output = RESTING_POTENTIAL 215 | return output 216 | 217 | 218 | @app.task 219 | def receiveInput(neuron_id): 220 | # 紀錄 input 的狀態 221 | config = getConfig() 222 | inputs = config.get('inputs', {}) 223 | input = inputs.get(neuron_id) 224 | 225 | # 收到 input 的時間 226 | currentTime = datetime.datetime.now() 227 | 228 | # 尚無紀錄,則 initialize 229 | if input is None: 230 | input = {} 231 | input['value'] = RESTING_POTENTIAL 232 | input['kick_time'] = currentTime 233 | input['lasting'] = datetime.timedelta(0, POLARIZATION_SECONDS) 234 | inputs[neuron_id] = input 235 | 236 | remainingValue = input['value'] if input['kick_time'] + input['lasting'] >= currentTime else RESTING_POTENTIAL # 上一次 input 的殘餘值 237 | input['value'] = remainingValue + ACTION_POTENTIAL # 同一個來源 累積的效果 238 | input['kick_time'] = currentTime 239 | input['lasting'] = datetime.timedelta(0, POLARIZATION_SECONDS) 240 | setConfig(config) 241 | 242 | 243 | @app.task 244 | def kick(neuron_id): 245 | myName = getHostname() 246 | log('{0} is kicking {1}.'.format(neuron_id, myName)) 247 | 248 | # 紀錄 input 的狀態 249 | receiveInput(neuron_id) 250 | 251 | sum_of_weighted_inputs = sumInputsAndWeights() 252 | threshold = getThreshold() 253 | currentOutput = getOutput() 254 | 255 | if not in_refractory_period(): 256 | # refractory period 已經過了,需重新估算 257 | if sum_of_weighted_inputs >= threshold: 258 | fire() 259 | else: 260 | # 還在 refractory period 期間 261 | log('{0} is still in refractory-period.'.format(myName)) 262 | if currentOutput == ACTION_POTENTIAL: 263 | # 目前在 ACTION_POTENTIAL 264 | if sum_of_weighted_inputs >= threshold: 265 | log('{0} is still in refractory_period at action potential, then a neuron {1} kicks in, now sum_of_weighted_inputs >= threshold.'.format(myName, neuron_id)) 266 | else: 267 | log('{0} is still in refractory_period at action potential, then a neuron {1} kicks in, now sum_of_weighted_inputs < threshold.'.format(myName, neuron_id)) 268 | # setOutputResting() 269 | else: 270 | # 目前在 RESTING_POTENTIAL 271 | if sum_of_weighted_inputs >= threshold: 272 | log('{0} is still in refractory_period at resting potential, then a neuron {1} kicks in, now sum_of_weighted_inputs >= threshold.'.format(myName, neuron_id)) 273 | else: 274 | log('{0} is still in refractory_period at resting potential, then a neuron {1} kicks in, now sum_of_weighted_inputs < threshold.'.format(myName, neuron_id)) 275 | # setOutputResting() 276 | 277 | 278 | @app.task 279 | def fire(): 280 | myName = getHostname() 281 | log('{0} fires.'.format(myName)) 282 | setOutputActive() 283 | 284 | # kick 下游 neurons 285 | connections = getConnections() 286 | group([kick.subtask((myName,), routing_key = connection) for connection in connections]).apply_async() -------------------------------------------------------------------------------- /Celery_config_plotter/CeleryConfigPlotter.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # # Celery結構規劃 5 | 6 | # In[1]: 7 | 8 | from celery import Celery 9 | 10 | 11 | # In[2]: 12 | 13 | import pandas as pd 14 | from pandas import Series, DataFrame 15 | 16 | 17 | # In[3]: 18 | 19 | import os 20 | from pprint import pprint 21 | 22 | 23 | # ### 預設的參數 24 | 25 | # In[4]: 26 | 27 | def listDefaultCeleryConfigurations(): 28 | app = Celery() 29 | configs = app.conf.__dict__['_order'][2] 30 | configs = sorted([(k, v) for k, v in configs.items()]) 31 | for k, v in configs: 32 | print ('{0} = {1}'.format(k, ("'" + v + "'") if isinstance(v, str) else v) ) 33 | 34 | 35 | # In[5]: 36 | 37 | listDefaultCeleryConfigurations() 38 | 39 | 40 | # --- 41 | # ### 抓取 規劃檔案 內容 42 | 43 | # In[6]: 44 | 45 | def getExcelData(file): 46 | df = pd.read_excel(file) 47 | df.dropna(axis=0, how='all', inplace=True) 48 | 49 | return df 50 | 51 | 52 | # --- 53 | # #### Import Kombu classes 54 | 55 | # In[7]: 56 | 57 | def import_Kombu_classes(plan, summary): 58 | output = [] 59 | output.extend(['', '#{0:_^78}'.format('Import Kombu classes')]) 60 | output.append('{0}'.format('from kombu import Exchange, Queue')) 61 | summary.extend(output) 62 | 63 | return summary 64 | 65 | 66 | # --- 67 | # #### CELERY_TIMEZONE & Misc. 68 | 69 | # In[8]: 70 | 71 | def set_CELERY_TIMEZONE_Misc(plan, summary): 72 | # 自訂的 73 | CELERY_TIMEZONE = 'Asia/Taipei' 74 | 75 | output = [] 76 | output.extend(['', '#{0:_^78}'.format('CELERY_TIMEZONE & Misc.')]) 77 | output.append("CELERY_TIMEZONE = '{0}'".format(CELERY_TIMEZONE)) 78 | output.append('CELERYD_POOL_RESTARTS = True') 79 | summary.extend(output) 80 | 81 | return summary 82 | 83 | 84 | # --- 85 | # #### BROKER_URL 86 | # BROKER_URL = 'redis://netbrain.noip.me:6379/0' 87 | 88 | # In[9]: 89 | 90 | def set_BROKER_URL(plan, summary): 91 | BROKER_URL = plan.Broker.drop_duplicates()[0] 92 | 93 | output = [] 94 | output.extend(['', '#{0:_^78}'.format('BROKER_URL')]) 95 | output.append("BROKER_URL = '{0}'".format(BROKER_URL)) 96 | summary.extend(output) 97 | 98 | return summary 99 | 100 | 101 | # --- 102 | # #### CELERY_RESULT_BACKEND 103 | # CELERY_RESULT_BACKEND = 'redis://netbrain.noip.me:6379/1' 104 | 105 | # In[10]: 106 | 107 | def set_CELERY_RESULT_BACKEND(plan, summary): 108 | CELERY_RESULT_BACKEND = plan.Result_backend.drop_duplicates()[0] 109 | 110 | output = [] 111 | output.extend(['', '#{0:_^78}'.format('CELERY_RESULT_BACKEND')]) 112 | output.append("CELERY_RESULT_BACKEND = '{0}'".format(CELERY_RESULT_BACKEND)) 113 | summary.extend(output) 114 | 115 | return summary 116 | 117 | 118 | # --- 119 | # #### CELERY_IMPORTS 120 | # CELERY_IMPORTS = ('proj.tasks', ) 121 | 122 | # In[11]: 123 | 124 | def set_CELERY_IMPORTS(plan, summary): 125 | Celery_app_tasks = plan[['Celery_app', 'Tasks_module']].drop_duplicates() 126 | modules = ('{0}.{1}'.format(Celery_app_tasks.ix[i, 'Celery_app'], Celery_app_tasks.ix[i, 'Tasks_module']) for i in range(len(Celery_app_tasks))) 127 | CELERY_IMPORTS = tuple(modules) 128 | 129 | output = [] 130 | output.extend(['', '#{0:_^78}'.format('CELERY_IMPORTS')]) 131 | output.append('CELERY_IMPORTS = {0}'.format(CELERY_IMPORTS)) 132 | summary.extend(output) 133 | 134 | return summary 135 | 136 | 137 | # --- 138 | # #### CELERY_QUEUES 139 | 140 | # CELERY_QUEUES = ( 141 | # Queue('feed_tasks', routing_key='feed.#'), 142 | # Queue('regular_tasks', routing_key='task.#'), 143 | # Queue('image_tasks', exchange=Exchange('mediatasks', type='direct'), routing_key='image.compress'), 144 | # ) 145 | # 146 | # CELERY_QUEUES = ( 147 | # Queue('default', Exchange('default'), routing_key='default'), 148 | # Queue('videos', Exchange('media'), routing_key='media.video'), 149 | # Queue('images', Exchange('media'), routing_key='media.image'), 150 | # ) 151 | 152 | # In[12]: 153 | 154 | def set_CELERY_QUEUES(plan, summary): 155 | queues = plan[['Queue', 'Exchange', 'Exchange_Type', 'Routing_Key']].drop_duplicates() 156 | output = [] 157 | output.extend(['', '#{0:_^78}'.format('CELERY_QUEUES')]) 158 | 159 | output.append('CELERY_QUEUES = (') 160 | 161 | for i in range(len(queues)): 162 | output.append(" Queue('{queue}', Exchange('{exchange}', type = '{exchange_Type}'), routing_key='{routing_key}')," .format(queue = queues.ix[i, 'Queue'], 163 | exchange = queues.ix[i, 'Exchange'], 164 | exchange_Type = queues.ix[i, 'Exchange_Type'], 165 | routing_key = queues.ix[i, 'Routing_Key'] 166 | ) 167 | ) 168 | output.append(')') 169 | 170 | summary.extend(output) 171 | 172 | return summary 173 | 174 | 175 | # --- 176 | # #### CELERY_ROUTES 177 | 178 | # CELERY_ROUTES = { 179 | # 'feeds.tasks.import_feed': { 180 | # 'queue': 'feed_tasks', 181 | # 'routing_key': 'feed.import', 182 | # }, 183 | # } 184 | 185 | # In[13]: 186 | 187 | def set_CELERY_ROUTES(plan, summary): 188 | routes = plan[['Celery_app', 'Tasks_module', 'Task', 'Queue', 'Routing_Key']].drop_duplicates() 189 | output = [] 190 | output.extend(['', '#{0:_^78}'.format('CELERY_ROUTES')]) 191 | 192 | output.append('CELERY_ROUTES = {') 193 | 194 | for i in range(len(routes)): 195 | output.append(" '{app}.{module}.{task}': {{\n 'queue': '{queue}',\n 'routing_key': '{routing_key}',\n }}," .format(app = routes.ix[i, 'Celery_app'], 196 | module = routes.ix[i, 'Tasks_module'], 197 | task = routes.ix[i, 'Task'], 198 | queue = routes.ix[i, 'Queue'], 199 | routing_key = routes.ix[i, 'Routing_Key']) 200 | ) 201 | output.append('}') 202 | 203 | summary.extend(output) 204 | 205 | return summary 206 | 207 | 208 | # --- 209 | # #### WORKERS 210 | 211 | # In[14]: 212 | 213 | def set_Workers_Scripts(plan, summary): 214 | workers = plan[['Node', 'Celery_app', 'Worker', 'Queue', 'Concurrency', 'Log_level']].drop_duplicates() 215 | output = [] 216 | output.extend(['', '#{0:_^78}'.format('Workers Scripts')]) 217 | 218 | for i in range(len(workers)): 219 | output.append('#[Node - {node}] : celery -A {app} worker -n {worker} -Q {queue} --concurrency={concurrency} --loglevel={loglevel}' .format(node = workers.ix[i, 'Node'], 220 | app = workers.ix[i, 'Celery_app'], 221 | worker = workers.ix[i, 'Worker'], 222 | queue = workers.ix[i, 'Queue'], 223 | concurrency = workers.ix[i, 'Concurrency'], 224 | loglevel = workers.ix[i, 'Log_level'] 225 | ) 226 | ) 227 | 228 | summary.extend(output) 229 | 230 | return summary 231 | 232 | 233 | # --- 234 | # #### FLOWER 235 | 236 | # In[15]: 237 | 238 | def set_FLOWER(plan, summary): 239 | app = plan.Celery_app.drop_duplicates()[0] 240 | output = [] 241 | output.extend(['', '#{0:_^78}'.format('FLOWER')]) 242 | 243 | output.append('#[Flower] : celery -A {app} flower'.format(app = app)) 244 | summary.extend(output) 245 | 246 | return summary 247 | 248 | 249 | # ## Summarize 250 | 251 | # In[16]: 252 | 253 | def summarizeConfigurations(planExcelFile): 254 | 255 | summary = [] 256 | 257 | # listDefaultCeleryConfigurations() 258 | 259 | plan = getExcelData(planExcelFile) 260 | 261 | import_Kombu_classes(plan, summary) 262 | set_CELERY_TIMEZONE_Misc(plan, summary) 263 | set_BROKER_URL(plan, summary) 264 | set_CELERY_RESULT_BACKEND(plan, summary) 265 | set_CELERY_IMPORTS(plan, summary) 266 | set_CELERY_QUEUES(plan, summary) 267 | set_CELERY_ROUTES(plan, summary) 268 | 269 | set_Workers_Scripts(plan, summary) 270 | set_FLOWER(plan, summary) 271 | 272 | return summary 273 | 274 | 275 | # ## Output Configuration File 276 | 277 | # In[17]: 278 | 279 | def writeConfigurationFile(summary, file = 'celeryconfig.py'): 280 | with open(file, 'w', encoding = 'utf8') as f: 281 | for line in summary: f.write(line + '\n') 282 | 283 | 284 | # In[18]: 285 | 286 | def genConfigFile(): 287 | # 指定規劃檔案 288 | folder = os.getcwd() 289 | files = [file for file in os.listdir(folder) if file.rpartition('.')[2] in ('xls','xlsx')] 290 | 291 | if len(files) == 1 : 292 | file = os.path.join(folder, files[0]) 293 | summary = summarizeConfigurations(file) 294 | for line in summary: print (line) 295 | writeConfigurationFile(summary) 296 | 297 | else: 298 | print('There must be one and only one plan Excel file.') 299 | 300 | 301 | # ## Main 302 | 303 | # In[19]: 304 | 305 | if __name__ == '__main__': 306 | genConfigFile() 307 | 308 | 309 | # In[ ]: 310 | 311 | 312 | 313 | -------------------------------------------------------------------------------- /Celery_config_plotter/CeleryConfigPlotter.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Celery結構規劃" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "from celery import Celery" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 2, 24 | "metadata": { 25 | "collapsed": false 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import pandas as pd\n", 30 | "from pandas import Series, DataFrame" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": { 37 | "collapsed": true 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "import os\n", 42 | "from pprint import pprint" 43 | ] 44 | }, 45 | { 46 | "cell_type": "markdown", 47 | "metadata": {}, 48 | "source": [ 49 | "### 預設的參數" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 4, 55 | "metadata": { 56 | "collapsed": false 57 | }, 58 | "outputs": [], 59 | "source": [ 60 | "def listDefaultCeleryConfigurations():\n", 61 | " app = Celery()\n", 62 | " configs = app.conf.__dict__['_order'][2]\n", 63 | " configs = sorted([(k, v) for k, v in configs.items()])\n", 64 | " for k, v in configs:\n", 65 | " print ('{0} = {1}'.format(k, (\"'\" + v + \"'\") if isinstance(v, str) else v) )" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 5, 71 | "metadata": { 72 | "collapsed": false 73 | }, 74 | "outputs": [ 75 | { 76 | "name": "stdout", 77 | "output_type": "stream", 78 | "text": [ 79 | "ADMINS = ()\n", 80 | "BROKER_CONNECTION_MAX_RETRIES = 100\n", 81 | "BROKER_CONNECTION_RETRY = True\n", 82 | "BROKER_CONNECTION_TIMEOUT = 4\n", 83 | "BROKER_FAILOVER_STRATEGY = None\n", 84 | "BROKER_HEARTBEAT = None\n", 85 | "BROKER_HEARTBEAT_CHECKRATE = 3.0\n", 86 | "BROKER_HOST = None\n", 87 | "BROKER_LOGIN_METHOD = None\n", 88 | "BROKER_PASSWORD = None\n", 89 | "BROKER_POOL_LIMIT = 10\n", 90 | "BROKER_PORT = None\n", 91 | "BROKER_TRANSPORT = None\n", 92 | "BROKER_TRANSPORT_OPTIONS = {}\n", 93 | "BROKER_URL = None\n", 94 | "BROKER_USER = None\n", 95 | "BROKER_USE_SSL = False\n", 96 | "BROKER_VHOST = None\n", 97 | "CASSANDRA_COLUMN_FAMILY = None\n", 98 | "CASSANDRA_DETAILED_MODE = False\n", 99 | "CASSANDRA_KEYSPACE = None\n", 100 | "CASSANDRA_READ_CONSISTENCY = None\n", 101 | "CASSANDRA_SERVERS = None\n", 102 | "CASSANDRA_WRITE_CONSISTENCY = None\n", 103 | "CELERYBEAT_LOG_FILE = None\n", 104 | "CELERYBEAT_LOG_LEVEL = 'INFO'\n", 105 | "CELERYBEAT_MAX_LOOP_INTERVAL = 0\n", 106 | "CELERYBEAT_SCHEDULE = {}\n", 107 | "CELERYBEAT_SCHEDULER = 'celery.beat:PersistentScheduler'\n", 108 | "CELERYBEAT_SCHEDULE_FILENAME = 'celerybeat-schedule'\n", 109 | "CELERYBEAT_SYNC_EVERY = 0\n", 110 | "CELERYD_AGENT = None\n", 111 | "CELERYD_AUTORELOADER = 'celery.worker.autoreload:Autoreloader'\n", 112 | "CELERYD_AUTOSCALER = 'celery.worker.autoscale:Autoscaler'\n", 113 | "CELERYD_CONCURRENCY = 0\n", 114 | "CELERYD_CONSUMER = 'celery.worker.consumer:Consumer'\n", 115 | "CELERYD_FORCE_EXECV = False\n", 116 | "CELERYD_HIJACK_ROOT_LOGGER = True\n", 117 | "CELERYD_LOG_COLOR = None\n", 118 | "CELERYD_LOG_FILE = None\n", 119 | "CELERYD_LOG_FORMAT = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'\n", 120 | "CELERYD_LOG_LEVEL = 'WARN'\n", 121 | "CELERYD_MAX_TASKS_PER_CHILD = None\n", 122 | "CELERYD_POOL = 'prefork'\n", 123 | "CELERYD_POOL_PUTLOCKS = True\n", 124 | "CELERYD_POOL_RESTARTS = False\n", 125 | "CELERYD_PREFETCH_MULTIPLIER = 4\n", 126 | "CELERYD_STATE_DB = None\n", 127 | "CELERYD_TASK_LOG_FORMAT = '[%(asctime)s: %(levelname)s/%(processName)s] %(task_name)s[%(task_id)s]: %(message)s'\n", 128 | "CELERYD_TASK_SOFT_TIME_LIMIT = None\n", 129 | "CELERYD_TASK_TIME_LIMIT = None\n", 130 | "CELERYD_TIMER = None\n", 131 | "CELERYD_TIMER_PRECISION = 1.0\n", 132 | "CELERYD_WORKER_LOST_WAIT = 10.0\n", 133 | "CELERYMON_LOG_FILE = None\n", 134 | "CELERYMON_LOG_FORMAT = '[%(asctime)s: %(levelname)s] %(message)s'\n", 135 | "CELERYMON_LOG_LEVEL = 'INFO'\n", 136 | "CELERY_ACCEPT_CONTENT = ['json', 'pickle', 'msgpack', 'yaml']\n", 137 | "CELERY_ACKS_LATE = False\n", 138 | "CELERY_ALWAYS_EAGER = False\n", 139 | "CELERY_ANNOTATIONS = None\n", 140 | "CELERY_BROADCAST_EXCHANGE = 'celeryctl'\n", 141 | "CELERY_BROADCAST_EXCHANGE_TYPE = 'fanout'\n", 142 | "CELERY_BROADCAST_QUEUE = 'celeryctl'\n", 143 | "CELERY_CACHE_BACKEND = None\n", 144 | "CELERY_CACHE_BACKEND_OPTIONS = {}\n", 145 | "CELERY_CHORD_PROPAGATES = True\n", 146 | "CELERY_COUCHBASE_BACKEND_SETTINGS = None\n", 147 | "CELERY_CREATE_MISSING_QUEUES = True\n", 148 | "CELERY_DEFAULT_DELIVERY_MODE = 2\n", 149 | "CELERY_DEFAULT_EXCHANGE = 'celery'\n", 150 | "CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'\n", 151 | "CELERY_DEFAULT_QUEUE = 'celery'\n", 152 | "CELERY_DEFAULT_RATE_LIMIT = None\n", 153 | "CELERY_DEFAULT_ROUTING_KEY = 'celery'\n", 154 | "CELERY_DISABLE_RATE_LIMITS = False\n", 155 | "CELERY_EAGER_PROPAGATES_EXCEPTIONS = False\n", 156 | "CELERY_ENABLE_REMOTE_CONTROL = True\n", 157 | "CELERY_ENABLE_UTC = True\n", 158 | "CELERY_EVENT_QUEUE_EXPIRES = None\n", 159 | "CELERY_EVENT_QUEUE_TTL = None\n", 160 | "CELERY_EVENT_SERIALIZER = 'json'\n", 161 | "CELERY_IGNORE_RESULT = False\n", 162 | "CELERY_IMPORTS = ()\n", 163 | "CELERY_INCLUDE = ()\n", 164 | "CELERY_MAX_CACHED_RESULTS = 100\n", 165 | "CELERY_MESSAGE_COMPRESSION = None\n", 166 | "CELERY_MONGODB_BACKEND_SETTINGS = None\n", 167 | "CELERY_QUEUES = None\n", 168 | "CELERY_QUEUE_HA_POLICY = None\n", 169 | "CELERY_REDIRECT_STDOUTS = True\n", 170 | "CELERY_REDIRECT_STDOUTS_LEVEL = 'WARNING'\n", 171 | "CELERY_REDIS_DB = None\n", 172 | "CELERY_REDIS_HOST = None\n", 173 | "CELERY_REDIS_MAX_CONNECTIONS = None\n", 174 | "CELERY_REDIS_PASSWORD = None\n", 175 | "CELERY_REDIS_PORT = None\n", 176 | "CELERY_RESULT_BACKEND = None\n", 177 | "CELERY_RESULT_DBURI = None\n", 178 | "CELERY_RESULT_DB_SHORT_LIVED_SESSIONS = False\n", 179 | "CELERY_RESULT_DB_TABLENAMES = None\n", 180 | "CELERY_RESULT_ENGINE_OPTIONS = None\n", 181 | "CELERY_RESULT_EXCHANGE = 'celeryresults'\n", 182 | "CELERY_RESULT_EXCHANGE_TYPE = 'direct'\n", 183 | "CELERY_RESULT_PERSISTENT = None\n", 184 | "CELERY_RESULT_SERIALIZER = 'pickle'\n", 185 | "CELERY_ROUTES = None\n", 186 | "CELERY_SECURITY_CERTIFICATE = None\n", 187 | "CELERY_SECURITY_CERT_STORE = None\n", 188 | "CELERY_SECURITY_KEY = None\n", 189 | "CELERY_SEND_EVENTS = False\n", 190 | "CELERY_SEND_TASK_ERROR_EMAILS = False\n", 191 | "CELERY_SEND_TASK_SENT_EVENT = False\n", 192 | "CELERY_STORE_ERRORS_EVEN_IF_IGNORED = False\n", 193 | "CELERY_TASK_PUBLISH_RETRY = True\n", 194 | "CELERY_TASK_PUBLISH_RETRY_POLICY = {'max_retries': 3, 'interval_max': 1, 'interval_step': 0.2, 'interval_start': 0}\n", 195 | "CELERY_TASK_RESULT_EXPIRES = 1 day, 0:00:00\n", 196 | "CELERY_TASK_SERIALIZER = 'pickle'\n", 197 | "CELERY_TIMEZONE = None\n", 198 | "CELERY_TRACK_STARTED = False\n", 199 | "CELERY_WORKER_DIRECT = False\n", 200 | "EMAIL_HOST = 'localhost'\n", 201 | "EMAIL_HOST_PASSWORD = None\n", 202 | "EMAIL_HOST_USER = None\n", 203 | "EMAIL_PORT = 25\n", 204 | "EMAIL_TIMEOUT = 2\n", 205 | "EMAIL_USE_SSL = False\n", 206 | "EMAIL_USE_TLS = False\n", 207 | "SERVER_EMAIL = 'celery@localhost'\n" 208 | ] 209 | } 210 | ], 211 | "source": [ 212 | "listDefaultCeleryConfigurations()" 213 | ] 214 | }, 215 | { 216 | "cell_type": "markdown", 217 | "metadata": {}, 218 | "source": [ 219 | "---\n", 220 | "### 抓取 規劃檔案 內容" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 6, 226 | "metadata": { 227 | "collapsed": true 228 | }, 229 | "outputs": [], 230 | "source": [ 231 | "def getExcelData(file):\n", 232 | " df = pd.read_excel(file)\n", 233 | " df.dropna(axis=0, how='all', inplace=True)\n", 234 | "\n", 235 | " return df" 236 | ] 237 | }, 238 | { 239 | "cell_type": "markdown", 240 | "metadata": {}, 241 | "source": [ 242 | "---\n", 243 | "#### Import Kombu classes" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 7, 249 | "metadata": { 250 | "collapsed": true 251 | }, 252 | "outputs": [], 253 | "source": [ 254 | "def import_Kombu_classes(plan, summary):\n", 255 | " output = [] \n", 256 | " output.extend(['', '#{0:_^78}'.format('Import Kombu classes')])\n", 257 | " output.append('{0}'.format('from kombu import Exchange, Queue'))\n", 258 | " summary.extend(output)\n", 259 | " \n", 260 | " return summary" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "---\n", 268 | "#### CELERY_TIMEZONE & Misc." 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": 8, 274 | "metadata": { 275 | "collapsed": false 276 | }, 277 | "outputs": [], 278 | "source": [ 279 | "def set_CELERY_TIMEZONE_Misc(plan, summary):\n", 280 | " # 自訂的\n", 281 | " CELERY_TIMEZONE = 'Asia/Taipei' \n", 282 | "\n", 283 | " output = [] \n", 284 | " output.extend(['', '#{0:_^78}'.format('CELERY_TIMEZONE & Misc.')])\n", 285 | " output.append(\"CELERY_TIMEZONE = '{0}'\".format(CELERY_TIMEZONE))\n", 286 | " output.append('CELERYD_POOL_RESTARTS = True')\n", 287 | " summary.extend(output)\n", 288 | " \n", 289 | " return summary" 290 | ] 291 | }, 292 | { 293 | "cell_type": "markdown", 294 | "metadata": {}, 295 | "source": [ 296 | "---\n", 297 | "#### BROKER_URL\n", 298 | "BROKER_URL = 'redis://netbrain.noip.me:6379/0'" 299 | ] 300 | }, 301 | { 302 | "cell_type": "code", 303 | "execution_count": 9, 304 | "metadata": { 305 | "collapsed": false 306 | }, 307 | "outputs": [], 308 | "source": [ 309 | "def set_BROKER_URL(plan, summary):\n", 310 | " BROKER_URL = plan.Broker.drop_duplicates()[0]\n", 311 | "\n", 312 | " output = [] \n", 313 | " output.extend(['', '#{0:_^78}'.format('BROKER_URL')])\n", 314 | " output.append(\"BROKER_URL = '{0}'\".format(BROKER_URL))\n", 315 | " summary.extend(output)\n", 316 | " \n", 317 | " return summary" 318 | ] 319 | }, 320 | { 321 | "cell_type": "markdown", 322 | "metadata": {}, 323 | "source": [ 324 | "---\n", 325 | "#### CELERY_RESULT_BACKEND\n", 326 | "CELERY_RESULT_BACKEND = 'redis://netbrain.noip.me:6379/1'" 327 | ] 328 | }, 329 | { 330 | "cell_type": "code", 331 | "execution_count": 10, 332 | "metadata": { 333 | "collapsed": false 334 | }, 335 | "outputs": [], 336 | "source": [ 337 | "def set_CELERY_RESULT_BACKEND(plan, summary):\n", 338 | " CELERY_RESULT_BACKEND = plan.Result_backend.drop_duplicates()[0]\n", 339 | "\n", 340 | " output = [] \n", 341 | " output.extend(['', '#{0:_^78}'.format('CELERY_RESULT_BACKEND')])\n", 342 | " output.append(\"CELERY_RESULT_BACKEND = '{0}'\".format(CELERY_RESULT_BACKEND))\n", 343 | " summary.extend(output)\n", 344 | " \n", 345 | " return summary" 346 | ] 347 | }, 348 | { 349 | "cell_type": "markdown", 350 | "metadata": {}, 351 | "source": [ 352 | "---\n", 353 | "#### CELERY_IMPORTS\n", 354 | "CELERY_IMPORTS = ('proj.tasks', )" 355 | ] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "execution_count": 11, 360 | "metadata": { 361 | "collapsed": false 362 | }, 363 | "outputs": [], 364 | "source": [ 365 | "def set_CELERY_IMPORTS(plan, summary):\n", 366 | " Celery_app_tasks = plan[['Celery_app', 'Tasks_module']].drop_duplicates()\n", 367 | " modules = ('{0}.{1}'.format(Celery_app_tasks.ix[i, 'Celery_app'], Celery_app_tasks.ix[i, 'Tasks_module']) for i in range(len(Celery_app_tasks)))\n", 368 | " CELERY_IMPORTS = tuple(modules)\n", 369 | "\n", 370 | " output = [] \n", 371 | " output.extend(['', '#{0:_^78}'.format('CELERY_IMPORTS')])\n", 372 | " output.append('CELERY_IMPORTS = {0}'.format(CELERY_IMPORTS))\n", 373 | " summary.extend(output)\n", 374 | " \n", 375 | " return summary" 376 | ] 377 | }, 378 | { 379 | "cell_type": "markdown", 380 | "metadata": {}, 381 | "source": [ 382 | "---\n", 383 | "#### CELERY_QUEUES" 384 | ] 385 | }, 386 | { 387 | "cell_type": "markdown", 388 | "metadata": {}, 389 | "source": [ 390 | " CELERY_QUEUES = (\n", 391 | " Queue('feed_tasks', routing_key='feed.#'),\n", 392 | " Queue('regular_tasks', routing_key='task.#'),\n", 393 | " Queue('image_tasks', exchange=Exchange('mediatasks', type='direct'), routing_key='image.compress'),\n", 394 | " )\n", 395 | "\n", 396 | " CELERY_QUEUES = (\n", 397 | " Queue('default', Exchange('default'), routing_key='default'),\n", 398 | " Queue('videos', Exchange('media'), routing_key='media.video'),\n", 399 | " Queue('images', Exchange('media'), routing_key='media.image'),\n", 400 | " )" 401 | ] 402 | }, 403 | { 404 | "cell_type": "code", 405 | "execution_count": 12, 406 | "metadata": { 407 | "collapsed": false 408 | }, 409 | "outputs": [], 410 | "source": [ 411 | "def set_CELERY_QUEUES(plan, summary):\n", 412 | " queues = plan[['Queue', 'Exchange', 'Exchange_Type', 'Routing_Key']].drop_duplicates()\n", 413 | " output = [] \n", 414 | " output.extend(['', '#{0:_^78}'.format('CELERY_QUEUES')])\n", 415 | "\n", 416 | " output.append('CELERY_QUEUES = (')\n", 417 | "\n", 418 | " for i in range(len(queues)):\n", 419 | " output.append(\" Queue('{queue}', Exchange('{exchange}', type = '{exchange_Type}'), routing_key='{routing_key}'),\"\\\n", 420 | " .format(queue = queues.ix[i, 'Queue'],\n", 421 | " exchange = queues.ix[i, 'Exchange'],\n", 422 | " exchange_Type = queues.ix[i, 'Exchange_Type'], \n", 423 | " routing_key = queues.ix[i, 'Routing_Key'] \n", 424 | " )\n", 425 | " )\n", 426 | " output.append(')')\n", 427 | "\n", 428 | " summary.extend(output)\n", 429 | " \n", 430 | " return summary" 431 | ] 432 | }, 433 | { 434 | "cell_type": "markdown", 435 | "metadata": {}, 436 | "source": [ 437 | "---\n", 438 | "#### CELERY_ROUTES" 439 | ] 440 | }, 441 | { 442 | "cell_type": "markdown", 443 | "metadata": {}, 444 | "source": [ 445 | " CELERY_ROUTES = {\n", 446 | " 'feeds.tasks.import_feed': {\n", 447 | " 'queue': 'feed_tasks',\n", 448 | " 'routing_key': 'feed.import',\n", 449 | " },\n", 450 | " }" 451 | ] 452 | }, 453 | { 454 | "cell_type": "code", 455 | "execution_count": 13, 456 | "metadata": { 457 | "collapsed": false 458 | }, 459 | "outputs": [], 460 | "source": [ 461 | "def set_CELERY_ROUTES(plan, summary):\n", 462 | " routes = plan[['Celery_app', 'Tasks_module', 'Task', 'Queue', 'Routing_Key']].drop_duplicates()\n", 463 | " output = [] \n", 464 | " output.extend(['', '#{0:_^78}'.format('CELERY_ROUTES')])\n", 465 | "\n", 466 | " output.append('CELERY_ROUTES = {')\n", 467 | "\n", 468 | " for i in range(len(routes)):\n", 469 | " output.append(\" '{app}.{module}.{task}': {{\\n 'queue': '{queue}',\\n 'routing_key': '{routing_key}',\\n }},\"\\\n", 470 | " .format(app = routes.ix[i, 'Celery_app'],\n", 471 | " module = routes.ix[i, 'Tasks_module'],\n", 472 | " task = routes.ix[i, 'Task'], \n", 473 | " queue = routes.ix[i, 'Queue'], \n", 474 | " routing_key = routes.ix[i, 'Routing_Key'])\n", 475 | " )\n", 476 | " output.append('}')\n", 477 | "\n", 478 | " summary.extend(output)\n", 479 | " \n", 480 | " return summary" 481 | ] 482 | }, 483 | { 484 | "cell_type": "markdown", 485 | "metadata": {}, 486 | "source": [ 487 | "---\n", 488 | "#### WORKERS" 489 | ] 490 | }, 491 | { 492 | "cell_type": "code", 493 | "execution_count": 14, 494 | "metadata": { 495 | "collapsed": false 496 | }, 497 | "outputs": [], 498 | "source": [ 499 | "def set_Workers_Scripts(plan, summary):\n", 500 | " workers = plan[['Node', 'Celery_app', 'Worker', 'Queue', 'Concurrency', 'Log_level']].drop_duplicates()\n", 501 | " output = []\n", 502 | " output.extend(['', '#{0:_^78}'.format('Workers Scripts')])\n", 503 | "\n", 504 | " for i in range(len(workers)):\n", 505 | " output.append('#[Node - {node}] : celery -A {app} worker -n {worker} -Q {queue} --concurrency={concurrency} --loglevel={loglevel}'\\\n", 506 | " .format(node = workers.ix[i, 'Node'],\n", 507 | " app = workers.ix[i, 'Celery_app'],\n", 508 | " worker = workers.ix[i, 'Worker'], \n", 509 | " queue = workers.ix[i, 'Queue'], \n", 510 | " concurrency = workers.ix[i, 'Concurrency'], \n", 511 | " loglevel = workers.ix[i, 'Log_level']\n", 512 | " )\n", 513 | " )\n", 514 | "\n", 515 | " summary.extend(output)\n", 516 | " \n", 517 | " return summary" 518 | ] 519 | }, 520 | { 521 | "cell_type": "markdown", 522 | "metadata": {}, 523 | "source": [ 524 | "---\n", 525 | "#### FLOWER" 526 | ] 527 | }, 528 | { 529 | "cell_type": "code", 530 | "execution_count": 15, 531 | "metadata": { 532 | "collapsed": false 533 | }, 534 | "outputs": [], 535 | "source": [ 536 | "def set_FLOWER(plan, summary):\n", 537 | " app = plan.Celery_app.drop_duplicates()[0]\n", 538 | " output = [] \n", 539 | " output.extend(['', '#{0:_^78}'.format('FLOWER')])\n", 540 | " \n", 541 | " output.append('#[Flower] : celery -A {app} flower'.format(app = app))\n", 542 | " summary.extend(output)\n", 543 | " \n", 544 | " return summary" 545 | ] 546 | }, 547 | { 548 | "cell_type": "markdown", 549 | "metadata": {}, 550 | "source": [ 551 | "## Summarize" 552 | ] 553 | }, 554 | { 555 | "cell_type": "code", 556 | "execution_count": 16, 557 | "metadata": { 558 | "collapsed": true 559 | }, 560 | "outputs": [], 561 | "source": [ 562 | "def summarizeConfigurations(planExcelFile):\n", 563 | " \n", 564 | " summary = []\n", 565 | " \n", 566 | "# listDefaultCeleryConfigurations()\n", 567 | "\n", 568 | " plan = getExcelData(planExcelFile)\n", 569 | " \n", 570 | " import_Kombu_classes(plan, summary)\n", 571 | " set_CELERY_TIMEZONE_Misc(plan, summary)\n", 572 | " set_BROKER_URL(plan, summary)\n", 573 | " set_CELERY_RESULT_BACKEND(plan, summary)\n", 574 | " set_CELERY_IMPORTS(plan, summary)\n", 575 | " set_CELERY_QUEUES(plan, summary)\n", 576 | " set_CELERY_ROUTES(plan, summary)\n", 577 | " \n", 578 | " set_Workers_Scripts(plan, summary)\n", 579 | " set_FLOWER(plan, summary)\n", 580 | " \n", 581 | " return summary" 582 | ] 583 | }, 584 | { 585 | "cell_type": "markdown", 586 | "metadata": {}, 587 | "source": [ 588 | "## Output Configuration File" 589 | ] 590 | }, 591 | { 592 | "cell_type": "code", 593 | "execution_count": 17, 594 | "metadata": { 595 | "collapsed": false 596 | }, 597 | "outputs": [], 598 | "source": [ 599 | "def writeConfigurationFile(summary, file = 'celeryconfig.py'):\n", 600 | " with open(file, 'w', encoding = 'utf8') as f:\n", 601 | " for line in summary: f.write(line + '\\n')" 602 | ] 603 | }, 604 | { 605 | "cell_type": "code", 606 | "execution_count": 18, 607 | "metadata": { 608 | "collapsed": true 609 | }, 610 | "outputs": [], 611 | "source": [ 612 | "def genConfigFile():\n", 613 | " # 指定規劃檔案\n", 614 | " folder = os.getcwd()\n", 615 | " files = [file for file in os.listdir(folder) if file.rpartition('.')[2] in ('xls','xlsx')] \n", 616 | " \n", 617 | " if len(files) == 1 : \n", 618 | " file = os.path.join(folder, files[0])\n", 619 | " summary = summarizeConfigurations(file)\n", 620 | " for line in summary: print (line) \n", 621 | " writeConfigurationFile(summary)\n", 622 | " \n", 623 | " else:\n", 624 | " print('There must be one and only one plan Excel file.') " 625 | ] 626 | }, 627 | { 628 | "cell_type": "markdown", 629 | "metadata": {}, 630 | "source": [ 631 | "## Main" 632 | ] 633 | }, 634 | { 635 | "cell_type": "code", 636 | "execution_count": 19, 637 | "metadata": { 638 | "collapsed": false, 639 | "scrolled": true 640 | }, 641 | "outputs": [ 642 | { 643 | "name": "stdout", 644 | "output_type": "stream", 645 | "text": [ 646 | "\n", 647 | "#_____________________________Import Kombu classes_____________________________\n", 648 | "from kombu import Exchange, Queue\n", 649 | "\n", 650 | "#___________________________CELERY_TIMEZONE & Misc.____________________________\n", 651 | "CELERY_TIMEZONE = 'Asia/Taipei'\n", 652 | "CELERYD_POOL_RESTARTS = True\n", 653 | "\n", 654 | "#__________________________________BROKER_URL__________________________________\n", 655 | "BROKER_URL = 'redis://weilin.noip.me:6379/0'\n", 656 | "\n", 657 | "#____________________________CELERY_RESULT_BACKEND_____________________________\n", 658 | "CELERY_RESULT_BACKEND = 'redis://weilin.noip.me:6379/1'\n", 659 | "\n", 660 | "#________________________________CELERY_IMPORTS________________________________\n", 661 | "CELERY_IMPORTS = ('word_count.tasks',)\n", 662 | "\n", 663 | "#________________________________CELERY_QUEUES_________________________________\n", 664 | "CELERY_QUEUES = (\n", 665 | " Queue('word_counting', Exchange('celery', type = 'direct'), routing_key='word_counting'),\n", 666 | ")\n", 667 | "\n", 668 | "#________________________________CELERY_ROUTES_________________________________\n", 669 | "CELERY_ROUTES = {\n", 670 | " 'word_count.tasks.mapper': {\n", 671 | " 'queue': 'word_counting',\n", 672 | " 'routing_key': 'word_counting',\n", 673 | " },\n", 674 | "}\n", 675 | "\n", 676 | "#_______________________________Workers Scripts________________________________\n", 677 | "#[Node - localhost] : celery -A word_count worker -n worker1.%h -Q word_counting --concurrency=10 --loglevel=INFO\n", 678 | "\n", 679 | "#____________________________________FLOWER____________________________________\n", 680 | "#[Flower] : celery -A word_count flower\n" 681 | ] 682 | } 683 | ], 684 | "source": [ 685 | "if __name__ == '__main__':\n", 686 | " genConfigFile()" 687 | ] 688 | }, 689 | { 690 | "cell_type": "code", 691 | "execution_count": null, 692 | "metadata": { 693 | "collapsed": true 694 | }, 695 | "outputs": [], 696 | "source": [] 697 | } 698 | ], 699 | "metadata": { 700 | "kernelspec": { 701 | "display_name": "Python 3", 702 | "language": "python", 703 | "name": "python3" 704 | }, 705 | "language_info": { 706 | "codemirror_mode": { 707 | "name": "ipython", 708 | "version": 3 709 | }, 710 | "file_extension": ".py", 711 | "mimetype": "text/x-python", 712 | "name": "python", 713 | "nbconvert_exporter": "python", 714 | "pygments_lexer": "ipython3", 715 | "version": "3.5.1" 716 | } 717 | }, 718 | "nbformat": 4, 719 | "nbformat_minor": 0 720 | } 721 | -------------------------------------------------------------------------------- /celery_projects/IoT_as_Brain.md: -------------------------------------------------------------------------------- 1 | 2 | # 使用 Celery 於 Docker Swarm 之上 建構類似 Bluemix 的 IoT 平台 3 | ## Part II: IoT as Brain 4 | 5 | Wei Lin 6 | 20160128 7 | 8 | ## 緣起 9 | [上次](https://github.com/Wei1234c/CeleryOnDockerSwarm/blob/master/celery_projects/CeleryOnDockerSwarm.md) 在 2 台 Raspberry Pi 上面架了一個 Docker Swarm,然後在 Docker Swarm 裡面使用 Celery (distributed task queue) 的機制,利用 8 個 containers 跑 40 個 processes 執行 "Word Count" 程式,驗證 Celery + Docker Swarm 是可行的。 10 | 11 | 做上述實驗的原因,是為了想利用 Celery + Docker Swarm 快速的建構私有的 類似 [Bluemix](https://console.ng.bluemix.net/) 的 IoT 平台,讓其上的 devices 共同組成一個分散式的協同運算系統,視整個 IoT(Internet of Things) 為一體。 12 | 13 | Celery 所採用的機制,簡單來說就是: producer 發出要求運算的訊息到 queue 中排隊,眾多的 workers 紛紛到 queue 去撿出訊息來進行處理。類比於 Bluemix 和 [MQTT](http://cheng-min-i-taiwan.blogspot.tw/2015/03/raspberry-pimqtt-android.html): producer 就如同 publisher,queue 如同 topic 或 channel,consumer 如同 subscriber,我覺得兩者是十分類似的。 14 | 15 | 在 Bluemix 的架構中,IBM 把 clients 區分為 device 和 application 兩種角色,devices 扮演 publisher,負責發送資料到平台上,然後扮演 subscriber 的 applications 會收到資料進行運算處理,資料的產生與運算分離,這樣的設計顯得理所當然,Bluemix 所提供的功能也很完整且強大。 16 | 17 | 然而還有另外一種可能,或許 **資料的儲存 與 運算,其實可以是同一件事情**,就如同我們的大腦,資料的儲存與運算都是由 神經**網路** 來完成的。 18 | 19 | 我們可以把一個 device 視為一個 neuron,讓 IoT 中眾多的 devices (neurons) 互相連結,經過訓練的 **IoT網路** 就可以自行對環境做出反應,並不需要 集中式的 "邏輯 applications"。但是,這樣的 IoT 平台要如何設計呢? 20 | 21 | 在**類神經網路**的發展歷史中,**[XOR網路](https://en.wikipedia.org/wiki/Feedforward_neural_network#Multi-layer_perceptron)** 是一個著名的案例,本次的實驗就來試試看用 IoT 做一個 XOR網路,如果這個可以做,應該也可以組成更複雜的東西。 22 | ![XOR 網路](https://upload.wikimedia.org/wikipedia/commons/thumb/7/7b/XOR_perceptron_net.png/250px-XOR_perceptron_net.png "XOR 網路 (來源: WiKi)") 23 | 24 | ## 實驗設計與原理: 25 | - 上圖中各 neuron 的代號: 26 | - input layer: x, y 27 | - hidden layer: h1, h2, h3 28 | - output layer: z 29 | - 使用 2 台 Raspberry Pi 組成一個 Docker Swarm。 30 | - Docker Swarm 中 run 6 個 containers,**每個 container 扮演一個 device (neuron)**。 31 | - 這 6 個 devices (neurons),可以分佈於不同的實體 host 上面,代表其可佈署在 Internet 上任何位置。 32 | - Neurons 之間的連結與權重如上圖所示。**設定 neurons 之間的連結,其實就是在設定 publisher / subscriber 的對應關係**。 33 | - **使用 message queue 來代表 MQTT 中的 "topic",每個 neuron 都有自己專屬的 message queue**,例如: 34 | - neruon h2 有自己專屬的 message queue "neuron_h2",input layer 的 **neuron x 如果想送訊息給 neuron h2,就必須發送到 message queue "neuron_h2"**,neruon h2 就會收到訊息。 35 | - 上例中,**neuron x 扮演 publisher,neuron h2 扮演 subscriber。neuron h1 也是 neuron x 的 subscriber**。 36 | - publisher / subscriber 的對應關係 可以是多對多。 37 | - 假設上圖的 x、y neurons 分別接到各自的 sensor,接收 0/1 的資料,每個 device (neuron) 都可以各自外接多個 sensors 感測外部的環境。 38 | - Neuron z 的 output 必須隨時等於 XOR(x, y) 39 | 40 | ## 實作步驟: 41 | 42 | ### 建立 Docker Swarm 43 | 之前已經參考了這篇 [文章](https://www.facebook.com/groups/docker.taipei/permalink/1704032656498757) 使用兩台 RPi2 建立了一個 Docker Swarm。 44 | 45 | #### Swarm 中有兩台 Docker machines: 46 | - host rpi202(192.168.0.114) 擔任 Swarm Manager,其 Docker machine name 為 master01 47 | - host rpi201(192.168.0.109) 擔任 Swarm Node,其 Docker machine name 為 node01 48 | 49 | 50 | ```python 51 | HypriotOS: pi@rpi202 in ~ 52 | $ docker-machine ls 53 | NAME ACTIVE DRIVER STATE URL SWARM 54 | master01 hypriot Running tcp://192.168.0.114:2376 master01 (master) 55 | node01 hypriot Running tcp://192.168.0.109:2376 master01 56 | HypriotOS: pi@rpi202 in ~ 57 | $ 58 | 59 | 60 | # Swarm 中的 nodes: 61 | 62 | HypriotOS: pi@rpi202 in /data/celery_projects 63 | $ docker $(docker-machine config --swarm master01) info 64 | Containers: 4 65 | Images: 51 66 | Role: primary 67 | Strategy: spread 68 | Filters: health, port, dependency, affinity, constraint 69 | Nodes: 2 70 | master01: 192.168.0.114:2376 71 | └ Status: Healthy 72 | └ Containers: 3 73 | └ Reserved CPUs: 0 / 4 74 | └ Reserved Memory: 0 B / 972 MiB 75 | └ Labels: executiondriver=native-0.2, kernelversion=4.1.8-hypriotos-v7+, operatingsystem=Raspbian GNU/Linux 8 (jessie), provider=hypriot, storagedriver=overlay 76 | node01: 192.168.0.109:2376 77 | └ Status: Healthy 78 | └ Containers: 1 79 | └ Reserved CPUs: 0 / 4 80 | └ Reserved Memory: 0 B / 972 MiB 81 | └ Labels: executiondriver=native-0.2, kernelversion=4.1.8-hypriotos-v7+, operatingsystem=Raspbian GNU/Linux 8 (jessie), provider=hypriot, storagedriver=overlay 82 | CPUs: 8 83 | Total Memory: 1.899 GiB 84 | Name: fe30da0875d6 85 | HypriotOS: pi@rpi202 in /data/celery_projects 86 | $ 87 | ``` 88 | 89 | ### 將檔案 celeryconfig.py、start_workers.sh、資料夾 IoT 複製到 兩台 hosts 的 /data/celery_projects 資料夾之下 90 | 91 | 可以使用 SCP 來達成,參考: http://www.hypexr.org/linux_scp_help.php 92 | 93 | 例如: 94 | $ scp -r /data/celery_projects root@rpi201:/data/ 95 | 96 | 97 | ```python 98 | # Swarm manager 99 | 100 | HypriotOS: pi@rpi202 in /data/celery_projects 101 | $ ll 102 | total 20 103 | drwxr-xr-x 3 999 root 4096 Jan 28 10:08 ./ 104 | drwxr-xr-x 3 999 root 4096 Jan 28 11:02 ../ 105 | -rw-r--r-- 1 999 root 1469 Jan 28 10:48 celeryconfig.py 106 | drwxr-xr-x 3 999 root 4096 Jan 28 10:08 IoT/ 107 | -rwxr-xr-x 1 999 root 963 Jan 28 10:28 start_workers.sh* <-- 用來啟動 containers 的 script,只在 Swarm Manager 上有需要 108 | HypriotOS: pi@rpi202 in /data/celery_projects 109 | $ 110 | 111 | 112 | # Swarm node 113 | HypriotOS: pi@rpi201 in /data/celery_projects 114 | $ ll 115 | total 16 116 | drwxr-xr-x 3 root root 4096 Jan 28 12:54 ./ 117 | drwxr-xr-x 3 999 root 4096 Jan 25 22:55 ../ 118 | -rw-r--r-- 1 root root 1250 Jan 28 11:27 celeryconfig.py 119 | drwxr-xr-x 3 root root 4096 Jan 28 12:54 IoT/ 120 | HypriotOS: pi@rpi201 in /data/celery_projects 121 | $ 122 | ``` 123 | 124 | #### start_workers.sh 的 內容 125 | 126 | 127 | ```python 128 | # ./start_workers.sh 129 | 130 | PROJECT='IoT' # project 名稱 131 | CONCURRENCY=1 # 每個 worker 可以有幾個 subprocesses 132 | 133 | 134 | 135 | echo "Starting Redis, Flower _________________________________________________" 136 | eval $(docker-machine env master01) 137 | 138 | docker run -dit -p 6379:6379 --name=redis -v /data:/data hypriot/rpi-redis 139 | docker run -d -p 5555:5555 --name=flower --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c "cd /celery_projects && celery -A ${PROJECT} flower" 140 | 141 | 142 | 143 | echo "Starting Celery cluster containers _________________________________________________" 144 | eval $(docker-machine env --swarm master01) 145 | 146 | for id in 'x' 'y' 'h1' 'h2' 'h3' 'z' 147 | do 148 | docker run -d --name=neuron_${id} --hostname=neuron_${id} --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c "cd /celery_projects && celery -A ${PROJECT} worker -n %h -Q neuron_${id} --concurrency=${CONCURRENCY} --loglevel=INFO" 149 | done 150 | ``` 151 | 152 | ### 執行 start_workers.sh,這會做以下幾件事情: 153 | - 建立 Celery 所需的 Broker,使用 Redis 154 | - 建立監控用的 Flower container 155 | - 透過 Swarm Manager 建立並佈署代表 neurons 的 Celery worker containers 156 | 157 | 158 | ```python 159 | HypriotOS: pi@rpi202 in /data/celery_projects 160 | $ ./start_workers.sh 161 | Starting Redis, Flower _________________________________________________ 162 | cb706da89689211601b931e88921df1564c939a2cdb3de7bda0f4fa878424553 163 | e136f3f443a46b1a1082b26d367c6c146327d54a3eb9f16aa907dd48bce38a47 164 | Starting Celery cluster containers _________________________________________________ 165 | 684e3d7b84bfa4713a972d434507473d33adcbaad092e32518a291f7e095c86f 166 | 8608740a5a86977f82dc2943feb315575a2ca9e38ebb1a2c73842567c87a865d 167 | 1b5180f0284c8c2aa723c63b4297e57fd35cc5a2c0b1ef5dded3bc3026202f61 168 | a6679a9bb651dc735725ecd7b4793f27d598efdd4179c008afaae1e4322b0a42 169 | 3a59323ae8f61e76e80595a67e98293c0d42f96b1ac47205aff48d863b321aba 170 | 6c0d6a8bb590961e9947bf1d15587f1c0017b114a466fc7061d2fe888740026e 171 | HypriotOS: pi@rpi202 in /data/celery_projects 172 | $ 173 | ``` 174 | 175 | #### 共有 6 個 neurons 176 | neuron x, y, z, h1, h2 都被佈署到 Swarm node "node01' 上面, 177 | 178 | 179 | ```python 180 | HypriotOS: pi@rpi201 in ~ 181 | $ docker ps 182 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 183 | 6c0d6a8bb590 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" About a minute ago Up About a minute 5555/tcp neuron_z 184 | a6679a9bb651 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" About a minute ago Up About a minute 5555/tcp neuron_h2 185 | 1b5180f0284c wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" About a minute ago Up About a minute 5555/tcp neuron_h1 186 | 8608740a5a86 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" About a minute ago Up About a minute 5555/tcp neuron_y 187 | 684e3d7b84bf wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 2 minutes ago Up 2 minutes 5555/tcp neuron_x 188 | ef0c519ae7da hypriot/rpi-swarm "/swarm join --advert" 4 minutes ago Up 4 minutes 2375/tcp swarm-agent 189 | HypriotOS: pi@rpi201 in ~ 190 | $ 191 | ``` 192 | 193 | 只有 h3 被安排在 Swarm manager 這台 "master01" machine 上。 194 | 195 | 196 | ```python 197 | HypriotOS: pi@rpi202 in /data/celery_projects 198 | $ docker ps 199 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 200 | 3a59323ae8f6 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 35 seconds ago Up 32 seconds 5555/tcp neuron_h3 201 | e136f3f443a4 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" About a minute ago Up About a minute 0.0.0.0:5555->5555/tcp flower 202 | cb706da89689 hypriot/rpi-redis "/entrypoint.sh redis" About a minute ago Up About a minute 0.0.0.0:6379->6379/tcp redis 203 | 966928d0a37c hypriot/rpi-swarm "/swarm join --advert" 3 minutes ago Up 3 minutes 2375/tcp swarm-agent 204 | b01b05cbe323 hypriot/rpi-swarm "/swarm manage --tlsv" 4 minutes ago Up 4 minutes 2375/tcp, 0.0.0.0:3376->3376/tcp swarm-agent-master 205 | ab78ab3e5476 nimblestratus/rpi-consul "/bin/start -server -" 4 minutes ago Up 4 minutes 53/udp, 8300-8302/tcp, 8400/tcp, 0.0.0.0:8500->8500/tcp, 8301-8302/udp consul 206 | HypriotOS: pi@rpi202 in /data/celery_projects 207 | $ 208 | ``` 209 | 210 | #### 從 Swarm Manager 的視角 綜觀全局 211 | 212 | 213 | ```python 214 | HypriotOS: pi@rpi202 in /data/celery_projects 215 | $ docker $(docker-machine config --swarm master01) ps 216 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 217 | 6c0d6a8bb590 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_z 218 | 3a59323ae8f6 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 2 minutes ago Up 2 minutes 5555/tcp master01/neuron_h3 219 | a6679a9bb651 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_h2 220 | 1b5180f0284c wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_h1 221 | 8608740a5a86 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 3 minutes ago Up 3 minutes 5555/tcp node01/neuron_y 222 | 684e3d7b84bf wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 3 minutes ago Up 3 minutes 5555/tcp node01/neuron_x 223 | e136f3f443a4 wei1234c/celery_armv7 "/bin/sh -c 'cd /cele" 3 minutes ago Up 3 minutes 192.168.0.114:5555->5555/tcp master01/flower 224 | cb706da89689 hypriot/rpi-redis "/entrypoint.sh redis" 3 minutes ago Up 3 minutes 192.168.0.114:6379->6379/tcp master01/redis 225 | ef0c519ae7da hypriot/rpi-swarm "/swarm join --advert" 5 minutes ago Up 5 minutes 2375/tcp node01/swarm-agent 226 | 966928d0a37c hypriot/rpi-swarm "/swarm join --advert" 5 minutes ago Up 5 minutes 2375/tcp master01/swarm-agent 227 | b01b05cbe323 hypriot/rpi-swarm "/swarm manage --tlsv" 5 minutes ago Up 5 minutes 2375/tcp, 192.168.0.114:3376->3376/tcp master01/swarm-agent-master 228 | ab78ab3e5476 nimblestratus/rpi-consul "/bin/start -server -" 6 minutes ago Up 6 minutes 53/udp, 8300-8302/tcp, 8301-8302/udp, 8400/tcp, 192.168.0.114:8500->8500/tcp master01/consul 229 | HypriotOS: pi@rpi202 in /data/celery_projects 230 | $ 231 | ``` 232 | 233 | --- 234 | 235 | ### 開始下指令config網路 236 | 237 | 238 | ```python 239 | from IoT.neuron import * 240 | from time import sleep 241 | import pandas as pd 242 | from pandas import DataFrame 243 | 244 | pd.options.display.max_colwidth = 400 245 | REFRACTORY_PERIOD = 0.1 # 0.1 seconds 246 | ``` 247 | 248 | #### Note: 249 | 定義一個 neuron 行為的 Python code 放在 celery_projects/IoT/neuron.py,Celery 的 worker 被啟動時 會將之載入。 250 | neuron.py 中以一個 pickle 檔案紀錄一個 neuron 的組成與任一時刻的狀態,包括有 connections、weights、inputs、output... 251 | 252 | 253 | ```python 254 | # 總共有 6 個 neurons,各代表一個 Docker container,被隨機佈署在 Docker Swarm 中的任一台 machine 上。 255 | neurons = ['neuron_x', 'neuron_y', 'neuron_h1', 'neuron_h2', 'neuron_h3', 'neuron_z'] 256 | 257 | # print 出 一個 neuron 中的 Log 258 | def printConfig(neuron): 259 | print('{0:_^78}\n {1}\n'.format(neuron + " config:", getConfig.apply_async(routing_key = neuron).get())) 260 | 261 | # 清除所有 neurons 中的 logs 262 | def emptyLogs(): 263 | for neuron in neurons: 264 | emptyLog.apply_async(routing_key = neuron) 265 | 266 | # 彙整logs。將所有 neurons 中的 logs merge 在一起,成為一個 Pandas.DataFrame 267 | def mergeLogs(): 268 | logs = [] 269 | 270 | for neuron in neurons: 271 | currentLog = getLog.apply_async(routing_key = neuron).get() 272 | logs += currentLog 273 | 274 | df = DataFrame(list(logs), columns = ['time', 'neuron', 'message']) 275 | df.set_index('time', inplace = True) 276 | df.sort_index(inplace = True) 277 | 278 | return df 279 | ``` 280 | 281 | ### 清空 log files 282 | 283 | 284 | ```python 285 | # 清除所有 neurons 中的 logs 286 | emptyLogs() 287 | ``` 288 | 289 | ### 設定 connections 290 | #### 設定 neurons 之間的連結,其實就是在設定 publisher / subscriber 的對應關係 291 | 292 | 293 | ```python 294 | # input layer fan out 295 | # neuron x 296 | addConnection.apply_async(['neuron_h1'], routing_key = 'neuron_x') # 增設 neuron_x -> neuron_h1 的 connection 297 | addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_x') 298 | # neuron y 299 | addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_y') # 增設 neuron_y -> neuron_h2 的 connection 300 | addConnection.apply_async(['neuron_h3'], routing_key = 'neuron_y') 301 | 302 | # hidden layer fan out 303 | addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h1') # 增設 neuron_h1 -> neuron_z 的 connection 304 | addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h2') 305 | addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h3') 306 | ``` 307 | 308 | 309 | 310 | 311 | 312 | 313 | 314 | 315 | ### 設定 weights 316 | 317 | 318 | ```python 319 | # hidden layer 320 | setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h1') # 設定 neuron_x -> neuron_h1 的 weight = 1 321 | setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h2') 322 | setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h2') # 設定 neuron_y -> neuron_h2 的 weight = 1 323 | setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h3') 324 | 325 | # output layer 326 | setWeight.apply_async(['neuron_h1', 1], routing_key = 'neuron_z') 327 | setWeight.apply_async(['neuron_h2', -2], routing_key = 'neuron_z') # 設定 neuron_h2 -> neuron_z 的 weight = -2 (inhibitory) 328 | setWeight.apply_async(['neuron_h3', 1], routing_key = 'neuron_z') 329 | ``` 330 | 331 | 332 | 333 | 334 | 335 | 336 | 337 | 338 | ### 設定 thresholds 339 | 340 | 341 | ```python 342 | # input layer 343 | setThreshold.apply_async([0.9], routing_key = 'neuron_x') # 設定 neuron_x 的 threshold = 0.9 344 | setThreshold.apply_async([0.9], routing_key = 'neuron_y') 345 | 346 | # hidden layer 347 | setThreshold.apply_async([0.9], routing_key = 'neuron_h1') 348 | setThreshold.apply_async([1.9], routing_key = 'neuron_h2') # 設定 neuron_h2 的 threshold = 1.9 349 | setThreshold.apply_async([0.9], routing_key = 'neuron_h3') 350 | 351 | # output layer 352 | setThreshold.apply_async([0.9], routing_key = 'neuron_z') # 設定 neuron_z 的 threshold = 0.9 353 | ``` 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | ### 模擬 sensor input,然後查看各 neurons 的 output 狀態 363 | 一個 neuron fire 之後,如果沒有持續的輸入可維持 fire 的狀態,則過 5 秒鐘之 neuron 的 output 一定為 0 364 | 365 | 366 | ```python 367 | ### 模擬 sensor input,強迫 neuron x 或 y ouput 1 368 | emptyLogs() # 清除 logs 369 | sleep(REFRACTORY_PERIOD) # 等電位歸零 370 | mergeLogs() # 彙整 logs 371 | ``` 372 | 373 | 374 | 375 | 376 |
377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | 392 |
neuronmessage
time
393 |
394 | 395 | 396 | 397 | 398 | ```python 399 | ### 模擬 sensor input,強迫 neuron x 或 y ouput 1 400 | emptyLogs() # 清除 logs 401 | sleep(REFRACTORY_PERIOD) # 等電位歸零 402 | fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire. 403 | mergeLogs() # 彙整 logs 404 | ``` 405 | 406 | 407 | 408 | 409 |
410 | 411 | 412 | 413 | 414 | 415 | 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | 424 | 425 | 426 | 427 | 428 | 429 | 430 | 431 | 432 | 433 | 434 | 435 | 436 | 437 | 438 | 439 | 440 | 441 | 442 | 443 | 444 | 445 | 446 | 447 | 448 | 449 | 450 | 451 | 452 | 453 | 454 | 455 | 456 | 457 | 458 | 459 | 460 | 461 | 462 | 463 | 464 | 465 | 466 | 467 | 468 | 469 | 470 |
neuronmessage
time
2016-03-10 20:51:18.489509neuron_xneuron_x fires.
2016-03-10 20:51:18.493309neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-03-10 20:51:18.559939neuron_h1neuron_x is kicking neuron_h1.
2016-03-10 20:51:18.581558neuron_h2neuron_x is kicking neuron_h2.
2016-03-10 20:51:18.587120neuron_h1neuron_h1 fires.
2016-03-10 20:51:18.595436neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-03-10 20:51:18.654499neuron_zneuron_h1 is kicking neuron_z.
2016-03-10 20:51:18.689138neuron_zneuron_z fires.
2016-03-10 20:51:18.692448neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
471 |
472 | 473 | 474 | 475 | 476 | ```python 477 | ### 模擬 sensor input,強迫 neuron x 或 y ouput 1 478 | emptyLogs() # 清除 logs 479 | sleep(REFRACTORY_PERIOD) # 等電位歸零 480 | fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire. 481 | mergeLogs() # 彙整 logs 482 | ``` 483 | 484 | 485 | 486 | 487 |
488 | 489 | 490 | 491 | 492 | 493 | 494 | 495 | 496 | 497 | 498 | 499 | 500 | 501 | 502 | 503 | 504 | 505 | 506 | 507 | 508 | 509 | 510 | 511 | 512 | 513 | 514 | 515 | 516 | 517 | 518 | 519 | 520 | 521 | 522 | 523 | 524 | 525 | 526 | 527 | 528 | 529 | 530 | 531 | 532 | 533 | 534 | 535 | 536 | 537 | 538 | 539 | 540 | 541 | 542 | 543 | 544 | 545 | 546 | 547 | 548 |
neuronmessage
time
2016-03-10 20:51:21.721563neuron_yneuron_y fires.
2016-03-10 20:51:21.726665neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-03-10 20:51:21.796071neuron_h3neuron_y is kicking neuron_h3.
2016-03-10 20:51:21.818246neuron_h3neuron_h3 fires.
2016-03-10 20:51:21.822734neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-03-10 20:51:21.858226neuron_h2neuron_y is kicking neuron_h2.
2016-03-10 20:51:21.899541neuron_zneuron_h3 is kicking neuron_z.
2016-03-10 20:51:21.922727neuron_zneuron_z fires.
2016-03-10 20:51:21.927111neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
549 |
550 | 551 | 552 | 553 | 554 | ```python 555 | ### 模擬 sensor input,強迫 neuron x 或 y ouput 1 556 | emptyLogs() # 清除 logs 557 | sleep(REFRACTORY_PERIOD) # 等電位歸零 558 | fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire. 559 | fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire. 560 | mergeLogs() # 彙整 logs 561 | ``` 562 | 563 | 564 | 565 | 566 |
567 | 568 | 569 | 570 | 571 | 572 | 573 | 574 | 575 | 576 | 577 | 578 | 579 | 580 | 581 | 582 | 583 | 584 | 585 | 586 | 587 | 588 | 589 | 590 | 591 | 592 | 593 | 594 | 595 | 596 | 597 | 598 | 599 | 600 | 601 | 602 | 603 | 604 | 605 | 606 | 607 | 608 | 609 | 610 | 611 | 612 | 613 | 614 | 615 | 616 | 617 | 618 | 619 | 620 | 621 | 622 | 623 | 624 | 625 | 626 | 627 | 628 | 629 | 630 | 631 | 632 | 633 | 634 | 635 | 636 | 637 | 638 | 639 | 640 | 641 | 642 | 643 | 644 | 645 | 646 | 647 | 648 | 649 | 650 | 651 | 652 | 653 | 654 | 655 | 656 | 657 | 658 | 659 | 660 | 661 | 662 | 663 | 664 | 665 | 666 | 667 | 668 | 669 | 670 | 671 | 672 | 673 | 674 | 675 | 676 | 677 | 678 | 679 | 680 | 681 | 682 | 683 | 684 | 685 | 686 | 687 |
neuronmessage
time
2016-03-10 20:51:25.524156neuron_xneuron_x fires.
2016-03-10 20:51:25.543150neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-03-10 20:51:25.557295neuron_yneuron_y fires.
2016-03-10 20:51:25.561307neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-03-10 20:51:25.620065neuron_h3neuron_y is kicking neuron_h3.
2016-03-10 20:51:25.632281neuron_h1neuron_x is kicking neuron_h1.
2016-03-10 20:51:25.649561neuron_h1neuron_h1 fires.
2016-03-10 20:51:25.649841neuron_h3neuron_h3 fires.
2016-03-10 20:51:25.653656neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-03-10 20:51:25.657057neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-03-10 20:51:25.689440neuron_h2neuron_x is kicking neuron_h2.
2016-03-10 20:51:25.759928neuron_zneuron_h1 is kicking neuron_z.
2016-03-10 20:51:25.787273neuron_zneuron_z fires.
2016-03-10 20:51:25.791270neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
2016-03-10 20:51:25.813897neuron_h2neuron_y is kicking neuron_h2.
2016-03-10 20:51:25.835654neuron_h2neuron_h2 fires.
2016-03-10 20:51:25.842069neuron_h2Setting output of neuron_h2 to ACTION_POTENTIAL.
2016-03-10 20:51:25.869606neuron_zneuron_h3 is kicking neuron_z.
2016-03-10 20:51:25.892445neuron_zneuron_z is still in refractory-period.
2016-03-10 20:51:25.895733neuron_zneuron_z is still in refractory_period at action potential, then a neuron neuron_h3 kicks in, now sum_of_weighted_inputs >= threshold.
2016-03-10 20:51:25.943689neuron_zneuron_h2 is kicking neuron_z.
688 |
689 | 690 | 691 | 692 | ### [Flower](http://192.168.0.114:5555) 中顯示各 worker 處理的 messages 數量: 693 | 694 | ![各 neuron 的活動次數](https://github.com/Wei1234c/IOTasBrain/raw/master/celery_projects/jpgs/flower2.jpg "各 neuron 的活動次數") 695 | 696 | ### 各 neurons 的 config 狀態: 697 | 698 | 699 | ```python 700 | for neuron in reversed(neurons): printConfig(neuron) 701 | ``` 702 | 703 | _______________________________neuron_z config:_______________________________ 704 | {'inputs': {'neuron_h1': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 763889), 'value': 1}, 'neuron_h2': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 946926), 'value': 1}, 'neuron_h3': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 873372), 'value': 1}}, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 803324), 'value': 1}, 'threshold': 0.9, 'weights': {'neuron_h1': 1, 'neuron_h3': 1, 'neuron_h2': -2}} 705 | 706 | ______________________________neuron_h3 config:_______________________________ 707 | {'inputs': {'neuron_y': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 628021), 'value': 1}}, 'threshold': 0.9, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 660253), 'value': 1}, 'connections': {'neuron_z'}, 'weights': {'neuron_y': 1}} 708 | 709 | ______________________________neuron_h2 config:_______________________________ 710 | {'inputs': {'neuron_x': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 693412), 'value': 1}, 'neuron_y': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 817076), 'value': 1}}, 'threshold': 1.9, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 845780), 'value': 1}, 'connections': {'neuron_z'}, 'weights': {'neuron_x': 1, 'neuron_y': 1}} 711 | 712 | ______________________________neuron_h1 config:_______________________________ 713 | {'inputs': {'neuron_x': {'lasting': datetime.timedelta(0, 0, 500000), 'kick_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 635170), 'value': 1}}, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 657518), 'value': 1}, 'weights': {'neuron_x': 1}, 'threshold': 0.9, 'connections': {'neuron_z'}} 714 | 715 | _______________________________neuron_y config:_______________________________ 716 | {'inputs': {}, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 565365), 'value': 1}, 'connections': {'neuron_h2', 'neuron_h3'}, 'threshold': 0.9} 717 | 718 | _______________________________neuron_x config:_______________________________ 719 | {'inputs': {}, 'output': {'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 3, 10, 20, 51, 25, 547371), 'value': 1}, 'connections': {'neuron_h1', 'neuron_h2'}, 'threshold': 0.9} 720 | 721 | 722 | 723 | ## Summary 724 | 725 | 這次實驗,主要是想驗證 可以使用 Celery + Docker Swarm 快速地建構私有的 類似 Bluemix 的 IoT 平台,讓其上的 devices 共同組成一個分散式的協同運算系統,視整個 IoT(Internet of Things) 為一體。 726 | 727 | 本次實作的平台中,使用 2 台 Raspberry Pi 組成一個 Docker Swarm,run 6 個 containers,每個 container 扮演一個 device,也可視為一個 neuron。設定 neurons 之間的連結,好比是在設定 publisher / subscriber 的對應關係,其對應關係 可以是多對多。 728 | 729 | 可以使用這 6 個 neurons (devices / containers),在設定好 connections / weights / thresholds 之後,組成一個 XOR網路,針對外接的 sensors 所感測到的環境狀態,依據 網路的pattern 決定最終的 output。 730 | 731 | ## 後記 (2016/01/31) 732 | 733 | 後來發現 XOR網路 可能不會存在於現實世界的大腦中,因為要實現XOR網路 就必須要求 各訊號都同時到達,這在真實的世界中不大可能發生,而且就算發生了,因為這種機制會要求 "等待",對系統整體的運算效能會造成很大的損失,我不認為是一個好的運算機制,大自然應該不會這樣設計大腦。 734 | 735 | ### 參考資料 736 | [Action potential](https://en.wikipedia.org/wiki/Action_potential) 737 | [Neural coding](https://en.wikipedia.org/wiki/Neural_coding) 738 | [Artificial neuron](https://en.wikipedia.org/wiki/Artificial_neuron) 739 | ["All-or-none" principle](https://en.wikipedia.org/wiki/Action_potential#.22All-or-none.22_principle) 740 | [Refractory period](https://en.wikipedia.org/wiki/Action_potential#Refractory_period) 741 | - The absolute refractory period is largely responsible for the unidirectional propagation of action potentials along axons.[34] At any given moment, the patch of axon behind the actively spiking part is refractory, but the patch in front, not having been activated recently, is capable of being stimulated by the depolarization from the action potential. 742 | 743 | 744 | 745 | -------------------------------------------------------------------------------- /celery_projects/IoT as Brain - en.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "---\n", 8 | "\n", 9 | "### config the network" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": { 16 | "collapsed": false 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "from IoT.neuron import * \n", 21 | "from time import sleep\n", 22 | "import pandas as pd\n", 23 | "from pandas import DataFrame\n", 24 | "\n", 25 | "pd.options.display.max_colwidth = 400\n", 26 | "REFRACTORY_PERIOD = 0.1 # 0.1 seconds" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 2, 32 | "metadata": { 33 | "collapsed": false 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "# there are 6 neurons,each is a Docker container,and was deployed on a Docker Swarm machine.\n", 38 | "neurons = ['neuron_x', 'neuron_y', 'neuron_h1', 'neuron_h2', 'neuron_h3', 'neuron_z'] \n", 39 | "\n", 40 | "# print log from a neuron\n", 41 | "def printConfig(neuron):\n", 42 | " print('{0:_^78}\\n {1}\\n'.format(neuron + \" config:\", getConfig.apply_async(routing_key = neuron).get()))\n", 43 | "\n", 44 | "# clear log in a neuron \n", 45 | "def emptyLogs():\n", 46 | " for neuron in neurons:\n", 47 | " emptyLog.apply_async(routing_key = neuron)\n", 48 | "\n", 49 | "# collect logs。concate logs from all neurons into a Pandas.DataFrame\n", 50 | "def mergeLogs():\n", 51 | " logs = []\n", 52 | " \n", 53 | " for neuron in neurons:\n", 54 | " currentLog = getLog.apply_async(routing_key = neuron).get()\n", 55 | " logs += currentLog \n", 56 | " \n", 57 | " df = DataFrame(list(logs), columns = ['time', 'neuron', 'message']) \n", 58 | " df.set_index('time', inplace = True)\n", 59 | " df.sort_index(inplace = True)\n", 60 | " \n", 61 | " return df" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "### clear log files" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 3, 74 | "metadata": { 75 | "collapsed": false 76 | }, 77 | "outputs": [], 78 | "source": [ 79 | "# clear logs from all neurons.\n", 80 | "emptyLogs()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": { 86 | "collapsed": true 87 | }, 88 | "source": [ 89 | "### setup connections" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 4, 95 | "metadata": { 96 | "collapsed": false 97 | }, 98 | "outputs": [ 99 | { 100 | "data": { 101 | "text/plain": [ 102 | "" 103 | ] 104 | }, 105 | "execution_count": 4, 106 | "metadata": {}, 107 | "output_type": "execute_result" 108 | } 109 | ], 110 | "source": [ 111 | "# input layer fan out\n", 112 | "# neuron x\n", 113 | "addConnection.apply_async(['neuron_h1'], routing_key = 'neuron_x') # 增設 neuron_x -> neuron_h1 的 connection\n", 114 | "addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_x')\n", 115 | "# neuron y\n", 116 | "addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_y') # 增設 neuron_y -> neuron_h2 的 connection\n", 117 | "addConnection.apply_async(['neuron_h3'], routing_key = 'neuron_y')\n", 118 | "\n", 119 | "# hidden layer fan out\n", 120 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h1') # 增設 neuron_h1 -> neuron_z 的 connection\n", 121 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h2')\n", 122 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h3')" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "### setup weights" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": 5, 135 | "metadata": { 136 | "collapsed": false 137 | }, 138 | "outputs": [ 139 | { 140 | "data": { 141 | "text/plain": [ 142 | "" 143 | ] 144 | }, 145 | "execution_count": 5, 146 | "metadata": {}, 147 | "output_type": "execute_result" 148 | } 149 | ], 150 | "source": [ 151 | "# hidden layer\n", 152 | "setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h1') # 設定 neuron_x -> neuron_h1 的 weight = 1\n", 153 | "setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h2')\n", 154 | "setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h2') # 設定 neuron_y -> neuron_h2 的 weight = 1\n", 155 | "setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h3')\n", 156 | "\n", 157 | "# output layer\n", 158 | "setWeight.apply_async(['neuron_h1', 1], routing_key = 'neuron_z')\n", 159 | "setWeight.apply_async(['neuron_h2', -2], routing_key = 'neuron_z') # 設定 neuron_h2 -> neuron_z 的 weight = -2 (inhibitory)\n", 160 | "setWeight.apply_async(['neuron_h3', 1], routing_key = 'neuron_z') " 161 | ] 162 | }, 163 | { 164 | "cell_type": "markdown", 165 | "metadata": {}, 166 | "source": [ 167 | "### setup thresholds" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 6, 173 | "metadata": { 174 | "collapsed": false 175 | }, 176 | "outputs": [ 177 | { 178 | "data": { 179 | "text/plain": [ 180 | "" 181 | ] 182 | }, 183 | "execution_count": 6, 184 | "metadata": {}, 185 | "output_type": "execute_result" 186 | } 187 | ], 188 | "source": [ 189 | "# input layer \n", 190 | "setThreshold.apply_async([0.9], routing_key = 'neuron_x') # 設定 neuron_x 的 threshold = 0.9\n", 191 | "setThreshold.apply_async([0.9], routing_key = 'neuron_y') \n", 192 | "\n", 193 | "# hidden layer\n", 194 | "setThreshold.apply_async([0.9], routing_key = 'neuron_h1') \n", 195 | "setThreshold.apply_async([1.9], routing_key = 'neuron_h2') # 設定 neuron_h2 的 threshold = 1.9\n", 196 | "setThreshold.apply_async([0.9], routing_key = 'neuron_h3')\n", 197 | "\n", 198 | "# output layer\n", 199 | "setThreshold.apply_async([0.9], routing_key = 'neuron_z') # 設定 neuron_z 的 threshold = 0.9" 200 | ] 201 | }, 202 | { 203 | "cell_type": "markdown", 204 | "metadata": {}, 205 | "source": [ 206 | "### simulate sensor inputs,and check status of neurons." 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": 7, 212 | "metadata": { 213 | "collapsed": false 214 | }, 215 | "outputs": [ 216 | { 217 | "data": { 218 | "text/html": [ 219 | "
\n", 220 | "\n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | "
neuronmessage
time
\n", 236 | "
" 237 | ], 238 | "text/plain": [ 239 | "Empty DataFrame\n", 240 | "Columns: [neuron, message]\n", 241 | "Index: []" 242 | ] 243 | }, 244 | "execution_count": 7, 245 | "metadata": {}, 246 | "output_type": "execute_result" 247 | } 248 | ], 249 | "source": [ 250 | "emptyLogs() # clear logs\n", 251 | "sleep(REFRACTORY_PERIOD) # wait for stead state\n", 252 | "mergeLogs() # summarize logs" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": 8, 258 | "metadata": { 259 | "collapsed": false 260 | }, 261 | "outputs": [ 262 | { 263 | "data": { 264 | "text/html": [ 265 | "
\n", 266 | "\n", 267 | " \n", 268 | " \n", 269 | " \n", 270 | " \n", 271 | " \n", 272 | " \n", 273 | " \n", 274 | " \n", 275 | " \n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | "
neuronmessage
time
2016-12-22 19:20:43.784441neuron_xneuron_x fires.
2016-12-22 19:20:43.788134neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-12-22 19:20:43.905049neuron_h2neuron_x is kicking neuron_h2.
2016-12-22 19:20:43.948279neuron_h1neuron_x is kicking neuron_h1.
2016-12-22 19:20:43.962901neuron_h1neuron_h1 fires.
2016-12-22 19:20:43.966272neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-12-22 19:20:44.027375neuron_zneuron_h1 is kicking neuron_z.
2016-12-22 19:20:44.048686neuron_zneuron_z fires.
2016-12-22 19:20:44.052765neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
\n", 327 | "
" 328 | ], 329 | "text/plain": [ 330 | " neuron \\\n", 331 | "time \n", 332 | "2016-12-22 19:20:43.784441 neuron_x \n", 333 | "2016-12-22 19:20:43.788134 neuron_x \n", 334 | "2016-12-22 19:20:43.905049 neuron_h2 \n", 335 | "2016-12-22 19:20:43.948279 neuron_h1 \n", 336 | "2016-12-22 19:20:43.962901 neuron_h1 \n", 337 | "2016-12-22 19:20:43.966272 neuron_h1 \n", 338 | "2016-12-22 19:20:44.027375 neuron_z \n", 339 | "2016-12-22 19:20:44.048686 neuron_z \n", 340 | "2016-12-22 19:20:44.052765 neuron_z \n", 341 | "\n", 342 | " message \n", 343 | "time \n", 344 | "2016-12-22 19:20:43.784441 neuron_x fires. \n", 345 | "2016-12-22 19:20:43.788134 Setting output of neuron_x to ACTION_POTENTIAL. \n", 346 | "2016-12-22 19:20:43.905049 neuron_x is kicking neuron_h2. \n", 347 | "2016-12-22 19:20:43.948279 neuron_x is kicking neuron_h1. \n", 348 | "2016-12-22 19:20:43.962901 neuron_h1 fires. \n", 349 | "2016-12-22 19:20:43.966272 Setting output of neuron_h1 to ACTION_POTENTIAL. \n", 350 | "2016-12-22 19:20:44.027375 neuron_h1 is kicking neuron_z. \n", 351 | "2016-12-22 19:20:44.048686 neuron_z fires. \n", 352 | "2016-12-22 19:20:44.052765 Setting output of neuron_z to ACTION_POTENTIAL. " 353 | ] 354 | }, 355 | "execution_count": 8, 356 | "metadata": {}, 357 | "output_type": "execute_result" 358 | } 359 | ], 360 | "source": [ 361 | "### force neuron x to ouput 1\n", 362 | "emptyLogs() # clear logs\n", 363 | "sleep(REFRACTORY_PERIOD) # wait for stead state\n", 364 | "fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire.\n", 365 | "mergeLogs() # summarize logs" 366 | ] 367 | }, 368 | { 369 | "cell_type": "code", 370 | "execution_count": 9, 371 | "metadata": { 372 | "collapsed": false 373 | }, 374 | "outputs": [ 375 | { 376 | "data": { 377 | "text/html": [ 378 | "
\n", 379 | "\n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | " \n", 385 | " \n", 386 | " \n", 387 | " \n", 388 | " \n", 389 | " \n", 390 | " \n", 391 | " \n", 392 | " \n", 393 | " \n", 394 | " \n", 395 | " \n", 396 | " \n", 397 | " \n", 398 | " \n", 399 | " \n", 400 | " \n", 401 | " \n", 402 | " \n", 403 | " \n", 404 | " \n", 405 | " \n", 406 | " \n", 407 | " \n", 408 | " \n", 409 | " \n", 410 | " \n", 411 | " \n", 412 | " \n", 413 | " \n", 414 | " \n", 415 | " \n", 416 | " \n", 417 | " \n", 418 | " \n", 419 | " \n", 420 | " \n", 421 | " \n", 422 | " \n", 423 | " \n", 424 | " \n", 425 | " \n", 426 | " \n", 427 | " \n", 428 | " \n", 429 | " \n", 430 | " \n", 431 | " \n", 432 | " \n", 433 | " \n", 434 | " \n", 435 | " \n", 436 | " \n", 437 | " \n", 438 | " \n", 439 | "
neuronmessage
time
2016-12-22 19:20:47.221569neuron_yneuron_y fires.
2016-12-22 19:20:47.225052neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-12-22 19:20:47.333917neuron_h2neuron_y is kicking neuron_h2.
2016-12-22 19:20:47.367095neuron_h3neuron_y is kicking neuron_h3.
2016-12-22 19:20:47.380941neuron_h3neuron_h3 fires.
2016-12-22 19:20:47.383612neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-12-22 19:20:47.444634neuron_zneuron_h3 is kicking neuron_z.
2016-12-22 19:20:47.460059neuron_zneuron_z fires.
2016-12-22 19:20:47.463746neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
\n", 440 | "
" 441 | ], 442 | "text/plain": [ 443 | " neuron \\\n", 444 | "time \n", 445 | "2016-12-22 19:20:47.221569 neuron_y \n", 446 | "2016-12-22 19:20:47.225052 neuron_y \n", 447 | "2016-12-22 19:20:47.333917 neuron_h2 \n", 448 | "2016-12-22 19:20:47.367095 neuron_h3 \n", 449 | "2016-12-22 19:20:47.380941 neuron_h3 \n", 450 | "2016-12-22 19:20:47.383612 neuron_h3 \n", 451 | "2016-12-22 19:20:47.444634 neuron_z \n", 452 | "2016-12-22 19:20:47.460059 neuron_z \n", 453 | "2016-12-22 19:20:47.463746 neuron_z \n", 454 | "\n", 455 | " message \n", 456 | "time \n", 457 | "2016-12-22 19:20:47.221569 neuron_y fires. \n", 458 | "2016-12-22 19:20:47.225052 Setting output of neuron_y to ACTION_POTENTIAL. \n", 459 | "2016-12-22 19:20:47.333917 neuron_y is kicking neuron_h2. \n", 460 | "2016-12-22 19:20:47.367095 neuron_y is kicking neuron_h3. \n", 461 | "2016-12-22 19:20:47.380941 neuron_h3 fires. \n", 462 | "2016-12-22 19:20:47.383612 Setting output of neuron_h3 to ACTION_POTENTIAL. \n", 463 | "2016-12-22 19:20:47.444634 neuron_h3 is kicking neuron_z. \n", 464 | "2016-12-22 19:20:47.460059 neuron_z fires. \n", 465 | "2016-12-22 19:20:47.463746 Setting output of neuron_z to ACTION_POTENTIAL. " 466 | ] 467 | }, 468 | "execution_count": 9, 469 | "metadata": {}, 470 | "output_type": "execute_result" 471 | } 472 | ], 473 | "source": [ 474 | "### force neuron y to ouput 1\n", 475 | "emptyLogs() # clear logs\n", 476 | "sleep(REFRACTORY_PERIOD) # wait for stead state.\n", 477 | "fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire.\n", 478 | "mergeLogs() # summarize logs" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": 10, 484 | "metadata": { 485 | "collapsed": false 486 | }, 487 | "outputs": [ 488 | { 489 | "data": { 490 | "text/html": [ 491 | "
\n", 492 | "\n", 493 | " \n", 494 | " \n", 495 | " \n", 496 | " \n", 497 | " \n", 498 | " \n", 499 | " \n", 500 | " \n", 501 | " \n", 502 | " \n", 503 | " \n", 504 | " \n", 505 | " \n", 506 | " \n", 507 | " \n", 508 | " \n", 509 | " \n", 510 | " \n", 511 | " \n", 512 | " \n", 513 | " \n", 514 | " \n", 515 | " \n", 516 | " \n", 517 | " \n", 518 | " \n", 519 | " \n", 520 | " \n", 521 | " \n", 522 | " \n", 523 | " \n", 524 | " \n", 525 | " \n", 526 | " \n", 527 | " \n", 528 | " \n", 529 | " \n", 530 | " \n", 531 | " \n", 532 | " \n", 533 | " \n", 534 | " \n", 535 | " \n", 536 | " \n", 537 | " \n", 538 | " \n", 539 | " \n", 540 | " \n", 541 | " \n", 542 | " \n", 543 | " \n", 544 | " \n", 545 | " \n", 546 | " \n", 547 | " \n", 548 | " \n", 549 | " \n", 550 | " \n", 551 | " \n", 552 | " \n", 553 | " \n", 554 | " \n", 555 | " \n", 556 | " \n", 557 | " \n", 558 | " \n", 559 | " \n", 560 | " \n", 561 | " \n", 562 | " \n", 563 | " \n", 564 | " \n", 565 | " \n", 566 | " \n", 567 | " \n", 568 | " \n", 569 | " \n", 570 | " \n", 571 | " \n", 572 | " \n", 573 | " \n", 574 | " \n", 575 | " \n", 576 | " \n", 577 | " \n", 578 | " \n", 579 | " \n", 580 | " \n", 581 | " \n", 582 | " \n", 583 | " \n", 584 | " \n", 585 | " \n", 586 | " \n", 587 | " \n", 588 | " \n", 589 | " \n", 590 | " \n", 591 | " \n", 592 | " \n", 593 | " \n", 594 | " \n", 595 | " \n", 596 | " \n", 597 | " \n", 598 | " \n", 599 | " \n", 600 | " \n", 601 | " \n", 602 | " \n", 603 | " \n", 604 | " \n", 605 | " \n", 606 | " \n", 607 | " \n", 608 | " \n", 609 | " \n", 610 | " \n", 611 | " \n", 612 | "
neuronmessage
time
2016-12-22 19:20:50.764877neuron_xneuron_x fires.
2016-12-22 19:20:50.769051neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-12-22 19:20:50.786583neuron_yneuron_y fires.
2016-12-22 19:20:50.790631neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-12-22 19:20:50.887841neuron_h2neuron_x is kicking neuron_h2.
2016-12-22 19:20:50.907530neuron_h1neuron_x is kicking neuron_h1.
2016-12-22 19:20:50.930401neuron_h1neuron_h1 fires.
2016-12-22 19:20:50.933333neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-12-22 19:20:50.956406neuron_h3neuron_y is kicking neuron_h3.
2016-12-22 19:20:50.984412neuron_h3neuron_h3 fires.
2016-12-22 19:20:50.991315neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-12-22 19:20:50.995927neuron_h2neuron_y is kicking neuron_h2.
2016-12-22 19:20:50.998374neuron_zneuron_h1 is kicking neuron_z.
2016-12-22 19:20:51.019654neuron_zneuron_z fires.
2016-12-22 19:20:51.021244neuron_h2neuron_h2 fires.
2016-12-22 19:20:51.023920neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
2016-12-22 19:20:51.028394neuron_h2Setting output of neuron_h2 to ACTION_POTENTIAL.
2016-12-22 19:20:51.070772neuron_zneuron_h3 is kicking neuron_z.
2016-12-22 19:20:51.090403neuron_zneuron_z is still in refractory-period.
2016-12-22 19:20:51.093631neuron_zneuron_z is still in refractory_period at action potential, then a neuron neuron_h3 kicks in, now sum_of_weighted_inputs >= threshold.
2016-12-22 19:20:51.120976neuron_zneuron_h2 is kicking neuron_z.
\n", 613 | "
" 614 | ], 615 | "text/plain": [ 616 | " neuron \\\n", 617 | "time \n", 618 | "2016-12-22 19:20:50.764877 neuron_x \n", 619 | "2016-12-22 19:20:50.769051 neuron_x \n", 620 | "2016-12-22 19:20:50.786583 neuron_y \n", 621 | "2016-12-22 19:20:50.790631 neuron_y \n", 622 | "2016-12-22 19:20:50.887841 neuron_h2 \n", 623 | "2016-12-22 19:20:50.907530 neuron_h1 \n", 624 | "2016-12-22 19:20:50.930401 neuron_h1 \n", 625 | "2016-12-22 19:20:50.933333 neuron_h1 \n", 626 | "2016-12-22 19:20:50.956406 neuron_h3 \n", 627 | "2016-12-22 19:20:50.984412 neuron_h3 \n", 628 | "2016-12-22 19:20:50.991315 neuron_h3 \n", 629 | "2016-12-22 19:20:50.995927 neuron_h2 \n", 630 | "2016-12-22 19:20:50.998374 neuron_z \n", 631 | "2016-12-22 19:20:51.019654 neuron_z \n", 632 | "2016-12-22 19:20:51.021244 neuron_h2 \n", 633 | "2016-12-22 19:20:51.023920 neuron_z \n", 634 | "2016-12-22 19:20:51.028394 neuron_h2 \n", 635 | "2016-12-22 19:20:51.070772 neuron_z \n", 636 | "2016-12-22 19:20:51.090403 neuron_z \n", 637 | "2016-12-22 19:20:51.093631 neuron_z \n", 638 | "2016-12-22 19:20:51.120976 neuron_z \n", 639 | "\n", 640 | " message \n", 641 | "time \n", 642 | "2016-12-22 19:20:50.764877 neuron_x fires. \n", 643 | "2016-12-22 19:20:50.769051 Setting output of neuron_x to ACTION_POTENTIAL. \n", 644 | "2016-12-22 19:20:50.786583 neuron_y fires. \n", 645 | "2016-12-22 19:20:50.790631 Setting output of neuron_y to ACTION_POTENTIAL. \n", 646 | "2016-12-22 19:20:50.887841 neuron_x is kicking neuron_h2. \n", 647 | "2016-12-22 19:20:50.907530 neuron_x is kicking neuron_h1. \n", 648 | "2016-12-22 19:20:50.930401 neuron_h1 fires. \n", 649 | "2016-12-22 19:20:50.933333 Setting output of neuron_h1 to ACTION_POTENTIAL. \n", 650 | "2016-12-22 19:20:50.956406 neuron_y is kicking neuron_h3. \n", 651 | "2016-12-22 19:20:50.984412 neuron_h3 fires. \n", 652 | "2016-12-22 19:20:50.991315 Setting output of neuron_h3 to ACTION_POTENTIAL. \n", 653 | "2016-12-22 19:20:50.995927 neuron_y is kicking neuron_h2. \n", 654 | "2016-12-22 19:20:50.998374 neuron_h1 is kicking neuron_z. \n", 655 | "2016-12-22 19:20:51.019654 neuron_z fires. \n", 656 | "2016-12-22 19:20:51.021244 neuron_h2 fires. \n", 657 | "2016-12-22 19:20:51.023920 Setting output of neuron_z to ACTION_POTENTIAL. \n", 658 | "2016-12-22 19:20:51.028394 Setting output of neuron_h2 to ACTION_POTENTIAL. \n", 659 | "2016-12-22 19:20:51.070772 neuron_h3 is kicking neuron_z. \n", 660 | "2016-12-22 19:20:51.090403 neuron_z is still in refractory-period. \n", 661 | "2016-12-22 19:20:51.093631 neuron_z is still in refractory_period at action potential, then a neuron neuron_h3 kicks in, now sum_of_weighted_inputs >= threshold. \n", 662 | "2016-12-22 19:20:51.120976 neuron_h2 is kicking neuron_z. " 663 | ] 664 | }, 665 | "execution_count": 10, 666 | "metadata": {}, 667 | "output_type": "execute_result" 668 | } 669 | ], 670 | "source": [ 671 | "### force neuron x and y to ouput 1\n", 672 | "emptyLogs() # clear logs\n", 673 | "sleep(REFRACTORY_PERIOD) # wait for stead state.\n", 674 | "fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire.\n", 675 | "fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire.\n", 676 | "mergeLogs() # summarize logs" 677 | ] 678 | }, 679 | { 680 | "cell_type": "markdown", 681 | "metadata": {}, 682 | "source": [ 683 | "### [Flower](http://192.168.0.114:5555) shows the status of each worker :\n", 684 | "\n", 685 | "![各 neuron 的活動次數](https://github.com/Wei1234c/IOTasBrain/raw/master/celery_projects/jpgs/flower2.jpg \"各 neuron 的活動次數\")" 686 | ] 687 | }, 688 | { 689 | "cell_type": "markdown", 690 | "metadata": {}, 691 | "source": [ 692 | "### content of config files in each neurons:" 693 | ] 694 | }, 695 | { 696 | "cell_type": "code", 697 | "execution_count": 11, 698 | "metadata": { 699 | "collapsed": false 700 | }, 701 | "outputs": [ 702 | { 703 | "name": "stdout", 704 | "output_type": "stream", 705 | "text": [ 706 | "_______________________________neuron_z config:_______________________________\n", 707 | " {'inputs': {'neuron_h2': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 51, 125376), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}, 'neuron_h3': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 51, 74644), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}, 'neuron_h1': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 51, 2932), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}}, 'weights': {'neuron_h2': -2, 'neuron_h3': 1, 'neuron_h1': 1}, 'output': {'value': 1, 'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 51, 27395)}, 'threshold': 0.9}\n", 708 | "\n", 709 | "______________________________neuron_h3 config:_______________________________\n", 710 | " {'inputs': {'neuron_y': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 966575), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}}, 'weights': {'neuron_y': 1}, 'connections': {'neuron_z'}, 'threshold': 0.9, 'output': {'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 995422), 'lasting': datetime.timedelta(0, 0, 100000), 'value': 1}}\n", 711 | "\n", 712 | "______________________________neuron_h2 config:_______________________________\n", 713 | " {'inputs': {'neuron_y': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 999478), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}, 'neuron_x': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 891263), 'lasting': datetime.timedelta(0, 0, 500000), 'value': 1}}, 'connections': {'neuron_z'}, 'weights': {'neuron_y': 1, 'neuron_x': 1}, 'output': {'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 51, 31717), 'lasting': datetime.timedelta(0, 0, 100000), 'value': 1}, 'threshold': 1.9}\n", 714 | "\n", 715 | "______________________________neuron_h1 config:_______________________________\n", 716 | " {'inputs': {'neuron_x': {'kick_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 911286), 'value': 1, 'lasting': datetime.timedelta(0, 0, 500000)}}, 'connections': {'neuron_z'}, 'weights': {'neuron_x': 1}, 'output': {'value': 1, 'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 940549)}, 'threshold': 0.9}\n", 717 | "\n", 718 | "_______________________________neuron_y config:_______________________________\n", 719 | " {'inputs': {}, 'connections': {'neuron_h2', 'neuron_h3'}, 'output': {'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 800631), 'lasting': datetime.timedelta(0, 0, 100000), 'value': 1}, 'threshold': 0.9}\n", 720 | "\n", 721 | "_______________________________neuron_x config:_______________________________\n", 722 | " {'inputs': {}, 'connections': {'neuron_h2', 'neuron_h1'}, 'output': {'value': 1, 'lasting': datetime.timedelta(0, 0, 100000), 'polarized_time': datetime.datetime(2016, 12, 22, 19, 20, 50, 775531)}, 'threshold': 0.9}\n", 723 | "\n" 724 | ] 725 | } 726 | ], 727 | "source": [ 728 | "for neuron in reversed(neurons): printConfig(neuron)" 729 | ] 730 | } 731 | ], 732 | "metadata": { 733 | "anaconda-cloud": {}, 734 | "kernelspec": { 735 | "display_name": "Python [default]", 736 | "language": "python", 737 | "name": "python3" 738 | }, 739 | "language_info": { 740 | "codemirror_mode": { 741 | "name": "ipython", 742 | "version": 3 743 | }, 744 | "file_extension": ".py", 745 | "mimetype": "text/x-python", 746 | "name": "python", 747 | "nbconvert_exporter": "python", 748 | "pygments_lexer": "ipython3", 749 | "version": "3.5.1" 750 | } 751 | }, 752 | "nbformat": 4, 753 | "nbformat_minor": 0 754 | } 755 | -------------------------------------------------------------------------------- /celery_projects/IoT as Brain.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 使用 Celery 於 Docker Swarm 之上 建構類似 Bluemix 的 IoT 平台\n", 8 | "## Part II: IoT as Brain\n", 9 | "\n", 10 | "Wei Lin \n", 11 | "20160128 " 12 | ] 13 | }, 14 | { 15 | "cell_type": "markdown", 16 | "metadata": {}, 17 | "source": [ 18 | "## 緣起 \n", 19 | " [上次](https://github.com/Wei1234c/CeleryOnDockerSwarm/blob/master/celery_projects/CeleryOnDockerSwarm.md) 在 2 台 Raspberry Pi 上面架了一個 Docker Swarm,然後在 Docker Swarm 裡面使用 Celery (distributed task queue) 的機制,利用 8 個 containers 跑 40 個 processes 執行 \"Word Count\" 程式,驗證 Celery + Docker Swarm 是可行的。 \n", 20 | "\n", 21 | " 做上述實驗的原因,是為了想利用 Celery + Docker Swarm 快速的建構私有的 類似 [Bluemix](https://console.ng.bluemix.net/) 的 IoT 平台,讓其上的 devices 共同組成一個分散式的協同運算系統,視整個 IoT(Internet of Things) 為一體。 \n", 22 | "\n", 23 | " Celery 所採用的機制,簡單來說就是: producer 發出要求運算的訊息到 queue 中排隊,眾多的 workers 紛紛到 queue 去撿出訊息來進行處理。類比於 Bluemix 和 [MQTT](http://cheng-min-i-taiwan.blogspot.tw/2015/03/raspberry-pimqtt-android.html): producer 就如同 publisher,queue 如同 topic 或 channel,consumer 如同 subscriber,我覺得兩者是十分類似的。 \n", 24 | "\n", 25 | " 在 Bluemix 的架構中,IBM 把 clients 區分為 device 和 application 兩種角色,devices 扮演 publisher,負責發送資料到平台上,然後扮演 subscriber 的 applications 會收到資料進行運算處理,資料的產生與運算分離,這樣的設計顯得理所當然,Bluemix 所提供的功能也很完整且強大。\n", 26 | " \n", 27 | " 然而還有另外一種可能,或許 **資料的儲存 與 運算,其實可以是同一件事情**,就如同我們的大腦,資料的儲存與運算都是由 神經**網路** 來完成的。\n", 28 | " \n", 29 | " 我們可以把一個 device 視為一個 neuron,讓 IoT 中眾多的 devices (neurons) 互相連結,經過訓練的 **IoT網路** 就可以自行對環境做出反應,並不需要 集中式的 \"邏輯 applications\"。但是,這樣的 IoT 平台要如何設計呢? \n", 30 | " \n", 31 | " 在**類神經網路**的發展歷史中,**[XOR網路](https://en.wikipedia.org/wiki/Feedforward_neural_network#Multi-layer_perceptron)** 是一個著名的案例,本次的實驗就來試試看用 IoT 做一個 XOR網路,如果這個可以做,應該也可以組成更複雜的東西。\n", 32 | " ![XOR 網路](https://upload.wikimedia.org/wikipedia/commons/thumb/7/7b/XOR_perceptron_net.png/250px-XOR_perceptron_net.png \"XOR 網路 (來源: WiKi)\")" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## 實驗設計與原理:\n", 40 | " - 上圖中各 neuron 的代號:\n", 41 | " - input layer: x, y\n", 42 | " - hidden layer: h1, h2, h3\n", 43 | " - output layer: z\n", 44 | " - 使用 2 台 Raspberry Pi 組成一個 Docker Swarm。\n", 45 | " - Docker Swarm 中 run 6 個 containers,**每個 container 扮演一個 device (neuron)**。\n", 46 | " - 這 6 個 devices (neurons),可以分佈於不同的實體 host 上面,代表其可佈署在 Internet 上任何位置。\n", 47 | " - Neurons 之間的連結與權重如上圖所示。**設定 neurons 之間的連結,其實就是在設定 publisher / subscriber 的對應關係**。\n", 48 | " - **使用 message queue 來代表 MQTT 中的 \"topic\",每個 neuron 都有自己專屬的 message queue**,例如:\n", 49 | " - neruon h2 有自己專屬的 message queue \"neuron_h2\",input layer 的 **neuron x 如果想送訊息給 neuron h2,就必須發送到 message queue \"neuron_h2\"**,neruon h2 就會收到訊息。\n", 50 | " - 上例中,**neuron x 扮演 publisher,neuron h2 扮演 subscriber。neuron h1 也是 neuron x 的 subscriber**。\n", 51 | " - publisher / subscriber 的對應關係 可以是多對多。\n", 52 | " - 假設上圖的 x、y neurons 分別接到各自的 sensor,接收 0/1 的資料,每個 device (neuron) 都可以各自外接多個 sensors 感測外部的環境。\n", 53 | " - Neuron z 的 output 必須隨時等於 XOR(x, y)" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "## 實作步驟:" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### 建立 Docker Swarm\n", 68 | "之前已經參考了這篇 [文章](https://www.facebook.com/groups/docker.taipei/permalink/1704032656498757) 使用兩台 RPi2 建立了一個 Docker Swarm。" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "#### Swarm 中有兩台 Docker machines:\n", 76 | "- host rpi202(192.168.0.114) 擔任 Swarm Manager,其 Docker machine name 為 master01\n", 77 | "- host rpi201(192.168.0.109) 擔任 Swarm Node,其 Docker machine name 為 node01" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": { 84 | "collapsed": true 85 | }, 86 | "outputs": [], 87 | "source": [ 88 | "HypriotOS: pi@rpi202 in ~\n", 89 | "$ docker-machine ls\n", 90 | "NAME ACTIVE DRIVER STATE URL SWARM\n", 91 | "master01 hypriot Running tcp://192.168.0.114:2376 master01 (master)\n", 92 | "node01 hypriot Running tcp://192.168.0.109:2376 master01\n", 93 | "HypriotOS: pi@rpi202 in ~\n", 94 | "$\n", 95 | "\n", 96 | "\n", 97 | "# Swarm 中的 nodes:\n", 98 | "\n", 99 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 100 | "$ docker $(docker-machine config --swarm master01) info\n", 101 | "Containers: 4\n", 102 | "Images: 51\n", 103 | "Role: primary\n", 104 | "Strategy: spread\n", 105 | "Filters: health, port, dependency, affinity, constraint\n", 106 | "Nodes: 2\n", 107 | " master01: 192.168.0.114:2376\n", 108 | " └ Status: Healthy\n", 109 | " └ Containers: 3\n", 110 | " └ Reserved CPUs: 0 / 4\n", 111 | " └ Reserved Memory: 0 B / 972 MiB\n", 112 | " └ Labels: executiondriver=native-0.2, kernelversion=4.1.8-hypriotos-v7+, operatingsystem=Raspbian GNU/Linux 8 (jessie), provider=hypriot, storagedriver=overlay\n", 113 | " node01: 192.168.0.109:2376\n", 114 | " └ Status: Healthy\n", 115 | " └ Containers: 1\n", 116 | " └ Reserved CPUs: 0 / 4\n", 117 | " └ Reserved Memory: 0 B / 972 MiB\n", 118 | " └ Labels: executiondriver=native-0.2, kernelversion=4.1.8-hypriotos-v7+, operatingsystem=Raspbian GNU/Linux 8 (jessie), provider=hypriot, storagedriver=overlay\n", 119 | "CPUs: 8\n", 120 | "Total Memory: 1.899 GiB\n", 121 | "Name: fe30da0875d6\n", 122 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 123 | "$" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "### 將檔案 celeryconfig.py、start_workers.sh、資料夾 IoT 複製到 兩台 hosts 的 /data/celery_projects 資料夾之下\n", 131 | "\n", 132 | "可以使用 SCP 來達成,參考: http://www.hypexr.org/linux_scp_help.php \n", 133 | "\n", 134 | "例如: \n", 135 | "$ scp -r /data/celery_projects root@rpi201:/data/ " 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "metadata": { 142 | "collapsed": true 143 | }, 144 | "outputs": [], 145 | "source": [ 146 | "# Swarm manager\n", 147 | "\n", 148 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 149 | "$ ll\n", 150 | "total 20\n", 151 | "drwxr-xr-x 3 999 root 4096 Jan 28 10:08 ./\n", 152 | "drwxr-xr-x 3 999 root 4096 Jan 28 11:02 ../\n", 153 | "-rw-r--r-- 1 999 root 1469 Jan 28 10:48 celeryconfig.py\n", 154 | "drwxr-xr-x 3 999 root 4096 Jan 28 10:08 IoT/\n", 155 | "-rwxr-xr-x 1 999 root 963 Jan 28 10:28 start_workers.sh* <-- 用來啟動 containers 的 script,只在 Swarm Manager 上有需要\n", 156 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 157 | "$\n", 158 | "\n", 159 | "\n", 160 | "# Swarm node\n", 161 | "HypriotOS: pi@rpi201 in /data/celery_projects\n", 162 | "$ ll\n", 163 | "total 16\n", 164 | "drwxr-xr-x 3 root root 4096 Jan 28 12:54 ./\n", 165 | "drwxr-xr-x 3 999 root 4096 Jan 25 22:55 ../\n", 166 | "-rw-r--r-- 1 root root 1250 Jan 28 11:27 celeryconfig.py\n", 167 | "drwxr-xr-x 3 root root 4096 Jan 28 12:54 IoT/\n", 168 | "HypriotOS: pi@rpi201 in /data/celery_projects\n", 169 | "$" 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "metadata": {}, 175 | "source": [ 176 | "#### start_workers.sh 的 內容" 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": null, 182 | "metadata": { 183 | "collapsed": true 184 | }, 185 | "outputs": [], 186 | "source": [ 187 | "# ./start_workers.sh\n", 188 | "\n", 189 | "PROJECT='IoT' # project 名稱\n", 190 | "CONCURRENCY=1 # 每個 worker 可以有幾個 subprocesses\n", 191 | "\n", 192 | "\n", 193 | "\n", 194 | "echo \"Starting Redis, Flower _________________________________________________\"\n", 195 | "eval $(docker-machine env master01)\n", 196 | "\n", 197 | "docker run -dit -p 6379:6379 --name=redis -v /data:/data hypriot/rpi-redis\n", 198 | "docker run -d -p 5555:5555 --name=flower --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c \"cd /celery_projects && celery -A ${PROJECT} flower\"\n", 199 | "\n", 200 | "\n", 201 | "\n", 202 | "echo \"Starting Celery cluster containers _________________________________________________\"\n", 203 | "eval $(docker-machine env --swarm master01)\n", 204 | "\n", 205 | "for id in 'x' 'y' 'h1' 'h2' 'h3' 'z'\n", 206 | "do\n", 207 | " docker run -d --name=neuron_${id} --hostname=neuron_${id} --volume=/data/celery_projects:/celery_projects wei1234c/celery_armv7 /bin/sh -c \"cd /celery_projects && celery -A ${PROJECT} worker -n %h -Q neuron_${id} --concurrency=${CONCURRENCY} --loglevel=INFO\"\n", 208 | "done" 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "metadata": {}, 214 | "source": [ 215 | "### 執行 start_workers.sh,這會做以下幾件事情:\n", 216 | "- 建立 Celery 所需的 Broker,使用 Redis\n", 217 | "- 建立監控用的 Flower container\n", 218 | "- 透過 Swarm Manager 建立並佈署代表 neurons 的 Celery worker containers" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": { 225 | "collapsed": true 226 | }, 227 | "outputs": [], 228 | "source": [ 229 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 230 | "$ ./start_workers.sh\n", 231 | "Starting Redis, Flower _________________________________________________\n", 232 | "cb706da89689211601b931e88921df1564c939a2cdb3de7bda0f4fa878424553\n", 233 | "e136f3f443a46b1a1082b26d367c6c146327d54a3eb9f16aa907dd48bce38a47\n", 234 | "Starting Celery cluster containers _________________________________________________\n", 235 | "684e3d7b84bfa4713a972d434507473d33adcbaad092e32518a291f7e095c86f\n", 236 | "8608740a5a86977f82dc2943feb315575a2ca9e38ebb1a2c73842567c87a865d\n", 237 | "1b5180f0284c8c2aa723c63b4297e57fd35cc5a2c0b1ef5dded3bc3026202f61\n", 238 | "a6679a9bb651dc735725ecd7b4793f27d598efdd4179c008afaae1e4322b0a42\n", 239 | "3a59323ae8f61e76e80595a67e98293c0d42f96b1ac47205aff48d863b321aba\n", 240 | "6c0d6a8bb590961e9947bf1d15587f1c0017b114a466fc7061d2fe888740026e\n", 241 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 242 | "$" 243 | ] 244 | }, 245 | { 246 | "cell_type": "markdown", 247 | "metadata": {}, 248 | "source": [ 249 | "#### 共有 6 個 neurons\n", 250 | "neuron x, y, z, h1, h2 都被佈署到 Swarm node \"node01' 上面," 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": null, 256 | "metadata": { 257 | "collapsed": true 258 | }, 259 | "outputs": [], 260 | "source": [ 261 | "HypriotOS: pi@rpi201 in ~\n", 262 | "$ docker ps\n", 263 | "CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n", 264 | "6c0d6a8bb590 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" About a minute ago Up About a minute 5555/tcp neuron_z\n", 265 | "a6679a9bb651 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" About a minute ago Up About a minute 5555/tcp neuron_h2\n", 266 | "1b5180f0284c wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" About a minute ago Up About a minute 5555/tcp neuron_h1\n", 267 | "8608740a5a86 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" About a minute ago Up About a minute 5555/tcp neuron_y\n", 268 | "684e3d7b84bf wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 2 minutes ago Up 2 minutes 5555/tcp neuron_x\n", 269 | "ef0c519ae7da hypriot/rpi-swarm \"/swarm join --advert\" 4 minutes ago Up 4 minutes 2375/tcp swarm-agent\n", 270 | "HypriotOS: pi@rpi201 in ~\n", 271 | "$ " 272 | ] 273 | }, 274 | { 275 | "cell_type": "markdown", 276 | "metadata": {}, 277 | "source": [ 278 | "只有 h3 被安排在 Swarm manager 這台 \"master01\" machine 上。" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": null, 284 | "metadata": { 285 | "collapsed": true 286 | }, 287 | "outputs": [], 288 | "source": [ 289 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 290 | "$ docker ps\n", 291 | "CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n", 292 | "3a59323ae8f6 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 35 seconds ago Up 32 seconds 5555/tcp neuron_h3\n", 293 | "e136f3f443a4 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" About a minute ago Up About a minute 0.0.0.0:5555->5555/tcp flower\n", 294 | "cb706da89689 hypriot/rpi-redis \"/entrypoint.sh redis\" About a minute ago Up About a minute 0.0.0.0:6379->6379/tcp redis\n", 295 | "966928d0a37c hypriot/rpi-swarm \"/swarm join --advert\" 3 minutes ago Up 3 minutes 2375/tcp swarm-agent\n", 296 | "b01b05cbe323 hypriot/rpi-swarm \"/swarm manage --tlsv\" 4 minutes ago Up 4 minutes 2375/tcp, 0.0.0.0:3376->3376/tcp swarm-agent-master\n", 297 | "ab78ab3e5476 nimblestratus/rpi-consul \"/bin/start -server -\" 4 minutes ago Up 4 minutes 53/udp, 8300-8302/tcp, 8400/tcp, 0.0.0.0:8500->8500/tcp, 8301-8302/udp consul\n", 298 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 299 | "$ " 300 | ] 301 | }, 302 | { 303 | "cell_type": "markdown", 304 | "metadata": {}, 305 | "source": [ 306 | "#### 從 Swarm Manager 的視角 綜觀全局" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": null, 312 | "metadata": { 313 | "collapsed": true 314 | }, 315 | "outputs": [], 316 | "source": [ 317 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 318 | "$ docker $(docker-machine config --swarm master01) ps\n", 319 | "CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n", 320 | "6c0d6a8bb590 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_z\n", 321 | "3a59323ae8f6 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 2 minutes ago Up 2 minutes 5555/tcp master01/neuron_h3\n", 322 | "a6679a9bb651 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_h2\n", 323 | "1b5180f0284c wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 2 minutes ago Up 2 minutes 5555/tcp node01/neuron_h1\n", 324 | "8608740a5a86 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 3 minutes ago Up 3 minutes 5555/tcp node01/neuron_y\n", 325 | "684e3d7b84bf wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 3 minutes ago Up 3 minutes 5555/tcp node01/neuron_x\n", 326 | "e136f3f443a4 wei1234c/celery_armv7 \"/bin/sh -c 'cd /cele\" 3 minutes ago Up 3 minutes 192.168.0.114:5555->5555/tcp master01/flower\n", 327 | "cb706da89689 hypriot/rpi-redis \"/entrypoint.sh redis\" 3 minutes ago Up 3 minutes 192.168.0.114:6379->6379/tcp master01/redis\n", 328 | "ef0c519ae7da hypriot/rpi-swarm \"/swarm join --advert\" 5 minutes ago Up 5 minutes 2375/tcp node01/swarm-agent\n", 329 | "966928d0a37c hypriot/rpi-swarm \"/swarm join --advert\" 5 minutes ago Up 5 minutes 2375/tcp master01/swarm-agent\n", 330 | "b01b05cbe323 hypriot/rpi-swarm \"/swarm manage --tlsv\" 5 minutes ago Up 5 minutes 2375/tcp, 192.168.0.114:3376->3376/tcp master01/swarm-agent-master\n", 331 | "ab78ab3e5476 nimblestratus/rpi-consul \"/bin/start -server -\" 6 minutes ago Up 6 minutes 53/udp, 8300-8302/tcp, 8301-8302/udp, 8400/tcp, 192.168.0.114:8500->8500/tcp master01/consul\n", 332 | "HypriotOS: pi@rpi202 in /data/celery_projects\n", 333 | "$" 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "metadata": {}, 339 | "source": [ 340 | "---\n", 341 | "\n", 342 | "### 開始下指令config網路 / config the network" 343 | ] 344 | }, 345 | { 346 | "cell_type": "code", 347 | "execution_count": 1, 348 | "metadata": { 349 | "collapsed": false 350 | }, 351 | "outputs": [], 352 | "source": [ 353 | "from IoT.neuron import * \n", 354 | "from time import sleep\n", 355 | "import pandas as pd\n", 356 | "from pandas import DataFrame\n", 357 | "\n", 358 | "pd.options.display.max_colwidth = 400\n", 359 | "REFRACTORY_PERIOD = 0.1 # 0.1 seconds" 360 | ] 361 | }, 362 | { 363 | "cell_type": "markdown", 364 | "metadata": {}, 365 | "source": [ 366 | "#### Note:\n", 367 | "定義一個 neuron 行為的 Python code 放在 celery_projects/IoT/neuron.py,Celery 的 worker 被啟動時 會將之載入。 \n", 368 | "neuron.py 中以一個 pickle 檔案紀錄一個 neuron 的組成與任一時刻的狀態,包括有 connections、weights、inputs、output..." 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": 2, 374 | "metadata": { 375 | "collapsed": false 376 | }, 377 | "outputs": [], 378 | "source": [ 379 | "# 總共有 6 個 neurons,各代表一個 Docker container,被隨機佈署在 Docker Swarm 中的任一台 machine 上。\n", 380 | "neurons = ['neuron_x', 'neuron_y', 'neuron_h1', 'neuron_h2', 'neuron_h3', 'neuron_z'] \n", 381 | "\n", 382 | "# print 出 一個 neuron 中的 Log\n", 383 | "def printConfig(neuron):\n", 384 | " print('{0:_^78}\\n {1}\\n'.format(neuron + \" config:\", getConfig.apply_async(routing_key = neuron).get()))\n", 385 | "\n", 386 | "# 清除所有 neurons 中的 logs \n", 387 | "def emptyLogs():\n", 388 | " for neuron in neurons:\n", 389 | " emptyLog.apply_async(routing_key = neuron)\n", 390 | "\n", 391 | "# 彙整logs。將所有 neurons 中的 logs merge 在一起,成為一個 Pandas.DataFrame\n", 392 | "def mergeLogs():\n", 393 | " logs = []\n", 394 | " \n", 395 | " for neuron in neurons:\n", 396 | " currentLog = getLog.apply_async(routing_key = neuron).get()\n", 397 | " logs += currentLog \n", 398 | " \n", 399 | " df = DataFrame(list(logs), columns = ['time', 'neuron', 'message']) \n", 400 | " df.set_index('time', inplace = True)\n", 401 | " df.sort_index(inplace = True)\n", 402 | " \n", 403 | " return df" 404 | ] 405 | }, 406 | { 407 | "cell_type": "markdown", 408 | "metadata": {}, 409 | "source": [ 410 | "### 清空 log files" 411 | ] 412 | }, 413 | { 414 | "cell_type": "code", 415 | "execution_count": 3, 416 | "metadata": { 417 | "collapsed": false 418 | }, 419 | "outputs": [], 420 | "source": [ 421 | "# 清除所有 neurons 中的 logs \n", 422 | "emptyLogs()" 423 | ] 424 | }, 425 | { 426 | "cell_type": "markdown", 427 | "metadata": { 428 | "collapsed": true 429 | }, 430 | "source": [ 431 | "### 設定 connections\n", 432 | "#### 設定 neurons 之間的連結,其實就是在設定 publisher / subscriber 的對應關係" 433 | ] 434 | }, 435 | { 436 | "cell_type": "code", 437 | "execution_count": 4, 438 | "metadata": { 439 | "collapsed": false 440 | }, 441 | "outputs": [ 442 | { 443 | "data": { 444 | "text/plain": [ 445 | "" 446 | ] 447 | }, 448 | "execution_count": 4, 449 | "metadata": {}, 450 | "output_type": "execute_result" 451 | } 452 | ], 453 | "source": [ 454 | "# input layer fan out\n", 455 | "# neuron x\n", 456 | "addConnection.apply_async(['neuron_h1'], routing_key = 'neuron_x') # 增設 neuron_x -> neuron_h1 的 connection\n", 457 | "addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_x')\n", 458 | "# neuron y\n", 459 | "addConnection.apply_async(['neuron_h2'], routing_key = 'neuron_y') # 增設 neuron_y -> neuron_h2 的 connection\n", 460 | "addConnection.apply_async(['neuron_h3'], routing_key = 'neuron_y')\n", 461 | "\n", 462 | "# hidden layer fan out\n", 463 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h1') # 增設 neuron_h1 -> neuron_z 的 connection\n", 464 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h2')\n", 465 | "addConnection.apply_async(['neuron_z'], routing_key = 'neuron_h3')" 466 | ] 467 | }, 468 | { 469 | "cell_type": "markdown", 470 | "metadata": {}, 471 | "source": [ 472 | "### 設定 weights" 473 | ] 474 | }, 475 | { 476 | "cell_type": "code", 477 | "execution_count": 5, 478 | "metadata": { 479 | "collapsed": false 480 | }, 481 | "outputs": [ 482 | { 483 | "data": { 484 | "text/plain": [ 485 | "" 486 | ] 487 | }, 488 | "execution_count": 5, 489 | "metadata": {}, 490 | "output_type": "execute_result" 491 | } 492 | ], 493 | "source": [ 494 | "# hidden layer\n", 495 | "setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h1') # 設定 neuron_x -> neuron_h1 的 weight = 1\n", 496 | "setWeight.apply_async(['neuron_x', 1], routing_key = 'neuron_h2')\n", 497 | "setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h2') # 設定 neuron_y -> neuron_h2 的 weight = 1\n", 498 | "setWeight.apply_async(['neuron_y', 1], routing_key = 'neuron_h3')\n", 499 | "\n", 500 | "# output layer\n", 501 | "setWeight.apply_async(['neuron_h1', 1], routing_key = 'neuron_z')\n", 502 | "setWeight.apply_async(['neuron_h2', -2], routing_key = 'neuron_z') # 設定 neuron_h2 -> neuron_z 的 weight = -2 (inhibitory)\n", 503 | "setWeight.apply_async(['neuron_h3', 1], routing_key = 'neuron_z') " 504 | ] 505 | }, 506 | { 507 | "cell_type": "markdown", 508 | "metadata": {}, 509 | "source": [ 510 | "### 設定 thresholds" 511 | ] 512 | }, 513 | { 514 | "cell_type": "code", 515 | "execution_count": 6, 516 | "metadata": { 517 | "collapsed": false 518 | }, 519 | "outputs": [ 520 | { 521 | "data": { 522 | "text/plain": [ 523 | "" 524 | ] 525 | }, 526 | "execution_count": 6, 527 | "metadata": {}, 528 | "output_type": "execute_result" 529 | } 530 | ], 531 | "source": [ 532 | "# input layer \n", 533 | "setThreshold.apply_async([0.9], routing_key = 'neuron_x') # 設定 neuron_x 的 threshold = 0.9\n", 534 | "setThreshold.apply_async([0.9], routing_key = 'neuron_y') \n", 535 | "\n", 536 | "# hidden layer\n", 537 | "setThreshold.apply_async([0.9], routing_key = 'neuron_h1') \n", 538 | "setThreshold.apply_async([1.9], routing_key = 'neuron_h2') # 設定 neuron_h2 的 threshold = 1.9\n", 539 | "setThreshold.apply_async([0.9], routing_key = 'neuron_h3')\n", 540 | "\n", 541 | "# output layer\n", 542 | "setThreshold.apply_async([0.9], routing_key = 'neuron_z') # 設定 neuron_z 的 threshold = 0.9" 543 | ] 544 | }, 545 | { 546 | "cell_type": "markdown", 547 | "metadata": {}, 548 | "source": [ 549 | "### 模擬 sensor input,然後查看各 neurons 的 output 狀態\n", 550 | "一個 neuron fire 之後,如果沒有持續的輸入可維持 fire 的狀態,則過 5 秒鐘之 neuron 的 output 一定為 0" 551 | ] 552 | }, 553 | { 554 | "cell_type": "code", 555 | "execution_count": 7, 556 | "metadata": { 557 | "collapsed": false 558 | }, 559 | "outputs": [ 560 | { 561 | "data": { 562 | "text/html": [ 563 | "
\n", 564 | "\n", 565 | " \n", 566 | " \n", 567 | " \n", 568 | " \n", 569 | " \n", 570 | " \n", 571 | " \n", 572 | " \n", 573 | " \n", 574 | " \n", 575 | " \n", 576 | " \n", 577 | " \n", 578 | " \n", 579 | "
neuronmessage
time
\n", 580 | "
" 581 | ], 582 | "text/plain": [ 583 | "Empty DataFrame\n", 584 | "Columns: [neuron, message]\n", 585 | "Index: []" 586 | ] 587 | }, 588 | "execution_count": 7, 589 | "metadata": {}, 590 | "output_type": "execute_result" 591 | } 592 | ], 593 | "source": [ 594 | "### 模擬 sensor input,強迫 neuron x 或 y ouput 1\n", 595 | "emptyLogs() # 清除 logs\n", 596 | "sleep(REFRACTORY_PERIOD) # 等電位歸零 \n", 597 | "mergeLogs() # 彙整 logs" 598 | ] 599 | }, 600 | { 601 | "cell_type": "code", 602 | "execution_count": 8, 603 | "metadata": { 604 | "collapsed": false 605 | }, 606 | "outputs": [ 607 | { 608 | "data": { 609 | "text/html": [ 610 | "
\n", 611 | "\n", 612 | " \n", 613 | " \n", 614 | " \n", 615 | " \n", 616 | " \n", 617 | " \n", 618 | " \n", 619 | " \n", 620 | " \n", 621 | " \n", 622 | " \n", 623 | " \n", 624 | " \n", 625 | " \n", 626 | " \n", 627 | " \n", 628 | " \n", 629 | " \n", 630 | " \n", 631 | " \n", 632 | " \n", 633 | " \n", 634 | " \n", 635 | " \n", 636 | " \n", 637 | " \n", 638 | " \n", 639 | " \n", 640 | " \n", 641 | " \n", 642 | " \n", 643 | " \n", 644 | " \n", 645 | " \n", 646 | " \n", 647 | " \n", 648 | " \n", 649 | " \n", 650 | " \n", 651 | " \n", 652 | " \n", 653 | " \n", 654 | " \n", 655 | " \n", 656 | " \n", 657 | " \n", 658 | " \n", 659 | " \n", 660 | " \n", 661 | " \n", 662 | " \n", 663 | " \n", 664 | " \n", 665 | " \n", 666 | " \n", 667 | " \n", 668 | " \n", 669 | " \n", 670 | " \n", 671 | "
neuronmessage
time
2016-07-30 16:07:54.898294neuron_xneuron_x fires.
2016-07-30 16:07:54.902727neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-07-30 16:07:55.288702neuron_h1neuron_x is kicking neuron_h1.
2016-07-30 16:07:55.305675neuron_h1neuron_h1 fires.
2016-07-30 16:07:55.309558neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-07-30 16:07:55.316428neuron_h2neuron_x is kicking neuron_h2.
2016-07-30 16:07:55.708307neuron_zneuron_h1 is kicking neuron_z.
2016-07-30 16:07:55.721137neuron_zneuron_z fires.
2016-07-30 16:07:55.724202neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
\n", 672 | "
" 673 | ], 674 | "text/plain": [ 675 | " neuron \\\n", 676 | "time \n", 677 | "2016-07-30 16:07:54.898294 neuron_x \n", 678 | "2016-07-30 16:07:54.902727 neuron_x \n", 679 | "2016-07-30 16:07:55.288702 neuron_h1 \n", 680 | "2016-07-30 16:07:55.305675 neuron_h1 \n", 681 | "2016-07-30 16:07:55.309558 neuron_h1 \n", 682 | "2016-07-30 16:07:55.316428 neuron_h2 \n", 683 | "2016-07-30 16:07:55.708307 neuron_z \n", 684 | "2016-07-30 16:07:55.721137 neuron_z \n", 685 | "2016-07-30 16:07:55.724202 neuron_z \n", 686 | "\n", 687 | " message \n", 688 | "time \n", 689 | "2016-07-30 16:07:54.898294 neuron_x fires. \n", 690 | "2016-07-30 16:07:54.902727 Setting output of neuron_x to ACTION_POTENTIAL. \n", 691 | "2016-07-30 16:07:55.288702 neuron_x is kicking neuron_h1. \n", 692 | "2016-07-30 16:07:55.305675 neuron_h1 fires. \n", 693 | "2016-07-30 16:07:55.309558 Setting output of neuron_h1 to ACTION_POTENTIAL. \n", 694 | "2016-07-30 16:07:55.316428 neuron_x is kicking neuron_h2. \n", 695 | "2016-07-30 16:07:55.708307 neuron_h1 is kicking neuron_z. \n", 696 | "2016-07-30 16:07:55.721137 neuron_z fires. \n", 697 | "2016-07-30 16:07:55.724202 Setting output of neuron_z to ACTION_POTENTIAL. " 698 | ] 699 | }, 700 | "execution_count": 8, 701 | "metadata": {}, 702 | "output_type": "execute_result" 703 | } 704 | ], 705 | "source": [ 706 | "### 模擬 sensor input,強迫 neuron x 或 y ouput 1\n", 707 | "emptyLogs() # 清除 logs\n", 708 | "sleep(REFRACTORY_PERIOD) # 等電位歸零\n", 709 | "fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire.\n", 710 | "mergeLogs() # 彙整 logs" 711 | ] 712 | }, 713 | { 714 | "cell_type": "code", 715 | "execution_count": 9, 716 | "metadata": { 717 | "collapsed": false 718 | }, 719 | "outputs": [ 720 | { 721 | "data": { 722 | "text/html": [ 723 | "
\n", 724 | "\n", 725 | " \n", 726 | " \n", 727 | " \n", 728 | " \n", 729 | " \n", 730 | " \n", 731 | " \n", 732 | " \n", 733 | " \n", 734 | " \n", 735 | " \n", 736 | " \n", 737 | " \n", 738 | " \n", 739 | " \n", 740 | " \n", 741 | " \n", 742 | " \n", 743 | " \n", 744 | " \n", 745 | " \n", 746 | " \n", 747 | " \n", 748 | " \n", 749 | " \n", 750 | " \n", 751 | " \n", 752 | " \n", 753 | " \n", 754 | " \n", 755 | " \n", 756 | " \n", 757 | " \n", 758 | " \n", 759 | " \n", 760 | " \n", 761 | " \n", 762 | " \n", 763 | " \n", 764 | " \n", 765 | " \n", 766 | " \n", 767 | " \n", 768 | " \n", 769 | " \n", 770 | " \n", 771 | " \n", 772 | " \n", 773 | " \n", 774 | " \n", 775 | " \n", 776 | " \n", 777 | " \n", 778 | " \n", 779 | " \n", 780 | " \n", 781 | " \n", 782 | " \n", 783 | " \n", 784 | "
neuronmessage
time
2016-07-29 00:38:16.531622neuron_yneuron_y fires.
2016-07-29 00:38:16.534594neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-07-29 00:38:16.618006neuron_h2neuron_y is kicking neuron_h2.
2016-07-29 00:38:16.644615neuron_h3neuron_y is kicking neuron_h3.
2016-07-29 00:38:16.659396neuron_h3neuron_h3 fires.
2016-07-29 00:38:16.663007neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-07-29 00:38:16.744374neuron_zneuron_h3 is kicking neuron_z.
2016-07-29 00:38:16.761774neuron_zneuron_z fires.
2016-07-29 00:38:16.765062neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
\n", 785 | "
" 786 | ], 787 | "text/plain": [ 788 | " neuron \\\n", 789 | "time \n", 790 | "2016-07-29 00:38:16.531622 neuron_y \n", 791 | "2016-07-29 00:38:16.534594 neuron_y \n", 792 | "2016-07-29 00:38:16.618006 neuron_h2 \n", 793 | "2016-07-29 00:38:16.644615 neuron_h3 \n", 794 | "2016-07-29 00:38:16.659396 neuron_h3 \n", 795 | "2016-07-29 00:38:16.663007 neuron_h3 \n", 796 | "2016-07-29 00:38:16.744374 neuron_z \n", 797 | "2016-07-29 00:38:16.761774 neuron_z \n", 798 | "2016-07-29 00:38:16.765062 neuron_z \n", 799 | "\n", 800 | " message \n", 801 | "time \n", 802 | "2016-07-29 00:38:16.531622 neuron_y fires. \n", 803 | "2016-07-29 00:38:16.534594 Setting output of neuron_y to ACTION_POTENTIAL. \n", 804 | "2016-07-29 00:38:16.618006 neuron_y is kicking neuron_h2. \n", 805 | "2016-07-29 00:38:16.644615 neuron_y is kicking neuron_h3. \n", 806 | "2016-07-29 00:38:16.659396 neuron_h3 fires. \n", 807 | "2016-07-29 00:38:16.663007 Setting output of neuron_h3 to ACTION_POTENTIAL. \n", 808 | "2016-07-29 00:38:16.744374 neuron_h3 is kicking neuron_z. \n", 809 | "2016-07-29 00:38:16.761774 neuron_z fires. \n", 810 | "2016-07-29 00:38:16.765062 Setting output of neuron_z to ACTION_POTENTIAL. " 811 | ] 812 | }, 813 | "execution_count": 9, 814 | "metadata": {}, 815 | "output_type": "execute_result" 816 | } 817 | ], 818 | "source": [ 819 | "### 模擬 sensor input,強迫 neuron x 或 y ouput 1\n", 820 | "emptyLogs() # 清除 logs\n", 821 | "sleep(REFRACTORY_PERIOD) # 等電位歸零\n", 822 | "fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire.\n", 823 | "mergeLogs() # 彙整 logs" 824 | ] 825 | }, 826 | { 827 | "cell_type": "code", 828 | "execution_count": 10, 829 | "metadata": { 830 | "collapsed": false 831 | }, 832 | "outputs": [ 833 | { 834 | "data": { 835 | "text/html": [ 836 | "
\n", 837 | "\n", 838 | " \n", 839 | " \n", 840 | " \n", 841 | " \n", 842 | " \n", 843 | " \n", 844 | " \n", 845 | " \n", 846 | " \n", 847 | " \n", 848 | " \n", 849 | " \n", 850 | " \n", 851 | " \n", 852 | " \n", 853 | " \n", 854 | " \n", 855 | " \n", 856 | " \n", 857 | " \n", 858 | " \n", 859 | " \n", 860 | " \n", 861 | " \n", 862 | " \n", 863 | " \n", 864 | " \n", 865 | " \n", 866 | " \n", 867 | " \n", 868 | " \n", 869 | " \n", 870 | " \n", 871 | " \n", 872 | " \n", 873 | " \n", 874 | " \n", 875 | " \n", 876 | " \n", 877 | " \n", 878 | " \n", 879 | " \n", 880 | " \n", 881 | " \n", 882 | " \n", 883 | " \n", 884 | " \n", 885 | " \n", 886 | " \n", 887 | " \n", 888 | " \n", 889 | " \n", 890 | " \n", 891 | " \n", 892 | " \n", 893 | " \n", 894 | " \n", 895 | " \n", 896 | " \n", 897 | " \n", 898 | " \n", 899 | " \n", 900 | " \n", 901 | " \n", 902 | " \n", 903 | " \n", 904 | " \n", 905 | " \n", 906 | " \n", 907 | " \n", 908 | " \n", 909 | " \n", 910 | " \n", 911 | " \n", 912 | " \n", 913 | " \n", 914 | " \n", 915 | " \n", 916 | " \n", 917 | " \n", 918 | " \n", 919 | " \n", 920 | " \n", 921 | " \n", 922 | " \n", 923 | " \n", 924 | " \n", 925 | " \n", 926 | " \n", 927 | " \n", 928 | " \n", 929 | " \n", 930 | " \n", 931 | " \n", 932 | " \n", 933 | " \n", 934 | " \n", 935 | " \n", 936 | " \n", 937 | " \n", 938 | " \n", 939 | " \n", 940 | " \n", 941 | " \n", 942 | " \n", 943 | " \n", 944 | " \n", 945 | " \n", 946 | " \n", 947 | " \n", 948 | " \n", 949 | " \n", 950 | " \n", 951 | " \n", 952 | " \n", 953 | " \n", 954 | " \n", 955 | " \n", 956 | " \n", 957 | "
neuronmessage
time
2016-07-29 00:38:20.262963neuron_xneuron_x fires.
2016-07-29 00:38:20.266762neuron_xSetting output of neuron_x to ACTION_POTENTIAL.
2016-07-29 00:38:20.307171neuron_yneuron_y fires.
2016-07-29 00:38:20.310272neuron_ySetting output of neuron_y to ACTION_POTENTIAL.
2016-07-29 00:38:20.358455neuron_h1neuron_x is kicking neuron_h1.
2016-07-29 00:38:20.383688neuron_h1neuron_h1 fires.
2016-07-29 00:38:20.392683neuron_h1Setting output of neuron_h1 to ACTION_POTENTIAL.
2016-07-29 00:38:20.393893neuron_h2neuron_x is kicking neuron_h2.
2016-07-29 00:38:20.417944neuron_h3neuron_y is kicking neuron_h3.
2016-07-29 00:38:20.431711neuron_h3neuron_h3 fires.
2016-07-29 00:38:20.435328neuron_h3Setting output of neuron_h3 to ACTION_POTENTIAL.
2016-07-29 00:38:20.478526neuron_zneuron_h1 is kicking neuron_z.
2016-07-29 00:38:20.501411neuron_h2neuron_y is kicking neuron_h2.
2016-07-29 00:38:20.510015neuron_zneuron_z fires.
2016-07-29 00:38:20.513233neuron_zSetting output of neuron_z to ACTION_POTENTIAL.
2016-07-29 00:38:20.527649neuron_h2neuron_h2 fires.
2016-07-29 00:38:20.537002neuron_h2Setting output of neuron_h2 to ACTION_POTENTIAL.
2016-07-29 00:38:20.575899neuron_zneuron_h3 is kicking neuron_z.
2016-07-29 00:38:20.600624neuron_zneuron_z is still in refractory-period.
2016-07-29 00:38:20.603210neuron_zneuron_z is still in refractory_period at action potential, then a neuron neuron_h3 kicks in, now sum_of_weighted_inputs >= threshold.
2016-07-29 00:38:20.671668neuron_zneuron_h2 is kicking neuron_z.
\n", 958 | "
" 959 | ], 960 | "text/plain": [ 961 | " neuron \\\n", 962 | "time \n", 963 | "2016-07-29 00:38:20.262963 neuron_x \n", 964 | "2016-07-29 00:38:20.266762 neuron_x \n", 965 | "2016-07-29 00:38:20.307171 neuron_y \n", 966 | "2016-07-29 00:38:20.310272 neuron_y \n", 967 | "2016-07-29 00:38:20.358455 neuron_h1 \n", 968 | "2016-07-29 00:38:20.383688 neuron_h1 \n", 969 | "2016-07-29 00:38:20.392683 neuron_h1 \n", 970 | "2016-07-29 00:38:20.393893 neuron_h2 \n", 971 | "2016-07-29 00:38:20.417944 neuron_h3 \n", 972 | "2016-07-29 00:38:20.431711 neuron_h3 \n", 973 | "2016-07-29 00:38:20.435328 neuron_h3 \n", 974 | "2016-07-29 00:38:20.478526 neuron_z \n", 975 | "2016-07-29 00:38:20.501411 neuron_h2 \n", 976 | "2016-07-29 00:38:20.510015 neuron_z \n", 977 | "2016-07-29 00:38:20.513233 neuron_z \n", 978 | "2016-07-29 00:38:20.527649 neuron_h2 \n", 979 | "2016-07-29 00:38:20.537002 neuron_h2 \n", 980 | "2016-07-29 00:38:20.575899 neuron_z \n", 981 | "2016-07-29 00:38:20.600624 neuron_z \n", 982 | "2016-07-29 00:38:20.603210 neuron_z \n", 983 | "2016-07-29 00:38:20.671668 neuron_z \n", 984 | "\n", 985 | " message \n", 986 | "time \n", 987 | "2016-07-29 00:38:20.262963 neuron_x fires. \n", 988 | "2016-07-29 00:38:20.266762 Setting output of neuron_x to ACTION_POTENTIAL. \n", 989 | "2016-07-29 00:38:20.307171 neuron_y fires. \n", 990 | "2016-07-29 00:38:20.310272 Setting output of neuron_y to ACTION_POTENTIAL. \n", 991 | "2016-07-29 00:38:20.358455 neuron_x is kicking neuron_h1. \n", 992 | "2016-07-29 00:38:20.383688 neuron_h1 fires. \n", 993 | "2016-07-29 00:38:20.392683 Setting output of neuron_h1 to ACTION_POTENTIAL. \n", 994 | "2016-07-29 00:38:20.393893 neuron_x is kicking neuron_h2. \n", 995 | "2016-07-29 00:38:20.417944 neuron_y is kicking neuron_h3. \n", 996 | "2016-07-29 00:38:20.431711 neuron_h3 fires. \n", 997 | "2016-07-29 00:38:20.435328 Setting output of neuron_h3 to ACTION_POTENTIAL. \n", 998 | "2016-07-29 00:38:20.478526 neuron_h1 is kicking neuron_z. \n", 999 | "2016-07-29 00:38:20.501411 neuron_y is kicking neuron_h2. \n", 1000 | "2016-07-29 00:38:20.510015 neuron_z fires. \n", 1001 | "2016-07-29 00:38:20.513233 Setting output of neuron_z to ACTION_POTENTIAL. \n", 1002 | "2016-07-29 00:38:20.527649 neuron_h2 fires. \n", 1003 | "2016-07-29 00:38:20.537002 Setting output of neuron_h2 to ACTION_POTENTIAL. \n", 1004 | "2016-07-29 00:38:20.575899 neuron_h3 is kicking neuron_z. \n", 1005 | "2016-07-29 00:38:20.600624 neuron_z is still in refractory-period. \n", 1006 | "2016-07-29 00:38:20.603210 neuron_z is still in refractory_period at action potential, then a neuron neuron_h3 kicks in, now sum_of_weighted_inputs >= threshold. \n", 1007 | "2016-07-29 00:38:20.671668 neuron_h2 is kicking neuron_z. " 1008 | ] 1009 | }, 1010 | "execution_count": 10, 1011 | "metadata": {}, 1012 | "output_type": "execute_result" 1013 | } 1014 | ], 1015 | "source": [ 1016 | "### 模擬 sensor input,強迫 neuron x 或 y ouput 1\n", 1017 | "emptyLogs() # 清除 logs\n", 1018 | "sleep(REFRACTORY_PERIOD) # 等電位歸零\n", 1019 | "fire.apply_async(routing_key = 'neuron_x') # force neuron x output 1 and fire.\n", 1020 | "fire.apply_async(routing_key = 'neuron_y') # force neuron y output 1 and fire.\n", 1021 | "mergeLogs() # 彙整 logs" 1022 | ] 1023 | }, 1024 | { 1025 | "cell_type": "markdown", 1026 | "metadata": {}, 1027 | "source": [ 1028 | "### [Flower](http://192.168.0.114:5555) 中顯示各 worker 處理的 messages 數量:\n", 1029 | "\n", 1030 | "![各 neuron 的活動次數](https://github.com/Wei1234c/IOTasBrain/raw/master/celery_projects/jpgs/flower2.jpg \"各 neuron 的活動次數\")" 1031 | ] 1032 | }, 1033 | { 1034 | "cell_type": "markdown", 1035 | "metadata": {}, 1036 | "source": [ 1037 | "### 各 neurons 的 config 狀態:" 1038 | ] 1039 | }, 1040 | { 1041 | "cell_type": "code", 1042 | "execution_count": null, 1043 | "metadata": { 1044 | "collapsed": false 1045 | }, 1046 | "outputs": [], 1047 | "source": [ 1048 | "for neuron in reversed(neurons): printConfig(neuron)" 1049 | ] 1050 | }, 1051 | { 1052 | "cell_type": "markdown", 1053 | "metadata": {}, 1054 | "source": [ 1055 | "## Summary\n", 1056 | "\n", 1057 | " 這次實驗,主要是想驗證 可以使用 Celery + Docker Swarm 快速地建構私有的 類似 Bluemix 的 IoT 平台,讓其上的 devices 共同組成一個分散式的協同運算系統,視整個 IoT(Internet of Things) 為一體。 \n", 1058 | " \n", 1059 | " 本次實作的平台中,使用 2 台 Raspberry Pi 組成一個 Docker Swarm,run 6 個 containers,每個 container 扮演一個 device,也可視為一個 neuron。設定 neurons 之間的連結,好比是在設定 publisher / subscriber 的對應關係,其對應關係 可以是多對多。 \n", 1060 | " \n", 1061 | " 可以使用這 6 個 neurons (devices / containers),在設定好 connections / weights / thresholds 之後,組成一個 XOR網路,針對外接的 sensors 所感測到的環境狀態,依據 網路的pattern 決定最終的 output。" 1062 | ] 1063 | }, 1064 | { 1065 | "cell_type": "markdown", 1066 | "metadata": {}, 1067 | "source": [ 1068 | "## 後記 (2016/01/31)\n", 1069 | "\n", 1070 | " 後來發現 XOR網路 可能不會存在於現實世界的大腦中,因為要實現XOR網路 就必須要求 各訊號都同時到達,這在真實的世界中不大可能發生,而且就算發生了,因為這種機制會要求 \"等待\",對系統整體的運算效能會造成很大的損失,我不認為是一個好的運算機制,大自然應該不會這樣設計大腦。" 1071 | ] 1072 | }, 1073 | { 1074 | "cell_type": "markdown", 1075 | "metadata": {}, 1076 | "source": [ 1077 | "### 參考資料\n", 1078 | "[Action potential](https://en.wikipedia.org/wiki/Action_potential) \n", 1079 | "[Neural coding](https://en.wikipedia.org/wiki/Neural_coding) \n", 1080 | "[Artificial neuron](https://en.wikipedia.org/wiki/Artificial_neuron) \n", 1081 | "[\"All-or-none\" principle](https://en.wikipedia.org/wiki/Action_potential#.22All-or-none.22_principle) \n", 1082 | "[Refractory period](https://en.wikipedia.org/wiki/Action_potential#Refractory_period) \n", 1083 | "- The absolute refractory period is largely responsible for the unidirectional propagation of action potentials along axons.[34] At any given moment, the patch of axon behind the actively spiking part is refractory, but the patch in front, not having been activated recently, is capable of being stimulated by the depolarization from the action potential. \n", 1084 | "\n", 1085 | "\n" 1086 | ] 1087 | } 1088 | ], 1089 | "metadata": { 1090 | "anaconda-cloud": {}, 1091 | "kernelspec": { 1092 | "display_name": "Python [default]", 1093 | "language": "python", 1094 | "name": "python3" 1095 | }, 1096 | "language_info": { 1097 | "codemirror_mode": { 1098 | "name": "ipython", 1099 | "version": 3 1100 | }, 1101 | "file_extension": ".py", 1102 | "mimetype": "text/x-python", 1103 | "name": "python", 1104 | "nbconvert_exporter": "python", 1105 | "pygments_lexer": "ipython3", 1106 | "version": "3.5.1" 1107 | } 1108 | }, 1109 | "nbformat": 4, 1110 | "nbformat_minor": 0 1111 | } 1112 | --------------------------------------------------------------------------------