├── moleculer ├── __init__.py ├── service.py ├── topics.py ├── consumer.py ├── client.py └── node.py ├── requirements.txt ├── Readme.md ├── node_example.py ├── setup.py ├── client_example.py └── .gitignore /moleculer/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | psutil 2 | pika -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | ### Usage: 2 | 3 | Each node should run instance of `MoleculerNode` class. Node id can be specified on initialization. 4 | 5 | To configure node you must edit `moleculer/service.py` file. 6 | 7 | Service configuration in `INFO_PACKET_TEMPLATE` 8 | 9 | To handle requests from other nodes (call, dcall), you must implement `request_handler` function. 10 | 11 | To handle events (emit, broadcast), you must implement `event_handler`. 12 | 13 | -------------------------------------------------------------------------------- /node_example.py: -------------------------------------------------------------------------------- 1 | from moleculer.node import MoleculerNode, LOG_FORMAT 2 | import logging 3 | 4 | 5 | def main(): 6 | logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) 7 | 8 | # Connect to localhost:5672 as guest with the password guest and virtual host "/" (%2F) 9 | service = MoleculerNode('amqp://guest:guest@localhost:5672/%2F?connection_attempts=3&heartbeat_interval=3600') 10 | service.run() 11 | 12 | 13 | if __name__ == '__main__': 14 | main() 15 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | setup( 4 | name = 'moleculer', 5 | packages = ['moleculer'], # this must be the same as the name above 6 | version='0.2', 7 | description = 'Moleculer node and client for amqp with json serializer', 8 | author = 'Andrei Khaliaukin', 9 | author_email = 'endpoo@gmail.com', 10 | url = 'https://github.com/ToGoBananas/moleculer-python', # use the URL to the github repo 11 | download_url='https://github.com/ToGoBananas/moleculer-python/archive/0.2.tar.gz', # I'll explain this in a second 12 | keywords = ['moleculer', 'microservices'], # arbitrary keywords 13 | classifiers = [], 14 | ) -------------------------------------------------------------------------------- /client_example.py: -------------------------------------------------------------------------------- 1 | from moleculer.client import MoleculerClient 2 | import threading 3 | import time 4 | 5 | 6 | def main(): 7 | client = MoleculerClient('amqp://guest:guest@localhost:5672/%2F?connection_attempts=3&heartbeat_interval=3600', 8 | namespace='EEE') 9 | t1 = threading.Thread(target=client.run) # run loop in separate thread 10 | t1.start() 11 | 12 | # wait for initialization 13 | time.sleep(5) # TODO: extract 'ready' event from thread 14 | client.emit('event_test') 15 | client.broadcast('event_test') 16 | print('EEE') 17 | 18 | 19 | if __name__ == '__main__': 20 | main() 21 | -------------------------------------------------------------------------------- /moleculer/service.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | 4 | def service_builder(name, cache=False, params=None): 5 | """ 6 | 7 | :param name: 8 | :param cache: 9 | :param params: dict. Example: {'name': {'optional': bool, 'type': 'typeof'}} 10 | :return: 11 | """ 12 | 13 | result = { 14 | 'cache': cache, 15 | 'name': name, 16 | } 17 | if params is not None: 18 | result['params'] = params 19 | return result 20 | 21 | 22 | def request_handler(action: str, params: dict) -> bool: 23 | with open('1.txt', 'w') as f: 24 | f.write(action) 25 | f.write('\n') 26 | f.write(json.dumps(params)) 27 | return True 28 | 29 | 30 | def event_handler(sender_node_id: str, event: str, payload: dict): 31 | pass 32 | 33 | 34 | INFO_PACKET_TEMPLATE = { 35 | 'ver': '2', 36 | 'sender': None, 37 | 'services': [{ 38 | 'actions': {'test': service_builder('test')}, # DO NOT name like service_name.action name. Just action name 39 | 'events': {'event_test': {'name': 'event_test'}}, 40 | 'metadata': {}, 41 | 'name': '$python', 42 | 'nodeID': None, 43 | 'settings': {} 44 | }], 45 | 'config': {}, 46 | 'ipList': ['127.0.0.1', ], 47 | 'port': None, 48 | 'client': {'langVersion': '3.6.3', 'type': 'python', 'version': '0.0.1'}, 49 | } 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | .idea 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | .static_storage/ 56 | .media/ 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /moleculer/topics.py: -------------------------------------------------------------------------------- 1 | from .service import INFO_PACKET_TEMPLATE 2 | 3 | 4 | EXCHANGES = { 5 | 'DISCOVER': 'MOL.DISCOVER', 6 | 'INFO': 'MOL.INFO', 7 | 'HEARTBEAT': 'MOL.HEARTBEAT', 8 | 'PING': 'MOL.PING', 9 | 'DISCONNECT': 'MOL.DISCONNECT' 10 | } 11 | 12 | REQB = 'MOL.REQB.{service_name}.{action}' 13 | REQB_NAMESPACE = 'MOL-{namespace}.REQB.{service_name}.{action}' 14 | EVENTB = 'MOL.EVENTB.{service_name}.{event}' 15 | EVENTB_NAMESPACE = 'MOL-{namespace}.EVENTB.{service_name}.{event}' 16 | 17 | 18 | class MoleculerTopics: 19 | EVENT_QUEUE = 'MOL.EVENT.{node_id}' 20 | REQUEST_QUEUE = 'MOL.REQ.{node_id}' 21 | RESPONSE_QUEUE = 'MOL.RES.{node_id}' 22 | PONG_QUEUE = 'MOL.PONG.{node_id}' 23 | INFO_QUEUE = 'MOL.INFO.{node_id}' 24 | PING_QUEUE = 'MOL.PING.{node_id}' 25 | DISCONNECT_QUEUE = 'MOL.DISCONNECT.{node_id}' 26 | DISCOVER_QUEUE = 'MOL.DISCOVER.{node_id}' 27 | HEARTBEAT_QUEUE = 'MOL.HEARTBEAT.{node_id}' 28 | 29 | @property 30 | def queue_attrs(self): 31 | return [attr for attr in MoleculerTopics.__dict__ if attr.endswith('_QUEUE')] 32 | 33 | @property 34 | def queues(self): 35 | result = {} 36 | for attr in self.queue_attrs: 37 | result[attr.replace('_QUEUE', '')] = getattr(self, attr) 38 | return result 39 | 40 | def __init__(self, node_id, namespace=None): 41 | for queue_name in self.queue_attrs: 42 | if namespace is None: 43 | setattr(self, queue_name, getattr(MoleculerTopics, queue_name).format(node_id=node_id)) 44 | else: 45 | queue_string = getattr(MoleculerTopics, queue_name).format(node_id=node_id) 46 | queue_string = queue_string.replace('MOL', 'MOL-' + namespace) 47 | setattr(self, queue_name, queue_string) 48 | self.namespace = namespace 49 | self.exchanges = self.generate_exchanges() 50 | 51 | @property 52 | def bindings(self): 53 | result = {} 54 | for queue_type, queue_name in self.queues.items(): 55 | exchanges = self.exchanges 56 | if queue_type in exchanges: 57 | result[queue_name] = exchanges[queue_type] 58 | return result 59 | 60 | @property 61 | def action_queues(self): 62 | if self.namespace is None: 63 | template = REQB 64 | else: 65 | template = REQB_NAMESPACE 66 | result = [] 67 | for service in INFO_PACKET_TEMPLATE['services']: 68 | service_name = service['name'] 69 | for action in service['actions'].keys(): 70 | result.append(template.format(service_name=service_name, action=action, namespace=self.namespace)) 71 | return result 72 | 73 | @property 74 | def event_queues(self): 75 | if self.namespace is None: 76 | template = EVENTB 77 | else: 78 | template = EVENTB_NAMESPACE 79 | result = [] 80 | for service in INFO_PACKET_TEMPLATE['services']: 81 | service_name = service['name'] 82 | for event in service['events'].keys(): 83 | result.append(template.format(service_name=service_name, event=event, namespace=self.namespace)) 84 | return result 85 | 86 | def generate_exchanges(self): 87 | if self.namespace is None: 88 | return EXCHANGES 89 | else: 90 | return {key: val.replace('MOL', 'MOL-' + self.namespace) for key, val in EXCHANGES.items()} 91 | -------------------------------------------------------------------------------- /moleculer/consumer.py: -------------------------------------------------------------------------------- 1 | import json 2 | import datetime 3 | from .service import request_handler, INFO_PACKET_TEMPLATE, event_handler 4 | 5 | 6 | class MoleculerConsumer: 7 | 8 | def __init__(self, node_id, moleculer_topics, namespace=None): 9 | self.namespace = namespace 10 | self.moleculer_topics = moleculer_topics 11 | self.node_id = node_id 12 | self.is_node_discovered = False 13 | if self.namespace is None: 14 | self.info_template = 'MOL.INFO.{node_id}' 15 | else: 16 | self.info_template = 'MOL-{namespace}.INFO.{node_id}' 17 | if self.namespace is None: 18 | self.res_template = 'MOL.RES.{node_id}' 19 | else: 20 | self.res_template = 'MOL-{namespace}.RES.{node_id}' 21 | if self.namespace is None: 22 | self.pong_template = 'MOL.PONG.{node_id}' 23 | else: 24 | self.pong_template = 'MOL-{namespace}.PONG.{node_id}' 25 | 26 | def build_info_package(self): 27 | info_packet = INFO_PACKET_TEMPLATE 28 | info_packet['sender'] = self.node_id 29 | info_packet['services'][0]['nodeID'] = self.node_id 30 | return info_packet 31 | 32 | def discover(self, channel, basic_deliver, properties, body): 33 | discover_packet = json.loads(body) 34 | sender = discover_packet['sender'] 35 | sender_queue = self.info_template.format(node_id=sender, namespace=self.namespace) 36 | info_packet = self.build_info_package() # TODO: reuse same package 37 | channel.basic_publish('', sender_queue, json.dumps(info_packet)) 38 | 39 | def info(self, channel, basic_deliver, properties, body): 40 | if not self.is_node_discovered: 41 | info_packet = json.loads(body) 42 | sender = info_packet['sender'] 43 | if sender != self.node_id: # TODO: send info package anyway 44 | info_packet = INFO_PACKET_TEMPLATE 45 | info_packet['sender'] = self.node_id 46 | info_packet['services'][0]['nodeID'] = self.node_id 47 | channel.basic_publish(self.moleculer_topics.exchanges['INFO'], '', json.dumps(info_packet)) 48 | self.is_node_discovered = True 49 | else: 50 | pass # TODO: save discovered services 51 | 52 | def heartbeat(self, channel, basic_deliver, properties, body): 53 | pass 54 | 55 | def ping(self, channel, basic_deliver, properties, body): 56 | ping_packet = json.loads(body) 57 | sender_node_id, time = ping_packet['sender'], ping_packet['time'] 58 | if sender_node_id != self.node_id: 59 | sender_exchange = self.pong_template.format(node_id=sender_node_id, namespace=self.namespace) 60 | pong_packet = { 61 | 'ver': '2', 62 | 'sender': self.node_id, 63 | 'time': time, 64 | 'arrived': datetime.datetime.utcnow().timestamp(), 65 | } 66 | channel.basic_publish(sender_exchange, '', json.dumps(pong_packet)) 67 | 68 | def pong(self, channel, basic_deliver, properties, body): 69 | pass 70 | 71 | def request(self, channel, basic_deliver, properties, body): 72 | channel.basic_ack(basic_deliver.delivery_tag) 73 | request_packet = json.loads(body) 74 | action, params, request_id, sender = request_packet['action'], request_packet['params'], \ 75 | request_packet['id'], request_packet['sender'] 76 | 77 | result = request_handler(action, params) 78 | response_packet = { 79 | 'ver': '2', 80 | 'sender': self.node_id, 81 | 'id': request_id, 82 | 'success': True, 83 | 'data': {'result': 'Response from python node: ' + self.node_id} 84 | } 85 | sender_exchange = self.res_template.format(node_id=sender, namespace=self.namespace) 86 | channel.basic_publish('', sender_exchange, json.dumps(response_packet)) 87 | 88 | def disconnect(self, channel, basic_deliver, properties, body): 89 | # TODO: remove disconnected services DISCOVERED list 90 | pass 91 | 92 | def response(self, channel, basic_deliver, properties, body): 93 | # channel.basic_ack(basic_deliver.delivery_tag) 94 | pass 95 | # TODO: handle responses from other services 96 | 97 | def event(self, channel, basic_deliver, properties, body): 98 | # print('EVENT!!!!') 99 | event_packet = json.loads(body) 100 | sender, event, data = event_packet['sender'], event_packet['event'], event_packet['data'] 101 | event_handler(sender, event, data) 102 | -------------------------------------------------------------------------------- /moleculer/client.py: -------------------------------------------------------------------------------- 1 | from .node import MoleculerNode, LOGGER 2 | from pika.channel import Channel 3 | import json 4 | 5 | 6 | class MoleculerClient(MoleculerNode): 7 | 8 | def __init__(self, amqp_url, namespace=None): 9 | self.namespace = namespace 10 | super().__init__(amqp_url, node_id='PYTHON-CLIENT', namespace=self.namespace) 11 | self.network = NetworkInfo() 12 | if self.namespace is None: 13 | self.info_template = 'MOL.INFO.{node_id}' 14 | self.disconnect_template = 'MOL.DISCONNECT.{node_id}' 15 | self.eventb_template = 'MOL.EVENTB.{service}.{event}' 16 | self.event_template = 'MOL.EVENT.{node_id}' 17 | else: 18 | self.info_template = 'MOL-{namespace}.INFO.{node_id}' 19 | self.disconnect_template = 'MOL-{namespace}.DISCONNECT.{node_id}' 20 | self.eventb_template = 'MOL-{namespace}.EVENTB.{service}.{event}' 21 | self.event_template = 'MOL-{namespace}.EVENT.{node_id}' 22 | 23 | def on_channel_open(self, channel): 24 | LOGGER.info('Channel opened') 25 | self.channel: Channel = channel 26 | self.add_on_channel_close_callback() 27 | info_queue = self.info_template.format(node_id=self.NODE_ID, namespace=self.namespace) 28 | disconnect_queue = self.disconnect_template.format(node_id=self.NODE_ID, namespace=self.namespace) 29 | self.setup_queue(info_queue) 30 | self.setup_queue(disconnect_queue) 31 | self.channel.queue_bind(self.on_bindok, info_queue, self.moleculer_topics.exchanges['INFO']) 32 | self.channel.queue_bind(self.on_bindok, disconnect_queue, self.moleculer_topics.exchanges['DISCONNECT']) 33 | self.channel.basic_consume(self.process_info_packages, info_queue) 34 | self.channel.basic_consume(self.on_node_disconnect, disconnect_queue) 35 | self.discover_packet() 36 | 37 | def process_info_packages(self, unused_channel, basic_deliver, properties, body): 38 | info_packet = json.loads(body) 39 | self.network.add_node(info_packet) 40 | 41 | def on_node_disconnect(self, unused_channel, basic_deliver, properties, body): 42 | disconnect_package = json.loads(body) 43 | self.network.disconnect_node(disconnect_package['sender']) 44 | 45 | def emit(self, event_name, data=None): 46 | candidates = self.get_emit_candidates(event_name) 47 | if len(candidates) == 0: 48 | return {'error': 'This event not registered.'} 49 | else: 50 | if data is None: 51 | data = {} 52 | event_package = MoleculerClient.build_event('PYTHON-CLIENT', event_name, data) 53 | for service_name in candidates: 54 | queue_name = self.eventb_template.format(service=service_name, event=event_name, 55 | namespace=self.namespace) 56 | # print(queue_name) 57 | self.channel.basic_publish('', queue_name, event_package) 58 | 59 | def broadcast(self, event_name, data=None): 60 | candidates = self.get_broadcast_candidates(event_name) 61 | if len(candidates) == 0: 62 | return {'error': 'This event not registered.'} 63 | else: 64 | if data is None: 65 | data = {} 66 | event_package = MoleculerClient.build_event('PYTHON-CLIENT', event_name, data) 67 | for node_id in candidates: 68 | queue_name = self.event_template.format(node_id=node_id, namespace=self.namespace) 69 | self.channel.basic_publish('', queue_name, event_package) 70 | 71 | def call(self): 72 | pass 73 | 74 | def dcall(self): 75 | pass 76 | 77 | def get_emit_candidates(self, event_name): 78 | service_names = set() 79 | for node_id, node_info in self.network.NODES.items(): 80 | if event_name in node_info['events']: 81 | service_name = node_info['service_name'] 82 | service_names.add(service_name) 83 | return service_names 84 | 85 | def get_broadcast_candidates(self, event_name): 86 | candidates = [] 87 | for node_id, node_info in self.network.NODES.items(): 88 | if event_name in node_info['events']: 89 | candidates.append(node_id) 90 | return candidates 91 | 92 | @staticmethod 93 | def build_event(sender_node_id, event_name, payload): 94 | event = { 95 | 'ver': '2', 96 | 'sender': sender_node_id, 97 | 'event': event_name, 98 | 'data': payload, 99 | 'groups': [] 100 | } 101 | return json.dumps(event) 102 | 103 | 104 | class NetworkInfo: 105 | NODES = {} 106 | 107 | def __init__(self, namespace=None): 108 | self.namespace = namespace 109 | if self.namespace is None: 110 | self.reqb_template = 'MOL.REQB.{action}' 111 | self.service_reqb_template = 'MOL.REQB.{service_name}.{action}' 112 | else: 113 | self.reqb_template = 'MOL.REQB.{action}'.replace('MOL', 'MOL-' + self.namespace) 114 | self.service_reqb_template = 'MOL.REQB.{service_name}.{action}'.replace('MOL', 'MOL-' + self.namespace) 115 | 116 | def add_node(self, info_packet: dict): 117 | node_id = info_packet['sender'] 118 | if node_id not in self.NODES.keys(): 119 | self.NODES[node_id] = { 120 | 'actions': {}, 121 | 'events': [] 122 | } 123 | node = self.NODES[node_id] 124 | for service in info_packet['services']: 125 | service_name = service['name'] 126 | is_service_node = bool(service_name == '$node') 127 | for action_name, action_spec in service['actions'].items(): 128 | if is_service_node: 129 | queue_name = self.reqb_template.format(action=action_name, namespace=self.namespace) 130 | else: 131 | queue_name = self.service_reqb_template.format(service_name=service_name, action=action_name, 132 | namespace=self.namespace) 133 | node['actions'][action_name] = queue_name 134 | 135 | for event_name in service['events'].keys(): 136 | node['events'].append(event_name) 137 | else: 138 | node['service_name'] = service_name 139 | 140 | def disconnect_node(self, node_id): 141 | del self.NODES[node_id] 142 | -------------------------------------------------------------------------------- /moleculer/node.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import logging 4 | import pika 5 | import json 6 | import psutil 7 | 8 | from .topics import MoleculerTopics 9 | from pika.channel import Channel 10 | from pika.adapters.select_connection import SelectConnection 11 | from .consumer import MoleculerConsumer 12 | 13 | LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) ' 14 | '-35s %(lineno) -5d: %(message)s') 15 | LOGGER = logging.getLogger(__name__) 16 | 17 | 18 | class MoleculerNode(object): 19 | """ 20 | 21 | If RabbitMQ closes the connection, it will reopen it. You should 22 | look at the output, as there are limited reasons why the connection may 23 | be closed, which usually are tied to permission related issues or 24 | socket timeouts. 25 | 26 | """ 27 | 28 | EXCHANGE_TYPE = 'fanout' 29 | NODE_ID = 'python-node-1' 30 | HEARTBEAT_INTERVAL = 5 31 | 32 | def __init__(self, amqp_url, node_id=None, namespace=None): 33 | """Setup the example publisher object, passing in the URL we will use 34 | to connect to RabbitMQ. 35 | 36 | :param str amqp_url: The URL for connecting to RabbitMQ 37 | 38 | """ 39 | self._connection = None 40 | self.channel: Channel = None 41 | 42 | self._deliveries = None 43 | self._acked = None 44 | self._nacked = None 45 | self._message_number = None 46 | 47 | self._stopping = False 48 | self._url = amqp_url 49 | self.ready_topics = [] 50 | self.expect_topics_count = None 51 | self.ready_bindings = [] 52 | self.expect_bindings_count = None 53 | 54 | if node_id is not None: 55 | self.NODE_ID = node_id 56 | 57 | self.namespace = namespace 58 | 59 | self.moleculer_topics = MoleculerTopics(self.NODE_ID, namespace=self.namespace) 60 | self.consumer = MoleculerConsumer(self.NODE_ID, moleculer_topics=self.moleculer_topics, 61 | namespace=self.namespace) 62 | 63 | def connect(self): 64 | """This method connects to RabbitMQ, returning the connection handle. 65 | When the connection is established, the on_connection_open method 66 | will be invoked by pika. If you want the reconnection to work, make 67 | sure you set stop_ioloop_on_close to False, which is not the default 68 | behavior of this adapter. 69 | 70 | :rtype: pika.SelectConnection 71 | 72 | """ 73 | LOGGER.info('Connecting to %s', self._url) 74 | return pika.SelectConnection(pika.URLParameters(self._url), 75 | on_open_callback=self.on_connection_open, 76 | on_close_callback=self.on_connection_closed, 77 | stop_ioloop_on_close=False) 78 | 79 | def on_connection_open(self, unused_connection): 80 | LOGGER.info('Connection opened') 81 | self.open_channel() 82 | 83 | def on_connection_closed(self, connection, reply_code, reply_text): 84 | """This method is invoked by pika when the connection to RabbitMQ is 85 | closed unexpectedly. Since it is unexpected, we will reconnect to 86 | RabbitMQ if it disconnects. 87 | 88 | :param pika.connection.Connection connection: The closed connection obj 89 | :param int reply_code: The server provided reply_code if given 90 | :param str reply_text: The server provided reply_text if given 91 | 92 | """ 93 | self.channel = None 94 | if self._stopping: 95 | self._connection.ioloop.stop() 96 | else: 97 | LOGGER.warning('Connection closed, reopening in 5 seconds: (%s) %s', 98 | reply_code, reply_text) 99 | self._connection.add_timeout(5, self._connection.ioloop.stop) 100 | 101 | def open_channel(self): 102 | LOGGER.info('Creating a new channel') 103 | self._connection.channel(on_open_callback=self.on_channel_open) 104 | 105 | def on_channel_open(self, channel): 106 | LOGGER.info('Channel opened') 107 | self.channel: Channel = channel 108 | # self._channel.basic_qos(prefetch_count=1) # TODO: figure out why prefetch must be disabled to make it work 109 | # self._channel.confirm_delivery() # Enabled delivery confirmations 110 | self.add_on_channel_close_callback() 111 | self.create_topics() 112 | self._connection.add_timeout(0.2, self.subscribe_to_topics) 113 | self._connection.add_timeout(1, self.start_heartbeating) 114 | 115 | def start_heartbeating(self): 116 | heartbeat_packet = { 117 | 'ver': '2', 118 | 'sender': self.NODE_ID, 119 | 'cpu': psutil.cpu_percent(interval=None) 120 | } 121 | self.channel.basic_publish(self.moleculer_topics.exchanges['HEARTBEAT'], '', 122 | json.dumps(heartbeat_packet)) 123 | self._connection.add_timeout(self.HEARTBEAT_INTERVAL, self.start_heartbeating) 124 | 125 | def subscribe_to_topics(self): 126 | if self.is_bindings_ready: 127 | self.add_on_cancel_callback() 128 | # for queue in self.moleculer_topics.queues.values(): 129 | self.channel.basic_consume(self.consumer.discover, self.moleculer_topics.queues['DISCOVER']) 130 | self.channel.basic_consume(self.consumer.info, self.moleculer_topics.queues['INFO']) 131 | self.channel.basic_consume(self.consumer.ping, self.moleculer_topics.queues['PING']) 132 | self.channel.basic_consume(self.consumer.request, self.moleculer_topics.queues['REQUEST']) 133 | self.channel.basic_consume(self.consumer.response, self.moleculer_topics.queues['RESPONSE']) 134 | self.channel.basic_consume(self.consumer.event, self.moleculer_topics.queues['EVENT'], no_ack=True) 135 | 136 | for queue_name in self.moleculer_topics.action_queues: 137 | self.channel.basic_consume(self.consumer.request, queue_name) 138 | for queue_name in self.moleculer_topics.event_queues: 139 | self.channel.basic_consume(self.consumer.event, queue_name, no_ack=True) 140 | 141 | self._connection.add_timeout(0.5, self.discover_packet) 142 | else: 143 | self._connection.add_timeout(0.1, self.subscribe_to_topics) 144 | 145 | def create_topics(self): 146 | queues = self.moleculer_topics.queues.items() 147 | action_queues, events_queues = self.moleculer_topics.action_queues, self.moleculer_topics.event_queues 148 | self.expect_topics_count = len(queues) + len(self.moleculer_topics.exchanges) + len(action_queues) + len( 149 | events_queues) 150 | 151 | for queue_type, queue_name in queues: 152 | if queue_type in ('REQUEST', 'RESPONSE'): 153 | self.setup_queue(queue_name, ttl=False, exclusive=False) 154 | elif queue_type == 'HEARTBEAT': 155 | self.setup_queue(queue_name, ttl=False, exclusive=True) 156 | else: 157 | self.setup_queue(queue_name, ttl=True, exclusive=False) 158 | 159 | for queue_name in action_queues: 160 | self.setup_queue(queue_name, ttl=False, exclusive=False, durable=False) 161 | 162 | for queue_name in events_queues: 163 | self.setup_queue(queue_name, ttl=True, exclusive=False) 164 | 165 | for exchange_type, exchange_name in self.moleculer_topics.exchanges.items(): 166 | self.setup_exchange(exchange_name) 167 | 168 | self._connection.add_timeout(0.1, self.check_topics_status) 169 | 170 | def check_topics_status(self): 171 | if len(self.ready_topics) == self.expect_topics_count: 172 | LOGGER.info('All topics successfully declared') 173 | self.bind_queues_to_exchanges() 174 | else: 175 | self._connection.add_timeout(0.1, self.check_topics_status) 176 | 177 | @property 178 | def is_bindings_ready(self): 179 | if len(self.ready_bindings) == self.expect_bindings_count: 180 | LOGGER.info('All bindings successfully declared.') 181 | return True 182 | else: 183 | return False 184 | 185 | def bind_queues_to_exchanges(self): 186 | self.expect_bindings_count = len(self.moleculer_topics.bindings) 187 | for queue_name, fanout_name in self.moleculer_topics.bindings.items(): 188 | self.channel.queue_bind(self.on_bindok, queue_name, fanout_name) 189 | 190 | def add_on_channel_close_callback(self): 191 | """This method tells pika to call the on_channel_closed method if 192 | RabbitMQ unexpectedly closes the channel. 193 | 194 | """ 195 | LOGGER.info('Adding channel close callback') 196 | self.channel.add_on_close_callback(self.on_channel_closed) 197 | 198 | def discover_packet(self): 199 | req = { 200 | 'ver': '2', 201 | 'sender': self.NODE_ID 202 | } 203 | self.channel.basic_publish(self.moleculer_topics.exchanges['DISCOVER'], '', json.dumps(req)) 204 | 205 | def on_channel_closed(self, channel, reply_code, reply_text): 206 | """Invoked by pika when RabbitMQ unexpectedly closes the channel. 207 | Channels are usually closed if you attempt to do something that 208 | violates the protocol, such as re-declare an exchange or queue with 209 | different parameters. In this case, we'll close the connection 210 | to shutdown the object. 211 | 212 | :param pika.channel.Channel channel: The closed channel 213 | :param int reply_code: The numeric reason the channel was closed 214 | :param str reply_text: The text reason the channel was closed 215 | 216 | """ 217 | LOGGER.warning('Channel was closed: (%s) %s', reply_code, reply_text) 218 | self.channel = None 219 | if not self._stopping: 220 | self._connection.close() 221 | 222 | def setup_exchange(self, exchange_name): 223 | """Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC 224 | command. When it is complete, the on_exchange_declareok method will 225 | be invoked by pika. 226 | 227 | :param str|unicode exchange_name: The name of the exchange to declare 228 | 229 | """ 230 | LOGGER.info('Declaring exchange %s', exchange_name) 231 | self.channel.exchange_declare(self.on_exchange_declareok, 232 | exchange_name, 233 | self.EXCHANGE_TYPE, durable=True) 234 | 235 | def on_exchange_declareok(self, unused_frame): 236 | """Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC 237 | command. 238 | 239 | :param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame 240 | 241 | """ 242 | LOGGER.info('Exchange declared') 243 | self.ready_topics.append(None) 244 | 245 | def setup_queue(self, queue_name, ttl=True, exclusive=False, durable=False): 246 | """Setup the queue on RabbitMQ by invoking the Queue.Declare RPC 247 | command. When it is complete, the on_queue_declareok method will 248 | be invoked by pika. 249 | 250 | :param auto_delete: 251 | :param ttl: 252 | :param str|unicode queue_name: The name of the queue to declare. 253 | 254 | """ 255 | LOGGER.info('Declaring queue %s', queue_name) 256 | arguments = {} 257 | if ttl: 258 | arguments['x-message-ttl'] = 5000 # eventTimeToLive: https://github.com/ice-services/moleculer/pull/72 259 | self.channel.queue_declare(self.on_queue_declareok, queue_name, 260 | exclusive=exclusive, durable=durable, arguments=arguments) 261 | 262 | def on_queue_declareok(self, method_frame): 263 | """Method invoked by pika when the Queue.Declare RPC call made in 264 | setup_queue has completed. In this method we will bind the queue 265 | and exchange together with the routing key by issuing the Queue.Bind 266 | RPC command. When this command is complete, the on_bindok method will 267 | be invoked by pika. 268 | 269 | :param pika.frame.Method method_frame: The Queue.DeclareOk frame 270 | 271 | """ 272 | LOGGER.info('Queue for moleculer declared') 273 | self.ready_topics.append(None) 274 | 275 | def on_bindok(self, unused_frame): 276 | """This method is invoked by pika when it receives the Queue.BindOk 277 | response from RabbitMQ. Since we know we're now setup and bound, it's 278 | time to start publishing.""" 279 | LOGGER.info('Queue bound to exchange.') 280 | self.ready_bindings.append(None) 281 | 282 | def add_on_cancel_callback(self): 283 | """Add a callback that will be invoked if RabbitMQ cancels the consumer 284 | for some reason. If RabbitMQ does cancel the consumer, 285 | on_consumer_cancelled will be invoked by pika. 286 | 287 | """ 288 | LOGGER.info('Adding consumer cancellation callback') 289 | self.channel.add_on_cancel_callback(self.on_consumer_cancelled) 290 | 291 | def on_consumer_cancelled(self, method_frame): 292 | """Invoked by pika when RabbitMQ sends a Basic.Cancel for a consumer 293 | receiving messages. 294 | 295 | :param pika.frame.Method method_frame: The Basic.Cancel frame 296 | 297 | """ 298 | LOGGER.info('Consumer was cancelled remotely, shutting down: %r', 299 | method_frame) 300 | if self.channel: 301 | self.channel.close() 302 | 303 | def run(self): 304 | """Run the service code by connecting and then starting the IOLoop. 305 | 306 | """ 307 | while not self._stopping: 308 | self._connection = None 309 | self._deliveries = [] 310 | self._acked = 0 311 | self._nacked = 0 312 | self._message_number = 0 313 | 314 | try: 315 | self._connection: SelectConnection = self.connect() 316 | self._connection.ioloop.start() 317 | except KeyboardInterrupt: 318 | self.stop() 319 | if (self._connection is not None and 320 | not self._connection.is_closed): 321 | self._connection.ioloop.start() 322 | 323 | LOGGER.info('Stopped') 324 | 325 | def stop(self): 326 | """Stop the example by closing the channel and connection. We 327 | set a flag here so that we stop scheduling new messages to be 328 | published. The IOLoop is started because this method is 329 | invoked by the Try/Catch below when KeyboardInterrupt is caught. 330 | Starting the IOLoop again will allow the publisher to cleanly 331 | disconnect from RabbitMQ. 332 | 333 | """ 334 | LOGGER.info('Stopping') 335 | disconnect_packet = { 336 | 'ver': '2', 337 | 'sender': self.NODE_ID 338 | } 339 | self.channel.basic_publish(self.moleculer_topics.exchanges['DISCONNECT'], '', 340 | json.dumps(disconnect_packet)) 341 | self._stopping = True 342 | self.close_channel() 343 | self.close_connection() 344 | 345 | def close_channel(self): 346 | """Invoke this command to close the channel with RabbitMQ by sending 347 | the Channel.Close RPC command. 348 | 349 | """ 350 | if self.channel is not None: 351 | LOGGER.info('Closing the channel') 352 | self.channel.close() 353 | 354 | def close_connection(self): 355 | """This method closes the connection to RabbitMQ.""" 356 | if self._connection is not None: 357 | LOGGER.info('Closing connection') 358 | self._connection.close() 359 | --------------------------------------------------------------------------------