├── .gitignore ├── LICENSE ├── README.md ├── _config.yml ├── aioconnectors ├── __init__.py ├── __main__.py ├── api.py ├── applications.py ├── connection.py ├── core.py ├── helpers.py └── ssl_helper.py ├── aioconnectors_test.py ├── setup.py └── standalone_api.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-hacker -------------------------------------------------------------------------------- /aioconnectors/__init__.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Copyright 2021 Mori Benech 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | ''' 16 | ''' 17 | https://github.com/mori-b/aioconnectors 18 | 19 | aioconnectors is an easy to set up broker that currently works on Unix like systems. 20 | Requirements are : Python >= 3.6, and openssl installed. 21 | It makes it easy to securely send and receive loads of messages and files between remote applications. 22 | - Easy and fast installation with pip, zero dependency 23 | - Nice trade off between ease of use and efficiency 24 | - Supports several use cases : transfer of messages, files, authentication, encryption, persistence, point-to-point, publish/subscribe. All configurable by simply modifying a configuration file 25 | - User friendly and intuitive API, with simple Python asynchronous functions to send and receive messages, easy to integrate in an existing asyncio code base 26 | - Bidirectional : client and server can push messages to each other 27 | - Embeds a command line interface, which can manage the broker through the command line 28 | - Embeds an encrypted chat/file transfer tool easily callable through the command line 29 | 30 | The command line tool can be called by "python3 -m aioconnectors --help" 31 | Usage examples can be found in applications.py 32 | ''' 33 | 34 | __version__ = '1.6.3' 35 | __author__ = 'Mori Benech' 36 | 37 | from .api import ConnectorManager, ConnectorAPI, ConnectorRemoteTool 38 | from .connection import MessageFields 39 | from .helpers import get_logger, iface_to_ip, get_tmp_dir 40 | from . import applications 41 | -------------------------------------------------------------------------------- /aioconnectors/__main__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import json 4 | import argparse 5 | 6 | import aioconnectors 7 | 8 | logger = logging.getLogger('aioconnectors_main') 9 | logger.setLevel(logging.DEBUG) 10 | handler = logging.StreamHandler() 11 | logger.addHandler(handler) 12 | 13 | 14 | HELP = ''' 15 | aioconnectors supported commands : 16 | 17 | - print_config_templates 18 | - create_certificates [--ca] [optional dirpath] [--help] 19 | - create_connector 20 | - cli (start, stop, restart, show_connected_peers, ignore_peer_traffic, peek_queues, delete_client_certificate, delete_client_token) 21 | - replace_server_certificate new_pem_path [optional dirpath] [--revert] [--help] 22 | - ping 23 | - chat [--target ] [--upload ] [--help] 24 | - test_receive_messages 25 | - test_send_messages 26 | - test_publish_messages 27 | - --help 28 | - --version 29 | ''' 30 | 31 | 32 | if len(sys.argv) > 1: 33 | if sys.argv[1] == 'create_certificates': 34 | no_ca = True 35 | if '--ca' in sys.argv: 36 | sys.argv.remove('--ca') 37 | no_ca = False 38 | if len(sys.argv) == 3: 39 | if sys.argv[2] == '--help': 40 | print('create_certificates without argument will create client and server certificates directories ' 41 | f'under {aioconnectors.core.Connector.CONNECTOR_FILES_DIRPATH}.\n' 42 | 'You can specify a target directory as an optional argument.\n' 43 | '(Use "create_certificates ." to create your target directory in your current working directory.)\n' 44 | 'Use --ca to create certificates for a server having server_ca=True (default is server_ca=False)') 45 | sys.exit(0) 46 | certificates_directory_path = aioconnectors.helpers.full_path(sys.argv[2]) 47 | else: 48 | certificates_directory_path = None 49 | logger.info('Starting create_certificates') 50 | res = aioconnectors.ssl_helper.create_certificates(logger, certificates_directory_path=certificates_directory_path, 51 | no_ca=no_ca) 52 | if res is False: 53 | sys.exit(1) 54 | 55 | elif sys.argv[1] == 'replace_server_certificate': 56 | help_test = 'For server only, replace_server_certificate puts your custom server certificate in the server certificates directories.\n' \ 57 | 'First argument (mandatory) : the path of you server pem (the server key should be there too)\n' \ 58 | f'Second argument (optional) : the certificates directory path. If not provided it is : {aioconnectors.core.Connector.CONNECTOR_FILES_DIRPATH}.\n' \ 59 | '(Use "." to create your target directory in your current working directory).\n' \ 60 | 'Use "--revert" to put back the original certificates and delete the custom certificate copy.' 61 | 62 | if len(sys.argv) == 3: 63 | if sys.argv[2] == '--help': 64 | print(help_test) 65 | sys.exit(0) 66 | if sys.argv[2] == '--revert': 67 | res = aioconnectors.ssl_helper.replace_server_certificate(logger, revert=True) 68 | sys.exit(0) 69 | server_certificate_path = aioconnectors.helpers.full_path(sys.argv[2]) 70 | certificates_directory_path = None 71 | elif len(sys.argv) == 4: 72 | server_certificate_path = aioconnectors.helpers.full_path(sys.argv[2]) 73 | certificates_directory_path = aioconnectors.helpers.full_path(sys.argv[3]) 74 | else: 75 | print(help_test) 76 | sys.exit(0) 77 | logger.info('Starting replace_server_certificate') 78 | res = aioconnectors.ssl_helper.replace_server_certificate(logger, server_certificate_path=server_certificate_path, 79 | certificates_directory_path=certificates_directory_path) 80 | if res is False: 81 | sys.exit(1) 82 | 83 | elif sys.argv[1] == 'print_config_templates': 84 | Connector = aioconnectors.core.Connector 85 | 86 | manager_config_template = dict(default_logger_log_level='INFO', default_logger_rotate=True, 87 | default_logger_dirpath=Connector.CONNECTOR_FILES_DIRPATH, 88 | default_logger_bk_count=aioconnectors.helpers.LOG_BK_COUNT, 89 | connector_files_dirpath=Connector.CONNECTOR_FILES_DIRPATH, 90 | is_server=True, server_sockaddr=Connector.SERVER_ADDR, reuse_server_sockaddr=False, 91 | reuse_uds_path_commander_server=False, reuse_uds_path_send_to_connector=False, 92 | use_ssl=Connector.USE_SSL, ssl_allow_all=False, use_token=Connector.USE_TOKEN, 93 | server_ca=Connector.SERVER_CA, server_ca_certs_not_stored=True, server_secure_tls=True, 94 | certificates_directory_path=Connector.CONNECTOR_FILES_DIRPATH, 95 | tokens_directory_path=Connector.CONNECTOR_FILES_DIRPATH, 96 | client_name=None, client_bind_ip=None, 97 | send_message_types=Connector.DEFAULT_MESSAGE_TYPES, 98 | recv_message_types=Connector.DEFAULT_MESSAGE_TYPES, 99 | subscribe_message_types=[], pubsub_central_broker=False, 100 | disk_persistence_send=Connector.DISK_PERSISTENCE_SEND, 101 | disk_persistence_recv=Connector.DISK_PERSISTENCE_RECV, 102 | max_size_persistence_path=Connector.MAX_SIZE_PERSISTENCE_PATH, 103 | file_recv_config={}, debug_msg_counts=Connector.DEBUG_MSG_COUNTS, silent=Connector.SILENT, 104 | uds_path_receive_preserve_socket=Connector.UDS_PATH_RECEIVE_PRESERVE_SOCKET, 105 | uds_path_send_preserve_socket=Connector.UDS_PATH_SEND_PRESERVE_SOCKET, 106 | enable_client_try_reconnect=True, keep_alive_period=None, keep_alive_timeout=Connector.KEEP_ALIVE_TIMEOUT, 107 | max_number_of_unanswered_keep_alive=Connector.MAX_NUMBER_OF_UNANSWERED_KEEP_ALIVE, 108 | connect_timeout = Connector.CONNECT_TIMEOUT, 109 | send_timeout=Connector.SEND_TIMEOUT, max_size_file_upload_send=Connector.MAX_SIZE_FILE_UPLOAD_SEND, 110 | max_size_file_upload_recv=Connector.MAX_SIZE_FILE_UPLOAD_RECV, max_certs=Connector.MAX_CERTS, 111 | everybody_can_send_messages=Connector.EVERYBODY_CAN_SEND_MESSAGES, send_message_types_priorities={}, 112 | proxy={}, alternate_client_default_cert=Connector.ALTERNATE_CLIENT_DEFAULT_CERT, 113 | blacklisted_clients_id=None, blacklisted_clients_ip=None, blacklisted_clients_subnet=None, 114 | whitelisted_clients_id=None, whitelisted_clients_ip=None, whitelisted_clients_subnet=None, 115 | ignore_peer_traffic=False, client_cafile_verify_server=None, 116 | token_verify_peer_cert=Connector.TOKEN_VERIFY_PEER_CERT, 117 | token_client_send_cert=Connector.TOKEN_CLIENT_SEND_CERT, 118 | token_client_verify_server_hostname=Connector.TOKEN_CLIENT_VERIFY_SERVER_HOSTNAME, 119 | token_server_allow_authorized_non_default_cert=False) 120 | 121 | print('\n- MANAGER TEMPLATE, used to create a connector') 122 | print(json.dumps(manager_config_template, indent=4, sort_keys=True)) 123 | file_recv_config = {'any': {'target_directory':'/var/tmp/aioconnectors/{message_type}/{source_id}/', 124 | 'owner':'user:user', 'override_existing':False}} 125 | print('\n- file_recv_config example, used inside MANAGER TEMPLATE') 126 | print(json.dumps(file_recv_config, indent=4, sort_keys=True)) 127 | 128 | api_config_template = dict(default_logger_log_level='INFO', default_logger_rotate=True, 129 | default_logger_dirpath=Connector.CONNECTOR_FILES_DIRPATH, 130 | default_logger_bk_count=aioconnectors.helpers.LOG_BK_COUNT, 131 | connector_files_dirpath=Connector.CONNECTOR_FILES_DIRPATH, 132 | is_server=True, server_sockaddr=Connector.SERVER_ADDR, client_name=None, 133 | uds_path_receive_preserve_socket=Connector.UDS_PATH_RECEIVE_PRESERVE_SOCKET, 134 | uds_path_send_preserve_socket=Connector.UDS_PATH_SEND_PRESERVE_SOCKET, 135 | send_message_types=Connector.DEFAULT_MESSAGE_TYPES, 136 | recv_message_types=Connector.DEFAULT_MESSAGE_TYPES, 137 | receive_from_any_connector_owner=True, pubsub_central_broker=False, 138 | max_size_chunk_upload= 209715200) 139 | print('\n- API TEMPLATE, used to send/receive messages') 140 | print(json.dumps(api_config_template, indent=4, sort_keys=True)) 141 | 142 | elif sys.argv[1] == 'create_connector': 143 | if len(sys.argv) != 3: 144 | print('Usage : create_connector ') 145 | sys.exit(1) 146 | if sys.argv[2] == '--help': 147 | print('Usage : create_connector ') 148 | sys.exit(0) 149 | config_file_path=sys.argv[2] 150 | aioconnectors.applications.create_connector(config_file_path, logger) 151 | 152 | elif sys.argv[1] == 'cli': 153 | aioconnectors.applications.cli(logger) 154 | 155 | elif sys.argv[1] == 'test_receive_messages': 156 | if len(sys.argv) != 3: 157 | print('Usage : test_receive_messages ') 158 | sys.exit(1) 159 | if sys.argv[2] == '--help': 160 | print('Usage : test_receive_messages ') 161 | sys.exit(0) 162 | config_file_path=sys.argv[2] 163 | aioconnectors.applications.test_receive_messages(config_file_path, logger) 164 | 165 | elif sys.argv[1] == 'test_send_messages': 166 | if len(sys.argv) != 3: 167 | print('Usage : test_send_messages ') 168 | sys.exit(1) 169 | if sys.argv[2] == '--help': 170 | print('Usage : test_send_messages ') 171 | sys.exit(0) 172 | config_file_path=sys.argv[2] 173 | aioconnectors.applications.test_send_messages(config_file_path, logger) 174 | 175 | elif sys.argv[1] == 'test_publish_messages': 176 | if len(sys.argv) != 3: 177 | print('Usage : test_publish_messages ') 178 | sys.exit(1) 179 | if sys.argv[2] == '--help': 180 | print('Usage : test_publish_messages ') 181 | sys.exit(0) 182 | config_file_path=sys.argv[2] 183 | aioconnectors.applications.test_publish_messages(config_file_path, logger) 184 | 185 | elif sys.argv[1] == 'ping': 186 | if len(sys.argv) != 3: 187 | print('Usage : ping ') 188 | sys.exit(1) 189 | if sys.argv[2] == '--help': 190 | print('Usage : ping ') 191 | sys.exit(0) 192 | config_file_path=sys.argv[2] 193 | aioconnectors.applications.ping(config_file_path, logger) 194 | 195 | elif sys.argv[1] == 'chat': 196 | #usage 197 | #python3 -m aioconnectors chat 198 | #python3 -m aioconnectors chat --target 127.0.0.1 [--upload ] 199 | #inside chat, prepend "!" to call a local shell command, !exit" to exit, "!upload " to upload to cwd, 200 | #"!dezip" to unzip an uploaded file. 201 | print('\nWelcome to aioconnectors chat !') 202 | print('Usage :\n- Type messages, or !exit to exit, or any shell command preceded by a ! to execute locally\n' 203 | '- !upload to upload to peer\'s current working directory\n' 204 | '- !dezip to unzip a file\n') 205 | parser = argparse.ArgumentParser() 206 | parser.add_argument('chat') 207 | parser.add_argument('--target', nargs='?', default=None, help="server ip, mandatory for client") 208 | parser.add_argument('--accept', action='store_true', help="accept all clients if specified, optional for server") 209 | parser.add_argument('--port', nargs='?', default=None, help="server port, optional for server and client") 210 | parser.add_argument('--bind_server_ip', nargs='?', default=None, help="bind to ip, optional for server") 211 | parser.add_argument('--upload', nargs='?', default=False, help="path of directory or file to upload") 212 | parser.add_argument('--nowrap', action='store_true', help="disable tab completion") 213 | parser.add_argument('--exec', nargs='?', default=None, help="executable path like /bin/bash, optional for server and client") 214 | 215 | args = parser.parse_args() 216 | aioconnectors.applications.chat(args) 217 | 218 | elif sys.argv[1] == '--help': 219 | print(HELP) 220 | elif sys.argv[1] == '--version': 221 | print(aioconnectors.__version__) 222 | else: 223 | print('Unknown command : '+str(sys.argv[1])) 224 | else: 225 | print(HELP) 226 | -------------------------------------------------------------------------------- /aioconnectors/applications.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import random 4 | import signal 5 | import shutil 6 | import zipfile 7 | import subprocess 8 | import sys 9 | import json 10 | import re 11 | 12 | import aioconnectors 13 | 14 | REGEX_IP = re.compile('^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}') 15 | 16 | def create_connector(config_file_path, logger=None): 17 | if not logger: 18 | logger = aioconnectors.get_logger(logger_name='create_connector', first_run=True) 19 | logger.info('Creating connector with config file '+config_file_path) 20 | connector_manager = aioconnectors.ConnectorManager(config_file_path=config_file_path) 21 | loop = asyncio.get_event_loop() 22 | #task = loop.create_task(connector_manager.delete_previous_persistence_remains()) 23 | #loop.run_until_complete(task) 24 | task_manager = loop.create_task(connector_manager.start_connector()) 25 | #run_until_complete now, in order to exit in case of exception 26 | #for example because of already existing socket 27 | loop.run_until_complete(task_manager) 28 | 29 | try: 30 | loop.run_forever() 31 | except: 32 | task_stop = loop.create_task(connector_manager.stop_connector(delay=None, hard=False, shutdown=True)) 33 | loop.run_until_complete(task_stop) 34 | del task_stop 35 | del task_manager 36 | del connector_manager 37 | print('Connector stopped !') 38 | 39 | def cli(logger=None): 40 | def clearscreen(): 41 | subprocess.call('clear', shell=True) 42 | def display_dict(the_json, connector=None): 43 | if connector: 44 | print('\n', 'Connector : '+str(connector)) 45 | #print('\n', json.dumps(the_json, indent=4, sort_keys=True),'') 46 | for the_key, value in sorted(the_json.items(), key=lambda x:int(x[0])): 47 | print(str(the_key)+') '+str(value)) 48 | 49 | if not logger: 50 | logger = aioconnectors.get_logger(logger_name='cli', first_run=True) 51 | running_with_tab_completion = True 52 | try: 53 | import readline 54 | readline.set_completer_delims('\t\n= ') 55 | readline.parse_and_bind('tab:complete') 56 | except Exception: 57 | running_with_tab_completion = False 58 | logger.info('Running without tab completion') 59 | 60 | print('\nWelcome to aioconnectors CLI') 61 | Connector = aioconnectors.core.Connector 62 | the_path = input('\nPlease type your connector_files_dirpath, or Enter if it is ' 63 | f'{Connector.CONNECTOR_FILES_DIRPATH}\n') 64 | if the_path: 65 | the_path = os.path.abspath(os.path.normpath(os.path.expandvars(os.path.expanduser(the_path)))) 66 | else: 67 | the_path = Connector.CONNECTOR_FILES_DIRPATH 68 | 69 | while True: 70 | active_connectors_path = os.path.join(the_path, Connector.DEFAULT_ACTIVE_CONNECTORS_NAME) 71 | dict_connector_names = {} 72 | try: 73 | if os.path.exists(active_connectors_path): 74 | with open(active_connectors_path, 'r') as fd: 75 | set_active_connectors = json.load(fd) 76 | dict_connector_names = {str(index):connector_name for index,connector_name in \ 77 | enumerate(sorted(set_active_connectors))} 78 | display_dict(dict_connector_names) 79 | print('\nPlease type the connector number you would like to run, or') 80 | except Exception as exc: 81 | print(exc) 82 | 83 | name_input = input(f'\nTo check your server, please type your server ' 84 | f'(default port is {Connector.SERVER_ADDR[1]}).\nTo check your client, please type your ' 85 | 'client name.\nType "q" to quit.\n') 86 | if name_input == 'q': 87 | sys.exit(0) 88 | name = dict_connector_names.get(name_input, name_input) 89 | names = name.split(maxsplit=1) #assume that client name has no spaces 90 | server_sockaddr = client_name = None 91 | if len(names) == 2: 92 | server_sockaddr = (names[0], int(names[1])) 93 | else: 94 | client_name = name 95 | 96 | loop = asyncio.get_event_loop() 97 | is_server = (server_sockaddr is not None) 98 | connector_remote_tool = aioconnectors.ConnectorRemoteTool(use_default_logger=False, is_server=is_server, 99 | server_sockaddr=server_sockaddr, client_name=client_name, 100 | connector_files_dirpath=the_path) 101 | if not os.path.exists(connector_remote_tool.connector.uds_path_commander): 102 | clearscreen() 103 | print(f'The connector {name} does not exist') 104 | if name_input in dict_connector_names: 105 | #deleting invalid name_input from dict_connector_names 106 | set_active_connectors.remove(name) 107 | with open(active_connectors_path, 'w') as fd: 108 | json.dump(set_active_connectors, fd) 109 | continue 110 | clearscreen() 111 | list_cmds = ['start', 'stop gracefully', 'stop hard', 'restart', 'show_connected_peers', 112 | 'ignore_peer_traffic', 'peek_queues', 'delete_client_certificate', 'delete_client_token', 'disconnect_client', 113 | 'show_log_level', 'set_log_level', 'show_subscribe_message_types', 'set_subscribe_message_types', 114 | 'add_blacklist_client', 'remove_blacklist_client', 'add_whitelist_client', 'remove_whitelist_client'] 115 | dict_cmds = {str(index):cmd for index,cmd in enumerate(list_cmds)} 116 | display_dict(dict_cmds, connector=server_sockaddr or client_name) 117 | res = input('\nPlease type the command number you would like to run, or q to quit\n') 118 | 119 | def show_connected_peers(return_peers=False): 120 | task = loop.create_task(connector_remote_tool.show_connected_peers()) 121 | loop.run_until_complete(task) 122 | peers_dict = task.result().decode() 123 | print(f'\nConnected peers : {peers_dict}') 124 | if return_peers: 125 | return json.loads(peers_dict) 126 | 127 | def show_attribute(attribute): 128 | show_attribute 129 | task = loop.create_task(connector_remote_tool.show_attribute(attribute)) 130 | loop.run_until_complete(task) 131 | value = task.result().decode() 132 | print(f'\n{attribute.capitalize()} : {value}') 133 | 134 | while True: 135 | clearscreen() 136 | if res == 'q': 137 | break 138 | if res not in dict_cmds: 139 | print('Invalid number : '+str(res)) 140 | else: 141 | the_cmd = dict_cmds[res] 142 | 143 | if the_cmd == 'start': 144 | task = loop.create_task(connector_remote_tool.start_connector()) 145 | loop.run_until_complete(task) 146 | print(task.result().decode()) 147 | 148 | elif the_cmd == 'stop gracefully': 149 | task = loop.create_task(connector_remote_tool.stop_connector(client_wait_for_reconnect=False, hard=False)) 150 | loop.run_until_complete(task) 151 | print(task.result().decode()) 152 | 153 | elif the_cmd == 'stop hard': 154 | task = loop.create_task(connector_remote_tool.stop_connector(client_wait_for_reconnect=False, hard=True)) 155 | loop.run_until_complete(task) 156 | print(task.result().decode()) 157 | 158 | elif the_cmd == 'restart': 159 | task = loop.create_task(connector_remote_tool.restart_connector(sleep_between=2, hard=False)) 160 | loop.run_until_complete(task) 161 | print(task.result().decode()) 162 | 163 | elif the_cmd == 'show_connected_peers': 164 | show_connected_peers() 165 | 166 | elif the_cmd == 'ignore_peer_traffic': 167 | while True: 168 | task = loop.create_task(connector_remote_tool.ignore_peer_traffic_show()) 169 | loop.run_until_complete(task) 170 | status = task.result().decode() 171 | print('\nignore_peer_traffic current status : ', status) 172 | if status == 'False': 173 | 174 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 175 | if running_with_tab_completion: 176 | def complete(text,state): 177 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 178 | return results[state] 179 | readline.set_completer(complete) 180 | 181 | res = input('\nType "y" to ignore peer traffic, or to ignore a unique peer ' 182 | 'traffic, or q to quit\n') 183 | 184 | if running_with_tab_completion: 185 | readline.set_completer(None) 186 | 187 | if res == 'y': 188 | task = loop.create_task(connector_remote_tool.ignore_peer_traffic_enable()) 189 | loop.run_until_complete(task) 190 | continue 191 | elif res == 'q': 192 | break 193 | else: 194 | task = loop.create_task(connector_remote_tool.ignore_peer_traffic_enable_unique(res)) 195 | loop.run_until_complete(task) 196 | continue 197 | else: 198 | res = input('\nType "y" to stop ignoring peer traffic, or Enter to quit\n') 199 | if res == 'y': 200 | task = loop.create_task(connector_remote_tool.ignore_peer_traffic_disable()) 201 | loop.run_until_complete(task) 202 | continue 203 | else: 204 | break 205 | 206 | elif the_cmd == 'peek_queues': 207 | task = loop.create_task(connector_remote_tool.peek_queues()) 208 | loop.run_until_complete(task) 209 | print(json.dumps(json.loads(task.result().decode()), indent=4, sort_keys=True)) 210 | 211 | elif the_cmd == 'delete_client_certificate': 212 | if is_server: 213 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 214 | if running_with_tab_completion: 215 | def complete(text,state): 216 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 217 | return results[state] 218 | readline.set_completer(complete) 219 | 220 | client_name = input('\nPlease type the client name whose certificate you would ' 221 | 'like to delete, or q to quit\n') 222 | if running_with_tab_completion: 223 | readline.set_completer(None) 224 | 225 | if client_name != 'q': 226 | res = input('\nAre you sure you want to delete '+client_name+' \'s certificate ? y/n\n') 227 | if res =='y': 228 | task = loop.create_task(connector_remote_tool.delete_client_certificate(client_id=client_name, 229 | remove_only_symlink=False)) 230 | loop.run_until_complete(task) 231 | print(task.result().decode()) 232 | task = loop.create_task(connector_remote_tool.disconnect_client(client_id=client_name)) 233 | loop.run_until_complete(task) 234 | print(task.result().decode()) 235 | else: 236 | res = input('\nAre you sure you want to delete '+client_name+' \'s certificate ? y/n\n') 237 | if res =='y': 238 | task = loop.create_task(connector_remote_tool.delete_client_certificate()) 239 | loop.run_until_complete(task) 240 | print(task.result().decode()) 241 | 242 | elif the_cmd == 'delete_client_token': 243 | if is_server: 244 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 245 | if running_with_tab_completion: 246 | def complete(text,state): 247 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 248 | return results[state] 249 | readline.set_completer(complete) 250 | 251 | client_name = input('\nPlease type the client name whose token you would ' 252 | 'like to delete, or q to quit\n') 253 | if running_with_tab_completion: 254 | readline.set_completer(None) 255 | 256 | if client_name != 'q': 257 | res = input('\nAre you sure you want to delete '+client_name+' \'s token ? y/n\n') 258 | if res =='y': 259 | task = loop.create_task(connector_remote_tool.delete_client_token(client_id=client_name)) 260 | loop.run_until_complete(task) 261 | print(task.result().decode()) 262 | task = loop.create_task(connector_remote_tool.disconnect_client(client_id=client_name)) 263 | loop.run_until_complete(task) 264 | print(task.result().decode()) 265 | else: 266 | res = input('\nAre you sure you want to delete '+client_name+' \'s token ? y/n\n') 267 | if res =='y': 268 | task = loop.create_task(connector_remote_tool.delete_client_token()) 269 | loop.run_until_complete(task) 270 | print(task.result().decode()) 271 | 272 | elif the_cmd == 'disconnect_client': 273 | if is_server: 274 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 275 | if running_with_tab_completion: 276 | def complete(text,state): 277 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 278 | return results[state] 279 | readline.set_completer(complete) 280 | 281 | client_name = input('\nPlease type the client name you would ' 282 | 'like to disconnect, or q to quit\n') 283 | if running_with_tab_completion: 284 | readline.set_completer(None) 285 | 286 | if client_name != 'q': 287 | res = input('\nAre you sure you want to disconnect '+client_name+' ? y/n\n') 288 | if res =='y': 289 | task = loop.create_task(connector_remote_tool.disconnect_client(client_id=client_name)) 290 | loop.run_until_complete(task) 291 | print(task.result().decode()) 292 | else: 293 | print('A client cannot use this functionality') 294 | 295 | elif the_cmd == 'add_blacklist_client': 296 | if is_server: 297 | show_attribute('blacklisted_clients_id') 298 | show_attribute('blacklisted_clients_ip') 299 | show_attribute('blacklisted_clients_subnet') 300 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 301 | if running_with_tab_completion: 302 | def complete(text,state): 303 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 304 | return results[state] 305 | readline.set_completer(complete) 306 | 307 | the_client = input('\nPlease type the client name (or regex) you would like to blacklist (and disconnect),' 308 | ' or the client IP address/subnet you would like to blacklist, or q to quit\n') 309 | if running_with_tab_completion: 310 | readline.set_completer(None) 311 | 312 | if the_client != 'q': 313 | res = input('\nAre you sure you want to blacklist '+the_client+' ? y/n\n') 314 | if res =='y': 315 | #As opposed to ip, the id follows SOURCE_ID_REGEX, which excludes dots 316 | if REGEX_IP.match(the_client): 317 | task = loop.create_task(connector_remote_tool.add_blacklist_client(client_ip=the_client)) 318 | else: 319 | task = loop.create_task(connector_remote_tool.add_blacklist_client(client_id=the_client)) 320 | loop.run_until_complete(task) 321 | print(task.result().decode()) 322 | else: 323 | print('A client cannot use this functionality') 324 | 325 | elif the_cmd == 'remove_blacklist_client': 326 | if is_server: 327 | show_attribute('blacklisted_clients_id') 328 | show_attribute('blacklisted_clients_ip') 329 | show_attribute('blacklisted_clients_subnet') 330 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 331 | if running_with_tab_completion: 332 | def complete(text,state): 333 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 334 | return results[state] 335 | readline.set_completer(complete) 336 | 337 | the_client = input('\nPlease type the client name (or regex) you would like to remove from blacklist,' 338 | ' or the client IP address/subnet you would like to remove from blacklist, or q to quit\n') 339 | if running_with_tab_completion: 340 | readline.set_completer(None) 341 | 342 | if the_client != 'q': 343 | res = input('\nAre you sure you want to remove from blacklist '+the_client+' ? y/n\n') 344 | if res =='y': 345 | if REGEX_IP.match(the_client): 346 | task = loop.create_task(connector_remote_tool.remove_blacklist_client(client_ip=the_client)) 347 | else: 348 | task = loop.create_task(connector_remote_tool.remove_blacklist_client(client_id=the_client)) 349 | loop.run_until_complete(task) 350 | print(task.result().decode()) 351 | else: 352 | print('A client cannot use this functionality') 353 | 354 | elif the_cmd == 'add_whitelist_client': 355 | if is_server: 356 | show_attribute('whitelisted_clients_id') 357 | show_attribute('whitelisted_clients_ip') 358 | show_attribute('whitelisted_clients_subnet') 359 | 360 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 361 | if running_with_tab_completion: 362 | def complete(text,state): 363 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 364 | return results[state] 365 | readline.set_completer(complete) 366 | 367 | the_client = input('\nPlease type the client name (or regex) you would like to whitelist,' 368 | ' or the client IP address/subnet you would like to whitelist, or q to quit\n') 369 | if running_with_tab_completion: 370 | readline.set_completer(None) 371 | 372 | if the_client != 'q': 373 | res = input('\nAre you sure you want to whitelist '+the_client+' ? y/n\n') 374 | if res =='y': 375 | if REGEX_IP.match(the_client): 376 | task = loop.create_task(connector_remote_tool.add_whitelist_client(client_ip=the_client)) 377 | else: 378 | task = loop.create_task(connector_remote_tool.add_whitelist_client(client_id=the_client)) 379 | loop.run_until_complete(task) 380 | print(task.result().decode()) 381 | else: 382 | print('A client cannot use this functionality') 383 | 384 | elif the_cmd == 'remove_whitelist_client': 385 | if is_server: 386 | show_attribute('whitelisted_clients_id') 387 | show_attribute('whitelisted_clients_ip') 388 | show_attribute('whitelisted_clients_subnet') 389 | 390 | peers_dict = show_connected_peers(return_peers=running_with_tab_completion) 391 | if running_with_tab_completion: 392 | def complete(text,state): 393 | results = [peer for peer in peers_dict if peer.startswith(text)] + [None] 394 | return results[state] 395 | readline.set_completer(complete) 396 | 397 | the_client = input('\nPlease type the client name (or regex) you would like to remove from whitelist,' 398 | ' or the client IP address/subnet you would like to remove from whitelist, or q to quit\n') 399 | if running_with_tab_completion: 400 | readline.set_completer(None) 401 | 402 | if the_client != 'q': 403 | res = input('\nAre you sure you want to remove from whitelist '+the_client+' ? y/n\n') 404 | if res =='y': 405 | if REGEX_IP.match(the_client): 406 | task = loop.create_task(connector_remote_tool.remove_whitelist_client(client_ip=the_client)) 407 | else: 408 | task = loop.create_task(connector_remote_tool.remove_whitelist_client(client_id=the_client)) 409 | loop.run_until_complete(task) 410 | print(task.result().decode()) 411 | else: 412 | print('A client cannot use this functionality') 413 | 414 | elif the_cmd == 'show_log_level': 415 | task = loop.create_task(connector_remote_tool.show_log_level()) 416 | loop.run_until_complete(task) 417 | print(task.result().decode()) 418 | 419 | elif the_cmd == 'set_log_level': 420 | list_levels = ['ERROR', 'WARNING', 'INFO', 'DEBUG'] 421 | dict_levels = {str(index):level for index,level in enumerate(list_levels)} 422 | display_dict(dict_levels) 423 | res = input('\nPlease type the log level you would like to set, or q to quit\n') 424 | clearscreen() 425 | if res == 'q': 426 | break 427 | if res not in dict_levels: 428 | print('Invalid number : '+str(res)) 429 | break 430 | new_level = dict_levels[res] 431 | task = loop.create_task(connector_remote_tool.set_log_level(new_level)) 432 | loop.run_until_complete(task) 433 | print(task.result().decode()) 434 | 435 | elif the_cmd == 'show_subscribe_message_types': 436 | if is_server: 437 | print('Only available for clients') 438 | else: 439 | task = loop.create_task(connector_remote_tool.show_subscribe_message_types()) 440 | loop.run_until_complete(task) 441 | print(task.result().decode()) 442 | 443 | elif the_cmd == 'set_subscribe_message_types': 444 | if is_server: 445 | print('Only available for clients') 446 | else: 447 | print('Current subscribed message types are :') 448 | task = loop.create_task(connector_remote_tool.show_subscribe_message_types()) 449 | loop.run_until_complete(task) 450 | print(task.result().decode()) 451 | res = input('\nPlease type the list of all message types you would like to subscribe, or q to quit\n') 452 | clearscreen() 453 | if res == 'q': 454 | break 455 | new_message_types = res.split() 456 | res2 = input(f'\nAre you sure you want to subscribe to these message types : {new_message_types} ? y/n\n') 457 | if res2.lower() != 'y': 458 | break 459 | task = loop.create_task(connector_remote_tool.set_subscribe_message_types(*new_message_types)) 460 | loop.run_until_complete(task) 461 | print(task.result().decode()) 462 | 463 | display_dict(dict_cmds, connector=server_sockaddr or client_name) 464 | res = input('\nPlease type the command number you would like to run, or q to quit\n') 465 | 466 | 467 | def test_receive_messages(config_file_path, logger=None): 468 | if not logger: 469 | logger = aioconnectors.get_logger(logger_name='test_receive_messages', first_run=True) 470 | print('Warning : No other application should be receiving events from this connector') 471 | logger.info('Creating connector api with config file '+config_file_path) 472 | connector_api = aioconnectors.ConnectorAPI(config_file_path=config_file_path) 473 | loop = asyncio.get_event_loop() 474 | tasks_waiting_for_messages = [] 475 | 476 | async def message_received_cb(logger, transport_json , data, binary): 477 | logger.info(f'RECEIVED MESSAGE {transport_json}') 478 | print(f'RECEIVED MESSAGE {transport_json}') 479 | if data: 480 | print(f'\tWith data {data.decode()}') 481 | if binary: 482 | print(f'\tWith binary {binary}') 483 | 484 | for message_type in connector_api.recv_message_types: 485 | task_api = loop.create_task(connector_api.start_waiting_for_messages(message_type=message_type, 486 | message_received_cb=message_received_cb)) 487 | tasks_waiting_for_messages.append(task_api) 488 | try: 489 | loop.run_forever() 490 | except: 491 | for message_type in connector_api.recv_message_types: 492 | connector_api.stop_waiting_for_messages(message_type=message_type) 493 | #for task_api in tasks_waiting_for_messages: 494 | # task_api.cancel() 495 | print('test_receive_messages stopped !') 496 | 497 | 498 | def test_send_messages(config_file_path, logger=None): 499 | if not logger: 500 | logger = aioconnectors.get_logger(logger_name='test_send_messages', first_run=True) 501 | logger.info('Creating connector api with config file '+config_file_path) 502 | connector_api = aioconnectors.ConnectorAPI(config_file_path=config_file_path) 503 | tag = 'tag1' 504 | destination_id = None 505 | if connector_api.is_server: 506 | destination_id = input('\nPlease type the name of your remote client\n') 507 | 508 | loop = asyncio.get_event_loop() 509 | 510 | async def send_messages(destination_id): 511 | index = 0 512 | while True: 513 | index += 1 514 | for message_type in connector_api.send_message_types: 515 | print(f'SENDING MESSAGE to peer {destination_id or connector_api.server_sockaddr} of type ' 516 | f'{message_type} and index {index} and tag {tag}') 517 | response = await connector_api.send_message(data=f'"TEST_MESSAGE {str(index)*5}"', data_is_json=False, 518 | destination_id=destination_id, 519 | message_type=message_type, await_response=False, request_id=index, tag=tag) 520 | #response_id=None, binary=b'\x01\x02\x03\x04\x05', with_file=None, wait_for_ack=False) 521 | await asyncio.sleep(2) 522 | 523 | task_send = loop.create_task(send_messages(destination_id)) 524 | try: 525 | loop.run_forever() 526 | except: 527 | task_send.cancel() 528 | print('test_send_messages stopped !') 529 | 530 | def test_publish_messages(config_file_path, logger=None): 531 | if not logger: 532 | logger = aioconnectors.get_logger(logger_name='test_publish_messages', first_run=True) 533 | logger.info('Creating connector api with config file '+config_file_path) 534 | connector_api = aioconnectors.ConnectorAPI(config_file_path=config_file_path) 535 | destination_id = None 536 | if connector_api.is_server: 537 | destination_id = input('\nPlease type the name of your remote client\n') 538 | 539 | loop = asyncio.get_event_loop() 540 | 541 | async def send_messages(destination_id): 542 | index = 0 543 | while True: 544 | index += 1 545 | for message_type in connector_api.send_message_types: 546 | if message_type == '_pubsub': 547 | continue 548 | print(f'PUBLISHING MESSAGE to peer {destination_id or connector_api.server_sockaddr} of type ' 549 | f'{message_type} and index {index}') 550 | response = await connector_api.publish_message(data=f'"TEST_MESSAGE {str(index)*5}"', data_is_json=False, 551 | destination_id=destination_id, 552 | message_type=message_type, await_response=False, request_id=index) 553 | #response_id=None, binary=b'\x01\x02\x03\x04\x05', with_file=None, wait_for_ack=False) 554 | await asyncio.sleep(2) 555 | 556 | task_send = loop.create_task(send_messages(destination_id)) 557 | try: 558 | loop.run_forever() 559 | except: 560 | task_send.cancel() 561 | print('test_publish_messages stopped !') 562 | 563 | def ping(config_file_path, logger=None): 564 | #lets a connector ping a remote connector peer 565 | if not logger: 566 | logger = aioconnectors.get_logger(logger_name='ping', first_run=True) 567 | logger.info('Creating connector api with config file '+config_file_path) 568 | connector_api = aioconnectors.ConnectorAPI(config_file_path=config_file_path) 569 | destination_id = None 570 | if connector_api.is_server: 571 | destination_id = input('\nPlease type the name of your remote client\n') 572 | 573 | loop = asyncio.get_event_loop() 574 | 575 | async def send_messages(destination_id): 576 | index = 0 577 | while True: 578 | index += 1 579 | print(f'\nSENDING PING to peer {destination_id or connector_api.server_sockaddr} with index {index}') 580 | response = await connector_api.send_message_await_response(data=f'PING {str(index)*5}', data_is_json=False, 581 | destination_id=destination_id, message_type='_ping', request_id=index) 582 | #response_id=None, binary=b'\x01\x02\x03\x04\x05', with_file=None, wait_for_ack=False) 583 | if response: 584 | try: 585 | transport_json, data, binary = response 586 | except Exception as exc: 587 | print(exc) 588 | return 589 | print(f'RECEIVING REPLY from peer {destination_id or connector_api.server_sockaddr} with data {data}') 590 | await asyncio.sleep(2) 591 | 592 | task_send = loop.create_task(send_messages(destination_id)) 593 | try: 594 | loop.run_forever() 595 | except: 596 | task_send.cancel() 597 | print('ping stopped !') 598 | 599 | 600 | def chat(args, logger=None): 601 | #chat supports sending messages and files/directories between 2 connectors 602 | if not logger: 603 | logger = aioconnectors.get_logger(logger_name='chat', first_run=True) 604 | 605 | if not args.nowrap and not args.upload: 606 | try: 607 | import readline 608 | readline.set_completer_delims('\t\n= ') 609 | readline.parse_and_bind('tab:complete') 610 | except Exception: 611 | logger.info('Running without tab completion') 612 | else: 613 | proc = subprocess.Popen( 614 | [sys.executable, '-m', 'aioconnectors'] + sys.argv[1:] + ['--nowrap'], 615 | bufsize=0, 616 | stdin=subprocess.PIPE, stdout=None, stderr=None) 617 | while True: 618 | try: 619 | user_input = input('') 620 | proc.stdin.write((user_input + os.linesep).encode()) 621 | if user_input == '!exit': 622 | return 623 | except KeyboardInterrupt: 624 | proc.kill() 625 | return 626 | 627 | custom_prompt = 'aioconnectors>> ' 628 | chat_client_name = 'chat_client' 629 | CONNECTOR_FILES_DIRPATH = aioconnectors.get_tmp_dir() 630 | if os.path.exists(CONNECTOR_FILES_DIRPATH): 631 | res = input(f'May I delete the content of {CONNECTOR_FILES_DIRPATH} ? y/n\n') 632 | if res == 'y': 633 | shutil.rmtree(CONNECTOR_FILES_DIRPATH) 634 | 635 | delete_connector_dirpath_later = not os.path.exists(CONNECTOR_FILES_DIRPATH) 636 | is_server = not args.target 637 | accept_all_clients = args.accept 638 | loop = asyncio.get_event_loop() 639 | cwd = os.getcwd() 640 | proc_exec = None 641 | transport_json_cb = None 642 | 643 | class AuthClient: 644 | #helper for client authentication on server connector 645 | perform_client_authentication = False 646 | authenticate = asyncio.Event() 647 | allow = False 648 | 649 | @staticmethod 650 | def update_allow(status): 651 | #User chooses the value of "allow", which is sent back to server connector 652 | AuthClient.allow = status 653 | AuthClient.perform_client_authentication = False 654 | AuthClient.authenticate.set() 655 | if args.exec: 656 | task_exec = loop.create_task(run_proc_exe(args.exec)) 657 | 658 | 659 | @staticmethod 660 | async def authenticate_client(client_name): 661 | #called as a hook by server when receiving new connection 662 | #waits for user input 663 | AuthClient.perform_client_authentication = True 664 | print(f'Accept client {client_name} ? y/n') 665 | await AuthClient.authenticate.wait() 666 | AuthClient.authenticate.clear() 667 | return AuthClient.allow 668 | 669 | 670 | if is_server: 671 | listening_ip = args.bind_server_ip or '0.0.0.0' 672 | if '.' not in listening_ip: 673 | listening_ip = aioconnectors.iface_to_ip(listening_ip) 674 | server_sockaddr = (listening_ip, int(args.port or 0) or aioconnectors.core.Connector.SERVER_ADDR[1]) 675 | if listening_ip == '0.0.0.0': 676 | listening_addresses = show_up_ips() 677 | else: 678 | listening_addresses = [listening_ip] 679 | 680 | print(f'Chat Server listening on addresses {listening_addresses[:5]}, and port {server_sockaddr[1]}\n') 681 | connector_files_dirpath = CONNECTOR_FILES_DIRPATH 682 | aioconnectors.ssl_helper.create_certificates(logger, certificates_directory_path=connector_files_dirpath) 683 | 684 | def hook_target_directory_any(transport_json): 685 | #this is just for testing the hook feature 686 | #this hook is simple and not really needed, we could have used instead : 687 | #{'target_directory':os.path.join(cwd,'{source_id')})} 688 | source_id = transport_json['source_id'] 689 | return source_id 690 | 691 | connector_manager = aioconnectors.ConnectorManager(is_server=True, server_sockaddr=server_sockaddr, 692 | use_ssl=True, ssl_allow_all=True, 693 | connector_files_dirpath=connector_files_dirpath, 694 | certificates_directory_path=connector_files_dirpath, 695 | send_message_types=['any'], recv_message_types=['any'], 696 | file_recv_config={'any': {'target_directory':cwd}}, 697 | #file_recv_config={'any': {'target_directory':os.path.join(cwd,'{source_id}')}}, 698 | hook_server_auth_client=None if accept_all_clients else \ 699 | AuthClient.authenticate_client, #) 700 | hook_target_directory={'any':hook_target_directory_any}) 701 | 702 | connector_api = aioconnectors.ConnectorAPI(is_server=True, server_sockaddr=server_sockaddr, 703 | connector_files_dirpath=connector_files_dirpath, 704 | send_message_types=['any'], recv_message_types=['any'], 705 | default_logger_log_level='INFO', 706 | default_logger_rotate=True) 707 | destination_id = chat_client_name 708 | else: 709 | server_sockaddr = (args.target, args.port or aioconnectors.core.Connector.SERVER_ADDR[1]) 710 | connector_files_dirpath = CONNECTOR_FILES_DIRPATH 711 | aioconnectors.ssl_helper.create_certificates(logger, certificates_directory_path=connector_files_dirpath) 712 | connector_manager = aioconnectors.ConnectorManager(is_server=False, server_sockaddr=server_sockaddr, 713 | use_ssl=True, ssl_allow_all=True, 714 | connector_files_dirpath=connector_files_dirpath, 715 | certificates_directory_path=connector_files_dirpath, 716 | send_message_types=['any'], recv_message_types=['any'], 717 | file_recv_config={'any': {'target_directory':cwd}}, 718 | client_name=chat_client_name, enable_client_try_reconnect=False) 719 | 720 | connector_api = aioconnectors.ConnectorAPI(is_server=False, server_sockaddr=server_sockaddr, 721 | connector_files_dirpath=connector_files_dirpath, 722 | client_name=chat_client_name, 723 | send_message_types=['any'], recv_message_types=['any'], 724 | default_logger_log_level='INFO', 725 | default_logger_rotate=True) 726 | destination_id = None 727 | 728 | 729 | task_manager = loop.create_task(connector_manager.start_connector()) 730 | #run_until_complete now, in order to exit in case of exception 731 | #for example because of already existing socket 732 | loop.run_until_complete(task_manager) 733 | 734 | task_recv = task_console = task_send_file = task_exec = None 735 | task_exec_stdout = task_exec_stderr = None 736 | 737 | 738 | async def message_received_cb(logger, transport_json , data, binary): 739 | nonlocal transport_json_cb 740 | transport_json_cb = transport_json 741 | #callback when a message is received from peer 742 | if transport_json.get('await_response'): 743 | #this response is necessary in args.upload mode, to know when to exit 744 | #it is in fact used also in chat mode by send_file, even if not mandatory 745 | loop.create_task(connector_api.send_message(data=data, data_is_json=False, message_type='any', 746 | response_id=transport_json['request_id'], 747 | destination_id=transport_json['source_id'])) 748 | if data: 749 | if args.exec: 750 | if proc_exec.returncode is not None: 751 | print('Shell has exited') 752 | return 753 | proc_exec.stdin.write(data+b'\r\n') 754 | await proc_exec.stdin.drain() 755 | else: 756 | #display message received from peer 757 | print(data.decode()) 758 | print(custom_prompt,end='', flush=True) 759 | 760 | if not args.upload: 761 | task_recv = loop.create_task(connector_api.start_waiting_for_messages(message_type='any', 762 | message_received_cb=message_received_cb)) 763 | 764 | async def send_file(data, destination_id, with_file, delete_after_upload): 765 | #upload file to peer. uses await_response always, mandatory for upload mode 766 | await connector_api.send_message(data=data, data_is_json=False, destination_id=destination_id, 767 | await_response=True, request_id=random.randint(1,1000), 768 | message_type='any', with_file=with_file) 769 | if delete_after_upload: 770 | os.remove(delete_after_upload) 771 | 772 | class InputProtocolFactory(asyncio.Protocol): 773 | #hook user input : sends message to peer, and support special cases (!) 774 | 775 | def connection_made(self, *args, **kwargs): 776 | super().connection_made(*args, **kwargs) 777 | print(custom_prompt,end='', flush=True) 778 | 779 | def data_received(self, data): 780 | data = data.decode().strip() 781 | if AuthClient.perform_client_authentication: 782 | if data == 'y': 783 | AuthClient.update_allow(True) 784 | else:# data == 'n': 785 | AuthClient.update_allow(False) 786 | print(custom_prompt,end='', flush=True) 787 | return 788 | 789 | if data == '!exit': 790 | os.kill(os.getpid(), signal.SIGINT) 791 | return 792 | 793 | if data.startswith('!upload '): 794 | try: 795 | with_file = None 796 | delete_after_upload = False 797 | upload_path = data[len('!upload '):] 798 | 799 | if not os.path.exists(upload_path): 800 | raise Exception(upload_path + ' does not exist') 801 | if os.path.isdir(upload_path): 802 | upload_path_zip = f'{upload_path}.zip' 803 | if not os.path.exists(upload_path_zip): 804 | shutil.make_archive(upload_path, 'zip', upload_path) 805 | delete_after_upload = upload_path_zip 806 | upload_path = upload_path_zip 807 | #if zip already exists, don't override it, just send it (even if it may not be the correct zip) 808 | 809 | data = f'Receiving {upload_path}' 810 | with_file={'src_path':upload_path,'dst_type':'any', 'dst_name':os.path.basename(upload_path), 811 | 'delete':False} 812 | loop.create_task(send_file(data, destination_id, with_file, delete_after_upload)) 813 | except Exception as exc: 814 | res = str(exc) 815 | print(custom_prompt,end='', flush=True) 816 | print(res) 817 | print(custom_prompt,end='', flush=True) 818 | return 819 | 820 | if data.startswith('!dezip '): 821 | try: 822 | target = data.split('!dezip ')[1] 823 | #copy target to cwd 824 | #shutil.copy(os.path.join(CONNECTOR_FILES_DIRPATH, target), target) 825 | target_dir = target.split('.zip')[0] 826 | with zipfile.ZipFile(target) as zf: 827 | zf.extractall(path=target_dir) 828 | except Exception as exc: 829 | res = str(exc) 830 | print(custom_prompt,end='', flush=True) 831 | print(res) 832 | print(custom_prompt,end='', flush=True) 833 | return 834 | 835 | elif data.startswith('!'): 836 | data_shell = data[1:] 837 | if data_shell: 838 | try: 839 | res = subprocess.check_output(data_shell, stderr=subprocess.PIPE, shell=True) 840 | res = res.decode().strip() 841 | except subprocess.CalledProcessError as exc: 842 | res = str(exc) 843 | print(custom_prompt,end='', flush=True) 844 | print(res) 845 | print(custom_prompt,end='', flush=True) 846 | return 847 | 848 | loop.create_task(connector_api.send_message(data=data, data_is_json=False, destination_id=destination_id, 849 | message_type='any')) 850 | print(custom_prompt,end='', flush=True) 851 | 852 | async def connect_pipe_to_stdin(loop, connector_manager): 853 | #hook user input 854 | if not is_server: 855 | print('Connector waiting to connect ... (Ctrl+C to quit)') 856 | 857 | while True: 858 | await asyncio.sleep(1) 859 | if connector_manager.show_connected_peers(): 860 | print('Connected !') 861 | break 862 | 863 | transport, protocol = await loop.connect_read_pipe(InputProtocolFactory, sys.stdin) 864 | 865 | async def upload_file(args, destination_id): 866 | #called when client uses the upload mode, which uploads and disconnects, without opening a chat 867 | await asyncio.sleep(3) #wait for connection 868 | upload_path = args.upload 869 | delete_after_upload = False 870 | if os.path.isdir(upload_path): 871 | upload_path_zip = f'{upload_path}.zip' 872 | if not os.path.exists(upload_path_zip): 873 | shutil.make_archive(upload_path, 'zip', upload_path) 874 | delete_after_upload = upload_path_zip 875 | upload_path = upload_path_zip 876 | #if zip already exists, don't override it, just send it (even if it may not be the correct zip) 877 | 878 | with_file={'src_path':upload_path,'dst_type':'any', 'dst_name':os.path.basename(upload_path), 'delete':False} 879 | await send_file('', destination_id, with_file, delete_after_upload) 880 | 881 | 882 | async def stdout_proc_exe(): 883 | while True: 884 | res_stdout = await proc_exec.stdout.read(4096) 885 | if not res_stdout: 886 | return 887 | res_stdout = res_stdout.decode() 888 | if transport_json_cb: 889 | await connector_api.send_message(data=res_stdout, data_is_json=False, destination_id=transport_json_cb['source_id'], 890 | message_type='any') 891 | 892 | async def stderr_proc_exe(): 893 | while True: 894 | res_stderr = await proc_exec.stderr.read(4096) 895 | if not res_stderr: 896 | return 897 | res_stderr = res_stderr.decode() 898 | if 'cannot set terminal process group' in res_stderr: 899 | continue 900 | if transport_json_cb: 901 | await connector_api.send_message(data=res_stderr, data_is_json=False, destination_id=transport_json_cb['source_id'], 902 | message_type='any') 903 | 904 | async def run_proc_exe(shell_path): 905 | nonlocal proc_exec, task_exec_stdout, task_exec_stderr 906 | # python3 -m aioconnectors chat --bind_server_ip 127.0.0.1 --port 1234 907 | # python3 -m aioconnectors chat --target 127.0.0.1 --port 1234 --exec /bin/bash 908 | proc_exec = await asyncio.create_subprocess_exec(shell_path, '-i', stdin=asyncio.subprocess.PIPE, 909 | stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, start_new_session=True) 910 | proc_exec.stdin.write(f"{sys.executable} -c 'import pty;pty.spawn(\"{shell_path}\")'\r\n".encode()) 911 | await proc_exec.stdin.drain() 912 | 913 | task_exec_stdout = loop.create_task(stdout_proc_exe()) 914 | task_exec_stderr = loop.create_task(stderr_proc_exe()) 915 | 916 | 917 | if not args.upload: 918 | if not is_server and args.exec: 919 | task_console = loop.create_task(connect_pipe_to_stdin(loop, connector_manager)) 920 | task_exec = loop.create_task(run_proc_exe(args.exec)) 921 | else: 922 | #chat mode, hook stdin 923 | task_console = loop.create_task(connect_pipe_to_stdin(loop, connector_manager)) 924 | else: 925 | #upload mode, upload and exit 926 | task_send_file = loop.create_task(upload_file(args, destination_id)) 927 | task_send_file.add_done_callback(lambda inp:os.kill(os.getpid(), signal.SIGINT)) 928 | 929 | try: 930 | loop.run_forever() 931 | except: 932 | print('Connector stopped !') 933 | 934 | task_stop = loop.create_task(connector_manager.stop_connector(delay=None, hard=False, shutdown=True)) 935 | loop.run_until_complete(task_stop) 936 | if task_console: 937 | del task_console 938 | if task_recv: 939 | connector_api.stop_waiting_for_messages(message_type='any') 940 | del task_recv 941 | if task_exec: 942 | del task_exec 943 | if task_exec_stdout: 944 | del task_exec_stdout 945 | if task_exec_stderr: 946 | del task_exec_stderr 947 | if proc_exec: 948 | del proc_exec 949 | del task_stop 950 | del task_manager 951 | del connector_manager 952 | if delete_connector_dirpath_later and os.path.exists(connector_files_dirpath): 953 | shutil.rmtree(connector_files_dirpath) 954 | 955 | def show_up_ips(): 956 | #tries to return list of up ipv4 ips (just for display) 957 | try: 958 | IPADDR_REGEX = re.compile('(?P[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)') 959 | cmd = ['ip', '-4', '-br', '-f', 'inet', 'addr', 'show'] 960 | ifconfig_lines = subprocess.check_output(cmd, encoding='utf8', timeout=5).splitlines() 961 | addresses = [] 962 | for line in ifconfig_lines: 963 | if 'UP' in line: 964 | res = IPADDR_REGEX.search(line) 965 | if res: 966 | addresses.append(res.group('ipaddr')) 967 | return addresses 968 | except Exception: 969 | return [] 970 | -------------------------------------------------------------------------------- /aioconnectors/helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pwd,grp 3 | import time 4 | import logging 5 | from logging.handlers import RotatingFileHandler 6 | import sys 7 | import stat 8 | import subprocess 9 | import re 10 | import gzip 11 | 12 | 13 | PYTHON_VERSION = (sys.version_info.major,sys.version_info.minor) 14 | if PYTHON_VERSION < (3,6): 15 | print('aioconnectors minimum requirement : Python 3.6') 16 | sys.exit(1) 17 | PYTHON_GREATER_37 = (PYTHON_VERSION >= (3,7)) 18 | 19 | DEFAULT_LOGGER_NAME = 'aioconnector' 20 | LOGFILE_DEFAULT_NAME = 'aioconnectors.log' 21 | LOG_LEVEL = 'INFO' 22 | LOG_ROTATE = True 23 | LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 24 | LOG_FORMAT_SHORT = '%(asctime)s - %(levelname)s - %(message)s' 25 | LOG_BK_COUNT = 5 26 | LOG_MAX_SIZE = 67108864 # 2**26 = 64 MB 27 | 28 | SOURCE_ID_REGEX = re.compile('^[0-9a-zA-Z-_:]+$') 29 | SOURCE_ID_DEFAULT_REGEX = re.compile('^default[0-9]*$') 30 | SOURCE_ID_MAX_LENGTH = 128 31 | TAG_REGEX = re.compile('^[0-9a-zA-Z-_:]+$') 32 | TAG_MAX_LENGTH = 128 33 | 34 | def full_path(the_path): 35 | if the_path is not None: 36 | return os.path.abspath(os.path.normpath(os.path.expandvars(os.path.expanduser(the_path)))) 37 | 38 | def get_tmp_dir(): 39 | if os.path.exists('/var/tmp'): 40 | return '/var/tmp/aioconnectors' 41 | else: 42 | candidate1 = full_path('~/aioconnectors_tmp') 43 | candidate2 = full_path('aioconnectors_tmp') 44 | return min(candidate1, candidate2, key=lambda x:len(x)) 45 | 46 | def get_logger(logfile_path=LOGFILE_DEFAULT_NAME, first_run=False, silent=True, logger_name=DEFAULT_LOGGER_NAME, 47 | log_format=LOG_FORMAT, level=LOG_LEVEL, rotate=LOG_ROTATE, bk_count=LOG_BK_COUNT): 48 | 49 | def namer(name): 50 | return name + '.gz' 51 | 52 | def rotator(source, dest): 53 | with open(source, 'rb') as sf: 54 | data = sf.read() 55 | compressed = gzip.compress(data) 56 | with open(dest, 'wb') as df: 57 | df.write(compressed) 58 | os.truncate(source, 0) 59 | 60 | logger = logging.getLogger(logger_name) 61 | logger.handlers = [] 62 | if not first_run: 63 | handlers = [] 64 | if logfile_path: #could be '' if no config file provided 65 | if rotate: 66 | if rotate is True: 67 | rotate = LOG_MAX_SIZE 68 | else: 69 | #use user defined value 70 | try: 71 | rotate = int(rotate) 72 | except Exception: 73 | rotate = LOG_MAX_SIZE 74 | fh = RotatingFileHandler(logfile_path, maxBytes=rotate, backupCount=bk_count) 75 | fh.rotator = rotator 76 | fh.namer = namer 77 | handlers.append(fh) 78 | else: 79 | handlers.append(logging.FileHandler(logfile_path)) 80 | if not silent: 81 | handlers.append(logging.StreamHandler(sys.stdout)) 82 | if not handlers: 83 | logger.addHandler(logging.NullHandler()) 84 | return logger 85 | 86 | log_level = getattr(logging, level, logging.INFO) 87 | logger.setLevel(log_level) 88 | 89 | formatter = logging.Formatter(log_format) 90 | formatter.converter = time.gmtime 91 | for fh in handlers: 92 | fh.setFormatter(formatter) 93 | fh.setLevel(logging.DEBUG) 94 | logger.addHandler(fh) 95 | else: 96 | logger.addHandler(logging.NullHandler()) 97 | 98 | return logger 99 | 100 | def chown_file(filepath, username, groupname, logger=None): 101 | try: 102 | uid = pwd.getpwnam(username).pw_uid 103 | gid = grp.getgrnam(groupname).gr_gid 104 | os.chown(filepath, uid, gid, follow_symlinks = False) 105 | except Exception: 106 | if logger: 107 | logger.exception('chown_file') 108 | 109 | def chown_nobody_permissions(directory_path, logger=None): 110 | try: 111 | UID_NOBODY = pwd.getpwnam("nobody").pw_uid 112 | try: 113 | GID_NOGROUP = grp.getgrnam("nogroup").gr_gid 114 | except Exception: 115 | GID_NOGROUP = grp.getgrnam("nobody").gr_gid 116 | os.chown(directory_path, UID_NOBODY, GID_NOGROUP, follow_symlinks = False) 117 | os.chmod(directory_path, stat.S_IRWXU | stat.S_IRWXG)# | stat.S_IRWXO) 118 | except Exception as exc: 119 | if logger: 120 | logger.info('chown_nobody_permissions : '+str(exc)) 121 | 122 | def iface_to_ip(iface, logger=None): 123 | try: 124 | ifconfig_output = subprocess.check_output(['ip', 'addr', 'show', iface], encoding='utf8', timeout=5) 125 | return re.search(f'inet (?P[\d\.]+).*{iface}$', ifconfig_output, re.MULTILINE).group('ipaddr') 126 | except Exception: 127 | if logger: 128 | logger.exception('iface_to_ip') 129 | return iface 130 | 131 | def validate_source_id(source_id): 132 | if not SOURCE_ID_REGEX.match(source_id): 133 | #if '.' in source_id or '/' in source_id: 134 | #protect against path traversal 135 | raise Exception(f'Invalid source_id : {source_id} - please use only {SOURCE_ID_REGEX.pattern}') 136 | if len(source_id) > SOURCE_ID_MAX_LENGTH: 137 | raise Exception(f'Invalid source_id : {source_id} - of length {len(source_id)}') 138 | if SOURCE_ID_DEFAULT_REGEX.match(source_id): 139 | raise Exception(f'Invalid source_id : {source_id} - cannot match {SOURCE_ID_DEFAULT_REGEX.pattern}') 140 | 141 | def validate_tag(tag): 142 | if not TAG_REGEX.match(tag): 143 | raise Exception(f'Invalid tag : {tag} - please use only {TAG_REGEX.pattern}') 144 | if len(tag) > TAG_MAX_LENGTH: 145 | raise Exception(f'Invalid tag : {tag} - of length {len(tag)}') 146 | 147 | class CustomException(Exception): 148 | pass 149 | -------------------------------------------------------------------------------- /aioconnectors/ssl_helper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import asyncio 3 | import json 4 | import subprocess 5 | import uuid 6 | import shutil 7 | import re 8 | 9 | from .helpers import get_tmp_dir, validate_source_id, SOURCE_ID_DEFAULT_REGEX 10 | 11 | CA_CREATE_CNF =''' 12 | [ ca ] 13 | default_ca = CA_default # The default ca section 14 | 15 | [ CA_default ] 16 | 17 | default_days = 3650 # How long to certify for 18 | default_crl_days = 3650 # How long before next CRL 19 | default_md = sha256 # Use public key default MD 20 | preserve = no # Keep passed DN ordering 21 | 22 | x509_extensions = ca_extensions # The extensions to add to the cert 23 | 24 | email_in_dn = no # Don't concat the email in the DN 25 | copy_extensions = copy # Required to copy SANs from CSR to cert 26 | 27 | base_dir = {base_dir} 28 | certificate = $base_dir/{ca_pem} # The CA certificate 29 | private_key = $base_dir/{ca_key} # The CA private key 30 | new_certs_dir = {ca_generated} # Location for new certs after signing 31 | database = $base_dir/index.txt # Database index file 32 | serial = $base_dir/serial.txt # The current serial number 33 | 34 | unique_subject = no # Set to 'no' to allow creation of 35 | # several certificates with same subject. 36 | 37 | [ req ] 38 | prompt = no 39 | default_bits = 4096 40 | default_keyfile = server_ca.pem 41 | distinguished_name = ca_distinguished_name 42 | x509_extensions = ca_extensions 43 | string_mask = utf8only 44 | 45 | [ ca_distinguished_name ] 46 | 47 | countryName = US 48 | #countryName_default = US 49 | 50 | 51 | [ ca_extensions ] 52 | 53 | subjectKeyIdentifier = hash 54 | authorityKeyIdentifier = keyid:always, issuer 55 | basicConstraints = critical, CA:true 56 | keyUsage = keyCertSign, cRLSign 57 | 58 | [ signing_policy ] 59 | countryName = optional 60 | stateOrProvinceName = optional 61 | localityName = optional 62 | organizationName = supplied 63 | organizationalUnitName = optional 64 | commonName = optional 65 | emailAddress = optional 66 | 67 | [ signing_req ] 68 | subjectKeyIdentifier = hash 69 | authorityKeyIdentifier = keyid,issuer 70 | basicConstraints = CA:FALSE 71 | keyUsage = digitalSignature, keyEncipherment 72 | ''' 73 | 74 | CA_CSR_CREATE_CNF = ''' 75 | [ req ] 76 | prompt = no 77 | default_bits = 2048 78 | default_keyfile = serverkey.pem 79 | distinguished_name = server_distinguished_name 80 | req_extensions = server_req_extensions 81 | string_mask = utf8only 82 | 83 | [ server_distinguished_name ] 84 | countryName = US 85 | organizationName = {org} 86 | 87 | [ server_req_extensions ] 88 | subjectKeyIdentifier = hash 89 | basicConstraints = CA:FALSE 90 | keyUsage = digitalSignature, keyEncipherment 91 | ''' 92 | 93 | def update_conf(conf_template, conf, replacement_dict): 94 | shutil.copy(conf_template, conf) 95 | with open(conf, 'a') as fd: 96 | for key,value in replacement_dict.items(): 97 | fd.write(str(key)+' = '+str(value)+'\n') 98 | 99 | class SSL_helper: 100 | DEFAULT_BASE_PATH = get_tmp_dir() #os.getcwd() 101 | CLIENT_DEFAULT_CERT_NAME = 'default' 102 | SOURCE_ID_2_CERT = 'source_id_2_cert.json' 103 | CERT_NAME_EXTENSION = "pem" 104 | KEY_NAME_EXTENSION = "key" 105 | 106 | def __init__(self, logger, is_server, certificates_directory_path=None, max_certs=None, server_ca=False, 107 | server_ca_certs_not_stored=True, tool_only=False): 108 | self.logger = logger.getChild('ssl') 109 | try: 110 | self.is_server, self.certificates_directory_path, self.server_ca = is_server, certificates_directory_path, server_ca 111 | if not self.certificates_directory_path: 112 | self.certificates_directory_path = self.DEFAULT_BASE_PATH 113 | self.BASE_PATH = self.certificates_directory_path 114 | self.certificates_base_path = os.path.join(self.BASE_PATH, 'certificates') 115 | #server 116 | self.SERVER_BASE_PATH = os.path.join(self.certificates_base_path, 'server') 117 | self.DEFAULT_CLIENT_CERTIFICATE_COMMON_NAME = "default.cn.com" 118 | self.DEFAULT_CLIENT_CERTIFICATE_ORGANIZATION_NAME = "default" 119 | self.SERVER_CERTS_PATH = os.path.join(self.SERVER_BASE_PATH, 'client-certs') 120 | self.SERVER_CERTS_PEM_PATH = os.path.join(self.SERVER_CERTS_PATH, '{}.'+self.CERT_NAME_EXTENSION) 121 | self.SERVER_CERTS_KEY_PATH = os.path.join(self.SERVER_CERTS_PATH, '{}.'+self.KEY_NAME_EXTENSION) 122 | self.SERVER_SYMLINKS_PATH = os.path.join(self.SERVER_BASE_PATH, 'client-certs/symlinks') 123 | self.SERVER_PEM_PATH = os.path.join(self.SERVER_BASE_PATH, 'server-cert/server.'+self.CERT_NAME_EXTENSION) 124 | self.SERVER_KEY_PATH = os.path.join(self.SERVER_BASE_PATH, 'server-cert/server.'+self.KEY_NAME_EXTENSION) 125 | self.CSR_CONF = os.path.join(self.SERVER_BASE_PATH, 'csr_details.conf') 126 | self.SERVER_CA_DETAILS_CONF = os.path.join(self.SERVER_BASE_PATH, 'server_ca_details.conf') 127 | self.CSR_TEMPLATE_CONF = os.path.join(self.SERVER_BASE_PATH, 'csr_details_template.conf') 128 | self.SERVER_CA_PEM_PATH = os.path.join(self.SERVER_BASE_PATH, 'server-cert/server_ca.'+self.CERT_NAME_EXTENSION) 129 | self.SERVER_CA_KEY_PATH = os.path.join(self.SERVER_BASE_PATH, 'server-cert/server_ca.'+self.KEY_NAME_EXTENSION) 130 | self.SERVER_CA_CSR_CONF = os.path.join(self.SERVER_BASE_PATH, 'server_ca_csr_details.conf') 131 | self.SERVER_CA_CSR_PEM_PATH = os.path.join(self.SERVER_BASE_PATH, 'server-cert/server_ca_csr.'+self.CERT_NAME_EXTENSION) 132 | self.CA_GENERATED = os.path.join(self.SERVER_CERTS_PATH, 'ca-generated') 133 | 134 | #client 135 | self.CLIENT_BASE_PATH = os.path.join(self.certificates_base_path, 'client') 136 | self.CLIENT_PEM_PATH = os.path.join(self.CLIENT_BASE_PATH, 'client-certs/{}.'+self.CERT_NAME_EXTENSION) 137 | self.CLIENT_KEY_PATH = os.path.join(self.CLIENT_BASE_PATH, 'client-certs/{}.'+self.KEY_NAME_EXTENSION) 138 | #self.CLIENT_DEFAULT_ORGANIZATION = '9d2f849c877b4e50b6fccb54d6cd1818' #'Internet Widgits Pty Ltd' #'company' 139 | self.CLIENT_SERVER_CRT_PATH = os.path.join(self.CLIENT_BASE_PATH, 'server-cert/server.'+self.CERT_NAME_EXTENSION) 140 | #we might want to chain multiple certificates in CLIENT_SERVER_CRT_PATH, to support multiple server certificates 141 | self.tool_only = tool_only 142 | if self.tool_only: 143 | return 144 | 145 | if self.is_server: 146 | self.source_id_2_cert_path = os.path.join(self.SERVER_CERTS_PATH, self.SOURCE_ID_2_CERT) 147 | if os.path.exists(self.source_id_2_cert_path): 148 | #load existing source_id_2_cert.json 149 | with open(self.source_id_2_cert_path, 'r') as fd: 150 | self.source_id_2_cert = json.load(fd) 151 | else: 152 | self.source_id_2_cert = {'source_id_2_cert':{}, 'cert_2_source_id':{}} 153 | #load default_client_cert_id, and default_client_cert_ids_list 154 | self.default_client_cert_ids_list = [] 155 | for cert in (file_name for file_name in os.listdir(self.SERVER_CERTS_PATH) if \ 156 | file_name.endswith(f'.{self.CERT_NAME_EXTENSION}')): 157 | if cert.startswith(self.CLIENT_DEFAULT_CERT_NAME): 158 | cert_name = cert[:-1-len(self.CERT_NAME_EXTENSION)] 159 | if SOURCE_ID_DEFAULT_REGEX.match(cert_name): 160 | stdout = subprocess.check_output('openssl x509 -hash -serial -noout -in '+\ 161 | str(os.path.join(self.SERVER_CERTS_PATH, 162 | # self.CLIENT_DEFAULT_CERT_NAME+'.'+self.CERT_NAME_EXTENSION)), shell=True) 163 | cert)), shell=True) 164 | 165 | hash_name, serial = stdout.decode().splitlines() 166 | serial = serial.split('=')[1] 167 | if cert_name == self.CLIENT_DEFAULT_CERT_NAME: 168 | self.default_client_cert_id = serial 169 | self.logger.info(f'Server adding default certificate : {cert_name}') 170 | self.default_client_cert_ids_list.append(serial) 171 | self.logger.info(f'Server using default_client_cert_ids_list : {self.default_client_cert_ids_list}') 172 | self.max_certs = max_certs 173 | self.server_ca_certs_not_stored = server_ca_certs_not_stored 174 | 175 | except Exception: 176 | self.logger.exception('init') 177 | raise 178 | 179 | async def run_cmd(self, cmd): 180 | proc = await asyncio.create_subprocess_shell( 181 | cmd, 182 | stdout=asyncio.subprocess.PIPE, 183 | stderr=asyncio.subprocess.PIPE) 184 | 185 | stdout, stderr = await proc.communicate() 186 | return proc, stdout.decode().strip(), stderr 187 | 188 | ''' 189 | def load_certificate(self, certificate_path): 190 | from cryptography import x509 191 | from cryptography.hazmat.backends import default_backend 192 | with open(certificate_path, 'rb') as fd: 193 | content = fd.read() 194 | certificate = x509.load_pem_x509_certificate(content, default_backend()) 195 | ''' 196 | 197 | async def create_client_certificate(self, source_id=None, common_name=None, hook_allow_certificate_creation=None, 198 | server_ca=False): 199 | #Only called by server 200 | #Generates self signed certificate for a client_id 201 | #returns paths of cert and key 202 | try: 203 | validate_source_id(source_id) 204 | crt_path = f'{self.SERVER_CERTS_PATH}/{source_id}.{self.CERT_NAME_EXTENSION}' 205 | key_path = f'{self.SERVER_CERTS_PATH}/{source_id}.{self.KEY_NAME_EXTENSION}' 206 | if os.path.exists(crt_path): 207 | raise Exception(f'A certificate already exists for client {source_id}. ' 208 | f'Use delete_client_certificate to delete it') 209 | 210 | if source_id in self.source_id_2_cert['source_id_2_cert']: 211 | raise Exception(f'A source_id_2_cert already exists for client {source_id}. ' 212 | f'Use delete_client_certificate to delete it') 213 | 214 | if hook_allow_certificate_creation: 215 | allow_certificate_creation = await hook_allow_certificate_creation(source_id) 216 | if not allow_certificate_creation: 217 | raise Exception(f'Not allowing certificate creation for {source_id}') 218 | 219 | if len(self.source_id_2_cert['source_id_2_cert']) >= self.max_certs: 220 | raise Exception(f'Too many certificates : {self.max_certs}, failed creating certificate for {source_id}') 221 | 222 | if common_name: 223 | update_conf(self.CSR_TEMPLATE_CONF, self.CSR_CONF, {'O':common_name, 'CN':common_name}) 224 | create_certificate_cmd = f"openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout "\ 225 | f"{key_path} -out {crt_path} -config {self.CSR_CONF}" 226 | raise Exception('Need to implement create_client_certificate with common_name') 227 | else: 228 | #necessary to set a unique field (like organization), so that each certificate has a unique hash, 229 | #which is better for fast authentication 230 | organization = uuid.uuid4().hex 231 | update_conf(self.CSR_TEMPLATE_CONF, self.CSR_CONF, {'O':organization}) 232 | if not server_ca: 233 | create_certificate_cmd = f"openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout "\ 234 | f"{key_path} -out {crt_path} -config {self.CSR_CONF}" 235 | proc, stdout, stderr = await self.run_cmd(create_certificate_cmd) 236 | if proc.returncode != 0: 237 | raise Exception('Error while Generating self signed certificate : '+stderr.decode()) 238 | 239 | else: 240 | #create csr and sign it with server_ca 241 | with open(self.SERVER_CA_CSR_CONF, 'w') as fd: 242 | fd.write(CA_CSR_CREATE_CNF.format(org=organization)) 243 | #update_conf(SERVER_CA_CSR_CONF, csr_details_conf, {'O':organization}) 244 | csr_path = f'{self.SERVER_CERTS_PATH}/{source_id}-csr.{self.CERT_NAME_EXTENSION}' 245 | 246 | create_csr_cmd = f"openssl req -config {self.SERVER_CA_CSR_CONF} -newkey rsa:2048 -sha256 -nodes -keyout "\ 247 | f"{key_path} -out {csr_path} -outform PEM" 248 | proc, stdout, stderr = await self.run_cmd(create_csr_cmd) 249 | if proc.returncode != 0: 250 | raise Exception('Error while Generating csr : '+stderr.decode()) 251 | 252 | self.logger.info('Sign client default certificate CSR') 253 | pem_path = f'{self.SERVER_CERTS_PATH}/{source_id}.{self.CERT_NAME_EXTENSION}' 254 | 255 | # Create the index file 256 | index_file_path = os.path.join(self.SERVER_BASE_PATH, 'index.txt') 257 | if not os.path.exists(index_file_path): 258 | open(index_file_path, 'w').close() 259 | 260 | create_certificate_cmd = f"openssl ca -rand_serial -batch -policy signing_policy -config {self.SERVER_CA_DETAILS_CONF} "\ 261 | f"-extensions signing_req -out {pem_path} -infiles {csr_path}" 262 | stdout = subprocess.check_output(create_certificate_cmd, shell=True) 263 | proc, stdout, stderr = await self.run_cmd(create_certificate_cmd) 264 | if proc.returncode != 0: 265 | raise Exception('Error while Generating CA signed certificate : '+stderr.decode()) 266 | 267 | if self.server_ca_certs_not_stored: 268 | try: 269 | for cert in os.listdir(self.CA_GENERATED): 270 | path_to_delete = os.path.join(self.CA_GENERATED, cert) 271 | self.logger.info(f'Deleting {path_to_delete}') 272 | os.remove(path_to_delete) 273 | except Exception: 274 | self.logger.exception(f'Deleting {csr_path}') 275 | 276 | try: 277 | if os.path.exists(csr_path): 278 | #deleting client csr 279 | os.remove(csr_path) 280 | except Exception: 281 | self.logger.exception(f'Deleting {csr_path}') 282 | 283 | #if stderr: 284 | # self.logger.warning('create_certificate_cmd : '+stderr.decode()) 285 | 286 | #create symlink for the client certificate named by their fingerprints 287 | #so they will be detected by context.load_verify_locations(capath= 288 | #first, calculate hash for symlink name 289 | proc, stdout, stderr = await self.run_cmd(f'openssl x509 -hash -serial -noout -in {crt_path}') 290 | if stderr: 291 | self.logger.warning('hash : '+stderr.decode()) 292 | hash_name, serial = stdout.splitlines() 293 | serial = serial.split('=')[1] 294 | cert_id = serial 295 | 296 | if not server_ca: 297 | #create a symlink called ., 298 | #pointing to f'../{source_id}.{self.CERT_NAME_EXTENSION}' 299 | index = 0 300 | while True: 301 | candidate = os.path.join(self.SERVER_SYMLINKS_PATH, hash_name + '.' + str(index)) 302 | if not os.path.exists(candidate): 303 | break 304 | index += 1 305 | os.symlink(f'../{source_id}.{self.CERT_NAME_EXTENSION}', candidate) 306 | 307 | #backup self.source_id_2_cert in file source_id_2_cert.json 308 | self.source_id_2_cert['source_id_2_cert'][source_id] = cert_id 309 | self.source_id_2_cert['cert_2_source_id'][cert_id] = source_id 310 | with open(self.source_id_2_cert_path, 'w') as fd: 311 | json.dump(self.source_id_2_cert, fd) 312 | 313 | self.logger.info('Generated certificate : '+str(crt_path)+' for : '+source_id) 314 | return (crt_path, key_path) 315 | except Exception: 316 | self.logger.exception('create_client_certificate') 317 | raise 318 | 319 | async def remove_client_cert_on_client(self, source_id): 320 | try: 321 | validate_source_id(source_id) 322 | cert_path = self.CLIENT_PEM_PATH.format(source_id) 323 | key_path = self.CLIENT_KEY_PATH.format(source_id) 324 | if not os.path.exists(cert_path): 325 | self.logger.warning(f'remove_client_cert : {source_id} has no certificate to remove at {cert_path}') 326 | else: 327 | self.logger.info(f'remove_client_cert deleting {cert_path}') 328 | os.remove(cert_path) 329 | if os.path.exists(key_path): 330 | self.logger.info(f'remove_client_cert deleting {key_path}') 331 | os.remove(key_path) 332 | return json.dumps({'status':True, 'msg':''}) 333 | except Exception as exc: 334 | self.logger.exception('remove_client_cert_on_client') 335 | return json.dumps({'status':False, 'msg':str(exc)}) 336 | 337 | 338 | async def remove_client_cert_on_server(self, source_id, remove_only_symlink=False): 339 | #also remove from self.source_id_2_cert 340 | #symlink pointing to f'../{source_id}.{self.CERT_NAME_EXTENSION}' should be removed 341 | #example : .2 -> source_id.pem : we need to remove .2, and then rename all .i where i>2, to .i-1 342 | try: 343 | validate_source_id(source_id) 344 | cert_path = self.SERVER_CERTS_PEM_PATH.format(source_id) 345 | key_path = self.SERVER_CERTS_KEY_PATH.format(source_id) 346 | 347 | #first, remove symlink pointing to link_to_find 348 | link_to_find = f'../{source_id}.{self.CERT_NAME_EXTENSION}' 349 | 350 | listdir = os.listdir(self.SERVER_SYMLINKS_PATH) 351 | for the_file in listdir: 352 | the_path = os.path.join(self.SERVER_SYMLINKS_PATH, the_file) 353 | if os.readlink(the_path) == link_to_find: 354 | break 355 | else: 356 | the_file = None 357 | self.logger.warning(f'remove_client_cert : could not find a symlink to {link_to_find}') 358 | 359 | if the_file: 360 | self.logger.info(f'remove_client_cert deleting {the_path}') 361 | os.remove(the_path) 362 | the_file_name, the_file_extension = the_file.split('.') 363 | the_index = int(the_file_extension) 364 | #if the_index == 0, nothing more to do 365 | if the_index: 366 | #find and rename others. build "others"= ordered list of indexes of same name as the_file_name 367 | others = [] 368 | for test_file in listdir: 369 | test_name, test_index = test_file.split('.') 370 | if test_name == the_file_name: 371 | if int(test_index) > the_index: 372 | others.append(int(test_index)) 373 | if others: 374 | for test_index in sorted(others): 375 | os.rename(os.path.join(self.SERVER_SYMLINKS_PATH, the_file_name+'.'+str(test_index)), 376 | os.path.join(self.SERVER_SYMLINKS_PATH, the_file_name+'.'+str(test_index-1))) 377 | 378 | #then remove paths 379 | if not remove_only_symlink: 380 | if not os.path.exists(cert_path): 381 | self.logger.warning(f'remove_client_cert : {source_id} has no certificate to remove at {cert_path}') 382 | else: 383 | self.logger.info(f'remove_client_cert deleting {cert_path}') 384 | os.remove(cert_path) 385 | if os.path.exists(key_path): 386 | self.logger.info(f'remove_client_cert deleting {key_path}') 387 | os.remove(key_path) 388 | 389 | #also remove from self.source_id_2_cert 390 | cert = self.source_id_2_cert['source_id_2_cert'].pop(source_id, None) 391 | if cert: 392 | self.logger.info(f'remove_client_cert removing {source_id} from source_id_2_cert') 393 | self.source_id_2_cert['cert_2_source_id'].pop(cert, None) 394 | with open(self.source_id_2_cert_path, 'w') as fd: 395 | json.dump(self.source_id_2_cert, fd) 396 | return json.dumps({'status':True, 'msg':''}) 397 | else: 398 | msg = 'remove_client_cert non existing client in source_id_2_cert : '+source_id 399 | self.logger.error(msg) 400 | return json.dumps({'status':False, 'msg':msg}) 401 | 402 | except Exception as exc: 403 | self.logger.exception('remove_client_cert_on_server') 404 | return json.dumps({'status':False, 'msg':str(exc)}) 405 | 406 | def create_certificates(logger, certificates_directory_path, no_ca=True): 407 | ''' 408 | 409 | 1) Create Server certificate 410 | openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout server.key -out server.pem -config csr_details.conf 411 | put in SERVER_PEM_PATH, SERVER_KEY_PATH, CLIENT_SERVER_CRT_PATH (pem renamed to crt) 412 | 413 | 2) Create client default certificate 414 | openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout default.key -out default.pem -config csr_details.conf 415 | ########openssl req -new -newkey rsa -nodes -x509 -days 3650 -subj '/O=9d2f849c877b4e50b6fccb54d6cd1818' -keyout default.key -out default.pem -config csr_details.conf #'/O=default/CN=default.cn.com' 416 | put in CLIENT_PEM_PATH, CLIENT_KEY_PATH, SERVER_CERTS_PATH (only pem) 417 | 418 | 3) Calculate hash of client default certificate 419 | openssl x509 -hash -noout -in default.pem #add '.0' as an extension 420 | 421 | 4) Create symlink of client default certificate in server directory 422 | ln -s ../default.pem 423 | 424 | 5) Create Server CA certificate 425 | 426 | ''' 427 | 428 | ssl_helper = SSL_helper(logger, is_server=False, certificates_directory_path=certificates_directory_path) 429 | 430 | #certificates_path = os.path.join(ssl_helper.BASE_PATH, 'certificates') 431 | certificates_path = ssl_helper.certificates_base_path 432 | logger.info('Certificates will be created under directory : '+certificates_path) 433 | 434 | certificates_path_server = os.path.join(certificates_path, 'server') 435 | certificates_path_server_client = os.path.join(certificates_path_server, 'client-certs') 436 | certificates_path_server_client_gen = os.path.join(certificates_path_server_client, 'ca-generated') 437 | certificates_path_server_client_sym = os.path.join(certificates_path_server_client, 'symlinks') 438 | certificates_path_server_server = os.path.join(certificates_path_server, 'server-cert') 439 | certificates_path_client = os.path.join(certificates_path, 'client') 440 | certificates_path_client_client = os.path.join(certificates_path_client, 'client-certs') 441 | certificates_path_client_server = os.path.join(certificates_path_client, 'server-cert') 442 | 443 | if os.path.exists(certificates_path_server_server) and os.listdir(certificates_path_server_server): 444 | logger.error(certificates_path_server_server+' should be empty before starting this process') 445 | return False 446 | if os.path.exists(certificates_path_server_client) and os.listdir(certificates_path_server_client): 447 | logger.error(certificates_path_server_client+' should be empty before starting this process') 448 | return False 449 | if os.path.exists(certificates_path_client) and os.listdir(certificates_path_client): 450 | logger.error(certificates_path_client+' should be empty before starting this process') 451 | return False 452 | 453 | if not os.path.exists(certificates_path_server_server): 454 | os.makedirs(certificates_path_server_server) 455 | if not os.path.exists(certificates_path_server_client_sym): 456 | os.makedirs(certificates_path_server_client_sym) 457 | if not os.path.exists(certificates_path_client_server): 458 | os.makedirs(certificates_path_client_server) 459 | if not os.path.exists(certificates_path_client_client): 460 | os.makedirs(certificates_path_client_client) 461 | if not no_ca: 462 | if not os.path.exists(certificates_path_server_client_gen): 463 | os.makedirs(certificates_path_server_client_gen) 464 | 465 | CERT_NAME_EXTENSION = "pem" 466 | KEY_NAME_EXTENSION = "key" 467 | 468 | #server 469 | SERVER_CERTS_PEM_PATH = os.path.join(certificates_path_server_client, '{}.'+CERT_NAME_EXTENSION) 470 | SERVER_PEM_PATH = os.path.join(certificates_path_server_server, 'server.'+CERT_NAME_EXTENSION) 471 | SERVER_KEY_PATH = os.path.join(certificates_path_server_server, 'server.'+KEY_NAME_EXTENSION) 472 | SERVER_CA_PEM = 'server_ca.pem' 473 | SERVER_CA_KEY = 'server_ca.key' 474 | SERVER_CA_PEM_PATH = os.path.join(certificates_path_server_server, SERVER_CA_PEM) 475 | SERVER_CA_KEY_PATH = os.path.join(certificates_path_server_server, SERVER_CA_KEY) 476 | SERVER_CA_CSR_CONF = os.path.join(certificates_path_server, 'server_ca_csr_details.conf') 477 | SERVER_CA_CSR_PEM_PATH = os.path.join(certificates_path_server, 'server-cert/server_ca_csr.'+CERT_NAME_EXTENSION) 478 | 479 | #client 480 | CLIENT_SERVER_CRT_PATH = os.path.join(certificates_path_client_server, 'server.'+CERT_NAME_EXTENSION) 481 | CLIENT_PEM_PATH = os.path.join(certificates_path_client_client, '{}.'+CERT_NAME_EXTENSION) 482 | CLIENT_KEY_PATH = os.path.join(certificates_path_client_client, '{}.'+KEY_NAME_EXTENSION) 483 | 484 | SERVER_CA_DETAILS_CONF = os.path.join(certificates_path_server, 'server_ca_details.conf') 485 | CSR_CONF = os.path.join(certificates_path_server, 'csr_details.conf') 486 | CSR_TEMPLATE_CONF = os.path.join(certificates_path_server, 'csr_details_template.conf') 487 | 488 | #this if lets the user tweak the base CSR_TEMPLATE_CONF 489 | if not os.path.exists(CSR_TEMPLATE_CONF): 490 | with open(CSR_TEMPLATE_CONF, 'w') as fd: 491 | fd.write( 492 | ''' 493 | [req] 494 | prompt = no 495 | default_bits = 2048 496 | default_md = sha256 497 | distinguished_name = dn 498 | 499 | [ dn ] 500 | C = US 501 | ''') 502 | else: 503 | logger.info(f'Using preexisting {CSR_TEMPLATE_CONF}') 504 | 505 | logger.info('Create Server certificate') 506 | # 1) Create Server certificate 507 | update_conf(CSR_TEMPLATE_CONF, CSR_CONF, {'O':'company'}) 508 | cmd = f'openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout {SERVER_KEY_PATH} -out {SERVER_PEM_PATH} -config {CSR_CONF} 2>/dev/null' 509 | stdout = subprocess.check_output(cmd, shell=True) 510 | shutil.copy(SERVER_PEM_PATH, CLIENT_SERVER_CRT_PATH) 511 | #we might want to append to an existing CLIENT_SERVER_CRT_PATH, to support multiple server certificates 512 | 513 | if not no_ca: 514 | logger.info('Generate CA') 515 | # 2)generate ca pem and key 516 | if not os.path.exists(SERVER_CA_DETAILS_CONF): 517 | with open(SERVER_CA_DETAILS_CONF, 'w') as fd: 518 | fd.write(CA_CREATE_CNF.format(base_dir=certificates_path_server_server, ca_generated=certificates_path_server_client_gen, 519 | ca_pem=SERVER_CA_PEM, ca_key=SERVER_CA_KEY)) 520 | 521 | cmd = f"openssl req -new -newkey rsa:4096 -sha256 -nodes -x509 -days 3650 -keyout {SERVER_CA_KEY_PATH}" \ 522 | f" -out {SERVER_CA_PEM_PATH} -config {SERVER_CA_DETAILS_CONF} -outform PEM" 523 | 524 | stdout = subprocess.check_output(cmd, shell=True) 525 | 526 | logger.info('Create server ca symlink') 527 | stdout = subprocess.check_output(f'openssl x509 -hash -serial -noout -in {SERVER_CA_PEM_PATH}', shell=True) 528 | hash_name, serial = stdout.decode().splitlines() 529 | #serial = serial.split('=')[1] 530 | 531 | index = 0 532 | while True: 533 | candidate = os.path.join(certificates_path_server_client_sym, hash_name + '.' + str(index)) 534 | if not os.path.exists(candidate): 535 | break 536 | index += 1 537 | os.symlink(f'../../server-cert/{SERVER_CA_PEM}' , candidate) 538 | 539 | logger.info('Generate client default certificate CSR') 540 | 541 | # 2) Create client default certificate 542 | client_default_key = CLIENT_KEY_PATH.format(ssl_helper.CLIENT_DEFAULT_CERT_NAME) 543 | client_default_pem = CLIENT_PEM_PATH.format(ssl_helper.CLIENT_DEFAULT_CERT_NAME) 544 | #organization = ssl_helper.CLIENT_DEFAULT_CERT_NAME 545 | #we might want to obfuscate organization 546 | #organization = str(abs(hash(organization)) % (10 ** 8)) 547 | organization = uuid.uuid4().hex 548 | 549 | if no_ca: 550 | update_conf(CSR_TEMPLATE_CONF, CSR_CONF, {'O':organization}) 551 | cmd = f"openssl req -new -newkey rsa -nodes -x509 -days 3650 -keyout {client_default_key} -out {client_default_pem} -config {CSR_CONF} 2>/dev/null" 552 | stdout = subprocess.check_output(cmd, shell=True) 553 | shutil.copy(client_default_pem, SERVER_CERTS_PEM_PATH.format(ssl_helper.CLIENT_DEFAULT_CERT_NAME)) 554 | else: 555 | #create csr and sign it with server_ca 556 | if not os.path.exists(SERVER_CA_CSR_CONF): 557 | with open(SERVER_CA_CSR_CONF, 'w') as fd: 558 | fd.write(CA_CSR_CREATE_CNF.format(org=organization)) 559 | else: 560 | logger.info(f'Using preexisting {SERVER_CA_CSR_CONF}') 561 | 562 | create_csr_cmd = f"openssl req -config {SERVER_CA_CSR_CONF} -newkey rsa:2048 -sha256 -nodes -keyout "\ 563 | f"{client_default_key} -out {SERVER_CA_CSR_PEM_PATH} -outform PEM" 564 | stdout = subprocess.check_output(create_csr_cmd, shell=True) 565 | 566 | logger.info('Sign client default certificate CSR') 567 | 568 | # Create the index file 569 | index_file_path = os.path.join(certificates_path_server_server, 'index.txt') 570 | if not os.path.exists(index_file_path): 571 | open(index_file_path, 'w').close() 572 | 573 | create_certificate_cmd = f"openssl ca -rand_serial -batch -policy signing_policy -config {SERVER_CA_DETAILS_CONF} "\ 574 | f"-extensions signing_req -out {client_default_pem} -infiles {SERVER_CA_CSR_PEM_PATH}" 575 | stdout = subprocess.check_output(create_certificate_cmd, shell=True) 576 | 577 | shutil.copy(client_default_pem, SERVER_CERTS_PEM_PATH.format(ssl_helper.CLIENT_DEFAULT_CERT_NAME)) 578 | 579 | 580 | # 3) Calculate hash of client default certificate 581 | cmd = f'openssl x509 -hash -noout -in {client_default_pem}' 582 | stdout = subprocess.check_output(cmd, shell=True) 583 | the_hash_name = stdout.decode().strip()+'.0' 584 | 585 | # 4) Create symlink of client default certificate in server directory 586 | dst = os.path.join(certificates_path_server_client_sym, the_hash_name) 587 | if os.path.exists(dst): 588 | os.remove(dst) 589 | 590 | cmd = f'ln -s ../{ssl_helper.CLIENT_DEFAULT_CERT_NAME}.pem '+dst 591 | stdout = subprocess.check_output(cmd, shell=True) 592 | 593 | logger.info('Finished create_certificates') 594 | return True 595 | 596 | def replace_server_certificate(logger, server_certificate_path=None, certificates_directory_path=None, revert=False): 597 | '''server_certificate_path should be the path to server.pem, where server.key also exists''' 598 | if not server_certificate_path and not revert: 599 | logger.warning('replace_server_certificate bad arguments') 600 | return False 601 | 602 | ssl_helper = SSL_helper(logger, is_server=True, certificates_directory_path=certificates_directory_path, tool_only=True) 603 | backup_server_pem = ssl_helper.SERVER_PEM_PATH+'.org' 604 | backup_server_key = ssl_helper.SERVER_KEY_PATH+'.org' 605 | 606 | if revert: 607 | logger.info('Reverting server certificate to original') 608 | shutil.move(backup_server_pem, ssl_helper.SERVER_PEM_PATH) 609 | shutil.move(backup_server_key, ssl_helper.SERVER_KEY_PATH) 610 | return True 611 | 612 | logger.info(f'Setting new server certificate from {server_certificate_path}') 613 | shutil.copy(ssl_helper.SERVER_PEM_PATH, backup_server_pem) 614 | shutil.copy(ssl_helper.SERVER_KEY_PATH, backup_server_key) 615 | shutil.copy(server_certificate_path, ssl_helper.SERVER_PEM_PATH) 616 | shutil.copy(server_certificate_path.replace('.'+ssl_helper.CERT_NAME_EXTENSION, '.'+ssl_helper.KEY_NAME_EXTENSION), 617 | ssl_helper.SERVER_KEY_PATH) 618 | #set owner (current user) and permissions (taken from original server pem/key) 619 | current_uid, current_gid = os.getuid(), os.getgid() 620 | shutil.copystat(backup_server_pem, ssl_helper.SERVER_PEM_PATH) 621 | shutil.copystat(backup_server_key, ssl_helper.SERVER_KEY_PATH) 622 | shutil.chown(ssl_helper.SERVER_PEM_PATH, user=current_uid, group=current_gid) 623 | shutil.chown(ssl_helper.SERVER_KEY_PATH, user=current_uid, group=current_gid) 624 | return True 625 | 626 | 627 | 628 | 629 | 630 | -------------------------------------------------------------------------------- /aioconnectors_test.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | from datetime import datetime 4 | import os 5 | from copy import deepcopy 6 | 7 | import aioconnectors 8 | 9 | ''' 10 | BEWARE : This file contains only non official test examples that require tweaks to run correctly. 11 | 12 | usage : 13 | First you can create your certificates with the command : 14 | python3 -m aioconnectors create_certificates 15 | 16 | Then, open 4 different shells in this order : 17 | python3 aioconnectors_test.py server 18 | python3 aioconnectors_test.py client 19 | python3 aioconnectors_test.py send2server 20 | python3 aioconnectors_test.py send2client 21 | ''' 22 | 23 | SERVER_SOCKADDR = ('127.0.0.1',10673) 24 | CERTIFICATES_DIRECTORY_PATH = None#'/tmp/mo' #None #default is cwd/connectors/certificates 25 | 26 | 27 | #Here we assume that send_message_types and recv_message_types are based on ['type1','type2'] 28 | results = {} 29 | 30 | ########################### TEST FLAGS ############## 31 | TEST_WITH_SSL = True 32 | TEST_TRAFFIC_CLIENT = True 33 | TEST_TRAFFIC_SERVER = False 34 | TEST_PERSISTENCE_CLIENT = False 35 | TEST_PERSISTENCE_SERVER = False 36 | TEST_SERVER_AWAITS_REPLY = False 37 | TEST_CLIENT_AWAITS_REPLY = False 38 | TEST_PERSISTENCE_CLIENT_AWAIT_REPLY = False 39 | TEST_MULTIPLE_CLIENTS = True 40 | TEST_UPLOAD_FILE = False 41 | TEST_UPLOAD_FILE_WITH_PERSISTENCE = False 42 | TEST_COMMANDER_SERVER = False #delete_client_certificate 43 | TEST_COMMANDER_CLIENT = False #delete_client_certificate 44 | TEST_WITH_ACK = False 45 | TEST_WITH_SSL_ALLOW_ALL = True 46 | TEST_WITH_TOKEN = False 47 | TEST_WITH_CLIENT_KEEP_ALIVE = True 48 | TEST_SERVER_WITH_CA = False 49 | PUBSUB = False #client1 uses publish, client2 uses subscribe_message_types 50 | 51 | 52 | 53 | ########################### TEST VALUES ############## 54 | UDS_PATH_RECEIVE_PRESERVE_SOCKET = True 55 | UDS_PATH_SEND_PRESERVE_SOCKET = True 56 | DEFAULT_LOGGER_LOG_LEVEL = 'INFO' #'DEBUG' 57 | SILENT=False 58 | TEST_DEBUG_MSG_COUNTS = True 59 | CLIENT_MESSAGE_TYPES = ['type2', 'type1'] 60 | SERVER_MESSAGE_TYPES = ['type1','type2'] 61 | PERSISTENCE_CLIENT = ['type1','type2'] if (TEST_PERSISTENCE_CLIENT or TEST_UPLOAD_FILE_WITH_PERSISTENCE or TEST_PERSISTENCE_CLIENT_AWAIT_REPLY) else False #True means persistence for both 'type1' and 'type2' 62 | PERSISTENCE_SERVER = True if TEST_PERSISTENCE_SERVER else False 63 | PERSISTENCE_CLIENT_DELETE_PREVIOUS_PERSISTENCE_FILE = True 64 | PERSISTENCE_SERVER_DELETE_PREVIOUS_PERSISTENCE_FILE = True 65 | CLIENT_NAMES = ['client1','client2'] if TEST_MULTIPLE_CLIENTS else ['client1'] 66 | CLIENT_KEEP_ALIVE_PERIOD = 4 if TEST_WITH_CLIENT_KEEP_ALIVE else 0 67 | if TEST_UPLOAD_FILE or TEST_UPLOAD_FILE_WITH_PERSISTENCE: 68 | FILE_RECV_CONFIG = {'file1':{'target_directory':'/tmp/file1'}, 'file2':{'target_directory':'/tmp/file2'}} #{} 69 | FILE_SRC_PATH = '/tmp/file_src' #'' 70 | 71 | if FILE_RECV_CONFIG: 72 | for dir_path in FILE_RECV_CONFIG.values(): 73 | dir_path = dir_path['target_directory'] 74 | if not os.path.exists(dir_path): 75 | print('Creating directory '+dir_path+' for FILE_RECV_CONFIG') 76 | os.makedirs(dir_path) 77 | if FILE_SRC_PATH: 78 | if not os.path.exists(FILE_SRC_PATH): 79 | with open(FILE_SRC_PATH, 'w') as fd: 80 | fd.write('Test upload file') 81 | else: 82 | FILE_RECV_CONFIG = None 83 | 84 | ########################### END TEST ############## 85 | 86 | async def print_results(interval=2): 87 | #shows source_id:message_type:peer_id:sendORrecv 88 | # start = time.time() 89 | while True: 90 | await asyncio.sleep(interval) 91 | print(str(datetime.now())) 92 | print(results) 93 | #if str(results).count('300')>=2: 94 | # end = time.now() 95 | # print('Duration in seconds : '+str(end-start)) 96 | 97 | 98 | def increment_result(source_id, peer_id, message_type, sendORrecv): 99 | if source_id not in results: 100 | results[source_id] = {} 101 | if message_type not in results[source_id]: 102 | results[source_id][message_type] = {} 103 | if peer_id not in results[source_id][message_type]: 104 | results[source_id][message_type][peer_id] = {'send':0, 'recv':0} 105 | #if sendORrecv not in results[name][event_type][source_id]: 106 | # results[source_id][event_type][peer_id][sendORrecv] = 0 107 | results[source_id][message_type][peer_id][sendORrecv] +=1 108 | 109 | 110 | if __name__ == '__main__': 111 | if len(sys.argv) > 1: 112 | print(sys.argv) 113 | local_name = None 114 | if len(sys.argv) > 2: 115 | local_name = sys.argv[2] #currently only for client 116 | 117 | if sys.argv[1] == 'server': 118 | print('Started server') 119 | 120 | connector_manager = aioconnectors.ConnectorManager(config_file_path=None, default_logger_log_level=DEFAULT_LOGGER_LOG_LEVEL, 121 | is_server=True, server_sockaddr=SERVER_SOCKADDR, use_ssl=TEST_WITH_SSL, 122 | certificates_directory_path=CERTIFICATES_DIRECTORY_PATH, 123 | disk_persistence_send=PERSISTENCE_SERVER, disk_persistence_recv=PERSISTENCE_SERVER, 124 | debug_msg_counts=TEST_DEBUG_MSG_COUNTS, silent=SILENT, #use_ack=TEST_WITH_ACK, 125 | send_message_types=SERVER_MESSAGE_TYPES, recv_message_types=CLIENT_MESSAGE_TYPES, 126 | file_recv_config=FILE_RECV_CONFIG, reuse_server_sockaddr=True, 127 | uds_path_receive_preserve_socket=UDS_PATH_RECEIVE_PRESERVE_SOCKET, 128 | uds_path_send_preserve_socket=UDS_PATH_SEND_PRESERVE_SOCKET, 129 | ssl_allow_all=TEST_WITH_SSL_ALLOW_ALL, use_token=TEST_WITH_TOKEN, 130 | server_ca=TEST_SERVER_WITH_CA, pubsub_central_broker=PUBSUB) 131 | loop = asyncio.get_event_loop() 132 | 133 | if PERSISTENCE_CLIENT_DELETE_PREVIOUS_PERSISTENCE_FILE: 134 | connector_manager.delete_previous_persistence_remains() 135 | 136 | loop.create_task(connector_manager.start_connector()) 137 | #loop.create_task(connector_manager.stop_connector(delay=10, hard=True)) 138 | 139 | if TEST_PERSISTENCE_CLIENT: 140 | loop.create_task(connector_manager.restart_connector(delay=15, sleep_between=3, hard=True)) 141 | if TEST_UPLOAD_FILE_WITH_PERSISTENCE: 142 | loop.create_task(connector_manager.restart_connector(delay=15, sleep_between=4, hard=True)) 143 | if TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 144 | loop.create_task(connector_manager.restart_connector(delay=12, sleep_between=2, hard=True)) 145 | 146 | try: 147 | loop.run_forever() 148 | except: 149 | task_stop = loop.create_task(connector_manager.stop_connector(shutdown=True)) 150 | loop.run_until_complete(task_stop) 151 | del connector_manager 152 | print('Server stopped !') 153 | 154 | elif sys.argv[1] == 'client': 155 | print('Started client') 156 | if TEST_PERSISTENCE_CLIENT or TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 157 | disk_persistence = ['type1','type2'] 158 | if PUBSUB and (local_name == 'client2'): 159 | subscribe_message_types = SERVER_MESSAGE_TYPES 160 | else: 161 | subscribe_message_types = None 162 | connector_manager = aioconnectors.ConnectorManager(default_logger_log_level=DEFAULT_LOGGER_LOG_LEVEL, 163 | is_server=False, server_sockaddr=SERVER_SOCKADDR, use_ssl=TEST_WITH_SSL, 164 | certificates_directory_path=CERTIFICATES_DIRECTORY_PATH, 165 | client_name=local_name, disk_persistence_send=PERSISTENCE_CLIENT, 166 | disk_persistence_recv=PERSISTENCE_CLIENT, debug_msg_counts=TEST_DEBUG_MSG_COUNTS, 167 | file_recv_config=FILE_RECV_CONFIG, silent=SILENT, #use_ack=TEST_WITH_ACK, 168 | send_message_types=CLIENT_MESSAGE_TYPES, recv_message_types=SERVER_MESSAGE_TYPES, 169 | uds_path_receive_preserve_socket=UDS_PATH_RECEIVE_PRESERVE_SOCKET, 170 | uds_path_send_preserve_socket=UDS_PATH_SEND_PRESERVE_SOCKET, 171 | ssl_allow_all=TEST_WITH_SSL_ALLOW_ALL, keep_alive_period=CLIENT_KEEP_ALIVE_PERIOD, 172 | send_message_types_priorities={'type1':1,'type2':2}, use_token=TEST_WITH_TOKEN, 173 | subscribe_message_types=subscribe_message_types) 174 | loop = asyncio.get_event_loop() 175 | 176 | if PERSISTENCE_CLIENT_DELETE_PREVIOUS_PERSISTENCE_FILE: 177 | connector_manager.delete_previous_persistence_remains() 178 | 179 | loop.create_task(connector_manager.start_connector()) 180 | #loop.create_task(connector_manager.stop_connector(delay=7, hard=True)) 181 | 182 | #loop.create_task(connector_manager.restart_connector(delay=20, sleep_between=3)) 183 | if TEST_PERSISTENCE_SERVER: 184 | #loop.create_task(connector_manager.restart_connector(delay=7, sleep_between=5)) 185 | loop.create_task(connector_manager.restart_connector(delay=16, sleep_between=2, hard=True)) 186 | 187 | 188 | try: 189 | loop.run_forever() 190 | except: 191 | task_stop = loop.create_task(connector_manager.stop_connector(shutdown=True)) 192 | loop.run_until_complete(task_stop) 193 | del connector_manager 194 | print('Client stopped !') 195 | 196 | 197 | elif sys.argv[1] == 'send2client': 198 | print('Started send2client') 199 | own_source_id = local_name or CLIENT_NAMES[0] 200 | 201 | connector_api = aioconnectors.ConnectorAPI(default_logger_log_level=DEFAULT_LOGGER_LOG_LEVEL, server_sockaddr=SERVER_SOCKADDR, client_name=own_source_id, is_server=False, 202 | send_message_types=CLIENT_MESSAGE_TYPES, recv_message_types=SERVER_MESSAGE_TYPES, 203 | uds_path_send_preserve_socket=UDS_PATH_SEND_PRESERVE_SOCKET)#, uds_path_receive_preserve_socket=UDS_PATH_RECEIVE_PRESERVE_SOCKET) 204 | loop = asyncio.get_event_loop() 205 | 206 | if TEST_COMMANDER_CLIENT: 207 | loop.create_task(connector_api.delete_client_certificate()) 208 | 209 | async def client_cb_type1(logger, transport_json , data, binary): 210 | peer_id = transport_json['source_id'] 211 | increment_result(own_source_id, peer_id, 'type1', 'recv') 212 | 213 | 214 | async def client_cb_type2(logger, transport_json , data, binary): 215 | peer_id = transport_json['source_id'] 216 | increment_result(own_source_id, peer_id, 'type2', 'recv') 217 | 218 | 219 | loop.create_task(print_results()) 220 | #wait for messages from server (call once only) 221 | if True: #TEST_PERSISTENCE_CLIENT or TEST_PERSISTENCE_SERVER or TEST_SERVER_AWAITS_REPLY or TEST_CLIENT_AWAITS_REPLY or TEST_UPLOAD_FILE: 222 | loop.create_task(connector_api.start_waiting_for_messages(message_type='type2', message_received_cb=client_cb_type2)) 223 | loop.create_task(connector_api.start_waiting_for_messages(message_type='type1', message_received_cb=client_cb_type1)) 224 | 225 | if PUBSUB: 226 | if local_name == 'client1': 227 | message_method = connector_api.publish_message 228 | else: 229 | message_method = None 230 | else: 231 | message_method = connector_api.send_message 232 | 233 | if message_method: 234 | 235 | async def send_stress(message_type, peer_id, delay): 236 | await asyncio.sleep(delay) 237 | 238 | index = 0 239 | await_response = False 240 | with_file_template = False 241 | with_file = None 242 | 243 | if TEST_PERSISTENCE_CLIENT: 244 | duration_test = 20 #seconds 245 | messages_per_second = 1000 246 | if TEST_WITH_ACK: 247 | messages_per_second = 10 248 | elif (TEST_SERVER_AWAITS_REPLY or TEST_CLIENT_AWAITS_REPLY): 249 | duration_test = 10 #seconds 250 | messages_per_second = 10 251 | if TEST_CLIENT_AWAITS_REPLY: 252 | await_response = True 253 | elif TEST_UPLOAD_FILE or TEST_UPLOAD_FILE_WITH_PERSISTENCE: 254 | duration_test = 10 #seconds 255 | if TEST_UPLOAD_FILE_WITH_PERSISTENCE: 256 | duration_test = 20 257 | messages_per_second = 1 258 | with_file_template={'src_path':FILE_SRC_PATH,'dst_type':'file1', 'dst_name':os.path.basename(FILE_SRC_PATH)+'_from_client_'+own_source_id+'_index_{}', 'delete':False} #default is delete=True 259 | elif TEST_TRAFFIC_CLIENT: 260 | duration_test = 15 #seconds 261 | messages_per_second = 1000 #10000 262 | if TEST_WITH_ACK: 263 | messages_per_second = 10 264 | elif TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 265 | duration_test = 20 #seconds 266 | messages_per_second = 2 267 | await_response = True 268 | 269 | max_index = duration_test * messages_per_second 270 | sleep_time = 1/messages_per_second 271 | request_id = response_id = None 272 | while index < max_index: 273 | index += 1 274 | data = 'טסט(% ;)'+str(index)*200 275 | #increment_result(own_source_id, peer_id, message_type, 'send') 276 | #while results[own_source_id][message_type][peer_id]['recv'] != index: 277 | # await asyncio.sleep(0.1) 278 | if TEST_SERVER_AWAITS_REPLY: 279 | response_id = index 280 | else: 281 | request_id = index 282 | if with_file_template: 283 | with_file = deepcopy(with_file_template) 284 | with_file['dst_name'] = with_file['dst_name'].format(index) 285 | 286 | response = await message_method(data=data, data_is_json=False, 287 | message_type=message_type, await_response=await_response, response_id=response_id, request_id=request_id, 288 | binary=b'\x01\x02\x03\x04\x05', with_file=with_file, wait_for_ack=TEST_WITH_ACK) 289 | increment_result(own_source_id, peer_id, message_type, 'send') 290 | #await asyncio.sleep(sleep_time) 291 | 292 | if TEST_CLIENT_AWAITS_REPLY or TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 293 | increment_result(own_source_id, peer_id, message_type, 'recv') 294 | else: 295 | await asyncio.sleep(sleep_time) 296 | 297 | print('Finished') 298 | 299 | 300 | if TEST_PERSISTENCE_CLIENT or TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 301 | loop.create_task(send_stress(message_type='type1', peer_id=str(SERVER_SOCKADDR), delay=2)) 302 | loop.create_task(send_stress(message_type='type2', peer_id=str(SERVER_SOCKADDR), delay=2)) 303 | elif TEST_SERVER_AWAITS_REPLY: 304 | loop.create_task(send_stress(message_type='type2', peer_id=str(SERVER_SOCKADDR), delay=7)) 305 | elif TEST_CLIENT_AWAITS_REPLY: 306 | loop.create_task(send_stress(message_type='type2', peer_id=str(SERVER_SOCKADDR), delay=3)) 307 | elif TEST_UPLOAD_FILE or TEST_UPLOAD_FILE_WITH_PERSISTENCE: 308 | loop.create_task(send_stress(message_type='type1', peer_id=str(SERVER_SOCKADDR), delay=3)) 309 | elif TEST_TRAFFIC_CLIENT: 310 | loop.create_task(send_stress(message_type='type1', peer_id=str(SERVER_SOCKADDR), delay=2)) 311 | loop.create_task(send_stress(message_type='type2', peer_id=str(SERVER_SOCKADDR), delay=2)) 312 | 313 | try: 314 | loop.run_forever() 315 | except: 316 | print('send2client stopped !') 317 | connector_api.stop_waiting_for_messages(message_type='type2') 318 | connector_api.stop_waiting_for_messages(message_type='type1') 319 | #for task in tasks: 320 | # task.cancel() 321 | 322 | elif sys.argv[1] == 'send2server': 323 | print('Started send2server') 324 | own_source_id = str(SERVER_SOCKADDR) 325 | #name = local_name or str(SERVER_SOCKADDR) 326 | #server_source_id = str(SERVER_SOCKADDR) 327 | 328 | connector_api = aioconnectors.ConnectorAPI(default_logger_log_level=DEFAULT_LOGGER_LOG_LEVEL, is_server=True, server_sockaddr=SERVER_SOCKADDR, 329 | send_message_types=SERVER_MESSAGE_TYPES, recv_message_types=CLIENT_MESSAGE_TYPES, 330 | uds_path_send_preserve_socket=UDS_PATH_SEND_PRESERVE_SOCKET)#, uds_path_receive_preserve_socket=UDS_PATH_RECEIVE_PRESERVE_SOCKET) 331 | # connector_api = aioconnectors.ConnectorAPI(is_server=True, server_sockaddr=own_source_id, use_ssl=TEST_WITH_SSL, certificates_directory_path=CERTIFICATES_DIRECTORY_PATH, 332 | 333 | 334 | loop = asyncio.get_event_loop() 335 | 336 | if TEST_COMMANDER_SERVER: 337 | loop.create_task(connector_api.delete_client_certificate(client_id='client2', remove_only_symlink=False)) 338 | ''' 339 | async def print_queues(period): 340 | while True: 341 | res = await connector_api.peek_queues() 342 | await asyncio.sleep(period) 343 | print(res) 344 | loop.create_task(print_queues(3)) 345 | ''' 346 | 347 | async def server_cb_type1(logger, transport_json , data, binary): 348 | peer_id = transport_json['source_id'] 349 | increment_result(own_source_id, peer_id, 'type1', 'recv') 350 | 351 | 352 | async def server_cb_type2(logger, transport_json , data, binary): 353 | peer_id = transport_json['source_id'] 354 | increment_result(own_source_id, peer_id, 'type2', 'recv') 355 | 356 | 357 | loop.create_task(print_results()) 358 | #wait for messages from client (call once only) 359 | if True: #TEST_PERSISTENCE_CLIENT or TEST_PERSISTENCE_SERVER or TEST_CLIENT_AWAITS_REPLY or TEST_UPLOAD_FILE: 360 | loop.create_task(connector_api.start_waiting_for_messages(message_type='type1', message_received_cb=server_cb_type1)) 361 | loop.create_task(connector_api.start_waiting_for_messages(message_type='type2', message_received_cb=server_cb_type2)) 362 | 363 | async def send_stress(message_type, peer_id, delay=0): 364 | index = 0 365 | await_response = False 366 | with_file_template = False 367 | with_file = None 368 | 369 | if TEST_PERSISTENCE_SERVER: 370 | duration_test = 20 #seconds 371 | messages_per_second = 1000 #1000 372 | if TEST_WITH_ACK: 373 | messages_per_second = 10 374 | elif TEST_SERVER_AWAITS_REPLY or TEST_CLIENT_AWAITS_REPLY: 375 | duration_test = 10 #seconds 376 | messages_per_second = 10 #1000 377 | if TEST_SERVER_AWAITS_REPLY: 378 | await_response = True 379 | elif TEST_UPLOAD_FILE: 380 | duration_test = 10 #seconds 381 | messages_per_second = 1 382 | with_file_template={'src_path':FILE_SRC_PATH,'dst_type':'file2', 'dst_name':os.path.basename(FILE_SRC_PATH)+'_to_client_'+peer_id+'_index_{}', 'delete':False} #default is delete=True 383 | elif TEST_TRAFFIC_SERVER: 384 | duration_test = 20 #seconds 385 | messages_per_second = 1000 #1000 386 | if TEST_WITH_ACK: 387 | messages_per_second = 10 388 | elif TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 389 | duration_test = 20 #seconds 390 | messages_per_second = 2 391 | 392 | max_index = duration_test * messages_per_second 393 | sleep_time = 1/messages_per_second 394 | 395 | await asyncio.sleep(delay) #let the server start, then clients 396 | request_id = response_id = None 397 | while index < max_index: 398 | index += 1 399 | data = str(index)*200 400 | if TEST_CLIENT_AWAITS_REPLY or TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 401 | response_id = index 402 | else: 403 | request_id = index 404 | if with_file_template: 405 | with_file = deepcopy(with_file_template) 406 | with_file['dst_name'] = with_file['dst_name'].format(index) 407 | 408 | response = await connector_api.send_message(data=data, data_is_json=False, destination_id=peer_id, 409 | message_type=message_type, await_response=await_response, request_id=request_id, 410 | response_id=response_id, binary=b'\x01\x02\x03\x04\x05', with_file=with_file, wait_for_ack=TEST_WITH_ACK) 411 | 412 | increment_result(own_source_id, peer_id, message_type, 'send') 413 | 414 | if TEST_SERVER_AWAITS_REPLY: 415 | increment_result(own_source_id, peer_id, message_type, 'recv') 416 | else: 417 | await asyncio.sleep(sleep_time) 418 | 419 | print('Finished') 420 | 421 | if TEST_PERSISTENCE_SERVER: 422 | for client_id in CLIENT_NAMES: 423 | loop.create_task(send_stress(message_type='type2', peer_id=client_id, delay=5))#5)) 424 | elif TEST_SERVER_AWAITS_REPLY: 425 | for client_id in CLIENT_NAMES: 426 | loop.create_task(send_stress(message_type='type2', peer_id=client_id, delay=4))#5)) 427 | elif TEST_CLIENT_AWAITS_REPLY: 428 | for client_id in CLIENT_NAMES: 429 | loop.create_task(send_stress(message_type='type2', peer_id=client_id, delay=8))#5)) 430 | elif TEST_UPLOAD_FILE: 431 | for client_id in CLIENT_NAMES: 432 | loop.create_task(send_stress(message_type='type2', peer_id=client_id, delay=5))#5)) 433 | elif TEST_TRAFFIC_SERVER: 434 | for client_id in CLIENT_NAMES: 435 | loop.create_task(send_stress(message_type='type2', peer_id=client_id, delay=5))#5)) 436 | elif TEST_PERSISTENCE_CLIENT_AWAIT_REPLY: 437 | for client_id in CLIENT_NAMES: 438 | loop.create_task(send_stress(message_type='type1', peer_id=client_id, delay=6))#5)) 439 | 440 | 441 | 442 | 443 | try: 444 | loop.run_forever() 445 | except: 446 | print('send2server stopped !') 447 | connector_api.stop_waiting_for_messages(message_type='type1') 448 | connector_api.stop_waiting_for_messages(message_type='type2') 449 | 450 | # for task in tasks: 451 | # task.cancel() 452 | 453 | 454 | else: 455 | print('Bad input',sys.argv) 456 | else: 457 | print('Bad Parameters',sys.argv) 458 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """A setuptools based setup module. 2 | 3 | See: 4 | https://packaging.python.org/en/latest/distributing.html 5 | https://github.com/pypa/sampleproject 6 | python3 setup.py bdist_wheel 7 | python3 setup.py sdist bdist_wheel 8 | python3 -m twine upload dist/* 9 | 10 | package dependencies offline : dep.txt contains dependencies freeze 11 | python3 -m pip wheel --wheel-dir= -r dep.txt 12 | install dependencies offline : 13 | python3 -m pip install --no-index --find-links= -r dep.txt 14 | """ 15 | 16 | # Always prefer setuptools over distutils 17 | from setuptools import setup, find_packages 18 | # To use a consistent encoding 19 | from codecs import open 20 | from os import path 21 | 22 | VERSION = '1.6.3' 23 | 24 | here = path.abspath(path.dirname(__file__)) 25 | # Get the long description from the README file 26 | with open(path.join(here, 'README.md'), encoding='utf-8') as fd: 27 | long_description = fd.read() 28 | 29 | # Arguments marked as "Required" below must be included for upload to PyPI. 30 | # Fields marked as "Optional" may be commented out. 31 | 32 | setup( 33 | # This is the name of your project. The first time you publish this 34 | # package, this name will be registered for you. It will determine how 35 | # users can install this project, e.g.: 36 | # 37 | # $ pip install sampleproject 38 | # 39 | # And where it will live on PyPI: https://pypi.org/project/sampleproject/ 40 | # 41 | # There are some restrictions on what makes a valid project name 42 | # specification here: 43 | # https://packaging.python.org/specifications/core-metadata/#name 44 | name='aioconnectors', # Required 45 | 46 | # Versions should comply with PEP 440: 47 | # https://www.python.org/dev/peps/pep-0440/ 48 | # 49 | # For a discussion on single-sourcing the version across setup.py and the 50 | # project code, see 51 | # https://packaging.python.org/en/latest/single_source_version.html 52 | version=VERSION, # Required 53 | 54 | # This is a one-line description or tagline of what your project does. This 55 | # corresponds to the "Summary" metadata field: 56 | # https://packaging.python.org/specifications/core-metadata/#summary 57 | description='Simple secure asynchronous message queue', # Required 58 | 59 | # This is an optional longer description of your project that represents 60 | # the body of text which users will see when they visit PyPI. 61 | # 62 | # Often, this is the same as your README, so you can just read it in from 63 | # that file directly (as we have already done above) 64 | # 65 | # This field corresponds to the "Description" metadata field: 66 | # https://packaging.python.org/specifications/core-metadata/#description-optional 67 | long_description=long_description, # Optional 68 | long_description_content_type='text/markdown', # Optional 69 | 70 | # This should be a valid link to your project's main homepage. 71 | # 72 | # This field corresponds to the "Home-Page" metadata field: 73 | # https://packaging.python.org/specifications/core-metadata/#home-page-optional 74 | url='https://github.com/mori-b/aioconnectors', # Optional 75 | 76 | # This should be your name or the name of the organization which owns the 77 | # project. 78 | author='Mori Benech', # Optional 79 | 80 | # This should be a valid email address corresponding to the author listed 81 | # above. 82 | author_email='moribirom@gmail.com', # Optional 83 | 84 | # Classifiers help users find your project by categorizing it. 85 | # 86 | # For a list of valid classifiers, see 87 | # https://pypi.python.org/pypi?%3Aaction=list_classifiers 88 | classifiers=[ # Optional 89 | # How mature is this project? Common values are 90 | # 3 - Alpha 91 | # 4 - Beta 92 | # 5 - Production/Stable 93 | 'Development Status :: 5 - Production/Stable', 94 | 95 | # Indicate who your project is intended for 96 | 'Intended Audience :: Developers', 97 | 'Topic :: Software Development :: Libraries', 98 | 99 | # Pick your license as you wish 100 | 'License :: OSI Approved :: Apache Software License', 101 | 102 | # Specify the Python versions you support here. In particular, ensure 103 | # that you indicate whether you support Python 2, Python 3 or both. 104 | 'Programming Language :: Python :: 3', 105 | ], 106 | 107 | python_requires='>=3.6', 108 | 109 | # This field adds keywords for your project which will appear on the 110 | # project page. What does your project relate to? 111 | # 112 | keywords=['message queue', 'broker', 'asyncio', 'simple', 'easy'], # Optional 113 | 114 | # You can just specify package directories manually here if your project is 115 | # simple. Or you can use find_packages(). 116 | # 117 | # Alternatively, if you just want to distribute a single Python file, use 118 | # the `py_modules` argument instead as follows, which will expect a file 119 | # called `my_module.py` to exist: 120 | # 121 | # py_modules=["my_module"], 122 | # 123 | # packages=find_packages(exclude=['contrib', 'docs', 'tests']), # Required 124 | packages=['aioconnectors',], 125 | # This field lists other packages that your project depends on to run. 126 | # Any package you put here will be installed by pip when your project is 127 | # installed, so they must be valid existing projects. 128 | # 129 | # For an analysis of "install_requires" vs pip's requirements files see: 130 | # https://packaging.python.org/en/latest/requirements.html 131 | install_requires=[], # Optional 132 | 133 | # List additional groups of dependencies here (e.g. development 134 | # dependencies). Users will be able to install these using the "extras" 135 | # syntax, for example: 136 | # 137 | # $ pip install sampleproject[dev] 138 | # 139 | # Similar to `install_requires` above, these must be valid existing 140 | # projects. 141 | # extras_require={ # Optional 142 | # 'dev': ['check-manifest'], 143 | # 'test': ['coverage'], 144 | # }, 145 | 146 | # If there are data files included in your packages that need to be 147 | # installed, specify them here. 148 | # 149 | # If using Python 2.6 or earlier, then these have to be included in 150 | # MANIFEST.in as well. 151 | package_data={ # Optional 152 | }, 153 | 154 | # Although 'package_data' is the preferred approach, in some case you may 155 | # need to place data files outside of your packages. See: 156 | # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files 157 | # 158 | # In this case, 'data_file' will be installed into '/my_data' 159 | # data_files=[('my_data', ['data/data_file'])], # Optional 160 | include_package_data=False 161 | # To provide executable scripts, use entry points in preference to the 162 | # "scripts" keyword. Entry points provide cross-platform support and allow 163 | # `pip` to create the appropriate form of executable for the target 164 | # platform. 165 | # 166 | # For example, the following would provide a command called `sample` which 167 | # executes the function `main` from this package when invoked: 168 | # entry_points={ # Optional 169 | # 'console_scripts': [ 170 | # 'sample=sample:main', 171 | # ], 172 | # }, 173 | ) 174 | -------------------------------------------------------------------------------- /standalone_api.py: -------------------------------------------------------------------------------- 1 | '''This file is a simplified standalone version of api.ConnectorAPI 2 | which could be translated to other languages like javascript''' 3 | 4 | import os 5 | import asyncio 6 | import json 7 | from structs import Struct 8 | from functools import partial 9 | import uuid 10 | 11 | class MessageFields: 12 | MESSAGE_TYPE = 'message_type' #'_ssl', '_ack', '_ping', , ... 13 | SOURCE_ID = 'source_id' #str 14 | DESTINATION_ID = 'destination_id' #str 15 | REQUEST_ID = 'request_id' #int 16 | RESPONSE_ID = 'response_id' #int 17 | WITH_BINARY = 'with_binary' #boolean 18 | AWAIT_RESPONSE = 'await_response' #boolean 19 | WITH_FILE = 'with_file' #dict {'src_path':, 'dst_name':, 'dst_type':, 'binary_offset':, 'delete':} 20 | TRANSPORT_ID = 'transport_id' #int 21 | WAIT_FOR_ACK = 'wait_for_ack' #boolean 22 | 23 | class Structures: 24 | MSG_4_STRUCT = Struct('I') #4 25 | MSG_2_STRUCT = Struct('H') #2 26 | 27 | class Misc: 28 | CHUNK_INDICATOR = '__aioconnectors_chunk' 29 | 30 | 31 | class ConnectorAPI: 32 | 33 | ASYNC_TIMEOUT = 10 34 | MAX_SOCKET_BUFFER_SIZE = 2 ** 16 35 | UDS_PATH_RECEIVE_FROM_CONNECTOR_SERVER = 'uds_path_receive_from_connector_server_{}_{}' 36 | UDS_PATH_RECEIVE_FROM_CONNECTOR_CLIENT = 'uds_path_receive_from_connector_client_{}_{}' 37 | UDS_PATH_SEND_TO_CONNECTOR_SERVER = 'uds_path_send_to_connector_server_{}' 38 | UDS_PATH_SEND_TO_CONNECTOR_CLIENT = 'uds_path_send_to_connector_client_{}' 39 | MAX_LENGTH_UDS_PATH = 104 40 | RECEIVE_FROM_ANY_CONNECTOR_OWNER = True 41 | MAX_SIZE_CHUNK_UPLOAD = 209_715_200 #200mb #104_857_600 #100mb 42 | READ_CHUNK_SIZE = 104_857_600 #100mb 43 | 44 | def __init__(self, config_file_path=None, connector_files_dirpath='/var/tmp/aioconnectors', 45 | is_server=False, server_sockaddr=('127.0.0.1',10673), client_name=None, 46 | send_message_types=("event","command"), recv_message_types=("event","command"), 47 | uds_path_receive_preserve_socket=True, uds_path_send_preserve_socket=True, 48 | receive_from_any_connector_owner=RECEIVE_FROM_ANY_CONNECTOR_OWNER, 49 | pubsub_central_broker=False): 50 | 51 | self.connector_files_dirpath = connector_files_dirpath 52 | if not os.path.isdir(self.connector_files_dirpath): 53 | os.makedirs(self.connector_files_dirpath) 54 | 55 | 56 | self.is_server, self.server_sockaddr, self.client_name = is_server, server_sockaddr, client_name 57 | self.send_message_types, self.recv_message_types = send_message_types, recv_message_types 58 | self.pubsub_central_broker = pubsub_central_broker 59 | self.uds_path_send_preserve_socket = uds_path_send_preserve_socket 60 | self.uds_path_receive_preserve_socket = uds_path_receive_preserve_socket 61 | self.receive_from_any_connector_owner = receive_from_any_connector_owner 62 | 63 | if config_file_path: 64 | self.config_file_path = str(config_file_path) 65 | if os.path.exists(self.config_file_path): 66 | try: 67 | with open(self.config_file_path, 'r') as fd: 68 | config_json = json.load(fd) 69 | self.logger.info(f'Overriding {type(self).__name__} attributes {list(config_json.keys())} ' 70 | f'from config file {self.config_file_path}') 71 | for key,val in config_json.items(): 72 | setattr(self, key, val) 73 | except Exception: 74 | self.logger.exception('type(self).__name__ init config_file_path') 75 | else: 76 | self.logger.warning('type(self).__name__ init could not find config file at path '+self.config_file_path) 77 | else: 78 | self.config_file_path = config_file_path 79 | 80 | if self.server_sockaddr: 81 | self.server_sockaddr = tuple(self.server_sockaddr) 82 | 83 | #source_id is used by send_message, will be overriden by queue_send_to_connector_put if invalid 84 | if self.is_server: 85 | self.source_id = str(self.server_sockaddr) 86 | else: 87 | if not self.client_name: 88 | #raise Exception('Client must have a client_name') 89 | self.client_name = uuid.uuid4().hex[:8] 90 | self.logger.warning(f'No client_name provided, using {self.client_name} instead') 91 | self.source_id = self.client_name 92 | self.reader_writer_uds_path_send = None 93 | self.message_waiters = {} 94 | 95 | if self.pubsub_central_broker: 96 | if self.recv_message_types is None: 97 | self.recv_message_types = [] 98 | self.recv_message_types.append('_pubsub') 99 | if self.send_message_types is None: 100 | self.send_message_types = [] 101 | #self.send_message_types.append('_pubsub') 102 | 103 | self.uds_path_receive_from_connector = {} 104 | self.send_message_lock = asyncio.Lock() 105 | 106 | if self.is_server: 107 | self.alnum_source_id = '_'.join([self.alnum_name(el) for el in self.source_id.split()]) 108 | self.alnum_source_id_for_uds = self.limit_length_for_uds(self.alnum_source_id) 109 | self.uds_path_send_to_connector = os.path.join(self.connector_files_dirpath, 110 | self.UDS_PATH_SEND_TO_CONNECTOR_SERVER.format(self.alnum_source_id_for_uds)) 111 | for recv_message_type in self.recv_message_types: 112 | self.uds_path_receive_from_connector[recv_message_type] = os.path.join(self.connector_files_dirpath, 113 | self.UDS_PATH_RECEIVE_FROM_CONNECTOR_SERVER.format(recv_message_type, self.alnum_source_id_for_uds)) 114 | if len(self.uds_path_receive_from_connector[recv_message_type]) > self.MAX_LENGTH_UDS_PATH: 115 | raise Exception(f'{self.uds_path_receive_from_connector[recv_message_type]} is longer ' 116 | f'than {self.MAX_LENGTH_UDS_PATH}') 117 | else: 118 | self.alnum_source_id = self.alnum_name(self.source_id) 119 | self.alnum_source_id_for_uds = self.limit_length_for_uds(self.alnum_source_id) 120 | self.uds_path_send_to_connector = os.path.join(self.connector_files_dirpath, 121 | self.UDS_PATH_SEND_TO_CONNECTOR_CLIENT.format(self.alnum_source_id_for_uds)) 122 | for recv_message_type in self.recv_message_types: 123 | self.uds_path_receive_from_connector[recv_message_type] = os.path.join(self.connector_files_dirpath, 124 | self.UDS_PATH_RECEIVE_FROM_CONNECTOR_CLIENT.format(recv_message_type, self.alnum_source_id_for_uds)) 125 | if len(self.uds_path_receive_from_connector[recv_message_type]) > self.MAX_LENGTH_UDS_PATH: 126 | raise Exception(f'{self.uds_path_receive_from_connector[recv_message_type]} is longer ' 127 | f'than {self.MAX_LENGTH_UDS_PATH}') 128 | 129 | def alnum_name(self, name): 130 | return ''.join([str(letter) for letter in name if str(letter).isalnum()]) 131 | 132 | def limit_length_for_uds(self, name): 133 | if len(name) > 32: 134 | name = hashlib.md5(name.encode()).hexdigest() 135 | return name 136 | 137 | #4|2|json|4|data|4|binary 138 | def pack_message(self, transport_json=None, message_type=None, source_id=None, destination_id=None, 139 | request_id=None, response_id=None, binary=None, await_response=False, 140 | with_file=None, data=None, wait_for_ack=False, message_type_publish=None): 141 | if transport_json is None: 142 | transport_json = {MessageFields.MESSAGE_TYPE : message_type or self.send_message_types[0]} 143 | if source_id is not None: 144 | transport_json[MessageFields.SOURCE_ID] = source_id 145 | if destination_id is not None: 146 | transport_json[MessageFields.DESTINATION_ID] = destination_id 147 | if request_id is not None: 148 | transport_json[MessageFields.REQUEST_ID] = request_id 149 | if response_id is not None: 150 | transport_json[MessageFields.RESPONSE_ID] = response_id 151 | if binary: 152 | transport_json[MessageFields.WITH_BINARY] = True 153 | if await_response: 154 | transport_json[MessageFields.AWAIT_RESPONSE] = True 155 | if with_file: 156 | transport_json[MessageFields.WITH_FILE] = with_file 157 | if wait_for_ack: 158 | transport_json[MessageFields.WAIT_FOR_ACK] = wait_for_ack 159 | if message_type_publish: 160 | transport_json[MessageFields.MESSAGE_TYPE_PUBLISH] = message_type_publish 161 | 162 | #pack message 163 | json_field = json.dumps(transport_json).encode() 164 | if isinstance(data, str): 165 | data = data.encode() 166 | if data is None: 167 | data = b'' 168 | message = Structures.MSG_2_STRUCT.pack(len(json_field)) + json_field + Structures.MSG_4_STRUCT.pack(len(data)) + data 169 | if binary: 170 | message += (Structures.MSG_4_STRUCT.pack(len(binary)) + binary) 171 | message = Structures.MSG_4_STRUCT.pack(len(message)) + message 172 | return message 173 | 174 | #4|2|json|4|data|4|binary 175 | def unpack_message(self, message): 176 | #receives full message in bytes 177 | #next_length_4 = Structures.MSG_4_STRUCT.unpack(message[:Structures.MSG_4_STRUCT.size])[0] 178 | next_pointer = Structures.MSG_4_STRUCT.size+Structures.MSG_2_STRUCT.size 179 | next_length_2 = Structures.MSG_2_STRUCT.unpack(message[Structures.MSG_4_STRUCT.size:next_pointer])[0] 180 | transport_json = json.loads(message[next_pointer:next_pointer+next_length_2]) 181 | next_pointer += next_length_2 182 | length_data = Structures.MSG_4_STRUCT.unpack(message[next_pointer:next_pointer+Structures.MSG_4_STRUCT.size])[0] 183 | next_pointer += Structures.MSG_4_STRUCT.size 184 | data = message[next_pointer:next_pointer+length_data] 185 | binary = None 186 | if transport_json.get(MessageFields.WITH_BINARY): 187 | next_pointer += length_data 188 | length_binary = Structures.MSG_4_STRUCT.unpack(message[next_pointer:next_pointer+Structures.MSG_4_STRUCT.size])[0] 189 | next_pointer += Structures.MSG_4_STRUCT.size 190 | binary = message[next_pointer:next_pointer+length_binary] 191 | return transport_json, data, binary #json, bytes, bytes 192 | 193 | async def send_message_await_response(self, message_type=None, destination_id=None, request_id=None, response_id=None, 194 | data=None, data_is_json=True, binary=None, await_response=False, with_file=None, 195 | wait_for_ack=False, message_type_publish=None, await_response_timeout=None): 196 | res = await self.send_message(await_response=True, message_type=message_type, destination_id=destination_id, 197 | request_id=request_id, response_id=response_id, data=data, 198 | data_is_json=data_is_json, binary=binary, with_file=with_file, 199 | wait_for_ack=wait_for_ack, message_type_publish=message_type_publish, 200 | await_response_timeout=await_response_timeout) 201 | return res 202 | 203 | def send_message_sync(self, message_type=None, destination_id=None, request_id=None, response_id=None, 204 | data=None, data_is_json=True, binary=None, await_response=False, with_file=None, 205 | wait_for_ack=False, message_type_publish=None, await_response_timeout=None, loop=None): 206 | self.logger.debug(f'send_message_sync of type {message_type}, destination_id {destination_id}, ' 207 | f'request_id {request_id}, response_id {response_id}') 208 | 209 | loop = loop or asyncio.get_event_loop() 210 | send_task = self.send_message(message_type=message_type, destination_id=destination_id, 211 | request_id=request_id, response_id=response_id, data=data, 212 | data_is_json=data_is_json, binary=binary, await_response=await_response, 213 | with_file=with_file, wait_for_ack=wait_for_ack, 214 | message_type_publish=message_type_publish, await_response_timeout=await_response_timeout) 215 | if loop.is_running(): 216 | return loop.create_task(send_task) 217 | else: 218 | return loop.run_until_complete(send_task) 219 | 220 | async def send_message(self, message_type=None, destination_id=None, request_id=None, response_id=None, 221 | data=None, data_is_json=True, binary=None, await_response=False, with_file=None, 222 | wait_for_ack=False, message_type_publish=None, await_response_timeout=None, check_chunk_file=True): #, reuse_uds_connection=True): 223 | 224 | if with_file: 225 | src_path = with_file.get('src_path') 226 | if src_path and check_chunk_file: 227 | file_size = os.path.getsize(src_path) 228 | number_of_chunks, last_bytes_size = divmod(file_size, self.MAX_SIZE_CHUNK_UPLOAD) 229 | if number_of_chunks: 230 | #divide file in chunks of max size MAX_SIZE_CHUNK_UPLOAD, and send each chunk one after the other 231 | dst_name = with_file.get('dst_name') 232 | chunk_basepath = self.connector_files_dirpath #os.path.dirname(src_path) 233 | chunk_basename = f'{dst_name}{Misc.CHUNK_INDICATOR}' #f'{dst_name}__aioconnectors_chunk' 234 | try: 235 | override_src_file_sizes = number_of_chunks * [self.MAX_SIZE_CHUNK_UPLOAD] 236 | if last_bytes_size: 237 | override_src_file_sizes += [last_bytes_size] 238 | len_override_src_file_sizes = len(override_src_file_sizes) 239 | chunk_names = [] 240 | fd = open(src_path, 'rb') 241 | for index, chunk_size in enumerate(override_src_file_sizes): 242 | chunk_name = f'{chunk_basename}_{index+1}_{len_override_src_file_sizes}' 243 | with open(os.path.join(chunk_basepath, chunk_name), 'wb') as fw: 244 | number_of_read_chunks, last_size = divmod(chunk_size, self.READ_CHUNK_SIZE) 245 | while number_of_read_chunks: 246 | number_of_read_chunks -= 1 247 | chunk_file = fd.read(self.READ_CHUNK_SIZE) 248 | fw.write(chunk_file) 249 | chunk_file = fd.read(last_size) 250 | fw.write(chunk_file) 251 | 252 | self.logger.info(f'send_message of type {message_type}, destination_id {destination_id}, ' 253 | f'request_id {request_id}, response_id {response_id} creating chunk {chunk_name}') 254 | chunk_names.append(chunk_name) 255 | await asyncio.sleep(0) 256 | chunk_file = None 257 | except Exception: 258 | self.logger.exception('send_message chunks') 259 | return False 260 | for index, chunk_name in enumerate(chunk_names): 261 | chunk_name_path = os.path.join(chunk_basepath, chunk_name) 262 | with_file['src_path'] = chunk_name_path 263 | with_file['dst_name'] = chunk_name 264 | with_file['chunked'] = [chunk_basename, index+1, len_override_src_file_sizes] 265 | res = await self.send_message(message_type=message_type, destination_id=destination_id, 266 | request_id=request_id, response_id=response_id, 267 | data=data, data_is_json=data_is_json, binary=binary, 268 | await_response=await_response, with_file=with_file, 269 | wait_for_ack=wait_for_ack, message_type_publish=message_type_publish, 270 | await_response_timeout=await_response_timeout, check_chunk_file=False) 271 | if os.path.exists(chunk_name_path): 272 | if await_response or wait_for_ack: 273 | try: 274 | self.logger.info(f'send_message of type {message_type}, destination_id {destination_id}, ' 275 | f'request_id {request_id}, response_id {response_id} deleting chunk {chunk_name_path}') 276 | 277 | os.remove(chunk_name_path) 278 | except Exception: 279 | self.logger.exception(f'send_message of type {message_type}, destination_id {destination_id}, ' 280 | f'request_id {request_id}, response_id {response_id} deleting chunk {chunk_name_path}') 281 | else: 282 | self.logger.warning(f'send_message of type {message_type}, destination_id {destination_id}, ' 283 | f'request_id {request_id}, response_id {response_id} leaving undeleted chunk {chunk_name_path}') 284 | 285 | if not res: 286 | return res 287 | return res 288 | try: 289 | 290 | if data_is_json: 291 | data = json.dumps(data) #, ensure_ascii=False) 292 | if not self.is_server and not destination_id: 293 | destination_id = str(self.server_sockaddr) 294 | self.logger.debug(f'send_message of type {message_type}, destination_id {destination_id}, ' 295 | f'request_id {request_id}, response_id {response_id}') 296 | 297 | message_bytes = self.pack_message(data=data, message_type=message_type, source_id=self.source_id, 298 | destination_id=destination_id, request_id=request_id, response_id=response_id, binary=binary, 299 | await_response=await_response, with_file=with_file, wait_for_ack=wait_for_ack, 300 | message_type_publish=message_type_publish) 301 | 302 | send_message_lock_internally_acquired = False 303 | if self.uds_path_send_preserve_socket and not await_response: 304 | #try to reuse connection to uds 305 | if not self.reader_writer_uds_path_send: 306 | #either there is really no reader_writer_uds_path_send, or the send_message_lock is currently 307 | #locked by another send_message which is 308 | #in the process of creating a reader_writer_uds_path_send. 309 | #In such a case, we wait for send_message_lock, and check again if reader_writer_uds_path_send exists. 310 | try: 311 | await asyncio.wait_for(self.send_message_lock.acquire(), self.ASYNC_TIMEOUT) 312 | except asyncio.CancelledError: 313 | raise 314 | except asyncio.TimeoutError: 315 | self.logger.warning('send_message could not acquire send_message_lock') 316 | return False 317 | else: 318 | #reader_writer_uds_path_send may have changed during wait_for(self.send_message_lock.acquire()) : 319 | #checking again if reader_writer_uds_path_send exists 320 | if self.reader_writer_uds_path_send: 321 | #a new check reader_writer_uds_path_send has just been created by another send_message task : use it ! 322 | try: 323 | self.send_message_lock.release() 324 | except Exception: 325 | self.logger.exception('send_message_lock release') 326 | else: 327 | #we acquired send_message_lock, and there is no reader_writer_uds_path_send : 328 | #we set send_message_lock_internally_acquired 329 | #to prevent waiting a second time for send_message_lock in the following 330 | send_message_lock_internally_acquired = True 331 | 332 | if self.reader_writer_uds_path_send: 333 | try: 334 | reader, writer = self.reader_writer_uds_path_send 335 | writer.write(message_bytes[:Structures.MSG_4_STRUCT.size]) 336 | writer.write(message_bytes[Structures.MSG_4_STRUCT.size:]) 337 | await writer.drain() 338 | self.logger.debug('send_message reusing existing connection') 339 | return True 340 | except Exception: 341 | #now we need to create a new connection 342 | self.reader_writer_uds_path_send = None 343 | self.logger.exception('send_message uds_path_send_preserve_socket') 344 | try: 345 | writer.close() 346 | if PYTHON_GREATER_37: 347 | try: 348 | await writer.wait_closed() #python 3.7 349 | except Exception as exc: 350 | self.logger.warning('send_message1 wait_closed : '+str(exc)) 351 | 352 | except Exception: 353 | pass 354 | 355 | self.logger.debug('send_message creating new connection') 356 | try: 357 | #in case send_message is called as a task, we need the send_message_lock when 358 | #creating a new connection to uds_path_send_to_connector 359 | #otherwise the order of messages can be messed up. 360 | #And also the shared reader_writer_uds_path_send mechanism can be messed up 361 | if not send_message_lock_internally_acquired: 362 | await asyncio.wait_for(self.send_message_lock.acquire(), self.ASYNC_TIMEOUT) 363 | 364 | reader, writer = await asyncio.wait_for(asyncio.open_unix_connection(path=self.uds_path_send_to_connector, 365 | limit=self.MAX_SOCKET_BUFFER_SIZE), timeout=self.ASYNC_TIMEOUT) 366 | writer.transport.set_write_buffer_limits(0,0) 367 | if self.uds_path_send_preserve_socket and not await_response: 368 | self.reader_writer_uds_path_send = reader, writer 369 | except asyncio.CancelledError: 370 | raise 371 | except Exception as exc: #ConnectionRefusedError: or TimeoutError 372 | self.logger.warning(f'send_message could not connect to {self.uds_path_send_to_connector} : {exc}') 373 | return False 374 | finally: 375 | try: 376 | if self.send_message_lock.locked(): 377 | self.send_message_lock.release() 378 | except Exception: 379 | self.logger.exception('send_message_lock release') 380 | 381 | writer.write(message_bytes[:Structures.MSG_4_STRUCT.size]) 382 | writer.write(message_bytes[Structures.MSG_4_STRUCT.size:]) 383 | try: 384 | await asyncio.wait_for(writer.drain(), timeout=self.ASYNC_TIMEOUT) 385 | except asyncio.CancelledError: 386 | raise 387 | except Exception: 388 | self.logger.exception('send_message writer drain') 389 | #beware to not lock the await_response recv_message with send_message_lock 390 | if await_response: 391 | if await_response_timeout is not None: 392 | try: 393 | the_response = await asyncio.wait_for(self.recv_message(reader, writer), timeout=await_response_timeout) 394 | except asyncio.TimeoutError: 395 | self.logger.warning(f'send_message : await_response_timeout error ({await_response_timeout} s)') 396 | writer.close() 397 | if PYTHON_GREATER_37: 398 | try: 399 | await writer.wait_closed() #python 3.7 400 | except Exception as exc: 401 | self.logger.warning('send_message2 wait_closed : '+str(exc)) 402 | 403 | return False 404 | else: 405 | the_response = await self.recv_message(reader, writer) 406 | self.logger.debug('send_message finished sending') 407 | if await_response: 408 | writer.close() 409 | if PYTHON_GREATER_37: 410 | try: 411 | await writer.wait_closed() #python 3.7 412 | except Exception as exc: 413 | self.logger.warning('send_message3 wait_closed : '+str(exc)) 414 | 415 | return the_response 416 | return True 417 | 418 | except asyncio.CancelledError: 419 | self.logger.warning('send_message : CancelledError') 420 | raise 421 | except asyncio.IncompleteReadError: 422 | self.logger.warning('send_message : peer disconnected') 423 | return False 424 | except ConnectionResetError as exc: 425 | self.logger.warning('ConnectionResetError : '+str(exc)) 426 | except Exception as exc: 427 | self.logger.exception('send_data') 428 | return False 429 | 430 | async def publish_message(self, message_type=None, destination_id=None, request_id=None, response_id=None, 431 | data=None, data_is_json=True, binary=None, await_response=False, with_file=None, 432 | wait_for_ack=False): 433 | res = await self.send_message(message_type='_pubsub', message_type_publish=message_type, destination_id=destination_id, 434 | request_id=request_id, response_id=response_id, data=data, 435 | data_is_json=data_is_json, binary=binary, await_response=await_response, 436 | with_file=with_file, wait_for_ack=wait_for_ack) 437 | return res 438 | 439 | def publish_message_sync(self, message_type=None, destination_id=None, request_id=None, response_id=None, 440 | data=None, data_is_json=True, binary=None, await_response=False, with_file=None, wait_for_ack=False): 441 | res = self.send_message_sync(message_type='_pubsub', message_type_publish=message_type, 442 | destination_id=destination_id, request_id=request_id, response_id=response_id, 443 | data=data, data_is_json=data_is_json, binary=binary, await_response=await_response, 444 | with_file=with_file, wait_for_ack=wait_for_ack) 445 | return res 446 | 447 | async def recv_message(self, reader, writer): 448 | try: 449 | self.logger.debug('recv_message') 450 | next_length_bytes = await reader.readexactly(Structures.MSG_4_STRUCT.size) 451 | next_length = Structures.MSG_4_STRUCT.unpack(next_length_bytes)[0] 452 | #self.logger.info('Received data from application with length: ' + str(next_length)) 453 | #payload = 2|json|4|data|4|binary 454 | payload = await asyncio.wait_for(reader.readexactly(next_length), timeout=self.ASYNC_TIMEOUT) 455 | message = next_length_bytes + payload 456 | response = transport_json , data, binary = self.unpack_message(message) 457 | self.logger.debug('recv_message : '+str(transport_json)) 458 | return response 459 | except asyncio.CancelledError: 460 | raise 461 | except asyncio.IncompleteReadError: 462 | self.logger.warning('recv_message : peer disconnected') 463 | return None, None, None 464 | except ConnectionResetError as exc: 465 | self.logger.warning('recv_message : peer disconnected '+str(exc)) 466 | return None, None, None 467 | except Exception as exc: 468 | self.logger.exception('recv_message') 469 | raise 470 | 471 | async def client_connected_cb(self, message_received_cb, reader, writer): 472 | while True: 473 | transport_json , data, binary = await self.recv_message(reader, writer) 474 | if transport_json: 475 | await message_received_cb(self.logger, transport_json , data, binary) 476 | else: 477 | return 478 | if not self.uds_path_receive_preserve_socket: 479 | return 480 | 481 | async def start_waiting_for_messages(self, message_type=None, message_received_cb=None, reuse_uds_path=False): 482 | #message_received_cb must receive arguments transport_json , data, binary 483 | try: 484 | uds_path_receive_from_connector = self.uds_path_receive_from_connector.get(message_type) 485 | if os.path.exists(uds_path_receive_from_connector) and not reuse_uds_path: 486 | raise Exception(f'{uds_path_receive_from_connector} already in use. Cannot start_waiting_for_messages') 487 | self.logger.info('start_waiting_for_messages of type {} on socket {}'.format(message_type, 488 | uds_path_receive_from_connector)) 489 | 490 | if message_type in self.message_waiters: 491 | raise Exception('Already waiting for messages of type {} on socket {}'.format(message_type, 492 | uds_path_receive_from_connector)) 493 | client_connected_cb = partial(self.client_connected_cb, message_received_cb) 494 | server = await asyncio.start_unix_server(client_connected_cb, path=uds_path_receive_from_connector, 495 | limit=self.MAX_SOCKET_BUFFER_SIZE) 496 | self.message_waiters[message_type] = server 497 | if self.receive_from_any_connector_owner: 498 | chown_nobody_permissions(uds_path_receive_from_connector) #must be implemented, for example call linux chown 499 | return server 500 | except asyncio.CancelledError: 501 | raise 502 | except Exception as exc: 503 | self.logger.exception('start_waiting_for_messages') 504 | raise 505 | 506 | def stop_waiting_for_messages(self, message_type=None): 507 | if message_type not in self.message_waiters: 508 | self.logger.warning('stop_waiting_for_messages has no {} waiter to stop'.format(message_type)) 509 | return 510 | self.logger.info('stop_waiting_for_messages of type {} on socket {}'.format(message_type, 511 | self.uds_path_receive_from_connector.get(message_type))) 512 | server = self.message_waiters.pop(message_type) 513 | server.close() 514 | try: 515 | uds_path_receive_from_connector = self.uds_path_receive_from_connector.get(message_type, '') 516 | if os.path.exists(uds_path_receive_from_connector): 517 | self.logger.info('Deleting file '+ uds_path_receive_from_connector) 518 | os.remove(uds_path_receive_from_connector) 519 | except Exception: 520 | self.logger.exception('stop_waiting_for_messages') 521 | raise 522 | 523 | class ConnectorRemoteTool(ConnectorAPI): 524 | def __init__(self, *args, **kwargs): 525 | super().__init__(*args, **kwargs) 526 | 527 | async def send_command(self, cmd=None, kwargs=None): 528 | try: 529 | if kwargs is None: 530 | kwargs = {} 531 | self.logger.info(f'send_command {cmd} with kwargs {kwargs}') 532 | message = json.dumps({'cmd':cmd, 'kwargs':kwargs}).encode() 533 | message = Structures.MSG_4_STRUCT.pack(len(message)) + message 534 | reader, writer = await asyncio.wait_for(asyncio.open_unix_connection(path=self.connector.uds_path_commander), 535 | timeout=self.ASYNC_TIMEOUT) 536 | writer.transport.set_write_buffer_limits(0,0) 537 | writer.write(message) 538 | try: 539 | await asyncio.wait_for(writer.drain(), timeout=self.ASYNC_TIMEOUT) 540 | except Exception: 541 | self.logger.exception('send_command writer drain') 542 | next_length_bytes = await reader.readexactly(Structures.MSG_4_STRUCT.size) 543 | next_length = Structures.MSG_4_STRUCT.unpack(next_length_bytes)[0] 544 | response = await asyncio.wait_for(reader.readexactly(next_length), timeout=self.ASYNC_TIMEOUT) 545 | writer.close() 546 | if PYTHON_GREATER_37: 547 | try: 548 | await writer.wait_closed() #python 3.7 549 | except Exception as exc: 550 | self.logger.warning('send_command wait_closed : '+str(exc)) 551 | 552 | self.logger.info(f'send_command got response {response}') 553 | except Exception as exc: 554 | self.logger.exception('send_command') 555 | response = str(exc).encode() 556 | return response 557 | 558 | 559 | async def start_connector(self, delay=None): 560 | if delay: 561 | self.logger.info(f'Waiting {delay} seconds before starting connector : {self.source_id}') 562 | await asyncio.sleep(delay) 563 | self.logger.info('start_connector : '+str(self.source_id)) 564 | response = await self.send_command(cmd='start', kwargs={}) 565 | return response 566 | 567 | async def stop_connector(self, delay=None, hard=False, shutdown=False, enable_delete_files=True, 568 | client_wait_for_reconnect=False): 569 | if delay: 570 | self.logger.info(f'Waiting {delay} seconds before stopping connector : {self.source_id}') 571 | await asyncio.sleep(delay) 572 | self.logger.info('stop_connector : '+str(self.source_id)) 573 | response = await self.send_command(cmd='stop', kwargs={'hard':hard, 'shutdown':shutdown, 574 | 'enable_delete_files':enable_delete_files, 575 | 'client_wait_for_reconnect':client_wait_for_reconnect}) 576 | return response 577 | 578 | async def restart_connector(self, delay=None, sleep_between=0, connector_socket_only=False, hard=False): 579 | if delay: 580 | self.logger.info(f'Waiting {delay} seconds before restarting connector : {self.source_id}') 581 | await asyncio.sleep(delay) 582 | self.logger.info('restart_connector : '+str(self.source_id)) 583 | response = await self.send_command(cmd='restart', kwargs={'hard':hard, 'sleep_between':sleep_between}) 584 | return response 585 | 586 | async def delete_client_certificate(self, client_id=None, remove_only_symlink=False, restart_client=True): 587 | self.logger.info(f'{self.source_id} delete_client_certificate {client_id}') 588 | if self.is_server: 589 | response = await self.send_command(cmd='delete_client_certificate_on_server', 590 | kwargs={'client_id':client_id, 'remove_only_symlink':remove_only_symlink}) 591 | return response 592 | else: 593 | response = await self.send_command(cmd='delete_client_certificate_on_client', kwargs={'restart_client':restart_client}) 594 | return response 595 | 596 | async def disconnect_client(self, client_id=None): 597 | self.logger.info(f'{self.source_id} disconnect_client {client_id}') 598 | if self.is_server: 599 | response = await self.send_command(cmd='disconnect_client', kwargs={'client_id':client_id}) 600 | return response 601 | else: 602 | return False 603 | 604 | async def add_blacklist_client(self, client_ip=None, client_id=None): 605 | self.logger.info(f'{self.source_id} blacklist_client ip : {client_ip}, id : {client_ip}') 606 | if self.is_server: 607 | response = await self.send_command(cmd='add_blacklist_client', kwargs={'client_ip':client_ip, 'client_id':client_id}) 608 | return response 609 | else: 610 | return False 611 | 612 | async def remove_blacklist_client(self, client_ip=None, client_id=None): 613 | self.logger.info(f'{self.source_id} remove_blacklist_client ip : {client_ip}, id : {client_ip}') 614 | if self.is_server: 615 | response = await self.send_command(cmd='remove_blacklist_client', kwargs={'client_ip':client_ip, 'client_id':client_id}) 616 | return response 617 | else: 618 | return False 619 | 620 | async def add_whitelist_client(self, client_ip=None, client_id=None): 621 | self.logger.info(f'{self.source_id} whitelist_client ip : {client_ip}, id : {client_ip}') 622 | if self.is_server: 623 | response = await self.send_command(cmd='add_whitelist_client', kwargs={'client_ip':client_ip, 'client_id':client_id}) 624 | return response 625 | else: 626 | return False 627 | 628 | async def remove_whitelist_client(self, client_ip=None, client_id=None): 629 | self.logger.info(f'{self.source_id} remove_whitelist_client ip : {client_ip}, id : {client_ip}') 630 | if self.is_server: 631 | response = await self.send_command(cmd='remove_whitelist_client', kwargs={'client_ip':client_ip, 'client_id':client_id}) 632 | return response 633 | else: 634 | return False 635 | 636 | async def delete_previous_persistence_remains(self): 637 | self.logger.info(f'{self.source_id} delete_previous_persistence_remains') 638 | response = await self.send_command(cmd='delete_previous_persistence_remains__sync', kwargs={}) 639 | return response 640 | 641 | async def show_subscribe_message_types(self): 642 | self.logger.info(f'{self.source_id} show_subscribe_message_types') 643 | response = await self.send_command(cmd='show_subscribe_message_types__sync', kwargs={}) 644 | return response 645 | 646 | async def set_subscribe_message_types(self, *message_types): 647 | self.logger.info(f'{self.source_id} set_subscribe_message_types {message_types}') 648 | response = await self.send_command(cmd='set_subscribe_message_types', kwargs={'message_types':message_types}) 649 | return response 650 | 651 | async def show_connected_peers(self, dump_result=True): 652 | self.logger.info(f'{self.source_id} show_connected_peers') 653 | response = await self.send_command(cmd='show_connected_peers__sync', kwargs={'dump_result':dump_result}) 654 | return response 655 | 656 | async def peek_queues(self): 657 | self.logger.info(f'{self.source_id} peek_queues') 658 | response = await self.send_command(cmd='peek_queues__sync', kwargs={'dump_result':True}) 659 | return response 660 | 661 | async def ignore_peer_traffic_show(self): 662 | self.logger.info(f'{self.source_id} ignore_peer_traffic_show') 663 | response = await self.send_command(cmd='manage_ignore_peer_traffic__sync', kwargs={'show':True}) 664 | return response 665 | 666 | async def ignore_peer_traffic_enable(self): 667 | self.logger.info(f'{self.source_id} ignore_peer_traffic_enable') 668 | response = await self.send_command(cmd='manage_ignore_peer_traffic__sync', kwargs={'enable':True}) 669 | return response 670 | 671 | async def ignore_peer_traffic_enable_unique(self, peername): 672 | self.logger.info(f'{self.source_id} ignore_peer_traffic_enable_unique') 673 | response = await self.send_command(cmd='manage_ignore_peer_traffic__sync', kwargs={'unique_peer':peername}) 674 | return response 675 | 676 | async def ignore_peer_traffic_disable(self): 677 | self.logger.info(f'{self.source_id} ignore_peer_traffic_disable') 678 | response = await self.send_command(cmd='manage_ignore_peer_traffic__sync', kwargs={'disable':True}) 679 | return response 680 | 681 | async def show_log_level(self): 682 | self.logger.info(f'{self.source_id} show_log_level') 683 | response = await self.send_command(cmd='show_log_level__sync', kwargs={}) 684 | return response 685 | 686 | async def set_log_level(self, level): 687 | self.logger.info(f'{self.source_id} set_log_level {level}') 688 | response = await self.send_command(cmd='set_log_level__sync', kwargs={'level':level}) 689 | return response 690 | 691 | async def show_attribute(self, attribute): 692 | self.logger.info(f'{self.source_id} show_attribute {attribute}') 693 | response = await self.send_command(cmd='show_attribute__sync', kwargs={'attribute':attribute}) 694 | return response 695 | 696 | 697 | --------------------------------------------------------------------------------