├── .dockerignore ├── .github └── workflows │ ├── lint.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── AUTHORS.rst ├── CONTRIBUTING.rst ├── Dockerfile ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── asyncio_rpc ├── __init__.py ├── client.py ├── commlayers │ ├── __init__.py │ ├── base.py │ └── redis.py ├── exceptions.py ├── models.py ├── pubsub.py ├── serialization │ ├── __init__.py │ ├── base.py │ ├── msgpack.py │ └── shapely_models.py └── server.py ├── docker-compose.yml ├── docs ├── conf.py ├── index.rst ├── installation.rst ├── readme.rst └── requirements.txt ├── examples ├── basic │ ├── README.rst │ ├── client.py │ └── server.py ├── dataclass │ ├── README.rst │ ├── client.py │ ├── models.py │ └── server.py ├── decorators │ ├── README.rst │ ├── client.py │ └── server.py └── stacked │ ├── README.rst │ ├── client.py │ └── server.py ├── pyproject.toml ├── pytest.ini ├── requirements.in ├── requirements.txt └── tests ├── __init__.py ├── conftest.py ├── test_msgpack_serialization.py ├── test_pubsub.py ├── test_rpc_client.py ├── test_rpc_server.py ├── test_shapely_serialization.py ├── test_simple_rpc_calls.py └── utils.py /.dockerignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | **/__pycache__ 3 | **/Dockerfile 4 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v3 10 | 11 | - name: Set up Python 12 | uses: actions/setup-python@v4 13 | with: 14 | python-version: "3.10" 15 | 16 | - name: Get changed files 17 | id: changed_files 18 | uses: tj-actions/changed-files@bab30c2299617f6615ec02a68b9a40d10bd21366 # v45.0.5 19 | with: 20 | files_ignore: "**/migrations/*" 21 | 22 | - name: Run pre-commit checks 23 | uses: pre-commit/action@576ff52938d158a24ac7e009dfa94b1455e7df99 # v3.0.1 24 | with: 25 | extra_args: --files ${{ steps.changed_files.outputs.all_changed_files }} 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | with: 15 | fetch-depth: 0 16 | 17 | - name: Set up Python 18 | uses: actions/setup-python@v4 19 | with: 20 | python-version: '3.10' 21 | 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install build 26 | 27 | - name: Build package 28 | run: | 29 | python -m build 30 | 31 | - name: Upload Github release 32 | uses: softprops/action-gh-release@36833a1c712e139c96b443e3af070e95d9c0f193 33 | with: 34 | files: dist/* 35 | body: "Release for ${{ github.ref }}" 36 | 37 | - name: Test install built package 38 | run: | 39 | pip install dist/*.whl 40 | 41 | - name: Publish package to PyPI 42 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc 43 | with: 44 | password: ${{ secrets.PYPI_UPLOAD_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Linux 2 | 3 | # Run on PR requests. And on master itself. 4 | on: 5 | push: 6 | branches: 7 | - master 8 | pull_request: 9 | 10 | jobs: 11 | tests: 12 | name: Python ${{ matrix.python }} ${{ matrix.display_name }} 13 | runs-on: ${{ matrix.os }} 14 | # Service containers to run with `container-job` 15 | services: 16 | # Label used to access the service container 17 | redis: 18 | # Docker Hub image 19 | image: redis:6-alpine 20 | ports: 21 | - 6379:6379 22 | # Set health checks to wait until redis has started 23 | options: >- 24 | --health-cmd "redis-cli ping" 25 | --health-interval 10s 26 | --health-timeout 5s 27 | --health-retries 5 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | include: 32 | - display_name: "2021" 33 | python: '3.10' 34 | os: ubuntu-22.04 35 | pins: "" 36 | - display_name: "2022" 37 | python: '3.11' 38 | os: ubuntu-22.04 39 | pins: "" 40 | - display_name: "2023" 41 | python: '3.12' 42 | os: ubuntu-22.04 43 | pins: "" 44 | - display_name: "latest" 45 | python: '3.12' 46 | os: ubuntu-latest 47 | pins: "" 48 | 49 | 50 | steps: 51 | - uses: actions/checkout@v4 52 | with: 53 | lfs: true 54 | 55 | - name: Set up Python ${{ matrix.python }} 56 | uses: actions/setup-python@v5 57 | with: 58 | python-version: ${{ matrix.python }} 59 | 60 | - name: Install python dependencies 61 | shell: bash 62 | run: | 63 | pip install --disable-pip-version-check --upgrade pip 64 | pip install -e .[numpy,shapely,test] ${{ matrix.pins }} 65 | pip list 66 | 67 | - name: Run tests 68 | shell: bash 69 | run: | 70 | pytest 71 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | .vscode 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # pyenv 77 | .python-version 78 | 79 | # celery beat schedule file 80 | celerybeat-schedule 81 | 82 | # SageMath parsed files 83 | *.sage.py 84 | 85 | # Environments 86 | .env 87 | .venv 88 | env/ 89 | venv/ 90 | ENV/ 91 | env.bak/ 92 | venv.bak/ 93 | 94 | # Spyder project settings 95 | .spyderproject 96 | .spyproject 97 | 98 | # Rope project settings 99 | .ropeproject 100 | 101 | # mkdocs documentation 102 | /site 103 | 104 | # mypy 105 | .mypy_cache/ 106 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | repos: 4 | - repo: https://github.com/astral-sh/ruff-pre-commit 5 | rev: v0.7.3 6 | hooks: 7 | - id: ruff 8 | args: [ --fix ] 9 | - id: ruff # isort 10 | args: ["check", "--select", "I", "--fix"] 11 | - id: ruff-format 12 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.10" 12 | 13 | # Build documentation in the "docs/" directory with Sphinx 14 | sphinx: 15 | configuration: docs/conf.py 16 | 17 | formats: 18 | - pdf 19 | - epub 20 | 21 | python: 22 | install: 23 | - requirements: docs/requirements.txt -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Jelle Prins 9 | 10 | Contributors 11 | ------------ 12 | 13 | * Jelle Prins 14 | * Lars Claussen 15 | 16 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/nens/asyncio-rpc/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the GitHub issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | asyncio-rpc could always use more documentation, whether as part of the 42 | official asyncio-rpc docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/nens/asyncio-rpc/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | * Remember that this is a volunteer-driven project, and that contributions 55 | are welcome :) 56 | 57 | Get Started! 58 | ------------ 59 | 60 | Ready to contribute? Here's how to set up `asyncio-rpc` for local development. 61 | 62 | 1. Fork the `asyncio-rpc` repo on GitHub. 63 | 2. Clone your fork locally:: 64 | 65 | $ git clone git@github.com:nens/asyncio-rpc.git 66 | 67 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 68 | 69 | $ mkvirtualenv asyncio-rpc 70 | $ cd asyncio-rpc/ 71 | $ python setup.py develop 72 | 73 | 4. Create a branch for local development:: 74 | 75 | $ git checkout -b name-of-your-bugfix-or-feature 76 | 77 | Now you can make your changes locally. 78 | 79 | 5. When you're done making changes, check that your changes pass flake8 and the 80 | tests. Tests can be run best in a docker container:: 81 | 82 | $ cd 83 | $ docker build -t asyncio-rpc . 84 | $ docker run --rm -v `pwd`:/code asyncio-rpc pytest --cov=asyncio-rpc --flake8 85 | 86 | 87 | 6. Commit your changes and push your branch to GitHub:: 88 | 89 | $ git add . 90 | $ git commit -m "Your detailed description of your changes." 91 | $ git push origin name-of-your-bugfix-or-feature 92 | 93 | 7. Submit a pull request through the GitHub website. 94 | 95 | Pull Request Guidelines 96 | ----------------------- 97 | 98 | Before you submit a pull request, check that it meets these guidelines: 99 | 100 | 1. The pull request should include tests. 101 | 2. If the pull request adds functionality, the docs should be updated. Put 102 | your new functionality into a function with a docstring, and add the 103 | feature to the list in README.rst. 104 | 3. The pull request should work for Python 3.7 and for PyPy. Check 105 | https://travis-ci.org/nens/asyncio-rpc/pull_requests 106 | and make sure that the tests pass for all supported Python versions. 107 | 108 | Tips 109 | ---- 110 | 111 | 112 | 113 | Deploying 114 | --------- 115 | 116 | A reminder for the maintainers on how to deploy. 117 | Make sure all your changes are committed (including an entry in HISTORY.rst). 118 | Then run:: 119 | 120 | $ bumpversion patch # possible: major / minor / patch 121 | $ git push 122 | $ git push --tags 123 | 124 | Travis will then deploy to PyPI if tests pass. 125 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | LABEL maintainer='jelle.prins ' 4 | LABEL py_version='3.10' 5 | 6 | # Change the date to force rebuilding the whole image. 7 | ENV REFRESHED_AT 2025-04-11 8 | 9 | WORKDIR /code 10 | COPY requirements.txt /code/ 11 | RUN pip install --no-cache-dir -r requirements.txt 12 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | 0.3.3 (unreleased) 2 | ------------------ 3 | 4 | - Nothing changed yet. 5 | 6 | 7 | 0.3.2 (2025-04-30) 8 | ------------------ 9 | 10 | - Set Redis socket keepalive and retry on timeout for RPCRedisCommLayer. 11 | 12 | 13 | 0.3.1 (2025-04-14) 14 | ------------------ 15 | 16 | - Improve version discovery. 17 | 18 | 19 | 0.3.0 (2025-04-14) 20 | ------------------ 21 | 22 | - Add Shapely Geometry support. 23 | 24 | 25 | 0.2.0 (2024-01-02) 26 | ------------------ 27 | 28 | - Dropped `aioredis` package, use `redis` package instead. 29 | 30 | - Github actions for unit-tests & linting. 31 | 32 | - Numpy is optional. 33 | 34 | 0.1.11 (2023-07-06) 35 | ------------------- 36 | 37 | - Add support for RPC client graceful shutdown. 38 | 39 | 40 | 0.1.10 (2021-02-26) 41 | ------------------- 42 | 43 | - Disabled logging errors for missing Asyncio futures 44 | for subscriptions. 45 | 46 | 47 | 0.1.9 (2021-02-22) 48 | ------------------ 49 | 50 | - Bugfix: asyncio future that waits for return RPC message needs 51 | to be created before sending RPC message to RPC server. 52 | 53 | - Added debug logging statements. 54 | 55 | 0.1.8 (2021-02-05) 56 | ------------------ 57 | 58 | - Add numpy int32 and int64 serializer. 59 | 60 | 61 | 0.1.7 (2020-01-10) 62 | ------------------ 63 | 64 | - When a message from the client has not been received by 65 | a server it raises a NotReceived exception instead of 66 | an assert error. 67 | 68 | 69 | 0.1.6 (2019-12-30) 70 | ------------------ 71 | 72 | - Added pub/sub support to allow sending continuous updates 73 | from the server for a client subscription 74 | 75 | - Add slice serialization/deserialization support 76 | 77 | 78 | 0.1.5 (2019-12-23) 79 | ------------------ 80 | 81 | - Server.serve() methode respawns on internal tasks errors 82 | 83 | - Better error handling in server. 84 | 85 | 86 | 0.1.4 (2019-10-03) 87 | ------------------ 88 | 89 | - Client now raises RPCTimeoutError if the result of a RPC call took to long to 90 | be received. 91 | 92 | - Client.serve() method respawns internal tasks on errors. 93 | 94 | 95 | 0.1.3 (2019-08-21) 96 | ------------------ 97 | 98 | - Verbose feedback on assertion error while trying to unpack dataclasses. 99 | 100 | 101 | 0.1.2 (2019-07-04) 102 | ------------------ 103 | 104 | - Fixed bug with bytes/str serialization/deserialization 105 | 106 | 107 | 0.1.1 (2019-04-29) 108 | ------------------ 109 | 110 | - Added channel override option in client.rpc_call 111 | 112 | 113 | 0.1.0 (2019-03-20) 114 | ------------------ 115 | 116 | - first pypi release 117 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD License 2 | 3 | Copyright (c) 2019, Nelen & Schuurmans 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without modification, 7 | are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, this 13 | list of conditions and the following disclaimer in the documentation and/or 14 | other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from this 18 | software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 21 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 22 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 23 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 24 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 25 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 27 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 28 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 29 | OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Asyncio-rpc: Remote procedure calling framework 2 | =============================================== 3 | 4 | The Python package for the asyncio remote procedure calling 5 | 6 | .. image:: https://github.com/nens/asyncio-rpc/actions/workflows/test.yml/badge.svg?branch=master 7 | :target: https://github.com/nens/asyncio-rpc/actions/workflows/test.yml 8 | 9 | .. image:: https://readthedocs.org/projects/asyncio-rpc/badge/?version=latest 10 | :target: https://asyncio-rpc.readthedocs.io/en/latest/?badge=latest 11 | :alt: Documentation Status 12 | 13 | 14 | 15 | * Free software: BSD license 16 | * Documentation: https://asyncio-rpc.readthedocs.io. 17 | 18 | 19 | Overview 20 | ======== 21 | 22 | 23 | Features 24 | -------- 25 | - Asyncio RPC client/server 26 | - Msgpack serialization with option to use own dataclasses (Python 3.10) 27 | - Redis communication layer 28 | - Other serialization methods and communication layers can be added 29 | 30 | 31 | Examples 32 | -------- 33 | 34 | The examples can be run from this directory, for the dataclass example 35 | (using localhost as redis host): 36 | 37 | >>> python3.8 -m examples.dataclass.server localhost 38 | >>> python3.8 -m examples.dataclass.client localhost 39 | 40 | 41 | Testing 42 | ------- 43 | >>> docker compose run asyncio_rpc pytest --cov=asyncio_rpc --cov-report=html 44 | -------------------------------------------------------------------------------- /asyncio_rpc/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.3.3.dev0" 2 | -------------------------------------------------------------------------------- /asyncio_rpc/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import builtins 3 | import logging 4 | from typing import List, Union 5 | 6 | from asyncio_rpc.commlayers.base import AbstractRPCCommLayer 7 | from asyncio_rpc.exceptions import NotReceived, RPCTimeoutError, WrappedException 8 | from asyncio_rpc.pubsub import Subscription 9 | 10 | from .models import ( 11 | RPCBase, 12 | RPCException, 13 | RPCMessage, 14 | RPCPubResult, 15 | RPCResult, 16 | RPCStack, 17 | RPCSubStack, 18 | ) 19 | 20 | logger = logging.getLogger("asyncio-rpc-client") 21 | 22 | 23 | class RPCClient(object): 24 | """ 25 | Remote procedure client class. Allows to send rpc_call 26 | to a RPCServer via a rpc_commlayer. 27 | """ 28 | 29 | def __init__(self, rpc_commlayer: AbstractRPCCommLayer): 30 | """ 31 | Initialize a new RPCClient by providing an implementation of 32 | AbstractRPCCommlayer 33 | """ 34 | assert isinstance(rpc_commlayer, AbstractRPCCommLayer) 35 | self.rpc_commlayer = rpc_commlayer 36 | self.futures = {} 37 | self.queue = asyncio.Queue() 38 | self.processing = False 39 | self.on_rpc_message = None 40 | self.subscriptions = {} 41 | logger.debug("Initialized RPC client") 42 | 43 | def register_models(self, models: List): 44 | """ 45 | Register all given models to the rpc_commlayer serialization 46 | 47 | Intended usage is to register dataclasses 48 | """ 49 | for model in models: 50 | self.rpc_commlayer.serialization.register(model) 51 | 52 | async def _wait_for_result(self, uid: bytes) -> Union[RPCResult, RPCException]: 53 | """ 54 | Internal helper function for stopping the rpc_commlayer subscription 55 | upon receiving a result from the RPC_server 56 | """ 57 | logger.debug("Start waiting for result with uid: %s", uid) 58 | result = None 59 | while True: 60 | event, channel = await self.queue.get() 61 | 62 | assert isinstance(event, RPCResult) or isinstance(event, RPCException) 63 | 64 | # Discard everything that we don't need... 65 | if event.uid == uid: 66 | logger.debug("Received result for uid: %s, %s", uid, event) 67 | result = event 68 | break 69 | 70 | # Stop subscription, automatically stops 71 | # queue processing as well 72 | await self.rpc_commlayer.unsubscribe() 73 | 74 | return result 75 | 76 | async def subscribe_call( 77 | self, rpc_sub_stack: RPCSubStack, channel=None 78 | ) -> Subscription: 79 | assert isinstance(rpc_sub_stack, RPCStack) 80 | 81 | if not self.processing: 82 | pass 83 | 84 | # Make sure to be subscribed before publishing 85 | await self.rpc_commlayer.do_subscribe() 86 | 87 | subscription = Subscription(self, rpc_sub_stack) 88 | 89 | self.subscriptions[rpc_sub_stack.uid] = subscription 90 | 91 | # Publish RPCStack to RPCServer 92 | count = await self.rpc_commlayer.publish(rpc_sub_stack, channel=channel) 93 | 94 | if count == 0: 95 | raise NotReceived( 96 | f"subscribe_call was not " f"received by any server: {rpc_sub_stack}" 97 | ) 98 | 99 | return subscription 100 | 101 | async def rpc_call(self, rpc_func_stack: RPCStack, channel=None) -> RPCResult: 102 | """ 103 | Execute the given rpc_func_stack (RPCStack) and either 104 | return a RPCResult or raise an exception based on the returned 105 | RPCException. 106 | 107 | This function can both be called with awaiting client.serve() or 108 | without. The difference is, that in the first case client.serve() 109 | starts while loops for performing background processing. 110 | 111 | The channel (optional) argument can be used to override 112 | the default publish channel 113 | """ 114 | assert isinstance(rpc_func_stack, RPCStack) 115 | 116 | # Make sure to be subscribed before publishing 117 | await self.rpc_commlayer.do_subscribe() 118 | 119 | # Always create a future before sending the rpc_func_stack 120 | # else the result can come back before the future is created 121 | future = asyncio.get_event_loop().create_future() 122 | self.futures[rpc_func_stack.uid] = future 123 | logger.debug("Added future for rpc_func_stack: %s", rpc_func_stack.uid) 124 | 125 | # Publish RPCStack to RPCServer 126 | count = await self.rpc_commlayer.publish(rpc_func_stack, channel=channel) 127 | 128 | logger.debug( 129 | "RPC call rpc_func_stack: %s, %s (received=%s, channel=%s)", 130 | rpc_func_stack.uid, 131 | rpc_func_stack, 132 | count, 133 | channel, 134 | ) 135 | 136 | if count == 0: 137 | self.futures.pop(rpc_func_stack.uid) 138 | future.set_result(None) 139 | raise NotReceived( 140 | f"rpc_call was not received " f"by any subscriber {rpc_func_stack}" 141 | ) 142 | 143 | if self.processing: 144 | # client.serve() has been awaited, so 145 | # background processing is taken care of. 146 | 147 | # Create a future that should be resolved within 148 | # the given timeout. The background processing initialized 149 | # by client.serve() resolves the future when a result 150 | # is returned. 151 | try: 152 | result = await asyncio.wait_for(future, timeout=rpc_func_stack.timeout) 153 | logger.debug( 154 | "Retrieved result for rpc_func_stack %s, %s", 155 | rpc_func_stack.uid, 156 | result, 157 | ) 158 | except asyncio.TimeoutError: 159 | logger.debug("TimeoutError rpc_func_stack: %s", rpc_func_stack.uid) 160 | raise RPCTimeoutError(f"rpc_func_stack: {rpc_func_stack}") 161 | else: 162 | # No background processing, so start the subscription 163 | # and wait for result via asyncio.gather 164 | # _wait_for_result is a helper function to unsubscribe 165 | # on a result. 166 | 167 | # Don't need future here anymore 168 | self.futures.pop(rpc_func_stack.uid) 169 | future.set_result(None) 170 | try: 171 | _, result = await asyncio.gather( 172 | self.rpc_commlayer.subscribe(self._on_rpc_event), 173 | self._wait_for_result(rpc_func_stack.uid), 174 | ) 175 | logger.debug( 176 | "Retrieved result for rpc_func_stack %s, %s", 177 | rpc_func_stack.uid, 178 | result, 179 | ) 180 | except asyncio.TimeoutError: 181 | logger.debug("TimeoutError rpc_func_stack: %s", rpc_func_stack.uid) 182 | raise RPCTimeoutError(f"rpc_func_stack: {rpc_func_stack}") 183 | 184 | if isinstance(result, RPCException): 185 | logger.debug("RPC exception %s, %s", rpc_func_stack.uid, result) 186 | # Try to resolve builtin errors 187 | try: 188 | exception_class = getattr(builtins, result.classname) 189 | except AttributeError: 190 | # Default to WrappedException if 191 | # returned exception is not a builtin error 192 | exception_class = WrappedException 193 | 194 | raise exception_class(*result.exc_args) 195 | 196 | return result.data 197 | 198 | async def _on_rpc_event(self, rpc_instance: RPCBase, channel: bytes = None): 199 | """ 200 | Callback function sent to rpc_commlayer, is called 201 | when a message is received by the subscription. 202 | """ 203 | logger.debug("New RPC event %s", rpc_instance) 204 | assert isinstance(rpc_instance, RPCBase) 205 | # Put everything in a queue and process 206 | # it afterwards 207 | await self.queue.put((rpc_instance, channel)) 208 | 209 | async def _process_queue(self, on_rpc_message: callable = None): 210 | """ 211 | Background queue processing function, processes 212 | the internal self.queue until b'END' is received 213 | 214 | 215 | if on_rpc_message has been set, it will be called 216 | whenever a RPCMessage is popped from the queue. 217 | """ 218 | self.processing = True 219 | 220 | while True: 221 | item = await self.queue.get() 222 | if item == b"END": 223 | break 224 | 225 | event, channel = item 226 | # The event can either be a: 227 | # 1) RPCResult or RPCException as a result from the RPCServer 228 | # 3) RPCMessage as a message from the RPCServer 229 | assert ( 230 | isinstance(event, RPCResult) 231 | or isinstance(event, RPCException) 232 | or isinstance(event, RPCMessage) 233 | ) 234 | 235 | if isinstance(event, RPCResult) or isinstance(event, RPCException): 236 | logger.debug("Received new queue item %s, %s", event.uid, event) 237 | if event.uid in self.futures: 238 | logger.debug("Found event in futures %s", event.uid) 239 | # A future is created & awaited in self.rpc_call 240 | # resolve this future to proceed in the rpc_call function 241 | # and return a result 242 | future = self.futures.pop(event.uid) 243 | if future is not None: 244 | future.set_result(event) 245 | elif event.uid in self.subscriptions: 246 | logger.debug("Found event in subscriptions %s", event.uid) 247 | subscription = self.subscriptions[event.uid] 248 | await subscription.enqueue(event) 249 | else: 250 | # FUTURE NOT FOUND FOR EVENT 251 | if not isinstance(event, RPCPubResult): 252 | logger.exception( 253 | "Future not found for %s, %s", event.uid, event 254 | ) 255 | elif isinstance(event, RPCMessage) and on_rpc_message: 256 | logger.debug("RPCMessage received: %s", event) 257 | await on_rpc_message(event, channel) 258 | 259 | self.processing = False 260 | 261 | async def serve(self, on_rpc_message=None): 262 | """ 263 | Start RPCClient background processing, blocks 264 | until self.rpc_commlayer.unsubscribe() is called. 265 | 266 | Use this method in an async context to enable 267 | background processing and allowing multiple 268 | rpc_calls asynchroniously. 269 | """ 270 | 271 | task_args_map = { 272 | self.rpc_commlayer.subscribe: [self._on_rpc_event], 273 | self._process_queue: [on_rpc_message], 274 | } 275 | # create the main tasks 276 | main_tasks = { 277 | asyncio.ensure_future(coro(*args)): (coro, args) 278 | for coro, args in task_args_map.items() 279 | } 280 | 281 | running = set(main_tasks.keys()) 282 | 283 | except_cnt = -1 284 | 285 | while running: 286 | except_cnt += 1 287 | finished, running = await asyncio.wait( 288 | running, return_when=asyncio.FIRST_EXCEPTION 289 | ) 290 | for task in finished: 291 | if task.exception(): 292 | logger.exception(task.exception()) 293 | task.print_stack() 294 | coro, args = main_tasks[task] 295 | new_task = asyncio.ensure_future(coro(*args)) 296 | main_tasks[new_task] = (coro, args) 297 | running.add(new_task) 298 | 299 | async def close(self): 300 | """ 301 | Gracefully shutdown the client and rpc_commlayer 302 | """ 303 | await self.queue.put(b"END") 304 | await self.rpc_commlayer.close() 305 | -------------------------------------------------------------------------------- /asyncio_rpc/commlayers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nens/asyncio-rpc/c86257df1af3f9c129444c046557169ab4fd1548/asyncio_rpc/commlayers/__init__.py -------------------------------------------------------------------------------- /asyncio_rpc/commlayers/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from asyncio_rpc.models import RPCBase 4 | 5 | 6 | class AbstractRPCCommLayer(ABC): 7 | """ 8 | Abstract baseclass for RPC communication layer 9 | """ 10 | 11 | @abstractmethod 12 | async def publish(self, rpc_instance: RPCBase): 13 | """ 14 | Publish a RPCBase subclass to the other end, 15 | either a RPCServer or RPCClient. 16 | """ 17 | 18 | @abstractmethod 19 | async def do_subscribe(self): 20 | """ 21 | Initialize subscription for receiving messages. 22 | This is needed for Redis to make sure all 23 | message are received, this might not be the case 24 | for other communication layers 25 | """ 26 | 27 | @abstractmethod 28 | async def subscribe(self, on_rpc_event_callback: callable): 29 | """ 30 | Subscribe and listen for messages. The on_rpc_event_callback 31 | function is called on every received RPCBase message 32 | """ 33 | 34 | @abstractmethod 35 | async def unsubscribe(self): 36 | """ 37 | Stop subscription for receiving messages. 38 | This is needed for Redis and might not be needed 39 | for other communication layers 40 | """ 41 | 42 | @abstractmethod 43 | async def close(self): 44 | """ 45 | Close the commlayer including closing all open connections, async tasks etc. 46 | """ 47 | -------------------------------------------------------------------------------- /asyncio_rpc/commlayers/redis.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from uuid import uuid4 3 | 4 | import redis.asyncio as async_redis 5 | 6 | from ..models import SERIALIZABLE_MODELS, RPCBase, RPCResult, RPCStack 7 | from .base import AbstractRPCCommLayer 8 | 9 | RESULT_EXPIRE_TIME = 300 # seconds 10 | 11 | 12 | class RPCRedisCommLayer(AbstractRPCCommLayer): 13 | """ 14 | Redis remote procedure call communication layer 15 | """ 16 | 17 | @classmethod 18 | async def create( 19 | cls, 20 | subchannel=b"subchannel", 21 | pubchannel=b"pubchannel", 22 | host="localhost", 23 | port=6379, 24 | serialization=None, 25 | ): 26 | """ 27 | Use a static create method to allow async context, 28 | __init__ cannot be async. 29 | """ 30 | 31 | self = RPCRedisCommLayer(subchannel, pubchannel) 32 | 33 | # Create communicationLayer 34 | self.host = host 35 | self.port = port 36 | self.serialization = serialization 37 | 38 | # Redis for publishing 39 | self.redis = async_redis.from_url( 40 | f"redis://{host}", 41 | socket_keepalive=True, 42 | retry_on_timeout=True, 43 | health_check_interval=None, # no health checks for Redis pub/sub 44 | ) 45 | 46 | # By default register all RPC models 47 | for model in SERIALIZABLE_MODELS: 48 | # Register models to serialization 49 | serialization.register(model) 50 | 51 | self.subscribed = False 52 | 53 | # Subscription has own redis 54 | self.sub_redis = None 55 | self.sub_channel = None 56 | 57 | # By default subscribe 58 | await self.do_subscribe() 59 | 60 | return self 61 | 62 | def __init__(self, subchannel, pubchannel): 63 | """ 64 | Initialize and set the sub/pub channels 65 | """ 66 | self.subchannel = subchannel 67 | self.pubchannel = pubchannel 68 | self.redis: async_redis.Redis 69 | self.pub_sub = None 70 | 71 | async def do_subscribe(self): 72 | if not self.subscribed: 73 | # By default subscribe 74 | self.sub_redis = async_redis.from_url(f"redis://{self.host}") 75 | self.pub_sub = self.sub_redis.pubsub(ignore_subscribe_messages=True) 76 | await self.pub_sub.subscribe(self.subchannel) 77 | self.subscribed = True 78 | 79 | async def publish(self, rpc_instance: RPCBase, channel=None): 80 | """ 81 | Publish redis implementation, publishes RPCBase instances. 82 | 83 | :return: the number of receivers 84 | """ 85 | # rpc_instance should be a subclass of RPCBase 86 | # For now just check if instance of RPCBase 87 | assert isinstance(rpc_instance, RPCBase) 88 | 89 | if isinstance(rpc_instance, RPCStack): 90 | # Add subchannel to RPCStack as respond_to 91 | rpc_instance.respond_to = self.subchannel 92 | elif isinstance(rpc_instance, RPCResult) and rpc_instance.data is not None: 93 | # Customized: 94 | # result data via redis.set 95 | # result without data via redis.publish 96 | redis_key = uuid4().hex 97 | 98 | # Store the result data via key/value in redis 99 | await self.redis.set( 100 | redis_key, 101 | self.serialization.dumpb(rpc_instance.data), 102 | ex=RESULT_EXPIRE_TIME, 103 | ) 104 | 105 | # Set redis_key and remove data, since 106 | # this is stored in redis now 107 | rpc_instance.data = {"redis_key": redis_key} 108 | 109 | # Override the pub_channel with channel, if set 110 | pub_channel = channel if channel is not None else self.pubchannel 111 | 112 | # Publish rpc_instance and return number of listeners 113 | return await self.redis.publish( 114 | pub_channel, self.serialization.dumpb(rpc_instance) 115 | ) 116 | 117 | async def get_data(self, redis_key, delete=True): 118 | """ 119 | Helper function to get data by redis_key, by default 120 | delete the data after retrieval. 121 | """ 122 | data = self.serialization.loadb(await self.redis.get(redis_key)) 123 | if delete: 124 | await self.redis.delete(redis_key) 125 | return data 126 | 127 | async def _process_msg(self, msg, on_rpc_event_callback, channel_name): 128 | """ 129 | Interal message processing, is called on every received 130 | message via the subscription. 131 | """ 132 | event = self.serialization.loadb(msg) 133 | 134 | # rpc_instance should be a subclass of RPCBase 135 | # For now just check if instance of RPCBase 136 | assert isinstance(event, RPCBase) 137 | 138 | if on_rpc_event_callback: 139 | if isinstance(event, RPCResult): 140 | # Customized: 141 | # result data via redis.set 142 | # result without data via redis.publish 143 | 144 | # Get data from redis and put it on the event 145 | if isinstance(event.data, dict) and "redis_key" in event.data: 146 | event.data = await self.get_data(event.data["redis_key"]) 147 | 148 | await on_rpc_event_callback(event, channel=channel_name) 149 | 150 | async def subscribe( 151 | self, 152 | on_rpc_event_callback, 153 | channel: Optional[str] = None, 154 | redis: Optional[async_redis.Redis] = None, 155 | ): 156 | """ 157 | Redis implementation for subscribe method, receives messages from 158 | subscription channel. 159 | 160 | Note: does block in while loop until .unsubscribe() is called. 161 | """ 162 | pub_sub = self.pub_sub 163 | if redis is not None: 164 | pub_sub = redis.pubsub(ignore_subscribe_messages=True) 165 | 166 | if channel is not None: 167 | await pub_sub.subscribe(channel) 168 | async with pub_sub as ps: 169 | # Inside a while loop, wait for incoming events. 170 | async for message in ps.listen(): 171 | if message is not None: 172 | await self._process_msg( 173 | message["data"], on_rpc_event_callback, message["channel"] 174 | ) 175 | self.subscribed = False 176 | 177 | async def unsubscribe(self): 178 | """ 179 | Redis implementation for unsubscribe. Stops subscription and breaks 180 | out of the while loop in .subscribe() 181 | """ 182 | if self.subscribed: 183 | await self.pub_sub.unsubscribe() 184 | self.subscribed = False 185 | await self.sub_redis.close() 186 | 187 | async def close(self): 188 | """ 189 | Stop subscription & close everything 190 | """ 191 | await self.unsubscribe() 192 | await self.redis.close() 193 | -------------------------------------------------------------------------------- /asyncio_rpc/exceptions.py: -------------------------------------------------------------------------------- 1 | class NotReceived(Exception): 2 | """ 3 | Message has not been recieved by anyone 4 | """ 5 | 6 | 7 | class WrappedException(Exception): 8 | """ 9 | Exception raised when an exception raised 10 | by RPC call could not be resolved, the innermessage 11 | shows the exception raised on the RPC server. 12 | """ 13 | 14 | 15 | class RPCTimeoutError(Exception): 16 | """ 17 | Timeouterror raised by RPC client if result 18 | took to long. 19 | """ 20 | 21 | 22 | class SubscriptionClosed(Exception): 23 | """ 24 | Raised when the subscription has already been closed 25 | """ 26 | -------------------------------------------------------------------------------- /asyncio_rpc/models.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Dict, List 3 | 4 | 5 | class RPCBase: 6 | """ 7 | Baseclass to identify all RPC dataclasses 8 | """ 9 | 10 | 11 | @dataclass 12 | class RPCMessage(RPCBase): 13 | """ 14 | Message definition. Can be used to publish some data 15 | 16 | :param uid: the unique id for this RPC message 17 | :param namespace: the namespace 18 | :param data: the data to sent (should not be to much data!) 19 | """ 20 | 21 | uid: str 22 | namespace: str 23 | data: Any 24 | 25 | 26 | @dataclass 27 | class RPCCall(RPCBase): 28 | """ 29 | Remote procedure call definition 30 | 31 | :param name: the name of the function to execute 32 | :param args: args for the function 33 | :param kwargs: kwargs for the function 34 | """ 35 | 36 | func_name: str 37 | func_args: List 38 | func_kwargs: Dict 39 | 40 | 41 | @dataclass 42 | class RPCStack(RPCBase): 43 | """ 44 | Represents a remote Procedure function call stack, for example the 45 | call: 46 | gr.nodes.subset('2D_open_water').filter(id__lt=100).reproject_to('4326').coordinates 47 | 48 | Would procedure the following stack: 49 | [ 50 | RPCCall('nodes', [], {}), 51 | RPCCall('subset',['2D_open_water'], {}), 52 | RPCCall('filter',[], {'id__lt': 100}), 53 | RPCCall('reproject_to',['4326'], {}), 54 | RPCCall('coordinates', [], {}), 55 | ] 56 | 57 | Note: properties are also encoded as function calls. 58 | """ 59 | 60 | uid: str 61 | namespace: str 62 | timeout: float 63 | stack: List[RPCCall] 64 | respond_to: str = None 65 | 66 | 67 | @dataclass 68 | class RPCSubStack(RPCStack): 69 | """ 70 | Same as RPCStack, but no for subscribing to a generator 71 | """ 72 | 73 | 74 | @dataclass 75 | class RPCUnSubStack(RPCStack): 76 | """ 77 | Unsubscribe call 78 | """ 79 | 80 | 81 | @dataclass 82 | class RPCResult(RPCBase): 83 | """ 84 | Represents the result of a remote procedure call (RPCStack) 85 | 86 | :param uid: is set to RPCStack.uid 87 | :param namespace: is set to RPCStack.namespace 88 | :param data: is the result of the RPCStack call 89 | """ 90 | 91 | uid: str 92 | namespace: str 93 | data: Any 94 | 95 | 96 | @dataclass 97 | class RPCPubResult(RPCResult): 98 | """ 99 | Same as RPCResult, but now for publications 100 | """ 101 | 102 | 103 | @dataclass 104 | class RPCException(RPCBase): 105 | """ 106 | Represents an exception raised during executing the remote procedure 107 | call server-side 108 | 109 | :param uid: is set to RPCStack.uid 110 | :param namespace: is set to RPCStack.namespace 111 | :param classname: the classname of the exception raised 112 | :param exc_args: the exception args as a list 113 | """ 114 | 115 | uid: str 116 | namespace: str 117 | classname: str # noqa 118 | exc_args: List 119 | 120 | 121 | # List of serializable model to 122 | # register in the serialization 123 | SERIALIZABLE_MODELS = ( 124 | RPCMessage, 125 | RPCCall, 126 | RPCStack, 127 | RPCSubStack, 128 | RPCUnSubStack, 129 | RPCResult, 130 | RPCPubResult, 131 | RPCException, 132 | ) 133 | -------------------------------------------------------------------------------- /asyncio_rpc/pubsub.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import builtins 3 | from typing import Any, AsyncIterator 4 | 5 | from asyncio_rpc.exceptions import WrappedException 6 | from asyncio_rpc.models import RPCException, RPCPubResult, RPCStack, RPCUnSubStack 7 | 8 | 9 | class Publisher: 10 | def __init__(self, server, rpc_stack: RPCStack): 11 | self._rpc_stack = rpc_stack 12 | self._server = server 13 | self._is_active = True 14 | 15 | def set_is_active(self, is_active: bool): 16 | self._is_active = is_active 17 | 18 | @property 19 | def is_active(self): 20 | return self._is_active 21 | 22 | @property 23 | def rpc_stack(self): 24 | return self._rpc_stack 25 | 26 | async def publish(self, data: Any): 27 | """ 28 | Publish data to the client 29 | """ 30 | if not self.is_active: 31 | return 0 32 | 33 | # Publish the data as partial data 34 | publication = RPCPubResult(self._rpc_stack.uid, self._rpc_stack.namespace, data) 35 | receiver_count = await self._server.rpc_commlayer.publish( 36 | publication, channel=self._rpc_stack.respond_to 37 | ) 38 | 39 | if receiver_count == 0: 40 | self._is_active = False 41 | self._server.publishers.pop(self._rpc_stack.uid, None) 42 | return 0 43 | 44 | return receiver_count 45 | 46 | def __del__(self): 47 | self._server.publishers.pop(self._rpc_stack.uid, None) 48 | 49 | 50 | class Subscription: 51 | def __init__(self, client, rpc_stack: RPCStack): 52 | self.queue = asyncio.Queue() 53 | self._client = client 54 | self._rpc_stack = rpc_stack 55 | 56 | async def enqueue(self, data: Any): 57 | await self.queue.put(data) 58 | 59 | async def close(self): 60 | self._client.subscriptions.pop(self._rpc_stack.uid, None) 61 | 62 | rpc_unsub_stack = RPCUnSubStack( 63 | self._rpc_stack.uid, self._rpc_stack.namespace, 300, [None] 64 | ) 65 | 66 | # Publish to RPCServer 67 | await self._client.rpc_commlayer.publish(rpc_unsub_stack) 68 | 69 | self.queue._queue.clear() 70 | self.queue._finished.set() 71 | self.queue._unfinished_tasks = 0 72 | await self.queue.put(b"STOP") 73 | 74 | async def enumerate(self) -> AsyncIterator[Any]: 75 | while True: 76 | result = await self.queue.get() 77 | if result == b"STOP": 78 | break 79 | 80 | if isinstance(result, RPCException): 81 | # Try to resolve builtin errors 82 | try: 83 | exception_class = getattr(builtins, result.classname) 84 | except AttributeError: 85 | # Default to WrappedException if 86 | # returned exception is not a builtin error 87 | exception_class = WrappedException 88 | 89 | raise exception_class(*result.exc_args) 90 | 91 | yield result.data 92 | 93 | def __del__(self): 94 | self._client.subscriptions.pop(self._rpc_stack.uid, None) 95 | -------------------------------------------------------------------------------- /asyncio_rpc/serialization/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nens/asyncio-rpc/c86257df1af3f9c129444c046557169ab4fd1548/asyncio_rpc/serialization/__init__.py -------------------------------------------------------------------------------- /asyncio_rpc/serialization/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any 3 | 4 | 5 | class AbstractHandler(ABC): 6 | ext_type: int = None # Unique int 7 | obj_type: Any = None # Unique object type 8 | 9 | @classmethod 10 | @abstractmethod 11 | def packb(cls, instance: Any) -> bytes: 12 | """ 13 | Pack the instance into bytes 14 | """ 15 | 16 | @classmethod 17 | @abstractmethod 18 | def unpackb(cls, data: bytes) -> Any: 19 | """ 20 | Unpack the data back into an instance 21 | """ 22 | -------------------------------------------------------------------------------- /asyncio_rpc/serialization/msgpack.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from datetime import datetime 3 | from io import BytesIO 4 | from typing import Any 5 | 6 | import msgpack 7 | from lz4.frame import compress as lz4_compress 8 | from lz4.frame import decompress as lz4_decompress 9 | 10 | from asyncio_rpc.serialization.base import AbstractHandler 11 | 12 | try: 13 | import numpy as np 14 | except ImportError: 15 | np = None 16 | 17 | 18 | # Maximum byte lengths for str/ext 19 | MAX_STR_LEN = 2147483647 20 | MAX_EXT_LEN = 2147483647 21 | 22 | 23 | # Internal registry 24 | # TODO: figure out if it is ok to do 25 | # this on the module... 26 | REGISTRY = {"obj_types": {}, "ext_types": {}, "serializables": {}} 27 | 28 | 29 | def register(obj_def): 30 | """ 31 | Register dataclasses or custom handlers in the registry. 32 | 33 | For example obj_def and required methods, see NumpyArray below 34 | """ 35 | if dataclasses.is_dataclass(obj_def): 36 | # Handle dataclasses, every dataclass needs to be registered 37 | # via register. 38 | class_name = obj_def.__name__ 39 | REGISTRY["serializables"][class_name] = obj_def 40 | REGISTRY["obj_types"][obj_def] = DataclassHandler 41 | 42 | # Register the DataclassHandler if not done already 43 | if DataclassHandler.ext_type not in REGISTRY["ext_types"]: 44 | REGISTRY["ext_types"][DataclassHandler.ext_type] = DataclassHandler 45 | else: 46 | # Assume the obj_def has obj_type and ext_type, as can be 47 | # seen below. 48 | assert hasattr(obj_def, "obj_type") and hasattr(obj_def, "ext_type") 49 | REGISTRY["obj_types"][obj_def.obj_type] = obj_def 50 | REGISTRY["ext_types"][obj_def.ext_type] = obj_def 51 | 52 | 53 | if np is not None: 54 | 55 | class NumpyArrayHandler(AbstractHandler): 56 | """ 57 | Use np.save and np.load to serialize/deserialize 58 | numpy array's. 59 | """ 60 | 61 | ext_type = 1 62 | obj_type = np.ndarray 63 | 64 | # Note: 65 | # More generic approach, but a bit slower than 66 | # packing it as a list/tuple with (dtype, shape, bytes) 67 | @classmethod 68 | def packb(cls, array: np.ndarray) -> bytes: 69 | buf = BytesIO() 70 | np.save(buf, array) 71 | buf.seek(0) 72 | return buf.read() 73 | 74 | @classmethod 75 | def unpackb(cls, data: bytes) -> np.ndarray: 76 | buf = BytesIO(data) 77 | buf.seek(0) 78 | return np.load(buf) 79 | 80 | class NumpyStructuredArrayHandler(NumpyArrayHandler): 81 | ext_type = 2 82 | obj_type = np.void # = the type of structured array's... 83 | 84 | class NumpyInt32Handler(AbstractHandler): 85 | """ 86 | Serialize np.int32 87 | """ 88 | 89 | ext_type = 6 90 | obj_type = np.int32 91 | 92 | @classmethod 93 | def packb(cls, data: np.int32) -> bytes: 94 | return data.tobytes() 95 | 96 | @classmethod 97 | def unpackb(cls, data: bytes) -> np.int32: 98 | return np.frombuffer(data, dtype=np.int32)[0] 99 | 100 | class NumpyInt64Handler(AbstractHandler): 101 | """ 102 | Serialize np.int64 103 | """ 104 | 105 | ext_type = 7 106 | obj_type = np.int64 107 | 108 | @classmethod 109 | def packb(cls, data: np.int64) -> bytes: 110 | return data.tobytes() 111 | 112 | @classmethod 113 | def unpackb(cls, data: bytes) -> np.int64: 114 | return np.frombuffer(data, dtype=np.int64)[0] 115 | 116 | register(NumpyArrayHandler) 117 | register(NumpyStructuredArrayHandler) 118 | register(NumpyInt32Handler) 119 | register(NumpyInt64Handler) 120 | 121 | 122 | class DatetimeHandler: 123 | """ 124 | Serialize datetime instances as timestamps. 125 | """ 126 | 127 | ext_type = 3 128 | obj_type = datetime 129 | 130 | @classmethod 131 | def packb(cls, dt: datetime) -> bytes: 132 | return b"%f" % dt.timestamp() 133 | 134 | @classmethod 135 | def unpackb(cls, data: bytes) -> datetime: 136 | return datetime.fromtimestamp(float(data)) 137 | 138 | 139 | class DataclassHandler: 140 | """ 141 | Serialize dataclasses by serializing the .__dict__ 142 | of dataclasses. This allows recursively serialization for example: 143 | dataclasses in dataclasses or Numpy array's in dataclasses. 144 | """ 145 | 146 | ext_type = 4 147 | 148 | @classmethod 149 | def packb(cls, obj) -> bytes: 150 | dataclass_name = obj.__class__.__name__ 151 | if isinstance(dataclass_name, str): 152 | dataclass_name = dataclass_name 153 | 154 | # Recursively process dataclasses of the dataclass, 155 | # serialize as tuple(dataclass_name, __dict__) 156 | return dumpb((dataclass_name, obj.__dict__), do_compress=False) 157 | 158 | @classmethod 159 | def unpackb(cls, data): 160 | # Recursively process the contents of the dataclass 161 | classname, data = loadb(data, do_decompress=False, raw=False) 162 | # Return registered class or Serializable (as default) 163 | assert ( 164 | classname in REGISTRY["serializables"] 165 | ), f"class {classname} not yet registered" 166 | klass = REGISTRY["serializables"][classname] 167 | return klass(**data) 168 | 169 | 170 | class SliceHandler: 171 | """ 172 | Serialize slices 173 | """ 174 | 175 | ext_type = 5 176 | obj_type = slice 177 | 178 | @classmethod 179 | def packb(cls, obj) -> bytes: 180 | return dumpb((obj.start, obj.stop, obj.step)) 181 | 182 | @classmethod 183 | def unpackb(cls, data): 184 | return slice(*loadb(data)) 185 | 186 | 187 | def default(obj: Any): 188 | """ 189 | Serialize (dumpb) hook for obj types that msgpack does not 190 | process out of the box. 191 | """ 192 | if type(obj) in REGISTRY["obj_types"]: 193 | # If the type is in the registry, use the 194 | # handler to serialize the obj 195 | handler = REGISTRY["obj_types"][type(obj)] 196 | return msgpack.ExtType(handler.ext_type, handler.packb(obj)) 197 | 198 | raise TypeError("Unknown type: %r" % (obj,)) 199 | 200 | 201 | def ext_hook(ext_type: int, bytes_data: bytes): 202 | """ 203 | Deserialize (loadb) hook for ext_types that are 204 | not default in msgpack. 205 | 206 | ext_types are user defined numbers for special 207 | deserialization handling. 208 | """ 209 | if ext_type in REGISTRY["ext_types"]: 210 | # If the ext_type is in the registry, use the 211 | # handler to deserialize the bytes_data 212 | handler = REGISTRY["ext_types"][ext_type] 213 | return handler.unpackb(bytes_data) 214 | 215 | raise TypeError("Unknown ext_type: %r" % (ext_type,)) # pragma: no cover 216 | 217 | 218 | def do_nothing(x): 219 | return x 220 | 221 | 222 | def dumpb( 223 | instance: Any, do_compress=True, compress_func=lz4_compress, use_bin_type=True 224 | ): 225 | """ 226 | Dump/pack instance with msgpack to bytes 227 | """ 228 | if not do_compress: 229 | compress_func = do_nothing 230 | return compress_func( 231 | msgpack.packb(instance, default=default, use_bin_type=use_bin_type) 232 | ) 233 | 234 | 235 | def loadb( 236 | packed: bytes, 237 | do_decompress=True, 238 | decompress_func=lz4_decompress, 239 | raw=False, 240 | strict_map_key=True, 241 | ): 242 | """ 243 | Load/unpack bytes back to instance 244 | """ 245 | if packed is None: 246 | return None 247 | if not do_decompress: 248 | decompress_func = do_nothing 249 | return msgpack.unpackb( 250 | decompress_func(packed), 251 | ext_hook=ext_hook, 252 | max_ext_len=MAX_EXT_LEN, 253 | max_str_len=MAX_STR_LEN, 254 | raw=raw, 255 | strict_map_key=strict_map_key, 256 | ) 257 | 258 | 259 | # Register custom handlers 260 | register(DatetimeHandler) 261 | register(SliceHandler) 262 | 263 | 264 | try: 265 | from asyncio_rpc.serialization.shapely_models import ( 266 | GeometryCollectionHandler, 267 | LinearRingHandler, 268 | LineStringHandler, 269 | MultiLineStringHandler, 270 | MultiPointHandler, 271 | MultiPolygonHandler, 272 | PointHandler, 273 | PolygonHandler, 274 | ) 275 | 276 | register(PointHandler) 277 | register(LineStringHandler) 278 | register(LinearRingHandler) 279 | register(PolygonHandler) 280 | register(MultiPointHandler) 281 | register(MultiLineStringHandler) 282 | register(MultiPolygonHandler) 283 | register(GeometryCollectionHandler) 284 | except ImportError: 285 | # Shapely is not installed, skip shapely handlers 286 | pass 287 | -------------------------------------------------------------------------------- /asyncio_rpc/serialization/shapely_models.py: -------------------------------------------------------------------------------- 1 | try: 2 | import shapely as shp 3 | from shapely.geometry import ( 4 | GeometryCollection, 5 | LinearRing, 6 | LineString, 7 | MultiLineString, 8 | MultiPoint, 9 | MultiPolygon, 10 | Point, 11 | Polygon, 12 | ) 13 | from shapely.geometry.base import BaseGeometry 14 | from shapely.wkb import dumps, loads 15 | 16 | from asyncio_rpc.serialization.base import AbstractHandler 17 | except ImportError: 18 | shp = None 19 | 20 | 21 | if shp is not None: 22 | 23 | class ShapelyBaseHandler(AbstractHandler): 24 | """ 25 | Base handler for all Shapely geometry objects. 26 | Uses WKB (Well-Known Binary) format for serialization. 27 | """ 28 | 29 | ext_type = 100 # Starting ext_type for Shapely objects 30 | 31 | @classmethod 32 | def packb(cls, geom: BaseGeometry) -> bytes: 33 | """Pack a shapely geometry into bytes using WKB format""" 34 | return dumps(geom) 35 | 36 | @classmethod 37 | def unpackb(cls, data: bytes): 38 | """Unpack bytes into a shapely geometry using WKB format""" 39 | return loads(data) 40 | 41 | class PointHandler(ShapelyBaseHandler): 42 | """Handler for Shapely Point objects""" 43 | 44 | ext_type = 101 45 | obj_type = Point 46 | 47 | class LineStringHandler(ShapelyBaseHandler): 48 | """Handler for Shapely LineString objects""" 49 | 50 | ext_type = 102 51 | obj_type = LineString 52 | 53 | class LinearRingHandler(ShapelyBaseHandler): 54 | """Handler for Shapely LinearRing objects""" 55 | 56 | ext_type = 103 57 | obj_type = LinearRing 58 | 59 | class PolygonHandler(ShapelyBaseHandler): 60 | """Handler for Shapely Polygon objects""" 61 | 62 | ext_type = 104 63 | obj_type = Polygon 64 | 65 | class MultiPointHandler(ShapelyBaseHandler): 66 | """Handler for Shapely MultiPoint objects""" 67 | 68 | ext_type = 105 69 | obj_type = MultiPoint 70 | 71 | class MultiLineStringHandler(ShapelyBaseHandler): 72 | """Handler for Shapely MultiLineString objects""" 73 | 74 | ext_type = 106 75 | obj_type = MultiLineString 76 | 77 | class MultiPolygonHandler(ShapelyBaseHandler): 78 | """Handler for Shapely MultiPolygon objects""" 79 | 80 | ext_type = 107 81 | obj_type = MultiPolygon 82 | 83 | class GeometryCollectionHandler(ShapelyBaseHandler): 84 | """Handler for Shapely GeometryCollection objects""" 85 | 86 | ext_type = 108 87 | obj_type = GeometryCollection 88 | -------------------------------------------------------------------------------- /asyncio_rpc/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from typing import List 4 | 5 | from asyncio_rpc.commlayers.base import AbstractRPCCommLayer 6 | from asyncio_rpc.models import ( 7 | RPCCall, 8 | RPCException, 9 | RPCResult, 10 | RPCStack, 11 | RPCSubStack, 12 | RPCUnSubStack, 13 | ) 14 | from asyncio_rpc.pubsub import Publisher 15 | 16 | logger = logging.getLogger("asyncio-rpc-server") 17 | 18 | 19 | class NamespaceError(Exception): 20 | """ 21 | Exception raised when a namespace unknown or 22 | already present. 23 | """ 24 | 25 | 26 | class RPCServer(object): 27 | """ 28 | Remote procedure server class. Allows to register executors 29 | by namespace and execute RPC calls from a RPCClient 30 | """ 31 | 32 | def __init__(self, rpc_commlayer: AbstractRPCCommLayer = None): 33 | assert isinstance(rpc_commlayer, AbstractRPCCommLayer) 34 | self.queue = asyncio.Queue() 35 | self._alive = True 36 | 37 | # Allow multiple executors to be registered by 38 | # namespace 39 | self.registry = {} 40 | self.publishers = {} 41 | self.rpc_commlayer = rpc_commlayer 42 | logger.debug("Initialized RPCServer") 43 | 44 | def register_models(self, models): 45 | """ 46 | Register all given models to the rpc_commlayer serialization 47 | 48 | Intended usage is to register dataclasses 49 | """ 50 | for model in models: 51 | self.rpc_commlayer.serialization.register(model) 52 | 53 | def register(self, executor): 54 | """ 55 | Register an executor for a namespace, the namespace 56 | should be unique and is used to route RPC calls from 57 | the client to the correct executor in the registry of 58 | the RPCServer. 59 | """ 60 | assert hasattr(executor, "namespace") 61 | 62 | if executor.namespace in self.registry: 63 | raise NamespaceError("Namespace already exists") 64 | 65 | # Register executor for this namespaces 66 | self.registry[executor.namespace] = executor 67 | 68 | async def rpc_call(self, rpc_func_stack: RPCStack): 69 | """ 70 | Incoming rpc_call, execute it via the registered 71 | executor and return the result or exception 72 | """ 73 | assert isinstance(rpc_func_stack, RPCStack) 74 | logger.debug( 75 | "Received RPC call rpc_func_stack %s, %s", 76 | rpc_func_stack.uid, 77 | rpc_func_stack, 78 | ) 79 | 80 | # Create a default result 81 | result = RPCResult( 82 | uid=rpc_func_stack.uid, namespace=rpc_func_stack.namespace, data=None 83 | ) 84 | 85 | if rpc_func_stack.namespace not in self.registry: 86 | logger.debug("Unknown namespace for rpc_func_stack: %s", rpc_func_stack.uid) 87 | raise NamespaceError("Unknown namespace") 88 | 89 | executor = self.registry[rpc_func_stack.namespace] 90 | 91 | try: 92 | # Wait for result from executor 93 | logger.debug( 94 | "Going to run executor for rpc_func_stack: %s", rpc_func_stack.uid 95 | ) 96 | result.data = await asyncio.wait_for( 97 | executor.rpc_call(rpc_func_stack.stack), timeout=rpc_func_stack.timeout 98 | ) 99 | logger.debug( 100 | "Got result for rpc_func_stack: %s, %s", rpc_func_stack.uid, result 101 | ) 102 | except Exception as e: 103 | # For now catch all exceptions here... 104 | # TODO: debug mode with stacktrace 105 | result = RPCException( 106 | uid=rpc_func_stack.uid, 107 | namespace=rpc_func_stack.namespace, 108 | classname=e.__class__.__name__, 109 | exc_args=e.args, 110 | ) 111 | logger.debug( 112 | "Exception occurred for rpc_funct_stack: %s, %s", rpc_func_stack.uid, e 113 | ) 114 | 115 | return result 116 | 117 | async def subscribe_call(self, rpc_sub_stack: RPCSubStack): 118 | assert isinstance(rpc_sub_stack, RPCSubStack) 119 | if rpc_sub_stack.namespace not in self.registry: 120 | raise NamespaceError("Unknown namespace") 121 | 122 | executor = self.registry[rpc_sub_stack.namespace] 123 | if not hasattr(executor, "subscribe_call"): 124 | raise NotImplementedError( 125 | f"Executor for namespace: {rpc_sub_stack.namespace} has " 126 | f"no subscribe_call function" 127 | ) 128 | 129 | publisher = Publisher(self, rpc_sub_stack) 130 | self.publishers[rpc_sub_stack.uid] = publisher 131 | 132 | # Create task for this publisher 133 | asyncio.create_task(executor.subscribe_call(publisher)) 134 | 135 | async def _on_rpc_event(self, rpc_func_stack: RPCStack, channel: bytes = None): 136 | """ 137 | Callback function sent to rpc_commlayer, is called 138 | when a RPCStack is received by the rpc_commlayer subscription 139 | """ 140 | await self.queue.put((rpc_func_stack, channel)) 141 | 142 | async def _process_queue(self): 143 | """ 144 | Background queue processing function, processes 145 | the internal self.queue until b'END' is received 146 | 147 | 148 | if on_rpc_message has been set, it will be called 149 | whenever a RPCMessage is popped from the queue. 150 | """ 151 | 152 | while self._alive: 153 | item = await self.queue.get() 154 | 155 | if item == b"END": 156 | break 157 | 158 | rpc_func_stack, channel = item 159 | 160 | assert isinstance(rpc_func_stack, RPCStack) 161 | 162 | logger.debug( 163 | "Processing rpcstack %s, %s", rpc_func_stack.uid, rpc_func_stack 164 | ) 165 | 166 | if isinstance(rpc_func_stack, RPCSubStack): 167 | try: 168 | # Process rpc_func_call_stack 169 | await self.subscribe_call(rpc_func_stack) 170 | except Exception as e: 171 | # Log everything that is not an 172 | # instance of RPCException 173 | if not isinstance(e, RPCException): 174 | logger.exception(e) 175 | result = RPCException( 176 | uid=rpc_func_stack.uid, 177 | namespace=rpc_func_stack.namespace, 178 | classname=e.__class__.__name__, 179 | exc_args=e.args, 180 | ) 181 | # Publish exception 182 | await self.rpc_commlayer.publish( 183 | result, channel=rpc_func_stack.respond_to 184 | ) 185 | elif isinstance(rpc_func_stack, RPCUnSubStack): 186 | publisher = self.publishers.pop(rpc_func_stack.uid, None) 187 | if publisher is not None: 188 | publisher.set_is_active(False) 189 | else: 190 | try: 191 | # Process rpc_func_call_stack 192 | result = await self.rpc_call(rpc_func_stack) 193 | 194 | logger.debug( 195 | "Publishing result for %s, %s", 196 | rpc_func_stack.uid, 197 | rpc_func_stack, 198 | ) 199 | # Publish result of rpc call 200 | await self.rpc_commlayer.publish( 201 | result, channel=rpc_func_stack.respond_to 202 | ) 203 | logger.debug("Publishing done for %s", rpc_func_stack.uid) 204 | except Exception as e: 205 | logger.debug("Error occured for %s: %s", rpc_func_stack.uid, e) 206 | # Log everything that is not an 207 | # instance of RPCException 208 | if not isinstance(e, RPCException): 209 | logger.exception(e) 210 | 211 | result = RPCException( 212 | uid=rpc_func_stack.uid, 213 | namespace=rpc_func_stack.namespace, 214 | classname=e.__class__.__name__, 215 | exc_args=e.args, 216 | ) 217 | # Try to publish error 218 | await self.rpc_commlayer.publish( 219 | result, channel=rpc_func_stack.respond_to 220 | ) 221 | 222 | async def serve(self): 223 | """ 224 | Main entry point for RPCServer. 225 | 226 | Starts RPCServer background processing, blocks 227 | until self.rpc_commlayer.unsubscribe() is called. 228 | """ 229 | task_args_map = { 230 | self.rpc_commlayer.subscribe: [self._on_rpc_event], 231 | self._process_queue: [], 232 | } 233 | 234 | # create the main tasks 235 | main_tasks = { 236 | asyncio.ensure_future(coro(*args)): (coro, args) 237 | for coro, args in task_args_map.items() 238 | } 239 | 240 | running = set(main_tasks.keys()) 241 | 242 | except_cnt = -1 243 | 244 | while running: 245 | except_cnt += 1 246 | finished, running = await asyncio.wait( 247 | running, return_when=asyncio.FIRST_EXCEPTION 248 | ) 249 | for task in finished: 250 | if task.exception(): 251 | logger.exception(task.exception()) 252 | task.print_stack() 253 | coro, args = main_tasks[task] 254 | new_task = asyncio.ensure_future(coro(*args)) 255 | main_tasks[new_task] = (coro, args) 256 | running.add(new_task) 257 | 258 | 259 | class DefaultExecutor: 260 | """ 261 | Default executor implementation, override if necessary 262 | """ 263 | 264 | def __init__(self, namespace, instance): 265 | assert namespace is not None 266 | assert instance is not None 267 | self.namespace = namespace 268 | self.instance = instance 269 | 270 | async def subscribe_call(self, publisher: Publisher): 271 | """ 272 | Use the Publisher to publish results to the client 273 | """ 274 | # if publisher.is_active: 275 | # await publisher.publish(b'blaat') 276 | pass 277 | 278 | async def rpc_call(self, stack: List[RPCCall] = []): 279 | """ 280 | Process incoming rpc call stack. 281 | The stack can contain multiple chained function calls for example: 282 | node.filter(id=1).reproject_to('4326').data 283 | """ 284 | 285 | resource = self.instance 286 | 287 | for rpc_func_call in stack: 288 | assert isinstance(rpc_func_call, RPCCall) 289 | 290 | # Try to get the function/property from self.instance 291 | instance_attr = getattr(resource, rpc_func_call.func_name) 292 | 293 | if callable(instance_attr): 294 | # Function 295 | resource = instance_attr( 296 | *rpc_func_call.func_args, **rpc_func_call.func_kwargs 297 | ) 298 | else: 299 | # Asume property 300 | resource = instance_attr 301 | 302 | return resource 303 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.0' 2 | 3 | services: 4 | 5 | redis: 6 | image: redis:6-alpine 7 | expose: 8 | - 6379 9 | 10 | asyncio_rpc: 11 | build: 12 | context: ./ 13 | dockerfile: Dockerfile 14 | environment: 15 | - REDIS_HOST=redis 16 | depends_on: 17 | - redis 18 | volumes: 19 | - ./:/code 20 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | 3 | # -- Project information 4 | 5 | project = "Asyncio-rpc" 6 | copyright = "2023, Jelle Prins" 7 | author = "Jelle Prins" 8 | 9 | release = "0.1" 10 | version = "0.1.0" 11 | 12 | # -- General configuration 13 | 14 | extensions = [ 15 | "sphinx.ext.duration", 16 | "sphinx.ext.doctest", 17 | "sphinx.ext.autodoc", 18 | "sphinx.ext.autosummary", 19 | "sphinx.ext.intersphinx", 20 | ] 21 | 22 | intersphinx_mapping = { 23 | "python": ("https://docs.python.org/3/", None), 24 | "sphinx": ("https://www.sphinx-doc.org/en/master/", None), 25 | } 26 | intersphinx_disabled_domains = ["std"] 27 | 28 | templates_path = ["_templates"] 29 | 30 | # -- Options for HTML output 31 | 32 | html_theme = "sphinx_rtd_theme" 33 | 34 | # -- Options for EPUB output 35 | epub_show_urls = "footnote" 36 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to asyncio-rpc documentation! 2 | ===================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 3 6 | :caption: Contents: 7 | 8 | readme 9 | installation 10 | 11 | Indices and tables 12 | ================== 13 | * :ref:`genindex` 14 | * :ref:`modindex` 15 | * :ref:`search` 16 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | Requirements 8 | ----------- 9 | 10 | asyncio-rpc requires Python 3.10 or higher. 11 | 12 | Stable release 13 | -------------- 14 | 15 | The standard asyncio-rpc distribution is pretty lightweight, installing as little dependencies 16 | as possible. Install it like this:: 17 | 18 | $ pip install asyncio-rpc 19 | 20 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 21 | you through the process. 22 | 23 | .. _pip: https://pip.pypa.io 24 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 25 | 26 | 27 | From sources 28 | ------------ 29 | 30 | The sources for asyncio-rpc can be downloaded from the `Github repo`_. 31 | 32 | You can either clone the public repository: 33 | 34 | .. code-block:: console 35 | 36 | $ git clone git://github.com/nens/asyncio-rpc 37 | 38 | Or download the `tarball`_: 39 | 40 | .. code-block:: console 41 | 42 | $ curl -OL https://github.com/nens/asyncio-rpc/tarball/master 43 | 44 | Once you have a copy of the source, you can install it with: 45 | 46 | .. code-block:: console 47 | 48 | $ python setup.py install 49 | 50 | 51 | .. _Github repo: https://github.com/nens/asyncio-rpc 52 | .. _tarball: https://github.com/nens/ascynio-rpc/tarball/master 53 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==7.1.2 2 | sphinx-rtd-theme==1.3.0rc1 -------------------------------------------------------------------------------- /examples/basic/README.rst: -------------------------------------------------------------------------------- 1 | Basic example 2 | ============= 3 | 4 | The basic example shows the code needed to expose a 'multiply' 5 | function via RPC. 6 | 7 | 8 | Running example 9 | --------------- 10 | 11 | - Run Redis on localhost. 12 | - Execute from the root directory in two different terminals: 13 | 14 | >>> python3.7 -m examples.basic.server localhost 15 | >>> python3.7 -m examples.basic.client localhost 16 | -------------------------------------------------------------------------------- /examples/basic/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | from uuid import uuid4 4 | 5 | from asyncio_rpc.client import RPCClient 6 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 7 | from asyncio_rpc.models import RPCCall, RPCStack 8 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 9 | 10 | 11 | class ServiceClient: 12 | """ 13 | 'Proxy' class exposing the same functions as the 14 | executor on the server side. 15 | """ 16 | 17 | def __init__(self, client: RPCClient, namespace=None): 18 | """ 19 | Set the RPC client and the namespace (should be same as server side) 20 | """ 21 | assert namespace is not None 22 | assert client is not None 23 | self.client = client 24 | self.namespace = namespace 25 | 26 | async def _rpc_call(self, func_name, func_args, func_kwargs): 27 | """ 28 | Helper function to wrap a Python function call into a RPCCall. 29 | 30 | A RPCCall wraps a function by a function's: 31 | name: str 32 | args: List 33 | kwargs: Dict 34 | 35 | A RPCStack can have multiple RPCCall's. The RPCStack is sent 36 | to the RPC server. 37 | """ 38 | rpc_func_call = RPCCall(func_name, func_args, func_kwargs) 39 | rpc_func_stack = RPCStack(uuid4().hex, self.namespace, 300, [rpc_func_call]) 40 | 41 | # Let the client sent the RPCStack to the server. 42 | # The server executes the function specified 43 | # by the RPCStack and returns the result to the client. 44 | # This result is returned by this method. 45 | return await self.client.rpc_call(rpc_func_stack) 46 | 47 | async def multiply(self, x, y): 48 | """ 49 | Multiply (proxy) function. This function is executed 50 | server side and returns the returned result. 51 | """ 52 | return await self._rpc_call( 53 | func_name="multiply", func_args=[x, y], func_kwargs={} 54 | ) 55 | 56 | 57 | async def main(args): 58 | """ 59 | The RPC client (and server) need a communicationlayer to communicate. 60 | Below the default Redis implementation is used together with the 61 | default msgpack serialization. 62 | """ 63 | rpc_commlayer = await RPCRedisCommLayer.create( 64 | subchannel=b"sub", 65 | pubchannel=b"pub", 66 | host=args.redis_host, 67 | serialization=msgpack_serialization, 68 | ) 69 | 70 | # Create a nwe RPCClient with the defined commlayer 71 | rpc_client = RPCClient(rpc_commlayer) 72 | 73 | # Create a ServiceClient which is a proxy class exposing 74 | # the same methods as available server side in the executor. 75 | service_client = ServiceClient(rpc_client, "TEST") 76 | 77 | # Execute the multiply function via RPC 78 | result = await service_client.multiply(100, 100) 79 | 80 | print(result) 81 | 82 | 83 | if __name__ == "__main__": 84 | parser = ArgumentParser() 85 | # Provide Redis host that is accessible for both client/server. 86 | parser.add_argument( 87 | "redis_host", metavar="H", type=str, help="Redis host IP address" 88 | ) 89 | args = parser.parse_args() 90 | 91 | # Create asyncio loop and execute main method 92 | loop = asyncio.get_event_loop() 93 | loop.run_until_complete(main(args)) 94 | -------------------------------------------------------------------------------- /examples/basic/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | 4 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 5 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 6 | from asyncio_rpc.server import DefaultExecutor, RPCServer 7 | 8 | 9 | class Service: 10 | """ 11 | Class holding the real multiply function. 12 | This function is executed by the RPC Server via 13 | the DefaultExecutor. 14 | """ 15 | 16 | def multiply(self, x, y): 17 | return x * y 18 | 19 | 20 | async def main(args): 21 | """ 22 | The RPC client (and server) need a communicationlayer to communicate. 23 | Below the default Redis implementation is used together with the 24 | default msgpack serialization. 25 | """ 26 | rpc_commlayer = await RPCRedisCommLayer.create( 27 | subchannel=b"pub", 28 | pubchannel=b"sub", # Inverse of client 29 | host=args.redis_host, 30 | serialization=msgpack_serialization, 31 | ) 32 | 33 | # Create a RPC Server with the commlayer 34 | rpc_server = RPCServer(rpc_commlayer) 35 | 36 | # Register the Service above with the default executor in 37 | # the TEST namespace 38 | # 39 | # The executor receives a RPCStack from the commlayer 40 | # and will try to execute the provided function names (with 41 | # args & kwargs) 42 | executor = DefaultExecutor(namespace="TEST", instance=Service()) 43 | 44 | # Register executor. 45 | rpc_server.register(executor) 46 | 47 | print("Start serving") 48 | await rpc_server.serve() 49 | 50 | 51 | if __name__ == "__main__": 52 | parser = ArgumentParser() 53 | 54 | # Provide Redis host that is accessible for both client/server. 55 | parser.add_argument( 56 | "redis_host", metavar="H", type=str, help="Redis host IP address" 57 | ) 58 | args = parser.parse_args() 59 | 60 | # Create asyncio loop and execute main method 61 | loop = asyncio.get_event_loop() 62 | loop.run_until_complete(main(args)) 63 | -------------------------------------------------------------------------------- /examples/dataclass/README.rst: -------------------------------------------------------------------------------- 1 | Dataclasses example 2 | =================== 3 | 4 | The dataclasses example shows the code needed to expose the same 5 | 'multiply' function as the basic example, only now the function 6 | arguments are dataclasses and the function result is also 7 | a dataclass. 8 | 9 | 10 | Note: don't forget to register dataclasses, they won't be picked 11 | up by the serialization/deserialization automatically. 12 | 13 | 14 | Running example 15 | --------------- 16 | 17 | - Run Redis on localhost. 18 | - Execute from the root directory in two different terminals: 19 | 20 | >>> python3.7 -m examples.dataclass.server localhost 21 | >>> python3.7 -m examples.dataclass.client localhost 22 | -------------------------------------------------------------------------------- /examples/dataclass/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | from uuid import uuid4 4 | 5 | from asyncio_rpc.client import RPCClient 6 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 7 | from asyncio_rpc.models import RPCCall, RPCStack 8 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 9 | 10 | from .models import Integer, MultiplyResult 11 | 12 | # Note: This example only contains documentation for dataclasses, 13 | # for more basic documentation see the basic example. 14 | 15 | 16 | class ServiceClient: 17 | def __init__(self, client: RPCClient, namespace=None): 18 | assert namespace is not None 19 | assert client is not None 20 | self.client = client 21 | self.namespace = namespace 22 | 23 | async def _rpc_call(self, func_name, func_args, func_kwargs): 24 | rpc_func_call = RPCCall(func_name, func_args, func_kwargs) 25 | rpc_func_stack = RPCStack(uuid4().hex, self.namespace, 300, [rpc_func_call]) 26 | 27 | return await self.client.rpc_call(rpc_func_stack) 28 | 29 | async def multiply(self, x: Integer, y: Integer) -> MultiplyResult: 30 | assert isinstance(x, Integer) and isinstance(y, Integer) 31 | return await self._rpc_call( 32 | func_name="multiply", func_args=[x, y], func_kwargs={} 33 | ) 34 | 35 | 36 | async def main(args): 37 | rpc_commlayer = await RPCRedisCommLayer.create( 38 | subchannel=b"sub", 39 | pubchannel=b"pub", 40 | host=args.redis_host, 41 | serialization=msgpack_serialization, 42 | ) 43 | 44 | rpc_client = RPCClient(rpc_commlayer) 45 | 46 | # Register dataclasses to allow serialized/deserialized 47 | rpc_client.register_models([Integer, MultiplyResult]) 48 | 49 | service_client = ServiceClient(rpc_client, "TEST") 50 | 51 | # Now we can do the same thing as in the basic example only we can 52 | # use dataclasses as function arguments. 53 | result = await service_client.multiply(Integer(100), Integer(100)) 54 | 55 | # And the result will also be a dataclass 56 | assert isinstance(result, MultiplyResult) 57 | 58 | print(result) 59 | 60 | 61 | if __name__ == "__main__": 62 | parser = ArgumentParser() 63 | parser.add_argument( 64 | "redis_host", metavar="H", type=str, help="Redis host IP address" 65 | ) 66 | args = parser.parse_args() 67 | 68 | loop = asyncio.get_event_loop() 69 | loop.run_until_complete(main(args)) 70 | -------------------------------------------------------------------------------- /examples/dataclass/models.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass 5 | class Integer: 6 | """ 7 | Dataclass wrapper around an integer value 8 | """ 9 | 10 | value: int 11 | 12 | 13 | @dataclass 14 | class MultiplyResult: 15 | """ 16 | Dataclass wrapper around a multiplication result 17 | """ 18 | 19 | value: int 20 | -------------------------------------------------------------------------------- /examples/dataclass/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | 4 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 5 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 6 | from asyncio_rpc.server import DefaultExecutor, RPCServer 7 | 8 | from .models import Integer, MultiplyResult 9 | 10 | 11 | class Service: 12 | """ 13 | Same service as the basic example, only now it uses 14 | dataclasses as arguments and returns a dataclass with 15 | the result. 16 | """ 17 | 18 | def multiply(self, x: Integer, y: Integer) -> MultiplyResult: 19 | return MultiplyResult(x.value * y.value) 20 | 21 | 22 | async def main(args): 23 | rpc_commlayer = await RPCRedisCommLayer.create( 24 | subchannel=b"pub", 25 | pubchannel=b"sub", # Inverse of client 26 | host=args.redis_host, 27 | serialization=msgpack_serialization, 28 | ) 29 | 30 | rpc_server = RPCServer(rpc_commlayer) 31 | 32 | # Register the Service above with the the default executor in 33 | # the TEST namespace 34 | executor = DefaultExecutor(namespace="TEST", instance=Service()) 35 | 36 | # IMPORTANT: Register dataclasses to allow serialization/deserialization 37 | rpc_server.register_models([Integer, MultiplyResult]) 38 | 39 | # Register executor 40 | rpc_server.register(executor) 41 | 42 | print("Start serving") 43 | await rpc_server.serve() 44 | 45 | 46 | if __name__ == "__main__": 47 | parser = ArgumentParser() 48 | parser.add_argument( 49 | "redis_host", metavar="H", type=str, help="Redis host IP address" 50 | ) 51 | args = parser.parse_args() 52 | 53 | loop = asyncio.get_event_loop() 54 | loop.run_until_complete(main(args)) 55 | -------------------------------------------------------------------------------- /examples/decorators/README.rst: -------------------------------------------------------------------------------- 1 | Decorator example 2 | ================= 3 | 4 | The decorator example shows the code needed to expose the same 5 | 'multiply' function as the basic example, only now the client side 6 | methods can be decoratored. 7 | 8 | 9 | Running example 10 | --------------- 11 | 12 | - Run Redis on localhost. 13 | - Execute from the root directory in two different terminals: 14 | 15 | >>> python3.7 -m examples.decorators.server localhost 16 | >>> python3.7 -m examples.decorators.client localhost 17 | -------------------------------------------------------------------------------- /examples/decorators/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | from uuid import uuid4 4 | 5 | from asyncio_rpc.client import RPCClient 6 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 7 | from asyncio_rpc.models import RPCCall, RPCStack 8 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 9 | 10 | # Note: This example only contains documentation for how 11 | # a decorator could be used for more basic documentation 12 | # see the basic example. 13 | 14 | 15 | def rpc_method(func): 16 | """ 17 | Decorator function that can be used to decorate 18 | (proxy) functions client side. It uses the same code as in 19 | the basic example for executing the rpc call. 20 | 21 | Note: it has drawbacks, see below under multiply. 22 | """ 23 | 24 | def rpc_method(self, *args, **kwargs): 25 | rpc_func_call = RPCCall(func.__name__, args, kwargs) 26 | rpc_func_stack = RPCStack(uuid4().hex, self.namespace, 300, [rpc_func_call]) 27 | return self.client.rpc_call(rpc_func_stack) 28 | 29 | rpc_method._is_rpc_method = True 30 | return rpc_method 31 | 32 | 33 | class ServiceClient: 34 | def __init__(self, client: RPCClient, namespace=None): 35 | assert namespace is not None 36 | assert client is not None 37 | self.client = client 38 | self.namespace = namespace 39 | 40 | @rpc_method 41 | async def multiply(self, x, y): 42 | """ 43 | The decorator takes care of sending the function 44 | name & params to the RPCServer 45 | 46 | Note: 47 | A (big) drawback of the decorator is that wrapped function 48 | do not seem to return anything. Think well 49 | before applying it everywhere.... 50 | """ 51 | 52 | @rpc_method 53 | async def not_decorated_method(self, x, y): 54 | """ 55 | This method is not decorated and therefore 56 | should not trigger a RPC call 57 | """ 58 | 59 | 60 | async def main(args): 61 | rpc_commlayer = await RPCRedisCommLayer.create( 62 | subchannel=b"sub", 63 | pubchannel=b"pub", 64 | host=args.redis_host, 65 | serialization=msgpack_serialization, 66 | ) 67 | 68 | rpc_client = RPCClient(rpc_commlayer) 69 | 70 | service_client = ServiceClient(rpc_client, "TEST") 71 | 72 | result = await service_client.multiply(100, 100) 73 | 74 | print(result) 75 | 76 | try: 77 | await service_client.not_decorated_method(100, 100) 78 | except AttributeError as e: 79 | print(e) 80 | 81 | 82 | if __name__ == "__main__": 83 | parser = ArgumentParser() 84 | parser.add_argument( 85 | "redis_host", metavar="H", type=str, help="Redis host IP address" 86 | ) 87 | args = parser.parse_args() 88 | 89 | loop = asyncio.get_event_loop() 90 | loop.run_until_complete(main(args)) 91 | -------------------------------------------------------------------------------- /examples/decorators/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | from typing import List 4 | 5 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 6 | from asyncio_rpc.models import RPCCall 7 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 8 | from asyncio_rpc.server import DefaultExecutor, RPCServer 9 | 10 | 11 | def rpc_method(func): 12 | """ 13 | Server side decorator for methods 14 | that need to be exposed via RPC. 15 | """ 16 | 17 | def rpc_method(*args, **kwargs): 18 | return func(*args, **kwargs) 19 | 20 | rpc_method._is_rpc_method = True 21 | return rpc_method 22 | 23 | 24 | class Service: 25 | @rpc_method 26 | def multiply(self, x, y): 27 | return x * y 28 | 29 | def not_decorated_method(self, x, y): 30 | return x * y 31 | 32 | 33 | class DecoratorFilterExecutor(DefaultExecutor): 34 | async def rpc_call(self, stack: List[RPCCall] = []): 35 | """ 36 | Process incoming rpc call stack. 37 | The stack can contain multiple chained function calls for example: 38 | node.filter(id=1).reproject_to('4326').data 39 | """ 40 | 41 | resource = self.instance 42 | 43 | for rpc_func_call in stack: 44 | assert isinstance(rpc_func_call, RPCCall) 45 | 46 | # Try to get the function/property from self.instance 47 | instance_attr = getattr(resource, rpc_func_call.func_name) 48 | 49 | if not hasattr(instance_attr, "_is_rpc_method"): 50 | raise AttributeError("%s is not a RPC method" % rpc_func_call.func_name) 51 | 52 | if callable(instance_attr): 53 | # Function 54 | resource = instance_attr( 55 | *rpc_func_call.func_args, **rpc_func_call.func_kwargs 56 | ) 57 | else: 58 | # Asume property 59 | resource = instance_attr 60 | 61 | return resource 62 | 63 | 64 | async def main(args): 65 | rpc_commlayer = await RPCRedisCommLayer.create( 66 | subchannel=b"pub", 67 | pubchannel=b"sub", # Inverse of client 68 | host=args.redis_host, 69 | serialization=msgpack_serialization, 70 | ) 71 | 72 | rpc_server = RPCServer(rpc_commlayer) 73 | 74 | # Register the Service above with the the default executor in 75 | # the TEST namespace 76 | executor = DecoratorFilterExecutor(namespace="TEST", instance=Service()) 77 | 78 | # Register executor 79 | rpc_server.register(executor) 80 | 81 | print("Start serving") 82 | await rpc_server.serve() 83 | 84 | 85 | if __name__ == "__main__": 86 | parser = ArgumentParser() 87 | parser.add_argument( 88 | "redis_host", metavar="H", type=str, help="Redis host IP address" 89 | ) 90 | args = parser.parse_args() 91 | 92 | loop = asyncio.get_event_loop() 93 | loop.run_until_complete(main(args)) 94 | -------------------------------------------------------------------------------- /examples/stacked/README.rst: -------------------------------------------------------------------------------- 1 | Stacked example 2 | =============== 3 | 4 | The stacked example shows the code needed to expose the same 5 | 'multiply' function as the basic example, only now allowing 6 | to execute stacked function calls like: 7 | 8 | res = service_client.integer.multiply(100, 100) 9 | 10 | 11 | Running example 12 | --------------- 13 | 14 | - Run Redis on localhost. 15 | - Execute from the root directory in two different terminals: 16 | 17 | >>> python3.7 -m examples.stacked.server localhost 18 | >>> python3.7 -m examples.stacked.client localhost 19 | -------------------------------------------------------------------------------- /examples/stacked/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | from typing import List 4 | from uuid import uuid4 5 | 6 | from asyncio_rpc.client import RPCClient 7 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 8 | from asyncio_rpc.models import RPCCall, RPCStack 9 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 10 | 11 | # Create a stacked object structure.. 12 | # res = service_client.integer.multiply(100, 100) 13 | 14 | 15 | class IntegerClient: 16 | """ 17 | Proxy class that is exposed via the ServiceClient below. 18 | """ 19 | 20 | def __init__(self, client: RPCClient, namespace, stack: List[RPCCall]): 21 | self.client = client 22 | self.namespace = namespace 23 | self.stack = stack 24 | 25 | async def _rpc_call(self, func_name, func_args, func_kwargs): 26 | rpc_func_call = RPCCall(func_name, func_args, func_kwargs) 27 | 28 | # Add rpc_func_call to the stack of methods to be executed 29 | stack = self.stack + [rpc_func_call] 30 | 31 | rpc_func_stack = RPCStack(uuid4().hex, self.namespace, 300, stack) 32 | 33 | return await self.client.rpc_call(rpc_func_stack) 34 | 35 | async def multiply(self, x, y): 36 | return await self._rpc_call( 37 | func_name="multiply", func_args=[x, y], func_kwargs={} 38 | ) 39 | 40 | 41 | class ServiceClient: 42 | def __init__(self, client: RPCClient, namespace=None): 43 | assert namespace is not None 44 | assert client is not None 45 | self.client = client 46 | self.namespace = namespace 47 | 48 | @property 49 | def integer(self): 50 | """ 51 | Instead of providing the multiply function directly it is now available 52 | via the 'integer' property. 53 | 54 | Note that an RPCCall with 'integer' is added to the RPCStack before 55 | any functions on the IntegerClient are executed. This way 56 | server-side first 'integer' is executed before 'multiply', allowing 57 | to stack functions calls like: 58 | 59 | res = service_client.integer.multiply(100, 100) 60 | """ 61 | return IntegerClient(self.client, self.namespace, [RPCCall("integer", (), {})]) 62 | 63 | 64 | async def main(args): 65 | rpc_commlayer = await RPCRedisCommLayer.create( 66 | subchannel=b"sub", 67 | pubchannel=b"pub", 68 | host=args.redis_host, 69 | serialization=msgpack_serialization, 70 | ) 71 | 72 | rpc_client = RPCClient(rpc_commlayer) 73 | 74 | service_client = ServiceClient(rpc_client, "TEST") 75 | 76 | # Execute the multiply on the integer 77 | result = await service_client.integer.multiply(100, 100) 78 | 79 | print(result) 80 | 81 | 82 | if __name__ == "__main__": 83 | parser = ArgumentParser() 84 | parser.add_argument( 85 | "redis_host", metavar="H", type=str, help="Redis host IP address" 86 | ) 87 | args = parser.parse_args() 88 | 89 | loop = asyncio.get_event_loop() 90 | loop.run_until_complete(main(args)) 91 | -------------------------------------------------------------------------------- /examples/stacked/server.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from argparse import ArgumentParser 3 | 4 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 5 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 6 | from asyncio_rpc.server import DefaultExecutor, RPCServer 7 | 8 | # Create a stacked object structure.. 9 | # res = Service().integer.multiply(100, 100) 10 | 11 | 12 | class Integer: 13 | """ 14 | Server side implementation for Integer multiplication 15 | """ 16 | 17 | def multiply(self, x, y): 18 | return x * y 19 | 20 | 21 | class Service: 22 | @property 23 | def integer(self): 24 | """ 25 | Expose Integer via a property (just like the client side) 26 | """ 27 | return Integer() 28 | 29 | 30 | async def main(args): 31 | rpc_commlayer = await RPCRedisCommLayer.create( 32 | subchannel=b"pub", 33 | pubchannel=b"sub", # Inverse of client 34 | host=args.redis_host, 35 | serialization=msgpack_serialization, 36 | ) 37 | 38 | rpc_server = RPCServer(rpc_commlayer) 39 | 40 | # Register the Service above with the the default executor in 41 | # the TEST namespace 42 | executor = DefaultExecutor(namespace="TEST", instance=Service()) 43 | 44 | # Register executor 45 | rpc_server.register(executor) 46 | 47 | print("Start serving") 48 | await rpc_server.serve() 49 | 50 | 51 | if __name__ == "__main__": 52 | parser = ArgumentParser() 53 | parser.add_argument( 54 | "redis_host", metavar="H", type=str, help="Redis host IP address" 55 | ) 56 | args = parser.parse_args() 57 | 58 | loop = asyncio.get_event_loop() 59 | loop.run_until_complete(main(args)) 60 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=77.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.poetry] 6 | name = "asyncio_rpc" 7 | version = "0.3.1.dev0" 8 | description = "Asyncio RPC client/server with redis/msgpack/dataclasses" 9 | authors = ["Jelle Prins "] 10 | license = "BSD license" 11 | readme = "README.rst" 12 | repository = "https://github.com/nens/asyncio-rpc" 13 | keywords = ["asyncio", "rpc"] 14 | classifiers = [ 15 | "Development Status :: 4 - Beta", 16 | "Intended Audience :: Developers", 17 | "License :: OSI Approved :: BSD License", 18 | "Natural Language :: English", 19 | "Programming Language :: Python :: 3", 20 | "Programming Language :: Python :: 3.10", 21 | "Topic :: Scientific/Engineering", 22 | ] 23 | packages = [ 24 | {include = "asyncio_rpc"} 25 | ] 26 | include = ["HISTORY.rst"] 27 | 28 | [project] 29 | name = "asyncio_rpc" 30 | dynamic = ["version"] 31 | description = "Asyncio RPC client/server with redis/msgpack/dataclasses" 32 | readme = "README.rst" 33 | authors = [ 34 | {name = "Jelle Prins", email = "jelle.prins@nelen-schuurmans.nl"} 35 | ] 36 | license = {text = "BSD license"} 37 | requires-python = ">=3.10" 38 | classifiers = [ 39 | "Development Status :: 4 - Beta", 40 | "Intended Audience :: Developers", 41 | "License :: OSI Approved :: BSD License", 42 | "Natural Language :: English", 43 | "Programming Language :: Python :: 3", 44 | "Programming Language :: Python :: 3.10", 45 | "Topic :: Scientific/Engineering", 46 | ] 47 | dependencies = [ 48 | "msgpack>=1.0.7", 49 | "lz4>=2.1.6", 50 | "redis[hiredis]<5.0", 51 | ] 52 | 53 | [project.optional-dependencies] 54 | test = ["pytest", "pytest-asyncio"] 55 | numpy = ["numpy==1.26.*"] 56 | shapely = ["shapely>=2.0.1"] 57 | dev = ["ipdb", "mypy", "ruff"] 58 | 59 | [project.urls] 60 | Homepage = "https://github.com/nens/asyncio-rpc" 61 | 62 | [tool.zest-releaser] 63 | release = false 64 | python-file-with-version = "asyncio_rpc/__init__.py" 65 | 66 | [tool.setuptools.dynamic] 67 | version = {attr = "asyncio_rpc.__version__"} -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | asyncio_mode=auto -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | msgpack>=1.0.7 2 | lz4>=2.1.6 3 | numpy>=1.23 4 | redis[hiredis]<5.0 5 | shapely>=2.0.1 6 | pytest 7 | pytest-asyncio -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by uv via the following command: 2 | # uv pip compile requirements.in --output-file requirements.txt --no-cache --no-strip-extras 3 | hiredis==3.1.0 4 | # via redis 5 | iniconfig==2.1.0 6 | # via pytest 7 | lz4==4.4.4 8 | # via -r requirements.in 9 | msgpack==1.1.0 10 | # via -r requirements.in 11 | numpy==2.2.4 12 | # via 13 | # -r requirements.in 14 | # shapely 15 | packaging==24.2 16 | # via pytest 17 | pluggy==1.5.0 18 | # via pytest 19 | pytest==8.3.5 20 | # via 21 | # -r requirements.in 22 | # pytest-asyncio 23 | pytest-asyncio==0.26.0 24 | # via -r requirements.in 25 | redis[hiredis]==4.6.0 26 | # via -r requirements.in 27 | shapely==2.1.0 28 | # via -r requirements.in 29 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nens/asyncio-rpc/c86257df1af3f9c129444c046557169ab4fd1548/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from asyncio_rpc.client import RPCClient 6 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 7 | from asyncio_rpc.server import RPCServer 8 | 9 | from .utils import rpc_commlayer, stop_rpc_server_on_result_of 10 | 11 | 12 | @pytest.fixture 13 | async def rpc_client(): 14 | return RPCClient(await rpc_commlayer(b"pub", b"sub")) 15 | 16 | 17 | @pytest.fixture 18 | async def rpc_server(): 19 | return RPCServer(await rpc_commlayer(b"sub", b"pub")) 20 | 21 | 22 | @pytest.fixture() 23 | def serialize_deserialize(): 24 | def func(value, strict_map_key=True): 25 | return msgpack_serialization.loadb( 26 | msgpack_serialization.dumpb(value), strict_map_key=strict_map_key 27 | ) 28 | 29 | return func 30 | 31 | 32 | @pytest.fixture 33 | async def do_rpc_call(): 34 | async def wrapper( 35 | service_client, executor, func, custom_dataclasses=[], client_processing=False 36 | ): 37 | # Initialize both client & server 38 | rpc_client = RPCClient(await rpc_commlayer(b"pub", b"sub")) 39 | rpc_server = RPCServer(await rpc_commlayer(b"sub", b"pub")) 40 | 41 | service_client.client = rpc_client 42 | rpc_server.register(executor) 43 | 44 | # Register any given custom dataclasses on both ends 45 | rpc_client.register_models(custom_dataclasses) 46 | rpc_server.register_models(custom_dataclasses) 47 | 48 | # Already start subscribing, to be sure 49 | # we don't miss a message 50 | await rpc_server.rpc_commlayer.do_subscribe() 51 | 52 | # Execute rpc call and stop rpc_server when 53 | # a result has been returned 54 | async_funcs = [ 55 | stop_rpc_server_on_result_of( 56 | func, rpc_server, rpc_client, client_processing 57 | ), 58 | rpc_server.serve(), 59 | ] 60 | 61 | # Add rpc_client.serve if client processing 62 | if client_processing: 63 | async_funcs.append(rpc_client.serve()) 64 | 65 | try: 66 | result = (await asyncio.gather(*async_funcs))[0] 67 | finally: 68 | # Close all rpc_commlayers 69 | await rpc_client.rpc_commlayer.close() 70 | await rpc_server.rpc_commlayer.close() 71 | 72 | # Clean exit 73 | tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()] 74 | if tasks: 75 | await asyncio.gather(*tasks) 76 | 77 | return result 78 | 79 | return wrapper 80 | -------------------------------------------------------------------------------- /tests/test_msgpack_serialization.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from dataclasses import dataclass 3 | 4 | import numpy as np 5 | import pytest 6 | 7 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 8 | 9 | 10 | def test_dict_serialization(serialize_deserialize): 11 | value = {"1": 2, 10: 1.10} 12 | assert value == serialize_deserialize(value, strict_map_key=False) 13 | 14 | 15 | def test_byte_and_str_serialization(serialize_deserialize): 16 | value = {"1": b"2", b"10": "1.10"} 17 | assert value == serialize_deserialize(value) 18 | 19 | 20 | def test_numpy_serialization(serialize_deserialize): 21 | value = np.arange(100, dtype=np.float64) 22 | assert np.all(value == serialize_deserialize(value)) 23 | 24 | 25 | def test_datetime_serialization(serialize_deserialize): 26 | value = datetime.datetime.now() 27 | assert value == serialize_deserialize(value) 28 | 29 | 30 | def test_none_serialization(serialize_deserialize): 31 | assert serialize_deserialize(None) is None 32 | 33 | 34 | def test_none_deserialization(): 35 | assert msgpack_serialization.loadb(None) is None 36 | 37 | 38 | def test_slice_serialization(serialize_deserialize): 39 | value = slice(1, 2, 3) 40 | assert value == serialize_deserialize(value) 41 | 42 | 43 | def test_numpy_int32_serialization(serialize_deserialize): 44 | value = np.int32(123) 45 | deserialized = serialize_deserialize(value) 46 | assert value == deserialized 47 | assert type(value) is type(deserialized) 48 | 49 | 50 | def test_numpy_int64_serialization(serialize_deserialize): 51 | value = np.int64(123) 52 | deserialized = serialize_deserialize(value) 53 | assert value == deserialized 54 | assert type(value) is type(deserialized) 55 | 56 | 57 | @dataclass 58 | class DataclassTest: 59 | uid: int 60 | data: np.ndarray 61 | 62 | 63 | @dataclass 64 | class DataclassWrapper: 65 | dataclass_test: DataclassTest 66 | 67 | 68 | msgpack_serialization.register(DataclassTest) 69 | msgpack_serialization.register(DataclassWrapper) 70 | 71 | 72 | def test_dataclass_serialization(serialize_deserialize): 73 | value = DataclassTest(101, np.arange(100, dtype=np.float32)) 74 | deserialized = serialize_deserialize(value) 75 | assert value.uid == deserialized.uid 76 | assert np.all(value.data == deserialized.data) 77 | 78 | 79 | def test_dataclass_wrapper_serialization(serialize_deserialize): 80 | value = DataclassWrapper(DataclassTest(101, np.arange(100, dtype=np.float32))) 81 | deserialized = serialize_deserialize(value) 82 | assert value.dataclass_test.uid == deserialized.dataclass_test.uid 83 | assert np.all(value.dataclass_test.data == deserialized.dataclass_test.data) 84 | 85 | 86 | @dataclass 87 | class UnregisteredTest: 88 | uid: int 89 | data: np.ndarray 90 | 91 | 92 | def test_unregistered_serialization(serialize_deserialize): 93 | value = UnregisteredTest(101, np.arange(100, dtype=np.float32)) 94 | with pytest.raises(TypeError): 95 | serialize_deserialize(value) 96 | -------------------------------------------------------------------------------- /tests/test_pubsub.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from uuid import uuid4 3 | 4 | from asyncio_rpc.client import RPCClient 5 | from asyncio_rpc.models import RPCCall, RPCSubStack 6 | from asyncio_rpc.pubsub import Publisher 7 | from asyncio_rpc.server import RPCServer 8 | 9 | from .conftest import rpc_commlayer 10 | 11 | 12 | class Executor: 13 | """ 14 | Default executor implementation, override if necessary 15 | """ 16 | 17 | def __init__(self, namespace, instance): 18 | self.namespace = namespace 19 | self.instance = instance 20 | 21 | async def subscribe_call(self, publisher: Publisher): 22 | """ 23 | Use the Publisher to publish results to the client 24 | """ 25 | for i in range(0, 20): 26 | if publisher.is_active: 27 | await publisher.publish(i) 28 | for i in range(0, 20): 29 | if not publisher.is_active: 30 | break 31 | await publisher.publish(i) 32 | 33 | # Clean-up 34 | rpc_server: RPCServer = publisher._server 35 | await rpc_server.queue.put(b"END") 36 | await rpc_server.rpc_commlayer.unsubscribe() 37 | 38 | async def rpc_call(self, rpc_stack): 39 | pass 40 | 41 | 42 | async def test_pubsub(): 43 | rpc_client = RPCClient(await rpc_commlayer(b"pub", b"sub")) 44 | rpc_server = RPCServer(await rpc_commlayer(b"sub", b"pub")) 45 | executor = Executor("PUBSUB", None) 46 | rpc_server.register(executor) 47 | 48 | await rpc_server.rpc_commlayer.do_subscribe() 49 | 50 | rpc_func_call = RPCCall("get_item", [1], {}) 51 | rpc_func_stack = RPCSubStack(uuid4().hex, "PUBSUB", 300, [rpc_func_call]) 52 | 53 | async def process_subscriber(rpc_func_stack): 54 | subscriber = await rpc_client.subscribe_call(rpc_func_stack) 55 | 56 | async for item in subscriber.enumerate(): 57 | if item > 5: 58 | await subscriber.close() 59 | 60 | # Clean-up 61 | await rpc_client.queue.put(b"END") 62 | await rpc_client.rpc_commlayer.unsubscribe() 63 | 64 | funcs = [ 65 | rpc_server.serve(), 66 | rpc_client.serve(), 67 | process_subscriber(rpc_func_stack), 68 | ] 69 | await asyncio.gather(*funcs) 70 | 71 | await rpc_client.rpc_commlayer.close() 72 | await rpc_server.rpc_commlayer.close() 73 | -------------------------------------------------------------------------------- /tests/test_rpc_client.py: -------------------------------------------------------------------------------- 1 | from asyncio_rpc.client import RPCClient 2 | from asyncio_rpc.models import RPCMessage 3 | 4 | 5 | async def stop_rpc_client_on_rpc_message( 6 | rpc_client: RPCClient, expected_rpc_message: RPCMessage 7 | ): 8 | async def on_rpc_message(rpc_message: RPCMessage, channel): 9 | assert isinstance(rpc_message, RPCMessage) 10 | 11 | await rpc_client.queue.put(b"END") 12 | await rpc_client.rpc_commlayer.unsubscribe() 13 | 14 | assert rpc_message == expected_rpc_message 15 | 16 | return on_rpc_message 17 | 18 | 19 | async def test_rpc_message(rpc_client: RPCClient): 20 | await rpc_client.rpc_commlayer.do_subscribe() 21 | 22 | rpc_message = RPCMessage(uid="1", namespace="TEST", data={"foo": "bar"}) 23 | 24 | # publish to self 25 | await rpc_client.rpc_commlayer.publish( 26 | rpc_message, channel=rpc_client.rpc_commlayer.subchannel 27 | ) 28 | 29 | await rpc_client.serve( 30 | on_rpc_message=await stop_rpc_client_on_rpc_message(rpc_client, rpc_message) 31 | ) 32 | 33 | await rpc_client.rpc_commlayer.close() 34 | -------------------------------------------------------------------------------- /tests/test_rpc_server.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from asyncio_rpc.models import RPCStack 4 | from asyncio_rpc.server import DefaultExecutor, NamespaceError, RPCServer 5 | 6 | 7 | class MockService: 8 | pass 9 | 10 | 11 | async def test_registration(rpc_server: RPCServer): 12 | rpc_server.register(DefaultExecutor("TEST", MockService())) 13 | await rpc_server.rpc_commlayer.close() 14 | 15 | 16 | async def test_double_registration_error(rpc_server: RPCServer): 17 | rpc_server.register(DefaultExecutor("TEST", MockService())) 18 | 19 | with pytest.raises(NamespaceError): 20 | rpc_server.register(DefaultExecutor("TEST", MockService())) 21 | 22 | await rpc_server.rpc_commlayer.close() 23 | 24 | 25 | async def test_unknown_namespace_error(rpc_server: RPCServer): 26 | with pytest.raises(NamespaceError): 27 | await rpc_server.rpc_call( 28 | RPCStack(uid="1", namespace="UNKNOWN", stack=[], timeout=300) 29 | ) 30 | 31 | await rpc_server.rpc_commlayer.close() 32 | -------------------------------------------------------------------------------- /tests/test_shapely_serialization.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import pytest 4 | from shapely.geometry import ( 5 | GeometryCollection, 6 | LinearRing, 7 | LineString, 8 | MultiLineString, 9 | MultiPoint, 10 | MultiPolygon, 11 | Point, 12 | Polygon, 13 | ) 14 | 15 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 16 | 17 | # Skip tests if shapely is not installed 18 | shapely = pytest.importorskip("shapely") 19 | 20 | 21 | def test_point_serialization(serialize_deserialize): 22 | """Test serialization of Point geometries""" 23 | value = Point(1.0, 2.0) 24 | deserialized = serialize_deserialize(value) 25 | assert value.equals(deserialized) 26 | assert isinstance(value, type(deserialized)) 27 | 28 | 29 | def test_point_with_z_serialization(serialize_deserialize): 30 | """Test serialization of 3D Point geometries""" 31 | value = Point(1.0, 2.0, 3.0) 32 | deserialized = serialize_deserialize(value) 33 | assert value.equals(deserialized) 34 | assert value.has_z == deserialized.has_z 35 | assert value.z == deserialized.z 36 | 37 | 38 | def test_linestring_serialization(serialize_deserialize): 39 | """Test serialization of LineString geometries""" 40 | value = LineString([(0, 0), (1, 1), (2, 2)]) 41 | deserialized = serialize_deserialize(value) 42 | assert value.equals(deserialized) 43 | assert isinstance(value, type(deserialized)) 44 | assert len(value.coords) == len(deserialized.coords) 45 | 46 | 47 | def test_linearring_serialization(serialize_deserialize): 48 | """Test serialization of LinearRing geometries""" 49 | value = LinearRing([(0, 0), (0, 1), (1, 1), (1, 0), (0, 0)]) 50 | deserialized = serialize_deserialize(value) 51 | assert value.equals(deserialized) 52 | assert isinstance(value, type(deserialized)) 53 | assert len(value.coords) == len(deserialized.coords) 54 | 55 | 56 | def test_polygon_serialization(serialize_deserialize): 57 | """Test serialization of Polygon geometries""" 58 | # Polygon with exterior only 59 | value = Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]) 60 | deserialized = serialize_deserialize(value) 61 | assert value.equals(deserialized) 62 | assert isinstance(value, type(deserialized)) 63 | 64 | # Polygon with interior (hole) 65 | value = Polygon( 66 | [(0, 0), (10, 0), (10, 10), (0, 10), (0, 0)], 67 | [[(2, 2), (2, 8), (8, 8), (8, 2), (2, 2)]], 68 | ) 69 | deserialized = serialize_deserialize(value) 70 | assert value.equals(deserialized) 71 | assert len(value.interiors) == len(deserialized.interiors) 72 | 73 | 74 | def test_multipoint_serialization(serialize_deserialize): 75 | """Test serialization of MultiPoint geometries""" 76 | value = MultiPoint([(0, 0), (1, 1), (2, 2)]) 77 | deserialized = serialize_deserialize(value) 78 | assert value.equals(deserialized) 79 | assert isinstance(value, type(deserialized)) 80 | assert len(value.geoms) == len(deserialized.geoms) 81 | 82 | 83 | def test_multilinestring_serialization(serialize_deserialize): 84 | """Test serialization of MultiLineString geometries""" 85 | value = MultiLineString([[(0, 0), (1, 1)], [(2, 2), (3, 3)]]) 86 | deserialized = serialize_deserialize(value) 87 | assert value.equals(deserialized) 88 | assert isinstance(value, type(deserialized)) 89 | assert len(value.geoms) == len(deserialized.geoms) 90 | 91 | 92 | def test_multipolygon_serialization(serialize_deserialize): 93 | """Test serialization of MultiPolygon geometries""" 94 | value = MultiPolygon( 95 | [ 96 | Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]), 97 | Polygon([(2, 2), (3, 2), (3, 3), (2, 3), (2, 2)]), 98 | ] 99 | ) 100 | deserialized = serialize_deserialize(value) 101 | assert value.equals(deserialized) 102 | assert isinstance(value, type(deserialized)) 103 | assert len(value.geoms) == len(deserialized.geoms) 104 | 105 | 106 | def test_geometry_collection_serialization(serialize_deserialize): 107 | """Test serialization of GeometryCollection""" 108 | value = GeometryCollection( 109 | [ 110 | Point(0, 0), 111 | LineString([(0, 0), (1, 1)]), 112 | Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]), 113 | ] 114 | ) 115 | deserialized = serialize_deserialize(value) 116 | assert value.equals(deserialized) 117 | assert isinstance(value, type(deserialized)) 118 | assert len(value.geoms) == len(deserialized.geoms) 119 | 120 | 121 | @dataclass 122 | class GeometryDataclass: 123 | id: int 124 | point: Point 125 | line: LineString 126 | polygon: Polygon 127 | 128 | 129 | # Register the dataclass for serialization 130 | msgpack_serialization.register(GeometryDataclass) 131 | 132 | 133 | def test_dataclass_with_geometries(serialize_deserialize): 134 | """Test serialization of dataclass containing geometries""" 135 | value = GeometryDataclass( 136 | id=42, 137 | point=Point(1, 2), 138 | line=LineString([(0, 0), (1, 1)]), 139 | polygon=Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]), 140 | ) 141 | deserialized = serialize_deserialize(value) 142 | 143 | assert value.id == deserialized.id 144 | assert value.point.equals(deserialized.point) 145 | assert value.line.equals(deserialized.line) 146 | assert value.polygon.equals(deserialized.polygon) 147 | 148 | 149 | def test_list_of_geometries(serialize_deserialize): 150 | """Test serialization of a list containing different geometry types""" 151 | value = [ 152 | Point(1, 2), 153 | LineString([(0, 0), (1, 1)]), 154 | Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]), 155 | ] 156 | deserialized = serialize_deserialize(value) 157 | 158 | assert len(value) == len(deserialized) 159 | for orig, des in zip(value, deserialized): 160 | assert orig.equals(des) 161 | assert type(orig) is type(des) 162 | 163 | 164 | def test_dict_of_geometries(serialize_deserialize): 165 | """Test serialization of a dictionary containing geometry values""" 166 | value = { 167 | "point": Point(1, 2), 168 | "line": LineString([(0, 0), (1, 1)]), 169 | "polygon": Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)]), 170 | } 171 | deserialized = serialize_deserialize(value) 172 | 173 | assert len(value) == len(deserialized) 174 | for key in value: 175 | assert value[key].equals(deserialized[key]) 176 | assert type(value[key]) is type(deserialized[key]) 177 | -------------------------------------------------------------------------------- /tests/test_simple_rpc_calls.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from uuid import uuid4 3 | 4 | import pytest 5 | 6 | from asyncio_rpc.client import WrappedException 7 | from asyncio_rpc.models import RPCCall, RPCStack 8 | from asyncio_rpc.server import DefaultExecutor 9 | 10 | 11 | @dataclass 12 | class CustomDataModel: 13 | x: int 14 | y: int 15 | 16 | def multiply(self): 17 | return self.x * self.y 18 | 19 | 20 | class CustomException(Exception): 21 | pass 22 | 23 | 24 | class Service(object): 25 | """ 26 | Testing service that is register via the TestExecutor 27 | on the RPCServer 28 | """ 29 | 30 | def __init__(self): 31 | self._data = {"foo": "bar"} 32 | 33 | def multiply(self, x, y=1): 34 | return x * y 35 | 36 | @property 37 | def data(self): 38 | return self._data 39 | 40 | def get_item(self, key): 41 | return self._data[key] 42 | 43 | def custom_error(self): 44 | raise CustomException("Foobar") 45 | 46 | def multiply_with_dataclass(self, x: CustomDataModel): 47 | assert isinstance(x, CustomDataModel) 48 | return x.multiply() 49 | 50 | 51 | class ServiceClient(object): 52 | """ 53 | TestService client, exposing (rpc) functions 54 | that can be called on the TestService instance. 55 | """ 56 | 57 | def __init__(self, client): 58 | self.client = client 59 | 60 | @property 61 | async def data(self): 62 | rpc_func_call = RPCCall("data", [], {}) 63 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 64 | return await self.client.rpc_call(rpc_func_stack) 65 | 66 | async def multiply(self, x, y=100): 67 | rpc_func_call = RPCCall("multiply", [x], {"y": y}) 68 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 69 | return await self.client.rpc_call(rpc_func_stack) 70 | 71 | async def get_item(self, key): 72 | rpc_func_call = RPCCall("get_item", [key], {}) 73 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 74 | return await self.client.rpc_call(rpc_func_stack) 75 | 76 | async def custom_error(self): 77 | rpc_func_call = RPCCall("custom_error", [], {}) 78 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 79 | return await self.client.rpc_call(rpc_func_stack) 80 | 81 | async def multiply_with_dataclass(self, x: CustomDataModel): 82 | assert isinstance(x, CustomDataModel) 83 | rpc_func_call = RPCCall("multiply_with_dataclass", [x], {}) 84 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 85 | return await self.client.rpc_call(rpc_func_stack) 86 | 87 | 88 | async def test_simple_call(do_rpc_call): 89 | test_service_client = ServiceClient(None) 90 | result = await do_rpc_call( 91 | test_service_client, 92 | DefaultExecutor("TEST", Service()), 93 | test_service_client.multiply(100, 100), 94 | ) 95 | assert result == 100 * 100 96 | 97 | 98 | async def test_simple_call_with_client_processing(do_rpc_call): 99 | test_service_client = ServiceClient(None) 100 | result = await do_rpc_call( 101 | test_service_client, 102 | DefaultExecutor("TEST", Service()), 103 | test_service_client.multiply(100, 100), 104 | client_processing=True, 105 | ) 106 | assert result == 100 * 100 107 | 108 | 109 | async def test_simple_call2(do_rpc_call): 110 | test_service_client = ServiceClient(None) 111 | result = await do_rpc_call( 112 | test_service_client, 113 | DefaultExecutor("TEST", Service()), 114 | test_service_client.get_item("foo"), 115 | ) 116 | assert result == "bar" 117 | 118 | 119 | async def test_property(do_rpc_call): 120 | test_service_client = ServiceClient(None) 121 | result = await do_rpc_call( 122 | test_service_client, 123 | DefaultExecutor("TEST", Service()), 124 | test_service_client.data, 125 | ) 126 | assert result == {"foo": "bar"} 127 | 128 | 129 | async def test_key_error(do_rpc_call): 130 | test_service_client = ServiceClient(None) 131 | with pytest.raises(KeyError): 132 | await do_rpc_call( 133 | test_service_client, 134 | DefaultExecutor("TEST", Service()), 135 | test_service_client.get_item("bar"), 136 | ) 137 | 138 | 139 | async def test_not_builtin_exception(do_rpc_call): 140 | test_service_client = ServiceClient(None) 141 | with pytest.raises(WrappedException): 142 | await do_rpc_call( 143 | test_service_client, 144 | DefaultExecutor("TEST", Service()), 145 | test_service_client.custom_error(), 146 | ) 147 | 148 | 149 | async def test_custom_data_model(do_rpc_call): 150 | test_service_client = ServiceClient(None) 151 | value = CustomDataModel(100, 100) 152 | result = await do_rpc_call( 153 | test_service_client, 154 | DefaultExecutor("TEST", Service()), 155 | test_service_client.multiply_with_dataclass(value), 156 | custom_dataclasses=[CustomDataModel], 157 | ) 158 | assert result == value.multiply() 159 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from uuid import uuid4 3 | 4 | from asyncio_rpc.commlayers.redis import RPCRedisCommLayer 5 | from asyncio_rpc.models import RPCCall, RPCStack 6 | from asyncio_rpc.serialization import msgpack as msgpack_serialization 7 | 8 | # Set to env_var REDIS_HOST or 'localhost' as default 9 | REDIS_HOST = environ.get("REDIS_HOST", "localhost") 10 | 11 | 12 | class CustomException(Exception): 13 | pass 14 | 15 | 16 | class Service(object): 17 | """ 18 | Testing service that is register via the TestExecutor 19 | on the RPCServer 20 | """ 21 | 22 | def __init__(self): 23 | self.data = {"foo": "bar"} 24 | 25 | def multiply(self, x, y=1): 26 | return x * y 27 | 28 | def get_item(self, key): 29 | return self.data[key] 30 | 31 | def custom_error(self): 32 | raise CustomException("Foobar") 33 | 34 | 35 | class ServiceClient(object): 36 | """ 37 | TestService client, exposing (rpc) functions 38 | that can be called on the TestService instance. 39 | """ 40 | 41 | def __init__(self, client): 42 | self.client = client 43 | 44 | async def multiply(self, x, y=100): 45 | rpc_func_call = RPCCall("multiply", [x], {"y": y}) 46 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 47 | return await self.client.rpc_call(rpc_func_stack) 48 | 49 | async def get_item(self, key): 50 | rpc_func_call = RPCCall("get_item", [key], {}) 51 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 52 | return await self.client.rpc_call(rpc_func_stack) 53 | 54 | async def custom_error(self): 55 | rpc_func_call = RPCCall("custom_error", [], {}) 56 | rpc_func_stack = RPCStack(uuid4().hex, "TEST", 300, [rpc_func_call]) 57 | return await self.client.rpc_call(rpc_func_stack) 58 | 59 | 60 | async def stop_rpc_server_on_result_of( 61 | async_func, rpc_server, rpc_client, client_processing=False 62 | ): 63 | """ 64 | awaits the given async_func. 65 | stops the rpc_server (background) processing on result, 66 | allowing the rpc_server.serve() to return. 67 | """ 68 | # Await func result 69 | 70 | try: 71 | result = await async_func 72 | finally: 73 | # Stop listening and queue processing in server, 74 | # allowing rpc_serve.serve() to return 75 | await rpc_server.queue.put(b"END") 76 | await rpc_server.rpc_commlayer.unsubscribe() 77 | 78 | if client_processing: 79 | await rpc_client.queue.put(b"END") 80 | await rpc_client.rpc_commlayer.unsubscribe() 81 | 82 | # Return the result 83 | return result 84 | 85 | 86 | async def rpc_commlayer(subchannel, pubchannel, host=REDIS_HOST): 87 | """ 88 | Get a RPCRedisCommLayer with subchannel/pubchannel 89 | """ 90 | return await RPCRedisCommLayer.create( 91 | subchannel=subchannel, 92 | pubchannel=pubchannel, 93 | host=host, 94 | serialization=msgpack_serialization, 95 | ) 96 | --------------------------------------------------------------------------------