├── VERSION ├── tests ├── __init__.py ├── client │ ├── __init__.py │ ├── internal_messaging │ │ ├── __init__.py │ │ ├── conftest.py │ │ └── test_producer.py │ └── test_utilities_unit.py ├── config │ ├── __init__.py │ └── test_structure.py ├── plugins │ ├── __init__.py │ └── test_unit_plugin_base.py ├── schema │ ├── __init__.py │ └── conftest.py ├── transports │ ├── __init__.py │ └── redis │ │ ├── __init__.py │ │ ├── benchmark_integration_redis.py │ │ ├── test_unit_redis_utils.py │ │ ├── conftest.py │ │ ├── test_reliability_redis_events.py │ │ ├── test_unit_redis_schema.py │ │ └── test_reliability_redis_rpc.py ├── utilities │ ├── __init__.py │ ├── benchmark_deforming.py │ └── test_frozendict.py ├── serializers │ ├── __init__.py │ ├── test_blob.py │ └── test_by_field.py ├── docker-compose.yaml ├── tests_testing.py ├── dummy_api.py └── test_creaton.py ├── docs ├── CNAME ├── reference │ ├── code-of-conduct.md │ ├── index.md │ ├── protocols │ │ └── index.md │ ├── authors.md │ ├── release-process.md │ ├── apis.md │ └── command-line-use │ │ ├── shell.md │ │ ├── dumpconfigschema.md │ │ └── dumpschema.md ├── static │ ├── images │ │ ├── quickstart-events.png │ │ ├── simple-processes.png │ │ ├── internal-architecture.png │ │ ├── internal-architecture.graffle │ │ ├── quickstart-lightbus-run.png │ │ ├── worked-example-flask-log.png │ │ ├── worked-example-honcho-startup.png │ │ ├── simple-processes.graffle │ │ │ ├── data.plist │ │ │ ├── image2.png │ │ │ ├── image5.png │ │ │ └── preview.jpeg │ │ └── worked-example-honcho-page-view.png │ ├── stylesheets │ │ ├── extra.css │ │ └── version-select.css │ └── js │ │ └── version-select.js ├── explanation │ ├── index.md │ ├── transports.md │ ├── bus.md │ ├── schema.md │ ├── internal-architecture.md │ ├── performance.md │ ├── services.md │ ├── lightbus-vs-celery.md │ ├── rpcs.md │ ├── marshalling.md │ ├── configuration.md │ └── apis.md ├── howto │ ├── index.md │ ├── run-background-tasks.md │ ├── metrics.md │ ├── access-your-bus-client.md │ ├── schedule-recurring-tasks.md │ ├── event-sourcing.md │ └── modify-lightbus.md ├── includes │ ├── if-you-get-stuck.md │ └── note-configuration-auto-complete.md ├── tutorial │ ├── index.md │ └── getting-involved.md └── check_links.sh ├── .dockerignore ├── lightbus ├── client │ ├── docks │ │ ├── __init__.py │ │ └── base.py │ ├── subclients │ │ ├── __init__.py │ │ └── base.py │ ├── internal_messaging │ │ └── __init__.py │ ├── __init__.py │ ├── commands.py │ └── validator.py ├── utilities │ ├── __init__.py │ ├── io.py │ ├── features.py │ ├── singledispatch.py │ ├── django.py │ ├── importing.py │ ├── frozendict.py │ ├── config.py │ └── deforming.py ├── config │ └── __init__.py ├── schema │ ├── __init__.py │ └── encoder.py ├── serializers │ ├── __init__.py │ ├── blob.py │ ├── base.py │ └── by_field.py ├── transports │ ├── redis │ │ ├── __init__.py │ │ └── schema.py │ └── __init__.py ├── __init__.py ├── commands │ ├── version.py │ ├── dump_config_schema.py │ ├── dump_schema.py │ ├── shell.py │ └── utilities.py ├── hooks.py └── internal_apis.py ├── lightbus_examples ├── __init__.py ├── ex05_schema │ ├── __init__.py │ └── bus.py ├── ex06_django │ ├── __init__.py │ ├── example_app │ │ ├── __init__.py │ │ ├── migrations │ │ │ ├── __init__.py │ │ │ └── 0001_initial.py │ │ ├── tests.py │ │ ├── apps.py │ │ ├── models.py │ │ ├── admin.py │ │ └── views.py │ ├── db.sqlite3 │ ├── wsgi.py │ ├── manage.py │ ├── urls.py │ └── bus.py ├── ex99_schema │ ├── __init__.py │ ├── client.py │ └── bus.py ├── ex01_quickstart │ ├── __init__.py │ ├── auth_service │ │ ├── __init__.py │ │ ├── manually_register_user.py │ │ └── bus.py │ └── another_service │ │ ├── __init__.py │ │ ├── check_password.py │ │ └── bus.py ├── ex04_organsing │ ├── __init__.py │ └── bus │ │ └── __init__.py ├── ex03_worked_example │ ├── __init__.py │ ├── image │ │ ├── __init__.py │ │ └── bus.py │ ├── store │ │ ├── __init__.py │ │ ├── bus.py │ │ └── web.py │ ├── dashboard │ │ ├── __init__.py │ │ ├── bus.py │ │ └── web.py │ ├── Procfile_combined │ └── Procfile ├── ex10_recurring_tasks │ ├── __init__.py │ └── bus.py ├── ex11_background_tasks │ ├── __init__.py │ └── bus.py ├── ex98_nested_bus_calls │ ├── __init__.py │ └── bus.py └── requirements.txt ├── lightbus_experiments ├── __init__.py ├── kombu │ ├── __init__.py │ ├── README.md │ ├── hello_publisher.py │ └── hello_consumer.py ├── kombu_zmq │ ├── __init__.py │ ├── timer.py │ ├── README.md │ ├── producer.py │ └── consumer.py ├── nameko │ ├── __init__.py │ ├── helloworld.py │ └── README.md ├── celery_rpc_amqp │ ├── __init__.py │ ├── celery_tasks.py │ ├── consumer_serial.py │ ├── README.md │ └── consumer_parallel.py ├── celery_rpc_redis │ ├── __init__.py │ ├── celery_tasks.py │ ├── consumer_serial.py │ ├── README.md │ └── consumer_parallel.py ├── error_in_callable.py ├── stub_maker.pyi ├── potential_api_fn.py ├── requirements.txt ├── pycharm_completion.py ├── asyncio_tasks_and_threads.py ├── versions_and_migrations.py ├── config.example.yaml ├── structured_logging.py ├── custom_loop.py ├── potential_use.py ├── potential_api_oo.py └── stub_maker.py ├── lightbus_vendored ├── __init__.py ├── jsonpath │ └── __init__.py └── aioredis │ ├── sentinel │ └── __init__.py │ ├── log.py │ ├── commands │ └── scripting.py │ ├── locks.py │ └── __init__.py ├── AUTHORS ├── .bandit ├── .gitignore ├── .pre-commit-config.yaml ├── pytest.ini ├── tox.ini ├── .coveragerc ├── .pylintrc ├── .github └── workflows │ ├── docs.yaml │ └── test.yaml ├── README.md └── pyproject.toml /VERSION: -------------------------------------------------------------------------------- 1 | 0.0.1 -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | lightbus.org 2 | -------------------------------------------------------------------------------- /tests/client/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/schema/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/transports/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | -------------------------------------------------------------------------------- /lightbus/client/docks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_vendored/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus/client/subclients/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/transports/redis/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex05_schema/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex99_schema/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu_zmq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/nameko/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/client/internal_messaging/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus/client/internal_messaging/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex04_organsing/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_amqp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex10_recurring_tasks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex11_background_tasks/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex98_nested_bus_calls/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_redis/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | See: https://lightbus.org/reference/authors/ 2 | -------------------------------------------------------------------------------- /lightbus/config/__init__.py: -------------------------------------------------------------------------------- 1 | from .config import Config 2 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/auth_service/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/image/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/store/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/another_service/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/dashboard/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/reference/code-of-conduct.md: -------------------------------------------------------------------------------- 1 | {!docs/CODE_OF_CONDUCT.md!} 2 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lightbus_examples/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | honcho 3 | aiohttp 4 | -------------------------------------------------------------------------------- /lightbus_vendored/jsonpath/__init__.py: -------------------------------------------------------------------------------- 1 | from .jsonpath import jsonpath 2 | -------------------------------------------------------------------------------- /lightbus/client/__init__.py: -------------------------------------------------------------------------------- 1 | from lightbus.client.bus_client import BusClient 2 | -------------------------------------------------------------------------------- /lightbus/schema/__init__.py: -------------------------------------------------------------------------------- 1 | from .schema import Schema, Parameter, WildcardParameter 2 | -------------------------------------------------------------------------------- /lightbus_examples/ex99_schema/client.py: -------------------------------------------------------------------------------- 1 | from .bus import bus 2 | 3 | # TBA 4 | bus.auth 5 | -------------------------------------------------------------------------------- /lightbus/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import * 2 | from .blob import * 3 | from .by_field import * 4 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /.bandit: -------------------------------------------------------------------------------- 1 | [bandit] 2 | exclude: /lightbus_experiments,/lightbus_examples,/tests,/build,/dist,/.tox,/.venv 3 | targets: /lightbus 4 | -------------------------------------------------------------------------------- /docs/static/images/quickstart-events.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/quickstart-events.png -------------------------------------------------------------------------------- /docs/static/images/simple-processes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/simple-processes.png -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/db.sqlite3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/lightbus_examples/ex06_django/db.sqlite3 -------------------------------------------------------------------------------- /docs/static/images/internal-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/internal-architecture.png -------------------------------------------------------------------------------- /docs/static/images/internal-architecture.graffle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/internal-architecture.graffle -------------------------------------------------------------------------------- /docs/static/images/quickstart-lightbus-run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/quickstart-lightbus-run.png -------------------------------------------------------------------------------- /docs/static/images/worked-example-flask-log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/worked-example-flask-log.png -------------------------------------------------------------------------------- /docs/static/images/worked-example-honcho-startup.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/worked-example-honcho-startup.png -------------------------------------------------------------------------------- /docs/static/images/simple-processes.graffle/data.plist: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/simple-processes.graffle/data.plist -------------------------------------------------------------------------------- /docs/static/images/simple-processes.graffle/image2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/simple-processes.graffle/image2.png -------------------------------------------------------------------------------- /docs/static/images/simple-processes.graffle/image5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/simple-processes.graffle/image5.png -------------------------------------------------------------------------------- /docs/static/images/worked-example-honcho-page-view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/worked-example-honcho-page-view.png -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ExampleAppConfig(AppConfig): 5 | name = "example_app" 6 | -------------------------------------------------------------------------------- /docs/static/images/simple-processes.graffle/preview.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adamcharnock/lightbus/HEAD/docs/static/images/simple-processes.graffle/preview.jpeg -------------------------------------------------------------------------------- /docs/static/stylesheets/extra.css: -------------------------------------------------------------------------------- 1 | .md-typeset .admonition, .md-typeset details { 2 | font-size: 0.8rem; 3 | } 4 | 5 | .md-typeset h5 { 6 | color: black; 7 | } 8 | -------------------------------------------------------------------------------- /lightbus_examples/ex04_organsing/bus/__init__.py: -------------------------------------------------------------------------------- 1 | import lightbus 2 | 3 | bus = lightbus.create() 4 | 5 | 6 | class FirstApi(lightbus.Api): 7 | 8 | class Meta: 9 | name = "first" 10 | -------------------------------------------------------------------------------- /lightbus_experiments/error_in_callable.py: -------------------------------------------------------------------------------- 1 | import lightbus 2 | 3 | bus = lightbus.create() 4 | 5 | 6 | @bus.client.every(seconds=1) 7 | def do_it(): 8 | raise RuntimeError("Oh no! The kittens are escaping! 🐈🐈🐈") 9 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_amqp/celery_tasks.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | 3 | app = Celery("tasks", backend="rpc://", broker="pyamqp://guest@localhost//") 4 | 5 | 6 | @app.task 7 | def add(x, y): 8 | return x + y 9 | -------------------------------------------------------------------------------- /lightbus_experiments/nameko/helloworld.py: -------------------------------------------------------------------------------- 1 | from nameko.rpc import rpc 2 | 3 | 4 | class GreetingService: 5 | name = "greeting_service" 6 | 7 | @rpc 8 | def hello(self, name): 9 | return "Hello, {}!".format(name) 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | INTEREST 2 | /docs/_build 3 | /docs/deploy_key 4 | /build 5 | /dist 6 | /.cache 7 | /.coverage 8 | /coverage.xml 9 | /junit.xml 10 | /site 11 | /.env 12 | /.venv 13 | /.tox 14 | /.python-version 15 | /bin/linkcheck 16 | /pip-wheel-metadata 17 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class PageView(models.Model): 5 | url = models.CharField(max_length=255) 6 | viewed_at = models.DateTimeField(auto_now_add=True) 7 | user_agent = models.TextField() 8 | -------------------------------------------------------------------------------- /docs/explanation/index.md: -------------------------------------------------------------------------------- 1 | # Explanation overview 2 | 3 | This section discusses the **theoretical and conceptual aspects 4 | of Lightbus**. This is in contrast to the more practical 5 | [tutorial] and [how to] sections. 6 | 7 | [tutorial]: ../tutorial/index.md 8 | [how to]: ../howto/index.md 9 | -------------------------------------------------------------------------------- /docs/howto/index.md: -------------------------------------------------------------------------------- 1 | # Howto overview 2 | 3 | In this section we address **specific problems and common use cases**. 4 | As with the [tutorials] we will link to concepts as we go, but the 5 | priority here is to provide a clear path to a solution. 6 | 7 | [tutorials]: ../tutorial/index.md 8 | -------------------------------------------------------------------------------- /lightbus_experiments/stub_maker.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from bottle import HTTPResponse 3 | 4 | class MyApi(): 5 | my_event = Event() 6 | def method1(self: Any, user_id: int) -> dict: 7 | pass 8 | def method2(self: Any) -> HTTPResponse: 9 | pass 10 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/ambv/black 3 | rev: 23.7.0 4 | hooks: 5 | - id: black 6 | args: [--line-length=100, --safe] 7 | language_version: python3.11 8 | verbose: true 9 | exclude: ^lightbus_(experiments|vendored)/ 10 | -------------------------------------------------------------------------------- /lightbus/utilities/io.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logger = logging.getLogger(__name__) 4 | 5 | 6 | def make_file_safe_api_name(api_name): 7 | """Make an api name safe for use in a file name""" 8 | return "".join([c for c in api_name if c.isalpha() or c.isdigit() or c in (".", "_", "-")]) 9 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu/README.md: -------------------------------------------------------------------------------- 1 | Kombu Simple Test 2 | ================= 3 | 4 | Sending 1kb messages, acknowledged, no returned response 5 | 6 | $ python hello_publisher.py 10000 7 | Time per put: 0.12 8 | 9 | $ python hello_consumer.py 10000 10 | Time per get: 0.22 11 | -------------------------------------------------------------------------------- /lightbus_vendored/aioredis/sentinel/__init__.py: -------------------------------------------------------------------------------- 1 | from .commands import RedisSentinel, create_sentinel 2 | from .pool import SentinelPool, create_sentinel_pool 3 | 4 | __all__ = [ 5 | "create_sentinel", 6 | "create_sentinel_pool", 7 | "RedisSentinel", 8 | "SentinelPool", 9 | ] 10 | -------------------------------------------------------------------------------- /lightbus/utilities/features.py: -------------------------------------------------------------------------------- 1 | """Data structures used for specifying which features a lightbus process should provide""" 2 | from enum import Enum 3 | 4 | 5 | class Feature(Enum): 6 | RPCS = "rpcs" 7 | EVENTS = "events" 8 | TASKS = "tasks" 9 | 10 | 11 | ALL_FEATURES = tuple(Feature) 12 | -------------------------------------------------------------------------------- /docs/reference/index.md: -------------------------------------------------------------------------------- 1 | # Reference overview 2 | 3 | This section provides **detailed information regarding the specific 4 | features of Lightbus**. 5 | 6 | A grasp of the [tutorial] and [explantion] sections will 7 | be useful here. 8 | 9 | [tutorial]: ../tutorial/index.md 10 | [explantion]: ../explanation/index.md 11 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | unit: Unit tests (fast) 4 | integration: Integration tests (slow) 5 | reliability: Reliability tests (slower) 6 | benchmark: Benchmarking results (slower) 7 | python_files = 8 | test_*.py 9 | benchmark_*.py 10 | python_functions = 11 | test_* 12 | benchmark_* 13 | asyncio_mode = auto 14 | -------------------------------------------------------------------------------- /lightbus/schema/encoder.py: -------------------------------------------------------------------------------- 1 | from json import JSONEncoder 2 | 3 | 4 | def json_encode(obj, indent=None, sort_keys=True, **options): 5 | # TODO: This is also used for non-schema related encoding. Either move 6 | # this elsewhere, or create a new general purpose encoder 7 | return JSONEncoder(indent=indent, sort_keys=sort_keys, **options).encode(obj) 8 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | from .models import PageView 4 | 5 | 6 | @admin.register(PageView) 7 | class PageViewAdmin(admin.ModelAdmin): 8 | list_display = ["pk", "url", "viewed_at", "user_agent"] 9 | search_fields = ["pk", "url", "user_agent"] 10 | list_filter = ["viewed_at", "url"] 11 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_redis/celery_tasks.py: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | 3 | app = Celery("tasks", backend="redis://localhost", broker="pyamqp://guest@localhost//") 4 | app.conf.task_reject_on_worker_lost = True 5 | app.conf.task_acks_late = True 6 | app.conf.broker_pool_limit = None 7 | 8 | 9 | @app.task 10 | def test_task(): 11 | return 1 12 | -------------------------------------------------------------------------------- /lightbus/transports/redis/__init__.py: -------------------------------------------------------------------------------- 1 | from lightbus.transports.redis.event import RedisEventTransport 2 | from lightbus.transports.redis.result import RedisResultTransport 3 | from lightbus.transports.redis.rpc import RedisRpcTransport 4 | from lightbus.transports.redis.schema import RedisSchemaTransport 5 | from lightbus.transports.redis.utilities import RedisEventMessage 6 | -------------------------------------------------------------------------------- /lightbus_examples/ex11_background_tasks/bus.py: -------------------------------------------------------------------------------- 1 | # bus.py 2 | import asyncio 3 | import lightbus 4 | 5 | bus = lightbus.create() 6 | 7 | 8 | async def my_background_task(): 9 | while True: 10 | await asyncio.sleep(1) 11 | print("Hello!") 12 | 13 | 14 | @bus.client.on_start() 15 | def on_startup(**kwargs): 16 | bus.client.add_background_task(my_background_task()) 17 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/store/bus.py: -------------------------------------------------------------------------------- 1 | # store/bus.py 2 | import lightbus 3 | import threading 4 | 5 | print(threading.current_thread()) 6 | 7 | bus = lightbus.create(flask=True) 8 | 9 | 10 | class StoreApi(lightbus.Api): 11 | page_view = lightbus.Event(parameters=("url",)) 12 | 13 | class Meta: 14 | name = "store" 15 | 16 | 17 | bus.client.register_api(StoreApi()) 18 | -------------------------------------------------------------------------------- /lightbus/__init__.py: -------------------------------------------------------------------------------- 1 | from lightbus.utilities.logging import configure_logging 2 | from lightbus.transports import * 3 | from lightbus.client import BusClient 4 | from lightbus.path import * 5 | from lightbus.message import * 6 | from lightbus.api import * 7 | from lightbus.schema import * 8 | from lightbus.creation import * 9 | from lightbus.client.utilities import OnError 10 | from lightbus.exceptions import * 11 | -------------------------------------------------------------------------------- /docs/reference/protocols/index.md: -------------------------------------------------------------------------------- 1 | # Protocols 2 | 3 | Here we define the specific interactions between Lightbus and its underlying 4 | communication medium, Redis. The intention is to provide enough information to allow 5 | services written in other languages to interact with Lightbus. 6 | 7 | * [Event protocol (Redis)](event.md) 8 | * [RPC & result protocol (Redis)](rpc-and-result.md) 9 | * [Schema protocol (Redis)](schema.md) 10 | -------------------------------------------------------------------------------- /lightbus_examples/ex99_schema/bus.py: -------------------------------------------------------------------------------- 1 | from lightbus import Api, Event, Parameter 2 | from lightbus.creation import create 3 | 4 | bus = create() 5 | 6 | 7 | class AuthApi(Api): 8 | user_registered = Event(parameters=[Parameter("username", str)]) 9 | 10 | class Meta: 11 | name = "auth" 12 | 13 | def check_password(self, username: str, password: str): 14 | return username == "admin" and password == "secret" 15 | -------------------------------------------------------------------------------- /lightbus_examples/ex10_recurring_tasks/bus.py: -------------------------------------------------------------------------------- 1 | import lightbus 2 | 3 | bus = lightbus.create() 4 | 5 | # Simple 6 | 7 | 8 | @bus.client.every(seconds=1) 9 | def do_it(): 10 | print("Simple hello") 11 | 12 | 13 | # Using the schedule library 14 | import schedule 15 | 16 | # Run the task every 1-3 seconds, varying randomly 17 | @bus.client.schedule(schedule.every(1).to(3).seconds) 18 | def do_it(): 19 | print("Hello using schedule library") 20 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_redis/consumer_serial.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | 4 | from .celery_tasks import test_task 5 | 6 | 7 | TOTAL_MESSAGES = int(sys.argv[1]) 8 | 9 | 10 | def do_it(): 11 | for x in range(0, TOTAL_MESSAGES): 12 | result = test_task.delay() 13 | result.get() 14 | 15 | 16 | seconds = timeit.timeit(do_it, number=1) / TOTAL_MESSAGES 17 | print("Time per call: {}ms".format(round(seconds * 1000, 2))) 18 | -------------------------------------------------------------------------------- /lightbus_experiments/potential_api_fn.py: -------------------------------------------------------------------------------- 1 | # api.py 2 | 3 | 4 | @method() 5 | def resize_image(data: bytes) -> dict: 6 | pass 7 | 8 | 9 | user_registered = Event() 10 | 11 | # implementation.py 12 | 13 | 14 | @implements(resize_image) 15 | def resize_image(data: bytes) -> dict: 16 | pass 17 | 18 | 19 | # Pros: 20 | # - Appealingly simple 21 | # Cons: 22 | # - IDE's won't warn about definition/implementation signatures not matching 23 | # - Not DRY 24 | -------------------------------------------------------------------------------- /docs/static/stylesheets/version-select.css: -------------------------------------------------------------------------------- 1 | #version-selector { 2 | padding: .6rem .8rem; 3 | } 4 | .md-nav__item--nested #version-selector { 5 | padding: .6rem .6rem; 6 | } 7 | #version-selector select { 8 | display: block; 9 | width: 100%; 10 | font-size: .8rem; 11 | } 12 | @media only screen and (min-width:76.1875em) { 13 | .md-nav__item--nested #version-selector { 14 | padding: .6rem .0rem; 15 | } 16 | #version-selector { 17 | padding: .6rem .6rem; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_amqp/consumer_serial.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | 4 | from .celery_tasks import add 5 | 6 | 7 | TOTAL_MESSAGES = int(sys.argv[1]) 8 | 9 | 10 | def do_it(): 11 | result = add.delay(4, 4) 12 | print(">", end="", flush=True) 13 | result.get() 14 | print("<", end="", flush=True) 15 | 16 | 17 | seconds = timeit.timeit(do_it, number=TOTAL_MESSAGES) / TOTAL_MESSAGES 18 | print("Time per call: {}ms".format(round(seconds * 1000, 2))) 19 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_amqp/README.md: -------------------------------------------------------------------------------- 1 | Test latency of Celery RPC (AMQP) 2 | ================================= 3 | 4 | Testing latency for simple Celery RPC, using AMQP for the backend: 5 | 6 | $ python consumer_serial.py 20 7 | Time per call: 1296.86ms 8 | 9 | $ python consumer_parallel.py 20 10 | Time per call: 8.1ms 11 | 12 | 13 | Notes 14 | ----- 15 | 16 | * This is very slow 17 | * The consumer spends pretty much all its time waiting on 18 | rpc responses (unsurprisingly) 19 | -------------------------------------------------------------------------------- /lightbus_vendored/aioredis/log.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import logging 4 | 5 | 6 | logger = logging.getLogger('aioredis') 7 | sentinel_logger = logger.getChild('sentinel') 8 | 9 | if os.environ.get("AIOREDIS_DEBUG"): 10 | logger.setLevel(logging.DEBUG) 11 | handler = logging.StreamHandler(stream=sys.stderr) 12 | handler.setFormatter(logging.Formatter( 13 | "%(asctime)s %(name)s %(levelname)s %(message)s")) 14 | logger.addHandler(handler) 15 | os.environ["AIOREDIS_DEBUG"] = "" 16 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for ex06_django project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | 12 | from django.core.wsgi import get_wsgi_application 13 | 14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightbus_examples.ex06_django.settings") 15 | 16 | application = get_wsgi_application() 17 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_redis/README.md: -------------------------------------------------------------------------------- 1 | Test latency of Celery RPC (Redis) 2 | ================================== 3 | 4 | Testing latency for simple Celery RPC, using Redis for the backend: 5 | 6 | $ python consumer_serial.py 20 7 | Time per call: 1281.98ms 8 | 9 | $ python consumer_parallel.py 20 10 | Time per call: 4.92ms 11 | 12 | 13 | Notes 14 | ----- 15 | 16 | * This is very slow 17 | * The consumer spends pretty much all its time waiting on 18 | rpc responses (unsurprisingly) 19 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/another_service/check_password.py: -------------------------------------------------------------------------------- 1 | # File: ./another_service/check_password.py 2 | 3 | import logging 4 | 5 | logging.basicConfig(level=logging.DEBUG) 6 | 7 | # Import our service's bus client 8 | from bus import bus 9 | 10 | # Call the check_password() procedure on our auth API 11 | valid = bus.auth.check_password(username="admin", password="secret") 12 | 13 | # Show the result 14 | if valid: 15 | print("Password valid!") 16 | else: 17 | print("Oops, bad username or password") 18 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py{37,38},docs,docs_check_links 4 | 5 | 6 | [testenv] 7 | commands = 8 | pip install poetry 9 | 10 | poetry install 11 | poetry run pytest {posargs} 12 | passenv = 13 | encrypted_739cc9c14904_key 14 | encrypted_739cc9c14904_iv 15 | 16 | [testenv:docs] 17 | basepython = python3.7 18 | whitelist_externals = 19 | bash 20 | commands = 21 | poetry install 22 | poetry run mkdocs gh-deploy --force --message="Automated build by CircleCI [ci skip]" 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /lightbus_experiments/nameko/README.md: -------------------------------------------------------------------------------- 1 | Nameko performance test 2 | ======================= 3 | 4 | $ nameko shell 5 | >>> import timeit 6 | >>> timeit.timeit(lambda: n.rpc.greeting_service.hello(name="ナメコ"), number=1000) 7 | 2.4728409260278568 8 | 9 | Notes 10 | ----- 11 | 12 | * The competition :s 13 | * About 2.5ms per call, achieved with RPC going via Kombu only. 14 | * Kombu + ZeroMQ gives 0.8ms. However, presumably Nameko adds some 15 | overhead (just as Warren would), so this may not be a fair 16 | comparison. 17 | -------------------------------------------------------------------------------- /lightbus/utilities/singledispatch.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | if sys.version_info >= (3, 8): 4 | from functools import singledispatchmethod 5 | else: 6 | from functools import singledispatch, update_wrapper 7 | 8 | def singledispatchmethod(func): 9 | dispatcher = singledispatch(func) 10 | 11 | def wrapper(*args, **kw): 12 | return dispatcher.dispatch(args[1].__class__)(*args, **kw) 13 | 14 | wrapper.register = dispatcher.register 15 | update_wrapper(wrapper, func) 16 | return wrapper 17 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_redis/consumer_parallel.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | 4 | from .celery_tasks import test_task 5 | 6 | 7 | TOTAL_MESSAGES = int(sys.argv[1]) 8 | 9 | 10 | def do_it(): 11 | results = [] 12 | for x in range(0, int(TOTAL_MESSAGES)): 13 | results.append(test_task.delay()) 14 | 15 | for result in results: 16 | result.get() 17 | 18 | 19 | do_it() 20 | 21 | seconds = timeit.timeit(do_it, number=1) / TOTAL_MESSAGES 22 | print("Time per call: {}ms".format(round(seconds * 1000, 2))) 23 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu_zmq/timer.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | 3 | 4 | class Timer(): 5 | def __init__(self): 6 | self.totals = [] 7 | self.stack = [] 8 | 9 | def __enter__(self): 10 | self.stack.append(time()) 11 | 12 | def __exit__(self, exc_type, exc_val, exc_tb): 13 | self.totals.append(time() - self.stack.pop()) 14 | 15 | def __str__(self): 16 | return "{}ms".format(round(self.total * 1000, 2)) 17 | 18 | @property 19 | def total(self): 20 | return sum(self.totals) 21 | -------------------------------------------------------------------------------- /lightbus_examples/ex05_schema/bus.py: -------------------------------------------------------------------------------- 1 | import lightbus 2 | 3 | bus = lightbus.create() 4 | 5 | 6 | class AuthApi(lightbus.Api): 7 | user_registered = lightbus.Event( 8 | parameters=( 9 | lightbus.Parameter("username", str), 10 | lightbus.Parameter("email", str), 11 | lightbus.Parameter("is_admin", bool, default=False), 12 | ) 13 | ) 14 | 15 | class Meta: 16 | name = "auth" 17 | 18 | def check_password(self, username: str, password: str) -> bool: 19 | return username == "admin" and password == "secret" 20 | -------------------------------------------------------------------------------- /lightbus_experiments/requirements.txt: -------------------------------------------------------------------------------- 1 | amqp==1.4.9 2 | anyjson==0.3.3 3 | billiard==3.5.0.2 4 | bottle==0.12.13 5 | celery==4.0.2 6 | certifi==2017.4.17 7 | chardet==3.0.4 8 | CProfileV==1.0.7 9 | enum-compat==0.0.2 10 | eventlet==0.21.0 11 | greenlet==0.4.12 12 | idna==2.5 13 | kombu==3.0.37 14 | mock==2.0.0 15 | mypy==0.511 16 | nameko==2.14.0 17 | path.py==10.3.1 18 | pbr==3.1.1 19 | pytz==2017.2 20 | PyYAML==3.12 21 | pyzmq==16.0.2 22 | redis==2.10.5 23 | requests==2.20.1 24 | six==1.10.0 25 | typed-ast==1.0.4 26 | urllib3==1.21.1 27 | vine==1.1.3 28 | Werkzeug==0.12.2 29 | wrapt==1.10.10 30 | -------------------------------------------------------------------------------- /docs/includes/if-you-get-stuck.md: -------------------------------------------------------------------------------- 1 | !!! note "If you get stuck..." 2 | 3 | It is really useful to hear from people who have encountered a problem 4 | or got stuck. Hearing from you means we can improve our documentation 5 | and error messages. 6 | 7 | **If you get stuck drop then please drop an email to adam@adamcharnock.com, 8 | visit the [Lightbus discord server](https://discord.gg/2j594ws), or call me (Adam) on 9 | +442032896620.** 10 | 11 | The more information 12 | you can include the better (problem description, screenshots, and code are all useful) 13 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/Procfile_combined: -------------------------------------------------------------------------------- 1 | # This file defines the processes to start up as part of this example. 2 | 3 | # The image resizer service has no web UI. I does have an ImageApi 4 | # which provides a resize() RPC. 5 | image_resizer_bus: lightbus run --bus=image.bus 6 | 7 | # The store web interface. This will fire the page_view event 8 | # on the StoreApi, and call the ImageApi's resize() RPC. 9 | store_web: FLASK_DEBUG=1 FLASK_APP=store/web.py flask run --port=5001 10 | 11 | # Run the combined web UI + lightbus process 12 | dashboard_combined: python dashboard/combined.py 13 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/image/bus.py: -------------------------------------------------------------------------------- 1 | """A simple image resizing service""" 2 | from lightbus import Api, Event 3 | from lightbus.creation import create 4 | 5 | bus = create() 6 | 7 | 8 | class ImageApi(Api): 9 | class Meta: 10 | name = "image" 11 | 12 | def resize(self, url, width, height): 13 | """Resize image at the given URL and return new URL""" 14 | # This is a demo, so just return an animal picture of the correct size 15 | return f"https://placeimg.com/{width}/{height}/animals?_={url}" 16 | 17 | 18 | bus.client.register_api(ImageApi()) 19 | -------------------------------------------------------------------------------- /lightbus_experiments/celery_rpc_amqp/consumer_parallel.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | 4 | from .celery_tasks import add 5 | 6 | 7 | TOTAL_MESSAGES = int(sys.argv[1]) 8 | 9 | 10 | def do_it(): 11 | results = [] 12 | for x in range(0, TOTAL_MESSAGES): 13 | results.append(add.delay(4, 4)) 14 | print(">", end="", flush=True) 15 | 16 | for result in results: 17 | result.get() 18 | print("<", end="", flush=True) 19 | 20 | 21 | seconds = timeit.timeit(do_it, number=1) / TOTAL_MESSAGES 22 | print("Time per call: {}ms".format(round(seconds * 1000, 2))) 23 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [paths] 2 | source = 3 | lightbus/ 4 | /lightbus/lightbus 5 | /home/circleci/project/lightbus 6 | 7 | [run] 8 | data_file = .coverage/coverage 9 | omit = 10 | lightbus/utilities/testing.py 11 | # Commands are executed in a separate interpreter, so do not 12 | # get included in the coverage results (although maybe we could) 13 | lightbus/commands/*.py 14 | 15 | [report] 16 | exclude_lines = 17 | pragma: no cover 18 | def __repr__ 19 | if self.debug: 20 | if settings.DEBUG 21 | raise NotImplementedError 22 | if 0: 23 | if __name__ == .__main__.: 24 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu/hello_publisher.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | from kombu import Connection 4 | 5 | TOTAL_MESSAGES = int(sys.argv[1]) 6 | 7 | with Connection("amqp://guest:guest@localhost:5672//") as conn: 8 | simple_queue = conn.SimpleQueue("simple_queue") 9 | # Let the consumer know we are ready to start 10 | simple_queue.put("x") 11 | 12 | def put(): 13 | simple_queue.put("x" * 1024) 14 | 15 | seconds = timeit.timeit(put, number=TOTAL_MESSAGES) / TOTAL_MESSAGES 16 | print("Time per put: {}ms".format(round(seconds * 1000, 2))) 17 | simple_queue.close() 18 | -------------------------------------------------------------------------------- /tests/utilities/benchmark_deforming.py: -------------------------------------------------------------------------------- 1 | from copy import copy 2 | 3 | import pytest 4 | 5 | from lightbus.utilities.deforming import deform_to_bus 6 | from tests.utilities.test_unit_deforming import DEFORMATION_TEST_PARAMETERS 7 | 8 | pytestmark = pytest.mark.benchmark 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "test_input,expected", 13 | list(DEFORMATION_TEST_PARAMETERS.values()), 14 | ids=list(DEFORMATION_TEST_PARAMETERS.keys()), 15 | ) 16 | @pytest.mark.benchmark(group="deforming") 17 | def benchmark_deform_to_bus(test_input, expected, benchmark): 18 | benchmark(deform_to_bus, test_input) 19 | -------------------------------------------------------------------------------- /lightbus/transports/__init__.py: -------------------------------------------------------------------------------- 1 | from lightbus.transports.base import ( 2 | RpcTransport, 3 | ResultTransport, 4 | EventTransport, 5 | SchemaTransport, 6 | Transport, 7 | ) 8 | from lightbus.transports.debug import ( 9 | DebugRpcTransport, 10 | DebugResultTransport, 11 | DebugEventTransport, 12 | DebugSchemaTransport, 13 | ) 14 | from lightbus.transports.redis.rpc import RedisRpcTransport 15 | from lightbus.transports.redis.result import RedisResultTransport 16 | from lightbus.transports.redis.event import RedisEventTransport 17 | from lightbus.transports.redis.schema import RedisSchemaTransport 18 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu/hello_consumer.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | from kombu import Connection 4 | 5 | TOTAL_MESSAGES = int(sys.argv[1]) 6 | 7 | with Connection("amqp://guest:guest@127.0.0.1:5672//") as conn: 8 | simple_queue = conn.SimpleQueue("simple_queue") 9 | # Block until we get the 'ready to start' message 10 | simple_queue.get(block=True) 11 | 12 | def get(): 13 | simple_queue.get(block=True, timeout=0.1).ack() 14 | 15 | seconds = timeit.timeit(get, number=TOTAL_MESSAGES) / TOTAL_MESSAGES 16 | print("Time per get: {}ms".format(round(seconds * 1000, 2))) 17 | simple_queue.close() 18 | -------------------------------------------------------------------------------- /lightbus_experiments/pycharm_completion.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Type 2 | 3 | T = TypeVar("T", bound="Parent") 4 | 5 | 6 | class Parent(): 7 | def instance_method(self: T, x: int) -> T: 8 | return self 9 | 10 | @classmethod 11 | def class_method(cls: Type[T]) -> T: 12 | return cls() 13 | 14 | 15 | class Child(Parent): 16 | def foo(self): 17 | pass 18 | 19 | 20 | # Child().instance_method() # No hinting 21 | # Child().class_method() # No hinting 22 | # Child().instance_method().foo() # Ok 23 | # Child().instance_method().bad() # Invalid 24 | 25 | 26 | import pdb 27 | 28 | pdb.set_trace() 29 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/auth_service/manually_register_user.py: -------------------------------------------------------------------------------- 1 | # ./auth_service/manually_register_user.py 2 | 3 | # Import the service's bus client from bus.py 4 | from bus import bus 5 | 6 | print("New user creation") 7 | new_username = input("Enter a username: ").strip() 8 | new_email = input("Enter the user's email address: ").strip() 9 | 10 | # You would normally store the new user in your database 11 | # at this point. We don't show this here for simplicity. 12 | 13 | # Let the bus know a user has been registered by firing the event 14 | bus.auth.user_registered.fire(username=new_username, email=new_email) 15 | 16 | print("Done") 17 | -------------------------------------------------------------------------------- /lightbus_experiments/asyncio_tasks_and_threads.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from functools import partial 3 | from threading import Thread 4 | from time import sleep 5 | 6 | 7 | def setter(f: asyncio.Future): 8 | sleep(1) 9 | f.get_loop().call_soon_threadsafe( 10 | partial(f.set_result, None) 11 | ) 12 | print("Setting done") 13 | 14 | 15 | async def main(): 16 | f = asyncio.Future() 17 | setter_thread = Thread(target=setter, args=[f]) 18 | 19 | setter_thread.start() 20 | 21 | await f 22 | print("Waiting complete") 23 | 24 | setter_thread.join() 25 | 26 | 27 | if __name__ == '__main__': 28 | asyncio.run(main()) 29 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/dashboard/bus.py: -------------------------------------------------------------------------------- 1 | """ 2 | This bus.py file listens for events only and does not 3 | provide any APIs. It receives page view events and 4 | writes the data to .exampledb.json. 5 | 6 | """ 7 | import json 8 | import lightbus 9 | 10 | bus = lightbus.create() 11 | page_views = {} 12 | 13 | 14 | def handle_page_view(event_message, url): 15 | page_views.setdefault(url, 0) 16 | page_views[url] += 1 17 | with open("/tmp/.dashboard.db.json", "w") as f: 18 | json.dump(page_views, f) 19 | 20 | 21 | @bus.client.on_start() 22 | def my_startup(client): 23 | bus.store.page_view.listen(handle_page_view, listener_name="handle_page_view") 24 | -------------------------------------------------------------------------------- /tests/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.1' 2 | 3 | services: 4 | 5 | db: 6 | image: postgres:9.6 7 | restart: always 8 | ports: 9 | - "54320:5432" 10 | command: ["postgres", "-c", "log_statement=all"] 11 | 12 | redis_a: 13 | image: redis:5.0 14 | ports: 15 | - "60791:6379" 16 | 17 | redis_b: 18 | image: redis:5.0 19 | ports: 20 | - "60792:6379" 21 | 22 | local_tests: 23 | build: 24 | context: .. 25 | dockerfile: tests/Dockerfile 26 | volumes: 27 | - ..:/lightbus 28 | command: pytest 29 | environment: 30 | PG_URL: "postgres://db:5432/postgres" 31 | depends_on: 32 | - db 33 | - redis_a 34 | - redis_b 35 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/dashboard/web.py: -------------------------------------------------------------------------------- 1 | """ 2 | This web server does not access the bus at all. It simply 3 | reads data from the .exampledb.json json file created by 4 | bus.py 5 | """ 6 | 7 | import json 8 | from flask import Flask 9 | 10 | app = Flask(__name__) 11 | 12 | 13 | @app.route("/") 14 | def home(): 15 | html = "

Dashboard

\n" 16 | html += "

Total store views

\n" 17 | 18 | with open("/tmp/.dashboard.db.json", "r") as f: 19 | page_views = json.load(f) 20 | 21 | html += "" 25 | 26 | return html 27 | -------------------------------------------------------------------------------- /tests/schema/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lightbus import Schema, RedisSchemaTransport, DebugSchemaTransport, Api, Event, Parameter 4 | 5 | 6 | @pytest.fixture 7 | def schema(redis_pool): 8 | return Schema(schema_transport=RedisSchemaTransport(redis_pool=redis_pool)) 9 | 10 | 11 | @pytest.fixture 12 | def dummy_schema(redis_pool): 13 | return Schema(schema_transport=DebugSchemaTransport()) 14 | 15 | 16 | @pytest.fixture 17 | def TestApi(): 18 | 19 | class TestApi(Api): 20 | my_event = Event([Parameter("field", bool)]) 21 | 22 | class Meta: 23 | name = "my.test_api" 24 | 25 | def my_proc(self, field: bool = True) -> str: 26 | pass 27 | 28 | return TestApi 29 | -------------------------------------------------------------------------------- /docs/includes/note-configuration-auto-complete.md: -------------------------------------------------------------------------------- 1 | !!! note "Configuration auto-complete using JSON Schema" 2 | 3 | Many code editors support using a JSON schema to provide auto-complete and validation when 4 | editing a JSON file. If you wish, you can write your configuration in JSON (rather than YAML), 5 | and load the following JSON schema into your editor: 6 | 7 | https://lightbus.org/static/default-config-schema.json 8 | 9 | This will provide you with autocomplete and validation for Lightbus' various 10 | configuration options. 11 | 12 | If you are using custom transports or plugins you should 13 | [generate your own config schema](../reference/command-line-use/dumpconfigschema.md). 14 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/another_service/bus.py: -------------------------------------------------------------------------------- 1 | # File: ./another_service/bus.py 2 | import lightbus 3 | from lightbus.utilities.async_tools import block 4 | 5 | bus = lightbus.create() 6 | 7 | 8 | def handle_new_user(event, username, email): 9 | # bus.client.enabled = True 10 | block(bus.auth.check_password.call_async(username="admin", password="secret"), timeout=2) 11 | # bus.client.enabled = False 12 | 13 | print(f"A new user was created in the authentication service:") 14 | print(f" Username: {username}") 15 | print(f" Email: {email}") 16 | 17 | 18 | @bus.client.on_start() 19 | def bus_start(**kwargs): 20 | bus.auth.user_registered.listen(handle_new_user, listener_name="print_on_new_registration") 21 | -------------------------------------------------------------------------------- /docs/explanation/transports.md: -------------------------------------------------------------------------------- 1 | # Transports 2 | 3 | Transports provide the communications system for Lightbus. There are four types 4 | of transport: 5 | 6 | * **RPC transports** – sends and consumes RPC calls 7 | * **Result transports** – sends and receives RPC results 8 | * **Event transports** – sends and consumes events 9 | * **Schema transports** – stores and retrieves the [bus schema](schema.md) 10 | 11 | **Lightbus ships with a Redis-backed implementation of each of these transports.** 12 | For configuration details see the [transport configuration reference](../reference/transport-configuration.md). 13 | 14 | Lightbus can be [configured] to use custom transports either globally, 15 | or on a per-API level. 16 | 17 | [configured]: ../reference/configuration.md 18 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightbus_examples.ex06_django.settings") 9 | try: 10 | from django.core.management import execute_from_command_line 11 | except ImportError as exc: 12 | raise ImportError( 13 | "Couldn't import Django. Are you sure it's installed and " 14 | "available on your PYTHONPATH environment variable? Did you " 15 | "forget to activate a virtual environment?" 16 | ) from exc 17 | execute_from_command_line(sys.argv) 18 | 19 | 20 | if __name__ == "__main__": 21 | main() 22 | -------------------------------------------------------------------------------- /lightbus_experiments/versions_and_migrations.py: -------------------------------------------------------------------------------- 1 | import lightbus 2 | 3 | bus = lightbus.create() 4 | 5 | 6 | class MyApi(lightbus.Api): 7 | class Meta: 8 | version = 1 9 | 10 | 11 | bus.client.register_api(MyApi()) 12 | 13 | 14 | class MyMigrations(): 15 | def migrate_1_to_2(self, event: lightbus.EventMessage) -> lightbus.EventMessage: 16 | pass 17 | 18 | def migrate_2_to_1(self, event: lightbus.EventMessage) -> lightbus.EventMessage: 19 | pass 20 | 21 | def migrate_2_to_3(self, event: lightbus.EventMessage) -> lightbus.EventMessage: 22 | pass 23 | 24 | 25 | @migrations(MyMigrations(), to_version=4) 26 | def my_listener(): 27 | pass 28 | 29 | 30 | bus.api.event.listen(my_listener, listener_name="my_listener") 31 | -------------------------------------------------------------------------------- /docs/tutorial/index.md: -------------------------------------------------------------------------------- 1 | # Tutorial overview 2 | 3 | These tutorials will give you **a practical concrete introduction to 4 | Lightbus**. We will link to concepts as we go, but the aim here 5 | is to get you up and running quickly. 6 | 7 | Do you prefer to read the theory first? 8 | Feel free to start with the [explanation] section and come 9 | back here later. 10 | 11 | --- 12 | 13 | We recommend you approach the tutorials in the following order: 14 | 15 | 1. [Installation] 16 | 2. [Quick start] 17 | 3. [Worked example] 18 | 19 | [explanation]: index.md 20 | [Installation]: installation.md 21 | [Quick start]: quick-start.md 22 | [Worked example]: worked-example.md 23 | 24 | After completing these tutorials you should make sure you 25 | look over the [explanation] section. 26 | -------------------------------------------------------------------------------- /lightbus_examples/ex01_quickstart/auth_service/bus.py: -------------------------------------------------------------------------------- 1 | # File: auth_service/bus.py 2 | import lightbus 3 | 4 | # Create your service's bus client. You can import this elsewere 5 | # in your service's codebase in order to access the bus 6 | bus = lightbus.create() 7 | 8 | 9 | class AuthApi(lightbus.Api): 10 | user_registered = lightbus.Event(parameters=("username", "email")) 11 | 12 | class Meta: 13 | name = "auth" 14 | 15 | def check_password(self, username, password): 16 | return username == "admin" and password == "secret" 17 | 18 | 19 | # Register this API with Lightbus. Lightbus will respond to 20 | # remote procedure calls for registered APIs, as well as allow you 21 | # as the developer to fire events on any registered APIs. 22 | bus.client.register_api(AuthApi()) 23 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/Procfile: -------------------------------------------------------------------------------- 1 | # This file defines the processes to start up as part of this example. 2 | 3 | # The image resizer service has no web UI. I does have an ImageApi 4 | # which provides a resize() RPC. 5 | image_resizer_bus: lightbus run --bus=image.bus 6 | 7 | # The store web interface. This will fire the page_view event 8 | # on the StoreApi, and call the ImageApi's resize() RPC. 9 | store_web: FLASK_DEBUG=1 FLASK_APP=store/web.py flask run --port=5001 10 | 11 | # The dashboard's lightbus process. This listens for page_view events 12 | # and writes them a database local to this service. 13 | dashboard_bus: lightbus run --bus=dashboard.bus 14 | 15 | # The dashboard web interface. This reads data from its own database 16 | dashboard_web: FLASK_DEBUG=1 FLASK_APP=dashboard/web.py flask run --port=5000 17 | -------------------------------------------------------------------------------- /tests/tests_testing.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from lightbus.utilities.internal_queue import InternalQueue 6 | from lightbus.utilities.testing import QueueMockContext 7 | 8 | 9 | def test_queue_mock_context_sync(): 10 | queue = InternalQueue() 11 | 12 | with QueueMockContext(queue) as m: 13 | queue.put_nowait(1) 14 | queue.put_nowait(2) 15 | queue.get_nowait() 16 | 17 | assert m.put_items == [1, 2] 18 | assert m.got_items == [1] 19 | 20 | 21 | @pytest.mark.asyncio 22 | async def test_queue_mock_context_async(): 23 | queue = InternalQueue() 24 | 25 | with QueueMockContext(queue) as m: 26 | await queue.put(1) 27 | await queue.put(2) 28 | await queue.get() 29 | 30 | assert m.put_items == [1, 2] 31 | assert m.got_items == [1] 32 | -------------------------------------------------------------------------------- /tests/utilities/test_frozendict.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lightbus.utilities.frozendict import frozendict 4 | 5 | 6 | @pytest.fixture() 7 | def d(): 8 | return frozendict(a=1, b=2) 9 | 10 | 11 | def test_frozendict_get(d): 12 | assert d["a"] == 1 13 | 14 | 15 | def test_frozendict_get_error(d): 16 | with pytest.raises(KeyError): 17 | d["z"] 18 | 19 | 20 | def test_frozendict_contains(d): 21 | assert "a" in d 22 | assert "z" not in d 23 | 24 | 25 | def test_frozendict_iter(d): 26 | i = iter(d) 27 | assert set(i) == {"a", "b"} 28 | 29 | 30 | def test_frozendict_repr(d): 31 | assert repr(d).startswith(" str: 17 | return "value: {}".format(field) 18 | 19 | def sudden_death(self, n): 20 | raise SuddenDeathException() 21 | 22 | def random_death(self, n, death_every=2): 23 | if n % death_every == 0: 24 | logger.warning(f"Triggering SuddenDeathException. n={n}") 25 | raise SuddenDeathException() 26 | return n 27 | 28 | def general_error(self): 29 | raise RuntimeError("Oh no, there was some kind of error") 30 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.2.4 on 2019-08-07 10:52 2 | 3 | from django.db import migrations, models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | 8 | initial = True 9 | 10 | dependencies = [] 11 | 12 | operations = [ 13 | migrations.CreateModel( 14 | name="PageView", 15 | fields=[ 16 | ( 17 | "id", 18 | models.AutoField( 19 | auto_created=True, primary_key=True, serialize=False, verbose_name="ID" 20 | ), 21 | ), 22 | ("url", models.CharField(max_length=255)), 23 | ("viewed_at", models.DateTimeField(auto_now_add=True)), 24 | ("user_agent", models.TextField()), 25 | ], 26 | ) 27 | ] 28 | -------------------------------------------------------------------------------- /lightbus_experiments/config.example.yaml: -------------------------------------------------------------------------------- 1 | lightbus: 2 | transports: 3 | # Urgh, lots of repeating 4 | rpc: 5 | redis: 6 | name: "redis" 7 | host: "127.0.0.1" 8 | port: "6379" 9 | username: "user" 10 | password: "password" 11 | db: "0" 12 | result: 13 | redis: 14 | name: "redis" 15 | host: "127.0.0.1" 16 | port: "6379" 17 | username: "user" 18 | password: "password" 19 | db: "0" 20 | event: 21 | redis: 22 | name: "redis" 23 | host: "127.0.0.1" 24 | port: "6379" 25 | username: "user" 26 | password: "password" 27 | db: "0" 28 | 29 | apis: 30 | default: 31 | rpc_backend: "redis" 32 | result_backend: "redis" 33 | event_backend: "redis" 34 | 35 | mycompany.auth: 36 | event_backend: "something_else" 37 | -------------------------------------------------------------------------------- /lightbus_examples/ex98_nested_bus_calls/bus.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | import lightbus 4 | 5 | bus = lightbus.create() 6 | 7 | 8 | class AuthApi(lightbus.Api): 9 | user_registered = lightbus.Event() 10 | request_send_email = lightbus.Event() 11 | something_else = lightbus.Event() 12 | 13 | class Meta: 14 | name = "auth" 15 | 16 | def check_password(self): 17 | bus.auth.something_else.fire() 18 | 19 | 20 | bus.client.register_api(AuthApi()) 21 | 22 | 23 | @bus.client.on_start() 24 | def on_startup(**kwargs): 25 | @bus.client.every(seconds=1) 26 | def constantly_register_users(): 27 | bus.auth.user_registered.fire() 28 | 29 | def handle_new_user(event): 30 | bus.auth.request_send_email.fire() 31 | bus.auth.check_password() 32 | 33 | bus.auth.user_registered.listen(handle_new_user, listener_name="ex98_nested_bus_calls") 34 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/urls.py: -------------------------------------------------------------------------------- 1 | """ex06_django URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/2.2/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | from django.contrib import admin 17 | from django.urls import path 18 | 19 | from lightbus_examples.ex06_django.example_app.views import home_page 20 | 21 | urlpatterns = [path("", home_page, name="home"), path("admin/", admin.site.urls)] 22 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu_zmq/README.md: -------------------------------------------------------------------------------- 1 | Test of AMQP/Kombu + ZeroMQ 2 | =========================== 3 | 4 | Testing latency & throughput of AMQP (via Kombu), using ZeroMQ as the 5 | return path. 6 | 7 | $ python producer.py 10000 8 | Time per put: 0.8ms 9 | Puts per second: 1257.15 10 | ZeroMQ time: 6420.75ms 11 | AMQP time: 1324.4ms 12 | 13 | $ python consumer.py 10000 14 | Waiting for kick-off message from producer 15 | Got it! Let's go... 16 | Time per get: 0.8ms 17 | Gets per second: 1257.31 18 | ZeroMQ time: 156.04ms 19 | AMQP time: 7672.79ms 20 | 21 | 22 | Notes 23 | ----- 24 | 25 | * Results are returned to the producer via ZeroMQ 26 | * Compared to the RPC celery experiment this is exceptionally fast 27 | * Requires direct network access between producer and consumer 28 | * I believe the high ZeroMQ time in the producer is caused by the 29 | high AMQP time in the consumer. Pre-fetching results from AMQP may 30 | alleviate this. 31 | -------------------------------------------------------------------------------- /lightbus_experiments/structured_logging.py: -------------------------------------------------------------------------------- 1 | """ Experimenting with using structlog with hope of supporting it in Lightbus 2 | """ 3 | import logging 4 | import sys 5 | 6 | import structlog 7 | 8 | 9 | def event_dict_ordering(logger, method_name, event_dict): 10 | ordered = {"event": event_dict.pop("event")} 11 | ordered.update(**event_dict) 12 | return ordered 13 | 14 | 15 | structlog.configure( 16 | processors=[ 17 | event_dict_ordering, 18 | structlog.stdlib.add_log_level, 19 | structlog.stdlib.add_logger_name, 20 | structlog.processors.TimeStamper(fmt="iso"), 21 | structlog.dev.ConsoleRenderer() 22 | if sys.stdout.isatty() 23 | else structlog.processors.JSONRenderer(), 24 | ] 25 | ) 26 | 27 | 28 | if __name__ == "__main__": 29 | log = structlog.wrap_logger(logging.getLogger("test")) 30 | log.warning("hello from std", foo=1) 31 | 32 | log.info("Loaded plugins", plugins={...}, context={"service_name": "..."}) 33 | -------------------------------------------------------------------------------- /docs/reference/authors.md: -------------------------------------------------------------------------------- 1 | # Authors 2 | 3 | ## Current team 4 | 5 | * [Adam Charnock](https://github.com/adamcharnock/), Portugal (Maintainer) 6 | 7 | ## Alumni 8 | 9 | * None yet... 10 | 11 | ## Thanks 12 | 13 | Lightbus would not have been possible without the intellectual, emotional, and logistical support 14 | of the following individuals and companies: 15 | 16 | * [Louis Thibault](https://github.com/lthibault) 17 | – For helping Adam work through many of the knottier problems, and relentlessly cheering him on. 18 | * [Futurepump](https://futurepump.com/) 19 | – For being an early testing ground and ongoing user of Lightbus. 20 | * [Louis Pilfold](https://github.com/lpil) 21 | – For being an early sounding board, and persuading Adam to take schemas seriously. 22 | * [Presscast](https://presscast.io/) 23 | – For being an early adopter and proving invaluable feedback. 24 | 25 | Additional thanks to everyone (technical and non-technical) who kindly listened to Adam 26 | talk about the reasons and ideas behind Lightbus. 27 | -------------------------------------------------------------------------------- /tests/client/internal_messaging/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from lightbus.client.internal_messaging.consumer import InternalConsumer 6 | from lightbus.client.internal_messaging.producer import InternalProducer 7 | from lightbus.utilities.internal_queue import InternalQueue 8 | 9 | 10 | @pytest.fixture 11 | async def consumer(): 12 | def _on_exception(e): 13 | raise e 14 | 15 | consumer = InternalConsumer( 16 | name="TestConsumer", queue=InternalQueue(), error_queue=InternalQueue() 17 | ) 18 | yield consumer 19 | await consumer.close() 20 | 21 | 22 | @pytest.fixture 23 | async def producer(): 24 | def _on_exception(e): 25 | raise e 26 | 27 | producer = InternalProducer( 28 | name="TestProducer", queue=InternalQueue(), error_queue=InternalQueue() 29 | ) 30 | yield producer 31 | await producer.close() 32 | 33 | 34 | @pytest.fixture 35 | async def fake_coroutine(): 36 | async def fake_coroutine_(*args, **kwargs): 37 | pass 38 | 39 | return fake_coroutine_ 40 | -------------------------------------------------------------------------------- /docs/explanation/bus.md: -------------------------------------------------------------------------------- 1 | # The bus 2 | 3 | The bus is the communications channel which links all your 4 | services together. Currently this is Redis. 5 | 6 | You use `lightbus.create()` in your `bus.py` file to access 7 | this bus: 8 | 9 | ```python3 10 | # bus.py 11 | 12 | import lightbus 13 | 14 | bus = lightbus.create() 15 | ``` 16 | 17 | This creates a high-level client through which you can 18 | perform [remote procedure calls] and fire [events]. 19 | 20 | ## About buses 21 | 22 | In computing, a bus is a shared communication medium. A bus allows any 23 | software/hardware connected to that medium to communicate, as long as 24 | common rules are obeyed. In this sense a bus is very similar to a conversation 25 | between a group of people. 26 | 27 | In electronics the communication medium can be a simple 28 | copper cable. In software the communication medium is itself defined 29 | by software. 30 | 31 | **Lightbus uses Redis as its communication medium**, although support 32 | for other mediums may be added in future. 33 | 34 | 35 | [remote procedure calls]: rpcs.md 36 | [events]: events.md 37 | -------------------------------------------------------------------------------- /docs/reference/release-process.md: -------------------------------------------------------------------------------- 1 | # Lightbus release process 2 | 3 | Lightbus releases are performed as follows: 4 | 5 | ```shell 6 | # Ensure poetry.lock is up to date 7 | poetry lock 8 | 9 | # Version bump 10 | poetry version {patch,minor,major,prepatch,preminor,premajor,prerelease} 11 | 12 | export VERSION=(lightbus version --pyproject) # v1.2.3 13 | export VERSION_DOCS=(lightbus version --pyproject --docs) # v1.2 14 | 15 | # Commit 16 | git add . 17 | git commit -m "Releasing version $VERSION" 18 | 19 | # Make docs 20 | git checkout gh-pages 21 | git pull origin gh-pages 22 | git checkout master 23 | 24 | mike deploy v$VERSION_DOCS --message="Build docs for release of $VERSION [ci skip]" 25 | mike delete latest 26 | mike alias v$VERSION_DOCS latest 27 | 28 | # Tagging and branching 29 | git tag "v$VERSION" 30 | git branch "v$VERSION" 31 | git push origin \ 32 | refs/tags/"v$VERSION" \ 33 | refs/heads/"v$VERSION" \ 34 | master \ 35 | gh-pages 36 | 37 | # Wait for CI to pass: https://github.com/adamcharnock/lightbus/actions 38 | 39 | # Build and publish 40 | poetry publish --build 41 | ``` 42 | -------------------------------------------------------------------------------- /docs/check_links.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 6 | LINKCHECK="$DIR/../bin/linkcheck" 7 | 8 | if [ ! -e "$LINKCHECK" ]; then 9 | echo "Downloading linkcheck" 10 | curl -L -o "$LINKCHECK" https://github.com/filiph/linkcheck/releases/download/v2.0.11/linkcheck-mac-x64 11 | chmod +x "$LINKCHECK" 12 | fi 13 | 14 | echo "Starting mkdocs server" 15 | poetry run mkdocs serve & 16 | 17 | 18 | until nc -z 127.0.0.1 8000; do 19 | echo "Waiting for server to start" 20 | sleep 1 21 | done 22 | 23 | echo "Checking links" 24 | 25 | cat >.lightbus-skip-file < 1 42 | exit $EXIT_CODE 43 | fi 44 | 45 | echo "Everything OK" 46 | -------------------------------------------------------------------------------- /lightbus/utilities/django.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | 4 | def uses_django_db(f): 5 | """Ensures Django discards any broken database connections 6 | 7 | Django normally cleans up connections once a web request has 8 | been processed. However, here we are not serving web requests 9 | and are outside of Django's request handling logic. We therefore 10 | need to make sure we cleanup any broken database connections. 11 | """ 12 | # TODO: Move this into middleware 13 | # (Tracked in: https://github.com/adamcharnock/lightbus/issues/6) 14 | 15 | # Import Django locally as it is not a dependency of Lightbus. 16 | # This will only get run on startup anyway, and we will assume that 17 | # if someone uses this decorator then they have Django installed. 18 | from django.db import reset_queries, close_old_connections 19 | 20 | @wraps(f) 21 | def wrapped(*args, **kwargs): 22 | reset_queries() 23 | close_old_connections() 24 | 25 | try: 26 | return f(*args, **kwargs) 27 | finally: 28 | close_old_connections() 29 | 30 | return wrapped 31 | -------------------------------------------------------------------------------- /docs/howto/run-background-tasks.md: -------------------------------------------------------------------------------- 1 | # How to run background tasks 2 | 3 | Sometimes you may wish to run arbitrary `asyncio` tasks in the background of the 4 | `lightbus run` process. You can set these up in your `bus.py` file: 5 | 6 | ```python3 7 | # bus.py 8 | import asyncio 9 | import lightbus 10 | 11 | bus = lightbus.create() 12 | 13 | async def my_background_task(): 14 | while True: 15 | await asyncio.sleep(1) 16 | print("Hello!") 17 | 18 | @bus.client.on_start() 19 | def on_startup(**kwargs): 20 | bus.client.add_background_task(my_background_task()) 21 | ``` 22 | 23 | Important points to note are: 24 | 25 | * The background task will be automatically cancelled when the bus is closed. 26 | * Any errors in the background task will be bubbled up and cause the 27 | Lightbus process to exit. If this is not desired you can implement 28 | your own try/except handling within the function being executed. 29 | 30 | !!! note 31 | 32 | If you wish to schedule a recurring task then you should probably use 33 | `@bus.client.every()` or `@bus.client.schedule()`. See 34 | [how to schedule recurring tasks](run-background-tasks.md). 35 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/example_app/views.py: -------------------------------------------------------------------------------- 1 | from django.http import HttpResponse 2 | from django.shortcuts import render 3 | 4 | from lightbus.exceptions import LightbusTimeout 5 | from lightbus_examples.ex06_django.bus import bus 6 | from lightbus_examples.ex06_django.example_app.models import PageView 7 | 8 | 9 | def home_page(request): 10 | current_url = request.META.get("PATH_INFO", "") 11 | 12 | PageView.objects.create(url=current_url, user_agent=request.META.get("HTTP_USER_AGENT", "")) 13 | 14 | html = ( 15 | "

Welcome!

\n" 16 | "

This is the home page. It's nothing special, but we have logged your page view. " 17 | "Thank you for visiting!

\n" 18 | ) 19 | 20 | html += "

Total views

" 21 | 22 | try: 23 | total_views = bus.analytics.get_total(url=current_url) 24 | html += f"

There have been {total_views} views for this page

" 25 | except LightbusTimeout: 26 | html += ( 27 | f"

The bus did not respond in a timely fashion. Have you started the " 28 | f"Lightbus worker process using lightbus run?

" 29 | ) 30 | 31 | return HttpResponse(html) 32 | -------------------------------------------------------------------------------- /lightbus_examples/ex03_worked_example/store/web.py: -------------------------------------------------------------------------------- 1 | """A simple pet shop 2 | 3 | Shows a list of animals, and you can click on each one. 4 | 5 | Image resizing and page view tracking performed using lightbus. 6 | """ 7 | import lightbus 8 | from flask import Flask 9 | 10 | from lightbus_examples.ex03_worked_example.store.bus import bus 11 | 12 | app = Flask(__name__) 13 | 14 | lightbus.configure_logging() 15 | 16 | PETS = ( 17 | "http://store.company.com/image1.jpg", 18 | "http://store.company.com/image2.jpg", 19 | "http://store.company.com/image3.jpg", 20 | ) 21 | 22 | 23 | @app.route("/") 24 | def home(): 25 | html = "

Online pet store


" 26 | 27 | for pet_num, image_url in enumerate(PETS): 28 | resized_url = bus.image.resize(url=image_url, width=200, height=200) 29 | html += f'' f'' f" " 30 | 31 | bus.store.page_view.fire(url="/") 32 | return html 33 | 34 | 35 | @app.route("/pet/") 36 | def pet(pet_num): 37 | resized_url = bus.image.resize(url=PETS[pet_num], width=200, height=200) 38 | bus.store.page_view.fire(url=f"/pet/{pet_num}") 39 | 40 | html = f"

Pet {pet_num}

" 41 | html = f'
' 42 | return html 43 | -------------------------------------------------------------------------------- /docs/explanation/schema.md: -------------------------------------------------------------------------------- 1 | # Schema 2 | 3 | Lightbus creates a schema for each of your APIs using the type hints 4 | specified on the API. This schema is shared on the bus for consumption 5 | by other Lightbus clients. This provides a number of features: 6 | 7 | * The availability of a particular API can be detected by remote clients 8 | * RPCs, results, and events transmitted on the bus can be validated by both the sender and receiver 9 | * Tooling can load the schema to provide additional functionality. For example, you can 10 | [dump your production schema](https://lightbus.org/reference/command-line-use/dumpschema/) 11 | and run your development environment and tests against it. 12 | 13 | Note that an API's schema will only be available on the bus while there is a worker 14 | running to provides it. Once the worker process for an API shuts down the schema on the 15 | bus will be cleaned up shortly thereafter. 16 | 17 | ## See also 18 | 19 | See the [schema reference](../reference/schema.md) section for details on how this works in practice. 20 | 21 | The schema is created using the [JSON schema] format, see the [schema protocol] for details of 22 | the transmission format. 23 | 24 | 25 | [JSON schema]: https://json-schema.org/ 26 | [schema protocol]: ../reference/protocols/schema.md 27 | -------------------------------------------------------------------------------- /docs/explanation/internal-architecture.md: -------------------------------------------------------------------------------- 1 | # Internal Architecture 2 | 3 | Lightbus' internal workings are composed of: 4 | 5 | * **The user-facing API**. This is provided by the `BusClient` class, which then makes use of the `EventClient` and 6 | `RpcResultClient` classes. This is a friendly API that issues helpful errors where appropriate. This also 7 | orchestrates system startup and shutdown. 8 | * **An internal message queuing system**. This includes four separate internal message queues plus the `EventDock` 9 | & `RpcResultDock` classes. The message queues provide the internal communication medium between the 10 | user-facing API and the Lightbus backend. The `EventDock` & `RpcResultDock` classes convert these messages 11 | into a simplified API for implementation by the transports. The `EventDock` contains the `EventTransport`, 12 | and the `RpcResultDock` contains both the `RpcTransport` and `RpcResultTransport`. 13 | * **The Event & RPC transports** implement Lightbus functionality for a specific backend (e.g. Redis). The main transports 14 | shipped with Lightbus are the `RedisEventTransport`, `RedisRpcTransport`, and `RedisResultTransport`. 15 | 16 | ## Diagram 17 | 18 | ![Internal Architecture Diagram][diagram] 19 | 20 | [diagram]: ../static/images/internal-architecture.png 21 | 22 | -------------------------------------------------------------------------------- /lightbus_examples/ex06_django/bus.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | 4 | import django 5 | import lightbus 6 | from lightbus.utilities.django import uses_django_db 7 | 8 | bus = lightbus.create() 9 | 10 | 11 | class AnalyticsApi(lightbus.Api): 12 | page_view = lightbus.Event( 13 | parameters=( 14 | lightbus.Parameter("pk", int), 15 | lightbus.Parameter("viewed_at", datetime), 16 | lightbus.Parameter("url", str), 17 | lightbus.Parameter("user_agent", str), 18 | ) 19 | ) 20 | 21 | @uses_django_db 22 | def get_total(self, url: str) -> int: 23 | from lightbus_examples.ex06_django.example_app.models import PageView 24 | 25 | return PageView.objects.filter(url=url).count() 26 | 27 | class Meta: 28 | name = "analytics" 29 | 30 | 31 | # Tell the client to respond to this API 32 | bus.client.register_api(AnalyticsApi()) 33 | 34 | 35 | @bus.client.on_start() 36 | async def bus_start(**kwargs): 37 | # Setup the default DJANGO_SETTINGS_MODULE 38 | # (as we also do in manage.py and wsgi.py) 39 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightbus_examples.ex06_django.settings") 40 | 41 | # Sets up django. We must do this before importing any models 42 | django.setup() 43 | -------------------------------------------------------------------------------- /docs/explanation/performance.md: -------------------------------------------------------------------------------- 1 | # Performance 2 | 3 | !!! note "Caveats" 4 | 5 | Lightbus has yet to undergo any profiling or optimisation, therefore it is 6 | reasonable to expect performance to improve with time. 7 | 8 | The performance of Lightbus is primarily governed by the [transports](transports.md) used. Lightbus 9 | currently only ships with Redis support. 10 | 11 | ## Individual process performance 12 | 13 | Simple benchmarking[^1] on a 2018 MacBook Pro indicates the following execution times (plugins disabled, schema 14 | and validation enabled, no event/RPC parameters): 15 | 16 | * Firing an event: ≈ 1.7ms (±10%) 17 | * Performing a remote procedure call: ≈ 6.9ms (±10%) 18 | 19 | ## Redis performance 20 | 21 | The Redis server has the potential to be a central bottleneck in Lightbus' performance. You may start to run into 22 | these limits if you are sending tens thousands of events per second. 23 | 24 | In these cases you can either: 25 | 26 | * Scale via Redis by setting up [Redis Cluster](https://redis.io/topics/cluster-tutorial) 27 | * Scale via Lightbus by specifying a different Redis instance per-API. See 28 | [configuration](../reference/configuration.md) 29 | 30 | [^1]: See [how to modify Lightbus](../howto/modify-lightbus.md) for details on how to run the benchmarks via `pytest` 31 | -------------------------------------------------------------------------------- /tests/config/test_structure.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lightbus.config.structure import make_transport_selector_structure, ApiConfig, RootConfig 4 | 5 | pytestmark = pytest.mark.unit 6 | 7 | 8 | def test_make_transport_config_structure(): 9 | EventTransportSelector = make_transport_selector_structure("event") 10 | assert "redis" in EventTransportSelector.__annotations__ 11 | 12 | 13 | def test_make_api_config_structure(): 14 | assert "event_transport" in ApiConfig.__annotations__ 15 | assert "rpc_transport" in ApiConfig.__annotations__ 16 | assert "result_transport" in ApiConfig.__annotations__ 17 | assert "validate" in ApiConfig.__annotations__ 18 | 19 | 20 | def test_root_config_service_name(): 21 | service_name = RootConfig().service_name 22 | assert service_name 23 | assert type(service_name) == str 24 | assert len(service_name) > 3 25 | # No format parameters in there, should have been formatted upon instantiation 26 | assert "{" not in service_name 27 | 28 | 29 | def test_root_config_process_name(): 30 | process_name = RootConfig().process_name 31 | assert process_name 32 | assert type(process_name) == str 33 | assert len(process_name) > 3 34 | # No format parameters in there, should have been formatted upon instantiation 35 | assert "{" not in process_name 36 | -------------------------------------------------------------------------------- /lightbus_vendored/aioredis/commands/scripting.py: -------------------------------------------------------------------------------- 1 | from lightbus_vendored.aioredis.util import wait_ok 2 | 3 | 4 | class ScriptingCommandsMixin: 5 | """Set commands mixin. 6 | 7 | For commands details see: http://redis.io/commands#scripting 8 | """ 9 | 10 | def eval(self, script, keys=[], args=[]): 11 | """Execute a Lua script server side.""" 12 | return self.execute(b'EVAL', script, len(keys), *(keys + args)) 13 | 14 | def evalsha(self, digest, keys=[], args=[]): 15 | """Execute a Lua script server side by its SHA1 digest.""" 16 | return self.execute(b'EVALSHA', digest, len(keys), *(keys + args)) 17 | 18 | def script_exists(self, digest, *digests): 19 | """Check existence of scripts in the script cache.""" 20 | return self.execute(b'SCRIPT', b'EXISTS', digest, *digests) 21 | 22 | def script_kill(self): 23 | """Kill the script currently in execution.""" 24 | fut = self.execute(b'SCRIPT', b'KILL') 25 | return wait_ok(fut) 26 | 27 | def script_flush(self): 28 | """Remove all the scripts from the script cache.""" 29 | fut = self.execute(b"SCRIPT", b"FLUSH") 30 | return wait_ok(fut) 31 | 32 | def script_load(self, script): 33 | """Load the specified Lua script into the script cache.""" 34 | return self.execute(b"SCRIPT", b"LOAD", script) 35 | -------------------------------------------------------------------------------- /lightbus/utilities/importing.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import logging 3 | import sys 4 | from typing import Sequence, Tuple, Callable 5 | 6 | if sys.version_info < (3, 10): 7 | from importlib.metadata import entry_points as _entry_points 8 | 9 | def entry_points(group): 10 | return _entry_points()[group] 11 | 12 | else: 13 | from importlib.metadata import entry_points 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | def import_module_from_string(name): 20 | """Import a module if necessary, otherwise return it from the list of already imported modules""" 21 | if name in sys.modules: 22 | return sys.modules[name] 23 | else: 24 | return importlib.import_module(name) 25 | 26 | 27 | def import_from_string(name): 28 | components = name.split(".") 29 | mod = __import__(components[0]) 30 | for comp in components[1:]: 31 | mod = getattr(mod, comp) 32 | return mod 33 | 34 | 35 | def load_entrypoint_classes(entrypoint_name) -> Sequence[Tuple[str, str, Callable]]: 36 | """Load classes specified in an entrypoint 37 | 38 | Entrypoints are specified in pyproject.toml, and Lightbus uses them to 39 | discover plugins & transports. 40 | """ 41 | found_classes = [] 42 | for entrypoint in entry_points(group=entrypoint_name): 43 | class_ = entrypoint.load() 44 | found_classes.append((entrypoint.module, entrypoint.name, class_)) 45 | return found_classes 46 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | 3 | on: 4 | push: 5 | paths: 6 | - '.github/workflows/docs.yaml' 7 | - 'docs/**' 8 | 9 | # TODO: Trigger on release tag creation 10 | # TODO: Also create workflow for creating releases 11 | 12 | jobs: 13 | docs: 14 | name: Build docs 15 | runs-on: ubuntu-22.04 16 | 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@master 20 | 21 | - uses: actions/setup-python@v3 22 | with: 23 | python-version: 3.9 24 | 25 | - name: "Configure git" 26 | run: | 27 | # Mike needs the git config user.name & user.email values set 28 | git config --global user.name "Adam Charnock" 29 | git config --global user.email "adam@adamcharnock.com" 30 | 31 | - name: "Pull the latest docs" 32 | run: | 33 | git fetch origin master gh-pages 34 | git checkout gh-pages 35 | git pull origin gh-pages 36 | git checkout ${{ github.sha }} 37 | 38 | - name: "Install poetry and lightbus" 39 | run: | 40 | pip install poetry 41 | poetry install 42 | 43 | - name: "Dump config schema" 44 | run: | 45 | poetry run lightbus dumpconfigschema > docs/static/default-config-schema.json 46 | 47 | - name: "Build and push docs" 48 | run: | 49 | poetry run mike deploy --message="Automated docs build [ci skip]" dev 50 | git push --force origin gh-pages 51 | 52 | -------------------------------------------------------------------------------- /lightbus/client/subclients/base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from lightbus.client.internal_messaging.consumer import InternalConsumer 4 | from lightbus.client.internal_messaging.producer import InternalProducer 5 | from lightbus.client.utilities import ErrorQueueType 6 | from lightbus.hooks import HookRegistry 7 | from lightbus.schema import Schema 8 | from lightbus.api import ApiRegistry 9 | from lightbus.config import Config 10 | from lightbus.utilities.internal_queue import InternalQueue 11 | 12 | 13 | class BaseSubClient: 14 | def __init__( 15 | self, 16 | api_registry: ApiRegistry, 17 | hook_registry: HookRegistry, 18 | config: Config, 19 | schema: Schema, 20 | error_queue: ErrorQueueType, 21 | consume_from: InternalQueue, 22 | produce_to: InternalQueue, 23 | ): 24 | self.api_registry = api_registry 25 | self.hook_registry: HookRegistry = hook_registry 26 | self.config = config 27 | self.schema = schema 28 | self.error_queue = error_queue 29 | self.producer = InternalProducer( 30 | name=self.__class__.__name__, queue=produce_to, error_queue=error_queue 31 | ) 32 | self.consumer = InternalConsumer( 33 | name=self.__class__.__name__, queue=consume_from, error_queue=error_queue 34 | ) 35 | 36 | self.producer.start() 37 | self.consumer.start(self.handle) 38 | 39 | async def handle(self, command): 40 | raise NotImplementedError() 41 | 42 | async def close(self): 43 | pass 44 | -------------------------------------------------------------------------------- /lightbus/utilities/frozendict.py: -------------------------------------------------------------------------------- 1 | """ An immutable dictionary 2 | 3 | This has been vendored from [python-frozendict](https://github.com/slezica/python-frozendict) 4 | and subsequently modified. 5 | """ 6 | import collections.abc 7 | 8 | 9 | class frozendict(collections.abc.Mapping): 10 | """ 11 | An immutable wrapper around dictionaries that implements the complete :py:class:`collections.Mapping` 12 | interface. It can be used as a drop-in replacement for dictionaries where immutability is desired. 13 | """ 14 | 15 | dict_cls = dict 16 | 17 | def __init__(self, *args, **kwargs): 18 | self._dict = self.dict_cls(*args, **kwargs) 19 | self._hash = None 20 | 21 | def __getitem__(self, key): 22 | return self._dict[key] 23 | 24 | def __contains__(self, key): 25 | return key in self._dict 26 | 27 | def copy(self, **add_or_replace): 28 | return self.__class__(self, **add_or_replace) 29 | 30 | def __iter__(self): 31 | return iter(self._dict) 32 | 33 | def __len__(self): 34 | return len(self._dict) 35 | 36 | def __repr__(self): 37 | return "<%s %r>" % (self.__class__.__name__, self._dict) 38 | 39 | def __hash__(self): 40 | if self._hash is None: 41 | h = 0 42 | for key, value in self._dict.items(): 43 | h ^= hash((key, value)) 44 | self._hash = h 45 | return self._hash 46 | 47 | 48 | class FrozenOrderedDict(frozendict): 49 | """ 50 | A frozendict subclass that maintains key order 51 | """ 52 | 53 | dict_cls = collections.OrderedDict 54 | -------------------------------------------------------------------------------- /lightbus/commands/version.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import sys 4 | from pathlib import Path 5 | 6 | from importlib.metadata import version as importlib_version 7 | 8 | if sys.version_info < (3, 10): 9 | from importlib.metadata import entry_points as _entry_points 10 | 11 | def entry_points(group): 12 | return _entry_points()[group] 13 | 14 | else: 15 | from importlib.metadata import entry_points 16 | 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | class Command: 22 | def setup(self, parser, subparsers): 23 | parser_version = subparsers.add_parser( 24 | "version", help="Show the currently installed Lightbus version" 25 | ) 26 | # Read version directly out of pyproject.toml. Useful for the release process 27 | parser_version.add_argument("--pyproject", action="store_true", help=argparse.SUPPRESS) 28 | # Show the version to be used for creating the docs 29 | parser_version.add_argument("--docs", action="store_true", help=argparse.SUPPRESS) 30 | parser_version.set_defaults(func=self.handle) 31 | 32 | def handle(self, args): 33 | if args.pyproject: 34 | import lightbus 35 | import toml 36 | 37 | file_path = Path(lightbus.__file__).parent.parent / "pyproject.toml" 38 | with file_path.open() as f: 39 | version = toml.load(f)["tool"]["poetry"]["version"] 40 | if args.docs: 41 | version = ".".join(version.split(".")[:2]) 42 | print(version) 43 | else: 44 | print(importlib_version("lightbus")) 45 | -------------------------------------------------------------------------------- /lightbus_vendored/aioredis/locks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | 4 | from asyncio.locks import Lock as _Lock 5 | 6 | # Fixes an issue with all Python versions that leaves pending waiters 7 | # without being awakened when the first waiter is canceled. 8 | # Code adapted from the PR https://github.com/python/cpython/pull/1031 9 | # Waiting once it is merged to make a proper condition to relay on 10 | # the stdlib implementation or this one patched 11 | 12 | 13 | class Lock(_Lock): 14 | 15 | if sys.version_info < (3, 7, 0): 16 | async def acquire(self): 17 | """Acquire a lock. 18 | This method blocks until the lock is unlocked, then sets it to 19 | locked and returns True. 20 | """ 21 | if not self._locked and all(w.cancelled() for w in self._waiters): 22 | self._locked = True 23 | return True 24 | 25 | fut = self._loop.create_future() 26 | 27 | self._waiters.append(fut) 28 | try: 29 | await fut 30 | self._locked = True 31 | return True 32 | except asyncio.CancelledError: 33 | if not self._locked: # pragma: no cover 34 | self._wake_up_first() 35 | raise 36 | finally: 37 | self._waiters.remove(fut) 38 | 39 | def _wake_up_first(self): 40 | """Wake up the first waiter who isn't cancelled.""" 41 | for fut in self._waiters: 42 | if not fut.done(): 43 | fut.set_result(True) 44 | break 45 | -------------------------------------------------------------------------------- /lightbus/client/commands.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from typing import NamedTuple, Optional, List, Tuple 4 | 5 | from lightbus.api import Api 6 | from lightbus.message import EventMessage, RpcMessage, ResultMessage 7 | from lightbus.utilities.internal_queue import InternalQueue 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class SendEventCommand(NamedTuple): 13 | message: EventMessage 14 | options: dict = {} 15 | 16 | 17 | class ConsumeEventsCommand(NamedTuple): 18 | events: List[Tuple[str, str]] 19 | listener_name: str 20 | destination_queue: InternalQueue[EventMessage] 21 | options: dict = {} 22 | 23 | 24 | class AcknowledgeEventCommand(NamedTuple): 25 | message: EventMessage 26 | options: dict = {} 27 | 28 | 29 | class CallRpcCommand(NamedTuple): 30 | message: RpcMessage 31 | options: dict = {} 32 | 33 | 34 | class ConsumeRpcsCommand(NamedTuple): 35 | api_names: List[str] 36 | options: dict = {} 37 | 38 | 39 | class ExecuteRpcCommand(NamedTuple): 40 | """An RPC call has been received and must be executed locally""" 41 | 42 | message: RpcMessage 43 | 44 | 45 | class PublishApiSchemaCommand(NamedTuple): 46 | api: Api 47 | 48 | 49 | class CloseCommand(NamedTuple): 50 | pass 51 | 52 | 53 | class SendResultCommand(NamedTuple): 54 | rpc_message: RpcMessage 55 | message: ResultMessage 56 | 57 | 58 | class ReceiveResultCommand(NamedTuple): 59 | message: RpcMessage 60 | destination_queue: InternalQueue 61 | options: dict 62 | 63 | 64 | class ReceiveSchemaUpdateCommand(NamedTuple): 65 | schema: dict 66 | 67 | 68 | class ShutdownCommand(NamedTuple): 69 | exception: Optional[BaseException] 70 | -------------------------------------------------------------------------------- /lightbus_vendored/aioredis/__init__.py: -------------------------------------------------------------------------------- 1 | from .connection import RedisConnection, create_connection 2 | from .commands import ( 3 | Redis, create_redis, 4 | create_redis_pool, 5 | # GeoPoint, GeoMember, 6 | ) 7 | from .pool import ConnectionsPool, create_pool 8 | from .pubsub import Channel 9 | from .sentinel import RedisSentinel, create_sentinel 10 | from .errors import ( 11 | ConnectionClosedError, 12 | ConnectionForcedCloseError, 13 | MasterNotFoundError, 14 | MultiExecError, 15 | PipelineError, 16 | ProtocolError, 17 | ReadOnlyError, 18 | RedisError, 19 | ReplyError, 20 | MaxClientsError, 21 | AuthError, 22 | ChannelClosedError, 23 | WatchVariableError, 24 | PoolClosedError, 25 | SlaveNotFoundError, 26 | MasterReplyError, 27 | SlaveReplyError, 28 | ) 29 | 30 | 31 | __version__ = '1.3.1' 32 | 33 | __all__ = [ 34 | # Factories 35 | 'create_connection', 36 | 'create_pool', 37 | 'create_redis', 38 | 'create_redis_pool', 39 | 'create_sentinel', 40 | # Classes 41 | 'RedisConnection', 42 | 'ConnectionsPool', 43 | 'Redis', 44 | 'GeoPoint', 45 | 'GeoMember', 46 | 'Channel', 47 | 'RedisSentinel', 48 | # Errors 49 | 'RedisError', 50 | 'ReplyError', 51 | 'MaxClientsError', 52 | 'AuthError', 53 | 'ProtocolError', 54 | 'PipelineError', 55 | 'MultiExecError', 56 | 'WatchVariableError', 57 | 'ConnectionClosedError', 58 | 'ConnectionForcedCloseError', 59 | 'PoolClosedError', 60 | 'ChannelClosedError', 61 | 'MasterNotFoundError', 62 | 'SlaveNotFoundError', 63 | 'ReadOnlyError', 64 | 'MasterReplyError', 65 | 'SlaveReplyError', 66 | ] 67 | -------------------------------------------------------------------------------- /lightbus/commands/dump_config_schema.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | from lightbus.commands import utilities as command_utilities 5 | from lightbus.config.config import config_as_json_schema 6 | from lightbus.plugins import PluginRegistry 7 | from lightbus.schema.encoder import json_encode 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class Command: 13 | def setup(self, parser, subparsers): 14 | parser_dumpconfigschema = subparsers.add_parser( 15 | "dumpconfigschema", 16 | help=( 17 | "Dumps the lightbus configuration json schema. Can be useful " 18 | "in validating your config. This is not the same as your " 19 | "bus' API schema, for that see the more commonly used 'dumpschema' " 20 | "command" 21 | ), 22 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 23 | ) 24 | group = parser_dumpconfigschema.add_argument_group( 25 | title="Dump config schema command arguments" 26 | ) 27 | group.add_argument( 28 | "--out", 29 | "-o", 30 | help=( 31 | "File to write config schema to. " 32 | "If omitted the schema will be written to standard out." 33 | ), 34 | metavar="FILE", 35 | ) 36 | parser_dumpconfigschema.set_defaults(func=self.handle) 37 | 38 | def handle(self, args): 39 | schema = json_encode(config_as_json_schema(), indent=2, sort_keys=True) 40 | 41 | if args.out: 42 | with open(args.out, "w", encoding="utf8") as f: 43 | f.write(schema) 44 | else: 45 | print(schema) 46 | -------------------------------------------------------------------------------- /lightbus/client/docks/base.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from lightbus.client.internal_messaging.consumer import InternalConsumer 4 | from lightbus.client.internal_messaging.producer import InternalProducer 5 | from lightbus.api import ApiRegistry 6 | from lightbus.client.utilities import ErrorQueueType 7 | from lightbus.config import Config 8 | from lightbus.transports.registry import TransportRegistry 9 | from lightbus.utilities.internal_queue import InternalQueue 10 | 11 | 12 | class BaseDock: 13 | """The base dock 14 | 15 | A dock is responsible for interfacing a transport with Lightbus' internal 16 | messaging system. 17 | """ 18 | 19 | def __init__( 20 | self, 21 | transport_registry: TransportRegistry, 22 | api_registry: ApiRegistry, 23 | config: Config, 24 | error_queue: ErrorQueueType, 25 | consume_from: InternalQueue, 26 | produce_to: InternalQueue, 27 | ): 28 | self.transport_registry = transport_registry 29 | self.api_registry = api_registry 30 | self.config = config 31 | self.error_queue = error_queue 32 | self.producer = InternalProducer( 33 | name=self.__class__.__name__, queue=produce_to, error_queue=error_queue 34 | ) 35 | self.consumer = InternalConsumer( 36 | name=self.__class__.__name__, queue=consume_from, error_queue=error_queue 37 | ) 38 | 39 | self.producer.start() 40 | self.consumer.start(self.handle) 41 | 42 | async def handle(self, command): 43 | raise NotImplementedError() 44 | 45 | async def wait_until_ready(self): 46 | await self.producer.wait_until_ready() 47 | await self.consumer.wait_until_ready() 48 | -------------------------------------------------------------------------------- /tests/transports/redis/benchmark_integration_redis.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import pytest 3 | 4 | import lightbus 5 | import lightbus.path 6 | from lightbus.transports.redis.event import StreamUse 7 | 8 | pytestmark = pytest.mark.integration 9 | 10 | stream_use_test_data = [StreamUse.PER_EVENT, StreamUse.PER_API] 11 | 12 | pytestmark = pytest.mark.benchmark 13 | 14 | 15 | class BenchmarkApi(lightbus.Api): 16 | fire_me = lightbus.Event() 17 | 18 | class Meta: 19 | name = "benchmark" 20 | 21 | def call_me(self): 22 | return True 23 | 24 | 25 | BUS_MODULE_CONTENT = f""" 26 | bus = lightbus.create(plugins=[]) 27 | 28 | {inspect.getsource(BenchmarkApi)} 29 | 30 | bus.client.register_api(BenchmarkApi()) 31 | """ 32 | 33 | 34 | @pytest.fixture() 35 | def run_lightbus(run_lightbus_command, make_test_bus_module): 36 | """Run lightbus in a background process""" 37 | run_lightbus_command( 38 | "run", "--bus", make_test_bus_module(code=BUS_MODULE_CONTENT), env={"LIGHTBUS_MODULE": ""} 39 | ) 40 | 41 | 42 | @pytest.fixture() 43 | def bus(redis_config_file): 44 | """Get a BusPath instance so we can use the bus""" 45 | bus = lightbus.create(config_file=redis_config_file) 46 | yield bus 47 | bus.client.close() 48 | 49 | 50 | @pytest.mark.benchmark(group="network") 51 | def benchmark_call_rpc(run_lightbus, bus, benchmark): 52 | def benchmark_me(): 53 | assert bus.benchmark.call_me() 54 | 55 | benchmark.pedantic(benchmark_me, rounds=20, warmup_rounds=1) 56 | 57 | 58 | @pytest.mark.benchmark(group="network") 59 | def benchmark_fire_event(bus, benchmark): 60 | bus.client.register_api(BenchmarkApi()) 61 | benchmark.pedantic(bus.benchmark.fire_me.fire, rounds=20, warmup_rounds=1) 62 | -------------------------------------------------------------------------------- /docs/explanation/services.md: -------------------------------------------------------------------------------- 1 | # Services 2 | 3 | A *service* is one or more processes handling a common task. 4 | These processes operate as a tightly-coupled whole. 5 | 6 | All processes in a service will generally: 7 | 8 | * Share the same API class definitions 9 | * Moreover, they will normally share the same codebase 10 | * Create a single instance of the bus client in `bus.py` using 11 | `bus = lightbus.create()`. 12 | 13 | --- 14 | 15 | For example, your company may have the following: 16 | 17 | * An online store 18 | * A price monitoring script 19 | * An image resizing resizing process 20 | 21 | Each of these would be a service. 22 | 23 | The store service would have a web process and a 24 | Lightbus process. The image resizing service & and price monitoring services 25 | would each likely have a Lightbus process only. 26 | 27 | A simple lightbus deployment could therefore look something like this: 28 | 29 | ![A simple Lightbus deployment][simple-processes] 30 | 31 | [simple-processes]: /static/images/simple-processes.png 32 | 33 | In this example the following actions would take place: 34 | 35 | * Django reads from the web service database in order to serve web content 36 | * The online shop's Lightbus process receives pricing events from the 37 | price monitoring service. It updates products in the database using 38 | this new pricing data. 39 | * When the Django app receives an image upload, it performs a RPC to the 40 | image resizing service to resize the image[^1]. 41 | 42 | 43 | 44 | [^1]: Making the Django process wait for an RPC to respond is 45 | probably a bad idea in this case, but it illustrates how it 46 | *could* be done. Using an event (which is fire-and-forget) 47 | could be more suitable in reality. 48 | -------------------------------------------------------------------------------- /tests/test_creaton.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | 3 | import pytest 4 | 5 | from lightbus import import_bus_module, BusPath 6 | from lightbus.creation import get_bus 7 | from lightbus.exceptions import FailedToImportBusModule 8 | 9 | pytestmark = pytest.mark.unit 10 | 11 | 12 | # import_bus_module() 13 | 14 | 15 | def test_import_bus_module_ok(make_test_bus_module): 16 | bus_module_name = make_test_bus_module() 17 | bus_module = import_bus_module(bus_module_name) 18 | 19 | assert inspect.ismodule(bus_module) 20 | assert isinstance(bus_module.bus, BusPath) 21 | 22 | 23 | def test_import_bus_module_does_not_exist(): 24 | bus_module_name = "does_not_exist" 25 | with pytest.raises(FailedToImportBusModule) as e: 26 | import_bus_module(bus_module_name) 27 | 28 | assert "failed to import" in str(e.value).lower() 29 | 30 | 31 | def test_import_bus_module_does_not_contain_bus(make_test_bus_module): 32 | bus_module_name = make_test_bus_module(code="") 33 | with pytest.raises(FailedToImportBusModule) as e: 34 | import_bus_module(bus_module_name) 35 | 36 | assert "attribute" in str(e.value).lower() 37 | 38 | 39 | def test_import_bus_module_contains_bus_but_wrong_type(make_test_bus_module): 40 | bus_module_name = make_test_bus_module(code="bus = 123") 41 | with pytest.raises(FailedToImportBusModule) as e: 42 | import_bus_module(bus_module_name) 43 | 44 | assert "invalid value" in str(e.value).lower() 45 | assert "int" in str(e.value).lower() 46 | 47 | 48 | # get_bus() - a wrapper around import_bus_module() 49 | 50 | 51 | def test_get_bus(make_test_bus_module): 52 | bus_module_name = make_test_bus_module() 53 | bus = get_bus(bus_module_name) 54 | assert isinstance(bus, BusPath) 55 | -------------------------------------------------------------------------------- /docs/howto/metrics.md: -------------------------------------------------------------------------------- 1 | # How to use Lightbus for metrics 2 | 3 | When we talk about metrics we may mean all or any of the following: 4 | 5 | * Current information is most important 6 | * Previous events will become irrelevant as soon as new data is received 7 | * Lost events are therefore tolerable, as long as we keep up with new events 8 | * Events may be high volume, so optimisations may be needed 9 | 10 | !!! note 11 | 12 | Your needs may not precisely match this scenario, so be prepared to tweak the following configuration to your needs. 13 | 14 | For the above metrics-based scenario, a sample Lightbus [configuration](../reference/configuration.md) may look like 15 | something like this: 16 | 17 | ```yaml 18 | # Lightbus config for metrics 19 | 20 | bus: 21 | schema: 22 | transport: 23 | redis: 24 | url: "redis://redis_host:6379/0" 25 | 26 | apis: 27 | 28 | # Here we specify the default for your entire bus, but you could 29 | # also specify the config for a specific API by using the API's name 30 | # instead of 'default'. 31 | default: 32 | 33 | # Disable validation to enhance performance 34 | validate: 35 | outgoing: false 36 | incoming: false 37 | 38 | # Assume we will be transmitting simple types, so we can bypass casting for performance 39 | cast_values: false 40 | 41 | event_transport: 42 | redis: 43 | url: 'redis://redis_host:6379/0' 44 | 45 | # Load in many events at once for performance improvements 46 | batch_size: 100 47 | 48 | # No need to keep many historical events around 49 | max_stream_length: 10000 50 | 51 | # Per-event streams, to allow selective consumption of metrics 52 | stream_use: "per_event" 53 | ``` 54 | -------------------------------------------------------------------------------- /lightbus/serializers/blob.py: -------------------------------------------------------------------------------- 1 | """ Serializers suitable for transports which require a single serialised value 2 | 3 | These serializers handle moving data to/from a string-based format. 4 | 5 | """ 6 | from typing import Union, TYPE_CHECKING 7 | 8 | from lightbus.serializers.base import ( 9 | decode_bytes, 10 | sanity_check_metadata, 11 | MessageSerializer, 12 | MessageDeserializer, 13 | ) 14 | 15 | if TYPE_CHECKING: 16 | # pylint: disable=unused-import,cyclic-import 17 | from lightbus import Message 18 | 19 | 20 | class BlobMessageSerializer(MessageSerializer): 21 | def __call__(self, message: "Message") -> str: 22 | """Takes a message object and returns a serialised string representation""" 23 | return self.encoder({"metadata": message.get_metadata(), "kwargs": message.get_kwargs()}) 24 | 25 | 26 | class BlobMessageDeserializer(MessageDeserializer): 27 | def __call__(self, serialized: Union[str, dict], *, native_id=None, **extra): 28 | """ Takes a serialised string representation and returns a Message object 29 | 30 | Reverse of BlobMessageSerializer 31 | """ 32 | # Allow for receiving dicts on the assumption that this will be 33 | # json which has already been decoded. 34 | if isinstance(serialized, dict): 35 | decoded = serialized 36 | else: 37 | serialized = decode_bytes(serialized) 38 | decoded = self.decoder(serialized) 39 | 40 | metadata = decoded.get("metadata", {}) 41 | kwargs = decoded.get("kwargs", {}) 42 | 43 | sanity_check_metadata(self.message_class, metadata) 44 | 45 | return self.message_class.from_dict( 46 | metadata=metadata, kwargs=kwargs, native_id=native_id, **extra 47 | ) 48 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu_zmq/producer.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import timeit 4 | from random import randint 5 | from socket import gethostbyname, gethostname 6 | 7 | import zmq 8 | from kombu import Connection 9 | from timer import Timer 10 | 11 | TOTAL_MESSAGES = int(sys.argv[1]) 12 | 13 | context = zmq.Context() 14 | socket = context.socket(zmq.PULL) 15 | PORT = socket.bind_to_random_port("tcp://*") 16 | HOST = gethostbyname(gethostname()) 17 | 18 | 19 | amqp_timer = Timer() 20 | zmq_timer = Timer() 21 | 22 | 23 | def log(msg): 24 | pass 25 | # print(msg) 26 | 27 | 28 | log("Listening for results on {}:{}".format(HOST, PORT)) 29 | 30 | with Connection("amqp://guest:guest@localhost:5672//") as conn: 31 | simple_queue = conn.SimpleQueue("simple_queue") 32 | simple_queue.put("kick-off!") 33 | 34 | def put(): 35 | message_id = str(randint(100, 999)) 36 | with amqp_timer: 37 | simple_queue.put( 38 | b"x" * 1024, 39 | headers={"reply-to": "tcp://{}:{}".format(HOST, PORT), "id": message_id}, 40 | ) 41 | 42 | log("Message {} has been put. Waiting for response...".format(message_id)) 43 | with zmq_timer: 44 | content = socket.recv() 45 | 46 | # Make sure we got the response for the message we sent 47 | assert message_id == str(content, "utf8").split(" ", 1)[0] 48 | 49 | log("Response received") 50 | 51 | seconds = timeit.timeit(put, number=TOTAL_MESSAGES) 52 | print("Time per put: {}ms".format(round(seconds * 1000 / TOTAL_MESSAGES, 2))) 53 | print("Puts per second: {}".format(round(TOTAL_MESSAGES / seconds, 2))) 54 | print("ZeroMQ time: {}".format(zmq_timer)) 55 | print("AMQP time: {}".format(amqp_timer)) 56 | simple_queue.close() 57 | -------------------------------------------------------------------------------- /lightbus/commands/dump_schema.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | import sys 5 | from pathlib import Path 6 | 7 | from lightbus.commands import utilities as command_utilities 8 | from lightbus.plugins import PluginRegistry 9 | from lightbus.utilities.async_tools import block 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class Command: 15 | def setup(self, parser, subparsers): 16 | parser_dumpschema = subparsers.add_parser( 17 | "dumpschema", 18 | help="Dumps all currently present bus schemas to a file or directory", 19 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 20 | ) 21 | group = parser_dumpschema.add_argument_group(title="Dump config schema command arguments") 22 | group.add_argument( 23 | "--out", 24 | "-o", 25 | help=( 26 | "File or directory to write schema to. If a directory is " 27 | "specified one schema file will be created for each API. " 28 | "If omitted the schema will be written to standard out." 29 | ), 30 | metavar="FILE_OR_DIRECTORY", 31 | ) 32 | command_utilities.setup_common_arguments(parser_dumpschema) 33 | parser_dumpschema.set_defaults(func=self.handle) 34 | 35 | def handle(self, args, config, plugin_registry: PluginRegistry): 36 | command_utilities.setup_logging(args.log_level or "warning", config) 37 | 38 | bus_module, bus = command_utilities.import_bus(args) 39 | block(bus.client.lazy_load_now()) 40 | bus.schema.save_local(args.out) 41 | 42 | if args.out: 43 | sys.stderr.write( 44 | "Schema for {} APIs saved to {}\n".format( 45 | len(bus.schema.api_names), Path(args.out).resolve() 46 | ) 47 | ) 48 | -------------------------------------------------------------------------------- /docs/explanation/lightbus-vs-celery.md: -------------------------------------------------------------------------------- 1 | # Lightbus vs Celery 2 | 3 | Lightbus was conceived as a result of using Celery to communicate 4 | between multiple Python services. 5 | 6 | ## Differences in principle 7 | 8 | Celery is a task queue: 9 | 10 | * A task queue is tightly coupled. The dispatching code must know what needs to be done 11 | * A task queue typically doesn't return results 12 | 13 | Lightbus is a bus: 14 | 15 | * A bus provides loose coupling. The dispatching code says what did happen, not what should happen (events) 16 | * A bus provides bi-directional communication (remote procedure calls) 17 | 18 | ## Differences in practice 19 | 20 | A number of pain points were identified with Celery that Lightbus 21 | aims to address. In particular: 22 | 23 | * Single vs multi-[service] – Celery is targeted as being a task queue for a service, rather than a means for multiple services to interact. 24 | * Conceptual overlap – The mapping between concepts in Celery and the underlying broker (AMQP at the time) is both unclear and overlapping. 25 | Lightbus provides a limited set of well defined concepts to avoid this confusion. 26 | * Non-sane defaults – Some Celery settings have non-sane defaults, making setup somewhat perilous at times. 27 | Lightbus provides sane defaults for most circumstances, and documentation specifically geared to certain use cases ([metrics], [event sourcing]) 28 | * Tight coupling (as discussed above) – Celery tasks define the action to take, not what happened. Lightbus uses events, 29 | which describe happened, and listening services decide the action to take. 30 | * General feeling – Celery feels large and opaque, debugging issues was challenging. Lightbus aims to feel lightweight, with clear 31 | logging and debugging tools. 32 | 33 | [service]: ../explanation/services.md 34 | [metrics]: ../howto/metrics.md 35 | [event sourcing]: ../howto/event-sourcing.md 36 | -------------------------------------------------------------------------------- /tests/client/test_utilities_unit.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import traceback 3 | 4 | import pytest 5 | 6 | from lightbus.client.utilities import queue_exception_checker, Error, ErrorQueueType 7 | from lightbus.utilities.internal_queue import InternalQueue 8 | 9 | pytestmark = pytest.mark.unit 10 | 11 | 12 | class ExampleException(Exception): 13 | pass 14 | 15 | 16 | @pytest.fixture 17 | def erroring_coroutine(): 18 | async def erroring_coroutine_(): 19 | raise ExampleException 20 | 21 | return erroring_coroutine_ 22 | 23 | 24 | @pytest.fixture 25 | def ok_coroutine(): 26 | async def ok_coroutine_(): 27 | pass 28 | 29 | return ok_coroutine_ 30 | 31 | 32 | @pytest.fixture 33 | def error_queue(): 34 | return InternalQueue() 35 | 36 | 37 | @pytest.mark.asyncio 38 | async def test_queue_exception_checker_directly(erroring_coroutine, error_queue: ErrorQueueType): 39 | coroutine = queue_exception_checker(erroring_coroutine(), error_queue) 40 | 41 | with pytest.raises(ExampleException): 42 | await coroutine 43 | 44 | assert error_queue.qsize() == 1 45 | error: Error = error_queue.get_nowait() 46 | 47 | assert error.type == ExampleException 48 | assert isinstance(error.value, ExampleException) 49 | assert "test_utilities_unit.py" in str(error) 50 | assert "ExampleException" in str(error) 51 | 52 | 53 | def test_queue_exception_checker_in_task(erroring_coroutine, error_queue: ErrorQueueType): 54 | coroutine = queue_exception_checker(erroring_coroutine(), error_queue) 55 | 56 | with pytest.raises(ExampleException): 57 | asyncio.run(coroutine) 58 | 59 | assert error_queue.qsize() == 1 60 | error: Error = error_queue.get_nowait() 61 | 62 | assert error.type == ExampleException 63 | assert isinstance(error.value, ExampleException) 64 | assert "test_utilities_unit.py" in str(error) 65 | assert "ExampleException" in str(error) 66 | -------------------------------------------------------------------------------- /docs/howto/access-your-bus-client.md: -------------------------------------------------------------------------------- 1 | # How to access your bus client 2 | 3 | You create your bus client in your bus module 4 | (typically called `bus.py`) with the line: 5 | 6 | ```python3 7 | # Creating your bus client in your bus.py file 8 | import lightbus 9 | 10 | bus = lightbus.create() 11 | ``` 12 | 13 | However, you will often need to make use of your bus client 14 | in other areas of your codebase. For example, you may need to 15 | fire an event when a web form is submitted. 16 | 17 | You can access the bus client in two ways. 18 | 19 | ## Method 1: Direct import (recommended) 20 | 21 | The first approach is to import your bus client directly from 22 | your bus module, in the same way you would import anything else 23 | in your codebase: 24 | 25 | ```python3 26 | # For example 27 | from bus import bus 28 | 29 | # ...or if your service has its own package 30 | from my_service.bus import bus 31 | ``` 32 | 33 | You should use this approach in code which is specific to your 34 | service (i.e. non-shared/non-library code). This approach is more 35 | explicit (good), but hard codes the path to your bus module (bad for shared code). 36 | 37 | ## Method 2: `get_bus()` 38 | 39 | The second approach uses the `lightbus.get_bus()` function. This will 40 | use the [module loading configuration] to determine the bus module location. 41 | If the bus module has already been imported, then the module's 42 | `bus` attribute will simply be returned. Otherwise the bus module will be 43 | imported first. 44 | 45 | ```python3 46 | # Anywhere in your codebase 47 | import lightbus 48 | 49 | bus = lightbus.get_bus() 50 | ``` 51 | 52 | This approach is best suited to when you do not know where your bus module 53 | will be at runtime. This could be the case when: 54 | 55 | * Writing shared code which is used by multiple services 56 | * Writing a third party library 57 | 58 | 59 | [module loading configuration]: ../reference/configuration.md#1-module-loading 60 | -------------------------------------------------------------------------------- /docs/explanation/rpcs.md: -------------------------------------------------------------------------------- 1 | # Remote Procedure Calls (RPCs) 2 | 3 | A remote procedure call is where you call a procedure available on the bus. 4 | The sequence of events is: 5 | 6 | * You call the RPC, `bus.auth.check_password()` 7 | * An autoratitive process for the `auth` API handles the request and sends the response. 8 | * You receive the result 9 | 10 | --- 11 | 12 | Remote Procedure Calls are useful when: 13 | 14 | 1. You require information from a service [^1] 15 | 2. You wish to wait until a remote procedure has completed an action 16 | 17 | You can perform an RPC as follows: 18 | 19 | ```python3 20 | support_case = bus.support.case.get(pk=123) 21 | ``` 22 | 23 | RPCs do not provide a *fire and forget* mode of operation. 24 | Consider using [events] if you need this feature. 25 | 26 | ## At most once semantics 27 | 28 | Remote procedure calls will be processed at most once. In some situations the call will 29 | never be processed, in which case the client will raise a `LigutbusTimeout` exception. 30 | 31 | ## Considerations 32 | 33 | Whether to use RPCs or events for communication will depend upon your project's particular needs. 34 | Some considerations are: 35 | 36 | * RPCs are **conceptually simple**. You call a procedure and wait for a response. You do not need to 37 | store any state locally, you can simply request data on demand (performance considerations aside). 38 | * RPCs can be **fragile**. Any errors in the remote service will propagate to the client's service. 39 | You should handle these if possible. 40 | * Their use within a codebase may be non-obvious, leading to poor performance. 41 | Lightbus tries to alleviate this somewhat by using the 42 | `bus.api.method()` calling format, making it clear that this is a 43 | bus-based operation. 44 | 45 | [^1]: This is also achievable with [events]. However you will need to listen 46 | for the events and likely store the data locally. See the [events] 47 | section for further discussion. 48 | 49 | 50 | [events]: events.md 51 | -------------------------------------------------------------------------------- /tests/serializers/test_blob.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | 5 | from lightbus.message import EventMessage 6 | from lightbus.serializers.blob import BlobMessageSerializer, BlobMessageDeserializer 7 | 8 | pytestmark = pytest.mark.unit 9 | 10 | 11 | def test_blob_serializer(): 12 | serializer = BlobMessageSerializer() 13 | serialized = serializer( 14 | EventMessage(api_name="my.api", event_name="my_event", id="123", kwargs={"field": "value"}) 15 | ) 16 | assert json.loads(serialized) == { 17 | "metadata": {"api_name": "my.api", "event_name": "my_event", "id": "123", "version": 1}, 18 | "kwargs": {"field": "value"}, 19 | } 20 | 21 | 22 | def test_blob_deserializer(): 23 | deserializer = BlobMessageDeserializer(EventMessage) 24 | message = deserializer( 25 | json.dumps( 26 | { 27 | "metadata": { 28 | "api_name": "my.api", 29 | "event_name": "my_event", 30 | "id": "123", 31 | "version": 1, 32 | }, 33 | "kwargs": {"field": "value"}, 34 | } 35 | ), 36 | native_id="456", 37 | ) 38 | assert message.api_name == "my.api" 39 | assert message.event_name == "my_event" 40 | assert message.id == "123" 41 | assert message.kwargs == {"field": "value"} 42 | assert message.native_id == "456" 43 | 44 | 45 | def test_blob_deserializer_dict(): 46 | deserializer = BlobMessageDeserializer(EventMessage) 47 | message = deserializer( 48 | { 49 | "metadata": {"api_name": "my.api", "event_name": "my_event", "id": "123", "version": 1}, 50 | "kwargs": {"field": "value"}, 51 | }, 52 | native_id="456", 53 | ) 54 | assert message.api_name == "my.api" 55 | assert message.event_name == "my_event" 56 | assert message.id == "123" 57 | assert message.kwargs == {"field": "value"} 58 | assert message.native_id == "456" 59 | -------------------------------------------------------------------------------- /tests/serializers/test_by_field.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from lightbus.message import EventMessage 4 | from lightbus.serializers.by_field import ByFieldMessageSerializer, ByFieldMessageDeserializer 5 | 6 | pytestmark = pytest.mark.unit 7 | 8 | 9 | def test_by_field_serializer(): 10 | serializer = ByFieldMessageSerializer() 11 | serialized = serializer( 12 | EventMessage( 13 | api_name="my.api", event_name="my_event", kwargs={"field": "value"}, id="123", version=2 14 | ) 15 | ) 16 | assert serialized == { 17 | "api_name": "my.api", 18 | "event_name": "my_event", 19 | ":field": '"value"', 20 | "id": "123", 21 | "version": 2, 22 | } 23 | 24 | 25 | def test_by_field_deserializer(): 26 | deserializer = ByFieldMessageDeserializer(EventMessage) 27 | message = deserializer( 28 | { 29 | "api_name": "my.api", 30 | "event_name": "my_event", 31 | "id": "123", 32 | "version": "2", 33 | ":field": '"value"', 34 | }, 35 | native_id="456", 36 | ) 37 | assert message.api_name == "my.api" 38 | assert message.event_name == "my_event" 39 | assert message.id == "123" 40 | assert message.kwargs == {"field": "value"} 41 | assert message.version == 2 42 | assert message.native_id == "456" 43 | 44 | 45 | def test_by_field_deserializer_empty_keys_and_values(): 46 | deserializer = ByFieldMessageDeserializer(EventMessage) 47 | message = deserializer( 48 | { 49 | "api_name": "my.api", 50 | "event_name": "my_event", 51 | "id": "123", 52 | "version": "2", 53 | ":field": '"value"', 54 | "": "", 55 | }, 56 | native_id="456", 57 | ) 58 | assert message.api_name == "my.api" 59 | assert message.event_name == "my_event" 60 | assert message.id == "123" 61 | assert message.kwargs == {"field": "value"} 62 | assert message.version == 2 63 | -------------------------------------------------------------------------------- /lightbus_experiments/custom_loop.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | 5 | # Logging setup 6 | import threading 7 | import time 8 | from concurrent.futures import ThreadPoolExecutor 9 | 10 | 11 | class AsyncioLoggingFilter(logging.Filter): 12 | def filter(self, record): 13 | task = asyncio.Task.current_task() 14 | 15 | record.task = f'[task {id(task)}]' if task else '[NOLOOP ]' 16 | return True 17 | 18 | 19 | logger = logging.getLogger(__name__) 20 | logger.addFilter(AsyncioLoggingFilter()) 21 | logging.getLogger('asyncio').setLevel(logging.CRITICAL) 22 | 23 | 24 | logging.basicConfig(level=logging.INFO, format="%(msecs)f %(threadName)s %(task)s %(msg)s") 25 | 26 | 27 | class ThreadSerializedTask(asyncio.Task): 28 | _lock = threading.Lock() 29 | 30 | def _wakeup(self, *args, **kwargs): 31 | logger.debug("Acquire lock") 32 | ThreadSerializedTask._lock.acquire() 33 | 34 | super()._wakeup(*args, **kwargs) 35 | 36 | logger.debug("Releasing lock") 37 | ThreadSerializedTask._lock.release() 38 | 39 | 40 | def task_factory(loop, coro): 41 | return ThreadSerializedTask(coro, loop=loop) 42 | 43 | 44 | async def one(): 45 | await asyncio.sleep(0.01) 46 | logger.debug("-> One") 47 | await two() 48 | await asyncio.sleep(0.01) 49 | logger.debug("-> Exiting one") 50 | 51 | 52 | async def two(): 53 | await asyncio.sleep(0.01) 54 | 55 | logger.info("--> Should not be interleaved with other threads") 56 | time.sleep(0.01) 57 | logger.info("--> Should not be interleaved with other threads") 58 | time.sleep(0.01) 59 | logger.info("--> Should not be interleaved with other threads") 60 | 61 | 62 | def run_loop(): 63 | loop = asyncio.new_event_loop() 64 | loop.set_task_factory(task_factory) 65 | loop.run_until_complete(one()) 66 | 67 | 68 | if __name__ == '__main__': 69 | 70 | threads = [] 71 | for _ in range(0, 5): 72 | thread = threading.Thread(target=run_loop) 73 | thread.start() 74 | threads.append(thread) 75 | 76 | [t.join() for t in threads] 77 | -------------------------------------------------------------------------------- /lightbus/commands/shell.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | import sys 4 | from inspect import isclass 5 | 6 | import lightbus 7 | from lightbus.commands import utilities as command_utilities 8 | from lightbus.plugins import PluginRegistry 9 | from lightbus.utilities.async_tools import block 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class Command: 15 | def setup(self, parser, subparsers): 16 | parser_shell = subparsers.add_parser( 17 | "shell", 18 | help="Provide an interactive Lightbus shell", 19 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 20 | ) 21 | command_utilities.setup_common_arguments(parser_shell) 22 | parser_shell.set_defaults(func=self.handle) 23 | 24 | def handle(self, args, config, plugin_registry: PluginRegistry, fake_it=False): 25 | command_utilities.setup_logging(args.log_level or "warning", config) 26 | 27 | try: 28 | # pylint: disable=unused-import,cyclic-import,import-outside-toplevel 29 | import bpython 30 | from bpython.curtsies import main as bpython_main 31 | except ImportError: # pragma: no cover 32 | print("Lightbus shell requires bpython. Run `pip install bpython` to install bpython.") 33 | sys.exit(1) 34 | return # noqa 35 | 36 | lightbus_logger = logging.getLogger("lightbus") 37 | lightbus_logger.setLevel(logging.WARNING) 38 | 39 | bus_module, bus = command_utilities.import_bus(args) 40 | block(bus.client.lazy_load_now()) 41 | 42 | objects = {k: v for k, v in lightbus.__dict__.items() if isclass(v)} 43 | objects.update(bus=bus) 44 | 45 | block(plugin_registry.execute_hook("receive_args", args=args), timeout=5) 46 | 47 | # Ability to not start up the repl is useful for testing 48 | if not fake_it: 49 | bpython_main( 50 | args=["-i", "-q"], 51 | locals_=objects, 52 | welcome_message="Welcome to the Lightbus shell. Use `bus` to access your bus.", 53 | ) 54 | -------------------------------------------------------------------------------- /tests/transports/redis/test_unit_redis_utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | import pytest 4 | 5 | from lightbus.transports.redis.utilities import ( 6 | redis_stream_id_subtract_one, 7 | redis_steam_id_to_datetime, 8 | datetime_to_redis_steam_id, 9 | redis_stream_id_add_one, 10 | ) 11 | 12 | pytestmark = pytest.mark.unit 13 | 14 | 15 | def test_redis_stream_id_subtract_one(): 16 | assert redis_stream_id_subtract_one("1514028809812-0") == "1514028809811-9999" 17 | assert redis_stream_id_subtract_one("1514028809812-10") == "1514028809812-9" 18 | assert redis_stream_id_subtract_one("0000000000000-0") == "0000000000000-0" 19 | 20 | 21 | def test_redis_stream_id_add_one(): 22 | assert redis_stream_id_add_one("1514028809812-0") == "1514028809812-1" 23 | assert redis_stream_id_add_one("1514028809812-10") == "1514028809812-11" 24 | assert redis_stream_id_add_one("0000000000000-0") == "0000000000000-1" 25 | 26 | 27 | def test_redis_steam_id_to_datetime(): 28 | assert redis_steam_id_to_datetime("0000000000000-0") == datetime( 29 | 1970, 1, 1, 0, 0, tzinfo=timezone.utc 30 | ) 31 | assert redis_steam_id_to_datetime("0000000000000-1") == datetime( 32 | 1970, 1, 1, 0, 0, 0, 1, tzinfo=timezone.utc 33 | ) 34 | assert redis_steam_id_to_datetime("1514028809812-10") == datetime( 35 | 2017, 12, 23, 11, 33, 29, 812_010, tzinfo=timezone.utc 36 | ) 37 | assert redis_steam_id_to_datetime(b"0000000000000-0") == datetime( 38 | 1970, 1, 1, 0, 0, tzinfo=timezone.utc 39 | ) 40 | 41 | 42 | def test_datetime_to_redis_steam_id(): 43 | assert ( 44 | datetime_to_redis_steam_id(datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc)) 45 | == "0000000000000-0" 46 | ) 47 | # Microseconds don't get added on as the sequence number 48 | assert ( 49 | datetime_to_redis_steam_id(datetime(1970, 1, 1, 0, 0, 0, 99, tzinfo=timezone.utc)) 50 | == "0000000000000-0" 51 | ) 52 | assert ( 53 | datetime_to_redis_steam_id(datetime(2017, 12, 23, 11, 33, 29, 812_010, tzinfo=timezone.utc)) 54 | == "1514028809812-0" 55 | ) 56 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # What is Lightbus? 2 | 3 | [![Codacy Badge](https://api.codacy.com/project/badge/Coverage/f5e5fd4eeb57462b80e2a99e957b7baa)](https://app.codacy.com/gh/adamcharnock/lightbus/dashboard) 4 | [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg)](https://lightbus.org/reference/code-of-conduct/) 5 | 6 | Lightbus allows your backend processes to communicate, run background 7 | tasks, and expose internal APIs. 8 | 9 | Lightbus uses Redis as its underlying transport, although support for 10 | other platforms may eventually be added. 11 | 12 | Lightbus requires Python 3.9 or above. 13 | 14 | **Full documentation can be found at https://lightbus.org** 15 | 16 | **[Adam Charnock](https://adamcharnock.com) is available for freelance/contract work.** 17 | 18 | ## Designed for ease of use 19 | 20 | Lightbus is designed with developers in mind. The syntax aims to be 21 | intuitive and familiar, and common problems are caught with clear and 22 | helpful error messages. 23 | 24 | For example, a naïve authentication API: 25 | 26 | ``` python3 27 | class AuthApi(Api): 28 | user_registered = Event(parameters=('username', 'email')) 29 | 30 | class Meta: 31 | name = 'auth' 32 | 33 | def check_password(self, user, password): 34 | return ( 35 | user == 'admin' 36 | and password == 'secret' 37 | ) 38 | ``` 39 | 40 | This can be called as follows: 41 | 42 | ``` python3 43 | import lightbus 44 | 45 | bus = lightbus.create() 46 | 47 | bus.auth.check_password( 48 | user='admin', 49 | password='secret' 50 | ) 51 | # Returns true 52 | ``` 53 | 54 | You can also listen for events: 55 | 56 | ``` python3 57 | import lightbus 58 | 59 | bus = lightbus.create() 60 | 61 | def send_signup_email(event_message, 62 | username, email): 63 | send_mail(email, 64 | subject=f'Welcome {username}' 65 | ) 66 | 67 | @bus.client.on_start() 68 | def bus_start(client): 69 | bus.auth.user_registered.listen( 70 | send_signup_email 71 | ) 72 | ``` 73 | 74 | **To get started checkout the documentation at https://lightbus.org.** 75 | -------------------------------------------------------------------------------- /docs/howto/schedule-recurring-tasks.md: -------------------------------------------------------------------------------- 1 | # How to schedule recurring tasks 2 | 3 | Recurring tasks can be scheduled in two ways: 4 | 5 | * The `@bus.client.every()` decorator – Will execute a function or coroutine at a given interval 6 | * The `@bus.client.schedule()` decorator – Similar to `every()`, but takes complex schedules as provided by the [schedule] library. 7 | 8 | ## Simple recurring tasks using `@bus.client.every()` 9 | 10 | Lightbus natively supports simple recurring tasks using the `@bus.client.every()` decorator: 11 | 12 | ```python3 13 | # bus.py 14 | import lightbus 15 | 16 | bus = lightbus.create() 17 | 18 | @bus.client.every(seconds=1) 19 | def do_it(): 20 | print("Hello!") 21 | 22 | ``` 23 | 24 | The interval can be specified using the `seconds`, `minutes`, `hours`, and `days` keys. 25 | Pass `also_run_immediately=True` to execute the function/coroutine immediately, as well as 26 | at the given interval. 27 | 28 | ## Complex schedules using `@bus.client.schedule()` 29 | 30 | Lightbus also supports using schedules specified using 31 | the [schedule] library. This allows for schedules 32 | such as 'every Monday at 1am', rather than simple intervals. 33 | For example: 34 | 35 | ```python3 36 | import lightbus 37 | import schedule 38 | 39 | bus = lightbus.create() 40 | 41 | # Run the task every 1-3 seconds, varying randomly 42 | @bus.client.schedule(schedule.every(1).to(3).seconds) 43 | def do_it(): 44 | print("Hello using schedule library") 45 | 46 | ``` 47 | 48 | ## Long running tasks 49 | 50 | If your tasks are long running you may prefer to handle these in a separate 51 | Lightbus process. This will avoid blocking the processing of incoming events and RPCs. 52 | 53 | For example, the default RPC timeout is 5 seconds. Any task which runs for longer than 54 | this has the possibility of causing incoming RPCs to timeout. 55 | 56 | You can move your task processing to a separate process as follows: 57 | 58 | ``` 59 | # Process 1: Handles scheduled tasks only 60 | lightbus run --only tasks 61 | 62 | # Process 2: Handles everything else (events and rpcs) 63 | lightbus run --skip tasks 64 | ``` 65 | 66 | [schedule]: https://github.com/dbader/schedule 67 | -------------------------------------------------------------------------------- /lightbus/utilities/config.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import logging 3 | import secrets 4 | import string 5 | 6 | from typing import Type, NamedTuple # pylint: disable=unused-import,cyclic-import 7 | 8 | import itertools 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def make_from_config_structure(class_name, from_config_method, extra_parameters=tuple()) -> Type: 14 | """ 15 | Create a new named tuple based on the method signature of from_config_method. 16 | 17 | This is useful when dynamically creating the config structure for Transports 18 | and Plugins. 19 | """ 20 | # pylint: disable=exec-used 21 | 22 | code = f"class {class_name}Config(NamedTuple):\n pass\n" 23 | variables = dict(p={}) 24 | 25 | parameters = inspect.signature(from_config_method).parameters.values() 26 | for parameter in itertools.chain(parameters, extra_parameters): 27 | if parameter.name == "config": 28 | # The config parameter is always passed to from_config() in order to 29 | # give it access to the global configuration (useful for setting 30 | # sensible defaults) 31 | continue 32 | 33 | if parameter.kind in (parameter.POSITIONAL_ONLY, parameter.VAR_POSITIONAL): 34 | logger.warning( 35 | f"Positional-only arguments are not supported in from_config() on class {class_name}" 36 | ) 37 | elif parameter.kind in (parameter.VAR_KEYWORD,): 38 | logger.warning( 39 | f"**kwargs-style parameters are not supported in from_config() on class {class_name}" 40 | ) 41 | else: 42 | name = parameter.name 43 | variables["p"][name] = parameter 44 | code += f" {name}: p['{name}'].annotation = p['{name}'].default\n" 45 | 46 | globals_ = globals().copy() 47 | globals_.update(variables) 48 | exec(code, globals_) # nosec 49 | return globals_[f"{class_name}Config"] 50 | 51 | 52 | def random_name(length: int) -> str: 53 | """Get a random string suitable for a processes/consumer name""" 54 | return "".join(secrets.choice(string.ascii_lowercase) for _ in range(length)) 55 | -------------------------------------------------------------------------------- /lightbus/hooks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from collections import defaultdict 3 | from typing import Dict, NamedTuple, List, Callable, Optional 4 | 5 | from lightbus.client.utilities import queue_exception_checker, ErrorQueueType 6 | from lightbus.utilities.async_tools import run_user_provided_callable 7 | 8 | 9 | class CallbackKey(NamedTuple): 10 | name: str 11 | run_before_plugins: bool 12 | 13 | 14 | class HookRegistry: 15 | def __init__( 16 | self, 17 | error_queue: ErrorQueueType, 18 | execute_plugin_hooks: Callable, 19 | extra_parameters: Optional[dict] = None, 20 | ): 21 | self._hook_callbacks: Dict[CallbackKey, List[Callable]] = defaultdict(list) 22 | self.error_queue = error_queue 23 | self.execute_plugin_hooks = execute_plugin_hooks 24 | self.extra_parameters = extra_parameters or {} 25 | 26 | def set_extra_parameter(self, name, value): 27 | self.extra_parameters[name] = value 28 | 29 | async def execute(self, name, **kwargs): 30 | # Hooks that need to run before plugins 31 | key = CallbackKey(name, run_before_plugins=True) 32 | for callback in self._hook_callbacks[key]: 33 | await queue_exception_checker( 34 | run_user_provided_callable( 35 | callback, 36 | args=[], 37 | kwargs=dict(**self.extra_parameters, **kwargs), 38 | type_name="hook", 39 | ), 40 | self.error_queue, 41 | ) 42 | 43 | await self.execute_plugin_hooks(name, **self.extra_parameters, **kwargs) 44 | 45 | # Hooks that need to run after plugins 46 | key = CallbackKey(name, run_before_plugins=False) 47 | for callback in self._hook_callbacks[key]: 48 | await run_user_provided_callable( 49 | callback, 50 | args=[], 51 | kwargs=dict(**self.extra_parameters, **kwargs), 52 | type_name="hook", 53 | ) 54 | 55 | def register_callback(self, name, fn, before_plugins=False): 56 | key = CallbackKey(name, bool(before_plugins)) 57 | self._hook_callbacks[key].append(fn) 58 | -------------------------------------------------------------------------------- /lightbus/serializers/base.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import json 3 | from typing import Union, TypeVar, Type 4 | 5 | from lightbus.exceptions import InvalidMessage, InvalidSerializerConfiguration 6 | from lightbus.schema.encoder import json_encode 7 | 8 | 9 | def decode_bytes(b: Union[str, bytes]): 10 | return b.decode("utf8") if isinstance(b, bytes) else b 11 | 12 | 13 | def sanity_check_metadata(message_class, metadata): 14 | """Takes unserialized metadata and checks it looks sane 15 | 16 | This relies upon the required_metadata of each Message class 17 | to provide a list of metadata fields that are required. 18 | """ 19 | for required_key in message_class.required_metadata: 20 | if required_key not in metadata: 21 | raise InvalidMessage( 22 | "Required key '{key}' missing in {cls} metadata. " 23 | "Found keys: {keys}".format( 24 | key=required_key, keys=", ".join(metadata.keys()), cls=message_class.__name__ 25 | ) 26 | ) 27 | elif not metadata.get(required_key): 28 | raise InvalidMessage( 29 | "Required key '{key}' present in {cls} metadata but value was empty" 30 | "".format(key=required_key, cls=message_class.__name__) 31 | ) 32 | 33 | 34 | SerialisedData = TypeVar("SerialisedData") 35 | 36 | 37 | class MessageSerializer: 38 | def __init__(self, encoder=json_encode): 39 | self.encoder = encoder 40 | 41 | def __call__(self, message: "lightbus.Message") -> SerialisedData: 42 | raise NotImplementedError() 43 | 44 | 45 | class MessageDeserializer: 46 | def __init__(self, message_class: Type["lightbus.Message"], decoder=json.loads): 47 | if not inspect.isclass(message_class): 48 | raise InvalidSerializerConfiguration( 49 | "The message_class value provided to JsonMessageDeserializer was not a class, " 50 | "it was actually: {}".format(message_class) 51 | ) 52 | 53 | self.message_class = message_class 54 | self.decoder = decoder 55 | 56 | def __call__(self, serialized: SerialisedData, *, native_id=None) -> "lightbus.Message": 57 | raise NotImplementedError() 58 | -------------------------------------------------------------------------------- /tests/transports/redis/conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | import pytest 5 | 6 | import lightbus 7 | import lightbus.creation 8 | import lightbus.transports.redis.event 9 | import lightbus.transports.redis.result 10 | import lightbus.transports.redis.rpc 11 | import lightbus.transports.redis.schema 12 | from lightbus.exceptions import BusAlreadyClosed 13 | from lightbus.transports.redis.event import StreamUse 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | @pytest.fixture 19 | async def redis_rpc_transport(new_redis_pool, loop): 20 | return lightbus.transports.redis.rpc.RedisRpcTransport( 21 | redis_pool=await new_redis_pool(maxsize=10000) 22 | ) 23 | 24 | 25 | @pytest.fixture 26 | async def redis_result_transport(new_redis_pool, loop): 27 | return lightbus.transports.redis.result.RedisResultTransport( 28 | redis_pool=await new_redis_pool(maxsize=10000) 29 | ) 30 | 31 | 32 | @pytest.fixture 33 | async def redis_event_transport(new_redis_pool, loop): 34 | transport = lightbus.transports.redis.event.RedisEventTransport( 35 | redis_pool=await new_redis_pool(maxsize=10000), 36 | service_name="test_service", 37 | consumer_name="test_consumer", 38 | # This used to be the default, so we still test against it here 39 | stream_use=StreamUse.PER_EVENT, 40 | ) 41 | yield transport 42 | await transport.close() 43 | 44 | 45 | @pytest.fixture 46 | async def redis_schema_transport(new_redis_pool, loop): 47 | return lightbus.transports.redis.schema.RedisSchemaTransport( 48 | redis_pool=await new_redis_pool(maxsize=10000) 49 | ) 50 | 51 | 52 | @pytest.fixture 53 | async def bus(new_bus): 54 | bus = new_bus() 55 | 56 | yield bus 57 | 58 | try: 59 | await bus.client.stop_worker() 60 | await bus.client.close_async() 61 | except BusAlreadyClosed: 62 | pass 63 | 64 | 65 | @pytest.fixture(name="fire_dummy_events") 66 | def fire_dummy_events_fixture(bus): 67 | async def fire_dummy_events(total, initial_delay=0.1): 68 | await asyncio.sleep(initial_delay) 69 | for x in range(0, total): 70 | await bus.my.dummy.my_event.fire_async(field=str(x)) 71 | logger.warning("TEST: fire_dummy_events() completed") 72 | 73 | return fire_dummy_events 74 | -------------------------------------------------------------------------------- /lightbus_experiments/kombu_zmq/consumer.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import timeit 3 | 4 | import zmq 5 | from kombu import Connection 6 | from timer import Timer 7 | 8 | TOTAL_MESSAGES = int(sys.argv[1]) 9 | 10 | 11 | amqp_timer = Timer() 12 | zmq_timer = Timer() 13 | 14 | 15 | def log(msg): 16 | pass 17 | # print(msg) 18 | 19 | 20 | def main(): 21 | context = zmq.Context() 22 | sockets = {} 23 | 24 | with Connection("amqp://guest:guest@127.0.0.1:5672//") as conn: 25 | simple_queue = conn.SimpleQueue("simple_queue") 26 | # Block until we get the 'ready to start' message 27 | print("Waiting for kick-off message from producer") 28 | simple_queue.get(block=True).ack() 29 | print("Got it! Let's go...") 30 | 31 | def get(): 32 | nonlocal sockets 33 | 34 | with amqp_timer: 35 | message = simple_queue.get(block=True) 36 | message_id = message.headers.get("id") 37 | addr = message.headers.get("reply-to") 38 | 39 | if not addr: 40 | with amqp_timer: 41 | message.ack() 42 | log("Message with no reply-to header. Ignoring.") 43 | return 44 | 45 | if addr not in sockets: 46 | log("Opening socket to: {}".format(addr)) 47 | with zmq_timer: 48 | socket = context.socket(zmq.PUSH) 49 | socket.connect(addr) 50 | sockets[addr] = socket 51 | 52 | socket = sockets[addr] 53 | log("Sending response for {} to: {}".format(message_id, addr)) 54 | 55 | # Send the message ID back plus some data 56 | with zmq_timer: 57 | socket.send(bytes(message_id, "utf8") + b" x" * 1024) 58 | 59 | log("Sent") 60 | 61 | with amqp_timer: 62 | message.ack() 63 | 64 | seconds = timeit.timeit(get, number=TOTAL_MESSAGES) 65 | print("Time per get: {}ms".format(round(seconds * 1000 / TOTAL_MESSAGES, 2))) 66 | print("Gets per second: {}".format(round(TOTAL_MESSAGES / seconds, 2))) 67 | print("ZeroMQ time: {}".format(zmq_timer)) 68 | print("AMQP time: {}".format(amqp_timer)) 69 | simple_queue.close() 70 | 71 | 72 | if __name__ == "__main__": 73 | main() 74 | -------------------------------------------------------------------------------- /lightbus_experiments/potential_use.py: -------------------------------------------------------------------------------- 1 | # Example use of API once it has been defined 2 | 3 | 4 | from mycompany.common.auth import api 5 | 6 | # Blocking calls 7 | user_info = api.get_user(username="testuser") 8 | password_ok = api.check_passsword(password="Passw0rd1") 9 | api.user_registered.listen(my_callback) 10 | 11 | # Parallel calls 12 | async_result1 = api.get_user.async(username="testuser") 13 | async_result2 = api.check_passsword.async(password="Passw0rd1") 14 | user_info = async_result1.wait() 15 | password_ok = async_result2.wait() 16 | 17 | 18 | # Pro 19 | # - Presence of 'api' indicates that this is an external service 20 | # - As api is an object, one cannot access the methods/events without the prefix 21 | # - Simple, readable 22 | # Con 23 | # - Use of 'api' is by convention only 24 | # - ...but we're all consenting adults. 25 | 26 | 27 | # Developer tools 28 | 29 | 30 | api.debug.enable() 31 | api.debug.disable() 32 | # Enables/disables debugging for calls using this api instance 33 | 34 | api.debug.info("get_user") 35 | # Shows: number of consumers, consumer information, last call timestamp, last call args, last call response, 36 | # last handled by 37 | 38 | api.debug.trace("get_user", username="testuser") 39 | # Shows: 40 | # - "WARNING: Only works when working with other warren consumers & producers" 41 | # - Total number of handlers listening for this api call/event [1] 42 | # - Raw message as sent to broker 43 | # - Expecting response to tcp://10.1.2.3:54142 44 | # - Raw message received by rabbitmq [1] 45 | # - "Waiting for debug information from handlers..." [3] 46 | # - Consumer ---> Message received from by PID at HOST 47 | # - Consumer ---> Raw message as received from broker: ... 48 | # - Consumer ---> Message being handled by implementation mycompany.common.auth.AuthApi.get_user() 49 | # - Consumer ---> Implementation returned result: ... 50 | # - Consumer ---> Returning result to tcp://10.1.2.3:54142 51 | # - Consumer ---> Acknowledgement sent to broker 52 | # - Consumer ---> All done 53 | # - Response received from consumer: ... 54 | # - Done 55 | 56 | # [1] Requires a debug back chanel of some sort (i.e. a debug exchange) 57 | # [2] Creates a temporary queue on the relevant exchange. 58 | # [3] The debugger sends the message with a 'debug-to' header, which logs back 59 | # via ZeroMQ 60 | -------------------------------------------------------------------------------- /docs/explanation/marshalling.md: -------------------------------------------------------------------------------- 1 | Lightbus has four stages of data marshalling: 2 | 3 | * Encode / Decode 4 | * Serialize / Deserialize 5 | * Validation 6 | * Deform / Cast 7 | 8 | An **inbound message** will go through this process from **top to bottom**. 9 | An **outbound message** will go through this process from **bottom to top**. 10 | 11 | ## Inbound flow 12 | 13 | Messages arriving from the bus go through the following stages 14 | in order to prepare the data for use: 15 | 16 | 1. **Decode:** Decode the incoming data (JSON decoding by default) 17 | 2. **Deserialise:** Convert decoded data into a `Message` object 18 | 3. **Validate:** Validate the incoming message against the JSON schema 19 | available on the bus. 20 | 4. **Cast:** Best effort casting of parameters/results based on 21 | the locally available type hinting. This can be disabled with the 22 | [`cast_values` configuration option](../reference/configuration.md#api-config). 23 | 24 | 25 | ## Outbound flow 26 | 27 | This is the reverse of the inbound flow. Messages being 28 | sent will go through the following process in order to 29 | prepare the data for transmission on bus: 30 | 31 | 1. **Deform:** Lightbus handles [NamedTuples], [dataclasses] 32 | and [other classes] by converting 33 | them into dictionaries. Other common types such as 34 | datetimes, Decimals etc are converted into strings. 35 | Internally this is referred to as the *deform* process and is 36 | the inverse of the *cast* process. 37 | 2. **Validate:** Validate the outgoing message against the JSON schema 38 | available on the bus. 39 | 3. **Serialize:** Structures the data in a way suitable for the 40 | transport. 41 | 4. **Encode:** Converts the data to a form suitable for transmission. 42 | This typically means stringifying it, for which lightbus 43 | uses JSON encoding by default. 44 | 45 | ## About casting 46 | 47 | Casting is separate from validation, although both rely on type hints. 48 | Whereas validation uses a shared 49 | bus-wide schema to check data validity, casting uses any Python type hints 50 | available in the **local codebase** to marshall event and RPC parameters 51 | into a format useful to the service's developer. 52 | 53 | 54 | 55 | 56 | [NamedTuples]: ../reference/typing.md#namedtuple-example 57 | [dataclasses]: ../reference/typing.md#dataclass-example 58 | [other classes]: ../reference/typing.md#custom-class-example 59 | -------------------------------------------------------------------------------- /tests/plugins/test_unit_plugin_base.py: -------------------------------------------------------------------------------- 1 | """Test the infrastructure for loading and calling plugins""" 2 | import pytest 3 | from collections import OrderedDict 4 | 5 | from lightbus.config import Config 6 | from lightbus.plugins import LightbusPlugin, PluginRegistry 7 | from lightbus.plugins.metrics import MetricsPlugin 8 | from lightbus.plugins.state import StatePlugin 9 | 10 | 11 | pytestmark = pytest.mark.unit 12 | 13 | 14 | def test_manually_set_plugins(plugin_registry: PluginRegistry): 15 | assert not plugin_registry._plugins 16 | p1 = LightbusPlugin() 17 | p2 = LightbusPlugin() 18 | plugin_registry.set_plugins([p1, p2]) 19 | assert plugin_registry._plugins == [p1, p2] 20 | 21 | 22 | def test_autoload_plugins(plugin_registry: PluginRegistry): 23 | config = Config.load_dict( 24 | {"plugins": {"internal_state": {"enabled": True}, "internal_metrics": {"enabled": True}}} 25 | ) 26 | assert not plugin_registry._plugins 27 | assert plugin_registry.autoload_plugins(config) 28 | assert [type(p) for p in plugin_registry._plugins] == [StatePlugin, MetricsPlugin] 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_execute_hook(mocker, plugin_registry: PluginRegistry): 33 | """Ensure calling execute_hook() calls the method on the plugin""" 34 | assert not plugin_registry._plugins 35 | plugin = LightbusPlugin() 36 | plugin_registry.set_plugins([plugin]) 37 | 38 | mocker.spy(plugin, "before_worker_start") 39 | 40 | await plugin_registry.execute_hook("before_worker_start", client=None) 41 | assert plugin.before_worker_start.called 42 | 43 | 44 | def test_is_plugin_loaded(plugin_registry: PluginRegistry): 45 | assert plugin_registry.is_plugin_loaded(LightbusPlugin) == False 46 | plugin_registry.set_plugins([LightbusPlugin()]) 47 | assert plugin_registry.is_plugin_loaded(LightbusPlugin) == True 48 | 49 | 50 | def test_plugin_config(): 51 | # Is the Config attached to the plugin class by the 52 | # base plugin's metaclass? 53 | class PluginWithConfig(LightbusPlugin): 54 | @classmethod 55 | def from_config(cls, config, first: int = 123): 56 | pass 57 | 58 | assert PluginWithConfig.Config 59 | assert type(PluginWithConfig.Config) == type 60 | assert "config" not in PluginWithConfig.Config.__annotations__ 61 | assert "first" in PluginWithConfig.Config.__annotations__ 62 | assert PluginWithConfig.Config().first == 123 63 | -------------------------------------------------------------------------------- /lightbus/commands/utilities.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from typing import Tuple, Any 4 | 5 | import lightbus.creation 6 | from lightbus import configure_logging, BusPath 7 | import lightbus.client 8 | from lightbus.config import Config 9 | from lightbus.exceptions import NoBusFoundInBusModule 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | def import_bus(args) -> Tuple[Any, BusPath]: 15 | bus_module = lightbus.creation.import_bus_module(args.bus_module_name) 16 | try: 17 | return bus_module, bus_module.bus 18 | except AttributeError: 19 | raise NoBusFoundInBusModule( 20 | f"Bus module at {bus_module.__file__} contains no variable named 'bus'. " 21 | f"Your bus module should contain the line 'bus = lightbus.create()'." 22 | ) 23 | 24 | 25 | def setup_logging(override: str, config: Config): 26 | configure_logging(log_level=(override or config.bus().log_level.value).upper()) 27 | 28 | 29 | def setup_common_arguments(parser): 30 | """Set common arguments needed by all commands""" 31 | general_argument_group = parser.add_argument_group(title="Common arguments") 32 | general_argument_group.add_argument( 33 | "--bus", 34 | "-b", 35 | dest="bus_module_name", 36 | metavar="BUS_MODULE", 37 | help=( 38 | "The bus module to import. Example 'bus', 'my_project.bus'. Defaults to " 39 | "the value of the LIGHTBUS_MODULE environment variable, or 'bus'" 40 | ), 41 | ) 42 | general_argument_group.add_argument( 43 | "--service-name", 44 | "-s", 45 | help="Name of service in which this process resides. YOU SHOULD " 46 | "LIKELY SET THIS IN PRODUCTION. Can also be set using the " 47 | "LIGHTBUS_SERVICE_NAME environment. Will default to a random string.", 48 | ) 49 | general_argument_group.add_argument( 50 | "--process-name", 51 | "-p", 52 | help="A unique name of this process within the service. Can also be set using the " 53 | "LIGHTBUS_PROCESS_NAME environment. Will default to a random string.", 54 | ) 55 | general_argument_group.add_argument( 56 | "--config", dest="config_file", help="Config file to load, JSON or YAML", metavar="FILE" 57 | ) 58 | general_argument_group.add_argument( 59 | "--log-level", 60 | help="Set the log level. Overrides any value set in config. " 61 | "One of debug, info, warning, critical, exception.", 62 | metavar="LOG_LEVEL", 63 | ) 64 | -------------------------------------------------------------------------------- /tests/transports/redis/test_reliability_redis_events.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from asyncio import CancelledError 4 | 5 | import pytest 6 | 7 | import lightbus 8 | import lightbus.path 9 | from lightbus import EventMessage 10 | from lightbus.utilities.async_tools import cancel 11 | from tests.conftest import Worker 12 | 13 | pytestmark = pytest.mark.reliability 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | @pytest.mark.asyncio 19 | async def test_listener_failures( 20 | bus: lightbus.path.BusPath, new_bus, caplog, redis_client, dummy_api, worker: Worker 21 | ): 22 | """Keep killing bus clients and check that we don't loose any events regardless""" 23 | 24 | caplog.set_level(logging.ERROR) 25 | event_ok_ids = dict() 26 | history = [] 27 | 28 | async def listener(event_message: EventMessage, field, **kwargs): 29 | call_id = int(field) 30 | event_ok_ids.setdefault(call_id, 0) 31 | event_ok_ids[call_id] += 1 32 | await asyncio.sleep(0.1) 33 | 34 | # Put a lot of events onto the bus (we'll pull them off shortly) 35 | bus.client.register_api(dummy_api) 36 | for n in range(0, 50): 37 | await bus.my.dummy.my_event.fire_async(field=str(n)) 38 | 39 | # Now pull the events off, and sometimes kill a worker early. 40 | # We kill 20% of listeners, so run 20% extra workers (we don't kill 41 | # any listeners in that extra 20% because these are just mop-up) 42 | for n in range(0, int(50 * 1.2)): 43 | cursed_bus = new_bus() 44 | cursed_bus.my.dummy.my_event.listen( 45 | listener, listener_name="test", bus_options={"since": "0"} 46 | ) 47 | 48 | async with worker(cursed_bus): 49 | logger.debug(f"Worker {n}") 50 | await asyncio.sleep(0.05) 51 | if n % 5 == 0 and n < 50: 52 | # Cancel 1 in every 5 attempts at handling the event 53 | tasks = cursed_bus.client.event_client._event_listener_tasks 54 | await cancel(list(tasks)[0]) 55 | await asyncio.sleep(0.15) 56 | 57 | await asyncio.sleep(0.1) 58 | 59 | info = await redis_client.xinfo_groups(stream="my.dummy.my_event:stream") 60 | 61 | duplicate_calls = [n for n, v in event_ok_ids.items() if v > 1] 62 | assert len(event_ok_ids) == 50, event_ok_ids 63 | assert len(duplicate_calls) > 0 64 | 65 | assert ( 66 | len(info) == 1 67 | ), "There should only be one consumer group which was reused by every listener above" 68 | assert info[0][b"pending"] == 0 69 | -------------------------------------------------------------------------------- /lightbus/client/validator.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Union 3 | 4 | from lightbus.exceptions import UnknownApi 5 | from lightbus.message import EventMessage, RpcMessage, ResultMessage 6 | from lightbus.schema import Schema 7 | from lightbus.config import Config 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def validate_incoming( 13 | config: Config, schema: Schema, message: Union[EventMessage, RpcMessage, ResultMessage] 14 | ): 15 | return _validate(config, schema, message, "incoming") 16 | 17 | 18 | def validate_outgoing( 19 | config: Config, schema: Schema, message: Union[EventMessage, RpcMessage, ResultMessage] 20 | ): 21 | return _validate(config, schema, message, "outgoing") 22 | 23 | 24 | def _validate( 25 | config, schema, message: Union[EventMessage, RpcMessage, ResultMessage], direction: str 26 | ): 27 | if direction not in ("incoming", "outgoing"): 28 | raise AssertionError("Invalid direction specified") 29 | 30 | # Result messages do not carry the api or procedure name, so allow them to be 31 | # specified manually 32 | api_name = message.api_name 33 | event_or_rpc_name = getattr(message, "procedure_name", None) or getattr(message, "event_name") 34 | api_config = config.api(api_name) 35 | strict_validation = api_config.strict_validation 36 | 37 | if not getattr(api_config.validate, direction): 38 | return 39 | 40 | if api_name not in schema: 41 | if strict_validation: 42 | raise UnknownApi( 43 | f"Validation is enabled for API named '{api_name}', but there is no schema present for this API. " 44 | f"Validation is therefore not possible. You are also seeing this error because the " 45 | f"'strict_validation' setting is enabled. Disabling this setting will turn this exception " 46 | f"into a warning. " 47 | ) 48 | else: 49 | logger.warning( 50 | f"Validation is enabled for API named '{api_name}', but there is no schema present for this API. " 51 | f"Validation is therefore not possible. You can force this to be an error by enabling " 52 | f"the 'strict_validation' config option. You can silence this message by disabling validation " 53 | f"for this API using the 'validate' option." 54 | ) 55 | return 56 | 57 | if isinstance(message, (RpcMessage, EventMessage)): 58 | schema.validate_parameters(api_name, event_or_rpc_name, message.kwargs) 59 | elif isinstance(message, ResultMessage): 60 | schema.validate_response(api_name, event_or_rpc_name, message.result) 61 | -------------------------------------------------------------------------------- /docs/reference/apis.md: -------------------------------------------------------------------------------- 1 | APIs specify the functionality available on the bus. To do this you 2 | define API classes within your `bus.py` file. You can also define 3 | your API elsewhere and import it into your `bus.py` file. 4 | 5 | **For further discussion of APIs [see the concepts section](../explanation/apis.md).** 6 | 7 | ## An example API 8 | 9 | ```python3 10 | # An example API. You can define this in your bus.py, 11 | # or import into your bus.py file from elsewhere 12 | 13 | class SupportCaseApi(Api): 14 | # An event, 15 | # available at bus.support.case.case_created 16 | case_created = Event(parameters=('id', 'sender', 'subject', 'body')) 17 | 18 | # Options for this API 19 | class Meta: 20 | # API name on the bus 21 | name = 'support.case' 22 | 23 | # Will be available as a remote procedure call at 24 | # bus.support.case.get() 25 | def get(self, id): 26 | return get_case_from_db(pk=id) 27 | ``` 28 | 29 | A service can define zero or more APIs, and each API can contain 30 | zero or more events and zero or more procedures. 31 | 32 | The `Meta` class specifies options regarding the API, with `name` being 33 | the only required option. The name specifies how the API will be 34 | accessed on the bus. 35 | 36 | You could call an RPC on the above API as follows: 37 | 38 | ```python3 39 | bus = lightbus.create() 40 | 41 | # Call the get_case() RPC. 42 | case = bus.support.case.get_case(id=123) 43 | ``` 44 | 45 | You can also fire an event on this API: 46 | 47 | ```python3 48 | bus = lightbus.create() 49 | 50 | # Fire the case_created event 51 | bus.support.case.case_created.fire( 52 | id=123, 53 | sender='Joe', 54 | subject='I need support please!', 55 | body='...', 56 | ) 57 | ``` 58 | 59 | ## Options 60 | 61 | ### `name (str)` 62 | 63 | Specifies the name of the API. This will determine how the API is addressed 64 | on the bus. See [naming](#naming-your-apis), below. 65 | 66 | `name` is a required option. 67 | 68 | ## Naming your APIs 69 | 70 | As you can from the `Meta.name` option in the example above, API names 71 | can contain periods which allow you 72 | to structure your bus in a suitable form for your situation. 73 | Some example API naming schemes may look like: 74 | 75 | ```yaml 76 | # Example API naming schemes for use within Meta.name 77 | Format: 78 | Example: support.get_case() 79 | support.get_activity() 80 | 81 | 82 | Format: . 83 | Example: support.case.get() 84 | support.activity.get() 85 | 86 | 87 | Format: .. 88 | Example: marketing.website.stats.get() 89 | ops.monitoring.servers.get_status() 90 | ``` 91 | -------------------------------------------------------------------------------- /tests/client/internal_messaging/test_producer.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | 4 | import pytest 5 | from _pytest.logging import LogCaptureFixture 6 | 7 | from lightbus.client.internal_messaging.producer import InternalProducer 8 | 9 | pytestmark = pytest.mark.unit 10 | 11 | 12 | @pytest.mark.asyncio 13 | async def test_queue_monitor(producer: InternalProducer, caplog: LogCaptureFixture, fake_coroutine): 14 | """Ensure the queue monitor logs as we expect 15 | 16 | Note that something we implicitly test for here is that the monitor 17 | does not log lots of duplicate lines. Rather it only logs when 18 | something changes. 19 | """ 20 | producer.size_warning = 3 21 | producer.monitor_interval = 0.01 22 | caplog.set_level(logging.WARNING) 23 | 24 | # Start the producer running 25 | producer.start() 26 | 27 | # No logging yet 28 | assert not caplog.records 29 | 30 | # Add a couple of items to the queue (still under size_warning) 31 | producer.queue.put_nowait(None) 32 | producer.queue.put_nowait(None) 33 | await asyncio.sleep(0.05) 34 | 35 | # Still no logging yet 36 | assert not caplog.records 37 | 38 | # One more gets us up to the warning level 39 | producer.queue.put_nowait(None) 40 | await asyncio.sleep(0.05) 41 | 42 | # Now we have logging 43 | assert len(caplog.records) == 1 44 | assert caplog.records[0].getMessage() == "Queue in TestProducer now has 3 commands." 45 | caplog.clear() # Clear the log messages 46 | 47 | # Let's check we get another messages when the queue gets bigger again 48 | producer.queue.put_nowait(None) 49 | await asyncio.sleep(0.05) 50 | 51 | assert len(caplog.records) == 1 52 | assert caplog.records[0].getMessage() == "Queue in TestProducer now has 4 commands." 53 | caplog.clear() # Clear the log messages 54 | 55 | # Now check we get logging when the queue shrinks, but is still above the warning level 56 | producer.queue.get_nowait() 57 | await asyncio.sleep(0.05) 58 | 59 | assert len(caplog.records) == 1 60 | assert caplog.records[0].getMessage() == ( 61 | "Queue in TestProducer has shrunk back down to 3 commands." 62 | ) 63 | caplog.clear() # Clear the log messages 64 | 65 | # Now check we get logging when the queue shrinks to BELOW the warning level 66 | producer.queue.get_nowait() 67 | await asyncio.sleep(0.05) 68 | 69 | assert len(caplog.records) == 1 70 | assert caplog.records[0].getMessage() == ( 71 | "Queue in TestProducer has shrunk back down to 2 commands. " 72 | "Queue is now at an OK size again." 73 | ) 74 | caplog.clear() # Clear the log messages 75 | -------------------------------------------------------------------------------- /lightbus/serializers/by_field.py: -------------------------------------------------------------------------------- 1 | """ Serializers suitable for transports which support multiple fields per message 2 | 3 | These serializers handle moving data to/from a dictionary 4 | format. The format looks like this:: 5 | 6 | # Message metadata first. Each value is implicitly a utf8 string 7 | id: 'ZOCTLh1CEeimW3gxwcOTbg==' 8 | api_name: 'my_company.auth' 9 | procedure_name: 'check_password' 10 | return_path: 'redis+key://my_company.auth.check_password:result:ZOCTLh1CEeimW3gxwcOTbg==' 11 | 12 | # kwargs follow, each encoded with the provided encoder (in this case JSON) 13 | kw:username: '"admin"' 14 | kw:password: '"secret"' 15 | 16 | """ 17 | from typing import TYPE_CHECKING 18 | 19 | from lightbus.serializers.base import ( 20 | decode_bytes, 21 | sanity_check_metadata, 22 | MessageSerializer, 23 | MessageDeserializer, 24 | ) 25 | 26 | if TYPE_CHECKING: 27 | # pylint: disable=unused-import,cyclic-import 28 | from lightbus import Message 29 | 30 | 31 | class ByFieldMessageSerializer(MessageSerializer): 32 | def __call__(self, message: "Message") -> dict: 33 | """Takes a message object and returns a serialised dictionary representation 34 | 35 | See the module-level docs (above) for further details 36 | """ 37 | serialized = message.get_metadata() 38 | for k, v in message.get_kwargs().items(): 39 | serialized[":{}".format(k)] = self.encoder(v) 40 | return serialized 41 | 42 | 43 | class ByFieldMessageDeserializer(MessageDeserializer): 44 | def __call__(self, serialized: dict, *, native_id=None, **extra): 45 | """Takes a dictionary of serialised fields and returns a Message object 46 | 47 | See the module-level docs (above) for further details 48 | """ 49 | metadata = {} 50 | kwargs = {} 51 | 52 | for k, v in serialized.items(): 53 | k = decode_bytes(k) 54 | v = decode_bytes(v) 55 | 56 | if not k: 57 | continue 58 | 59 | # kwarg fields start with a ':', everything else is metadata 60 | if k[0] == ":": 61 | # kwarg values need decoding 62 | kwargs[k[1:]] = self.decoder(v) 63 | else: 64 | # metadata args are implicitly strings, so we don't need to decode them 65 | metadata[k] = v 66 | 67 | sanity_check_metadata(self.message_class, metadata) 68 | 69 | if "native_id" in metadata: 70 | native_id = metadata.pop("native_id") 71 | 72 | return self.message_class.from_dict( 73 | metadata=metadata, kwargs=kwargs, native_id=native_id, **extra 74 | ) 75 | -------------------------------------------------------------------------------- /docs/tutorial/getting-involved.md: -------------------------------------------------------------------------------- 1 | # Getting involved (even if you don't code) 2 | 3 | **Lightbus can only thrive as a project if it has new contributors getting involved, 4 | this means we need you!** 5 | 6 | *"But I've never contributed to open source before,"* I hear you cry. Well, it turns 7 | out most people have never contributed to open source before either, so don't worry, you're 8 | not alone. The Lightbus team (which currently consists of me, Adam) is here to support you. 9 | 10 | ## Code of Conduct 11 | 12 | **Make sure you read over the [Code of Conduct](../reference/code-of-conduct.md)**. This will outline how you should 13 | engage with others, and how you should expect to be treated yourself. 14 | 15 | ## How to get started (non-coding) 16 | 17 | There are lots of ways you can get even without getting stuck into code. Even if you are a 18 | Python developer, this can still be a good place to start: 19 | 20 | * **Say hello** [in our community chat](https://discord.gg/2j594ws). We can have a chat about ways 21 | in which you can help. 22 | * Raise an issue and **describe something you find confusing** about Lightbus. Something you think 23 | is not clear or missing from the documentation. 24 | * [Submit a pull request] to **improve the documentation** (for example, I'm sure you can find some typos) 25 | * Tell me how you are using (or would like to use) Lightbus. This kind of insight is invaluable. 26 | 27 | ## How to get started (coding) 28 | 29 | If you want to get stuck in to writing some code here are some good places to start: 30 | 31 | * See [how to modify Lightbus](../howto/modify-lightbus.md) for a guide to getting your local development 32 | environment setup. 33 | * **If you have a specific idea or bug you would like to fix then come and discuss it**, either 34 | [in our community chat](https://discord.gg/2j594ws) or in a 35 | [GitHub issue](https://github.com/adamcharnock/lightbus/issues/new). It would be great to meet you, 36 | and I may be able to advise you of any pitfalls to be aware of. 37 | * **Read over the code** to get a feel for how things work. For a high-level view take a look at 38 | the `BusClient` class. For a low-level view of the messaging system take a look at the 39 | redis transports (`lightbus/transports/redis`). You'll likely find some typo's as you go, so 40 | feel free to [submit a pull request]! 41 | * **Work on one of the [project's issues]**. I recommend you 42 | talk your plan over me first, that way we can make sure you develop something that fits with the 43 | the project as a whole. 44 | 45 | [submit a pull request]: https://help.github.com/en/github/collaborating-with-issues-and-pull-requests 46 | [project's issues]: https://github.com/adamcharnock/lightbus/issues 47 | -------------------------------------------------------------------------------- /tests/transports/redis/test_unit_redis_schema.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | 5 | from lightbus import RedisSchemaTransport 6 | 7 | pytestmark = pytest.mark.unit 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_store(redis_schema_transport: RedisSchemaTransport, redis_client): 12 | await redis_schema_transport.store("my.api", {"key": "value"}, ttl_seconds=60) 13 | assert set(await redis_client.keys("*")) == {b"schemas", b"schema:my.api"} 14 | 15 | schemas = await redis_client.smembers("schemas") 16 | assert schemas == [b"my.api"] 17 | 18 | my_api_schema = await redis_client.get("schema:my.api") 19 | assert json.loads(my_api_schema) == {"key": "value"} 20 | 21 | ttl = await redis_client.ttl("schema:my.api") 22 | assert 59 <= ttl <= 60 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_store_no_ttl(redis_schema_transport: RedisSchemaTransport, redis_client): 27 | await redis_schema_transport.store("my.api", {"key": "value"}, ttl_seconds=None) 28 | ttl = await redis_client.ttl("schema:my.api") 29 | assert ttl == -1 30 | 31 | 32 | @pytest.mark.asyncio 33 | async def test_load(redis_schema_transport: RedisSchemaTransport, redis_client): 34 | await redis_client.sadd("schemas", "my.api", "old.api") 35 | await redis_client.set("schema:my.api", json.dumps({"key": "value"})) 36 | 37 | schemas = await redis_schema_transport.load() 38 | 39 | assert schemas == {"my.api": {"key": "value"}} 40 | 41 | 42 | @pytest.mark.asyncio 43 | async def test_load(redis_schema_transport: RedisSchemaTransport, redis_client): 44 | await redis_client.sadd("schemas", "my.api", "old.api") 45 | await redis_client.set("schema:my.api", json.dumps({"key": "value"})) 46 | 47 | schemas = await redis_schema_transport.load() 48 | 49 | assert schemas == {"my.api": {"key": "value"}} 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_load_no_apis(redis_schema_transport: RedisSchemaTransport, redis_client): 54 | schemas = await redis_schema_transport.load() 55 | assert schemas == {} 56 | 57 | 58 | @pytest.mark.asyncio 59 | async def test_from_config(redis_client): 60 | await redis_client.select(5) 61 | host, port = redis_client.address 62 | transport = RedisSchemaTransport.from_config( 63 | config=None, url=f"redis://127.0.0.1:{port}/5", connection_parameters=dict(maxsize=3) 64 | ) 65 | with await transport.connection_manager() as transport_client: 66 | assert transport_client.connection.address == ("127.0.0.1", port) 67 | assert transport_client.connection.db == 5 68 | await transport_client.set("x", 1) 69 | assert await redis_client.get("x") 70 | 71 | assert transport._redis_pool.connection.maxsize == 3 72 | await transport.close() 73 | -------------------------------------------------------------------------------- /docs/howto/event-sourcing.md: -------------------------------------------------------------------------------- 1 | # How to use Lightbus for event sourcing 2 | 3 | We won't go into the details of event sourcing here, but we can roughly 4 | describe our messaging needs as follows: 5 | 6 | * We are optimising for reliability and completeness, performance is secondary 7 | * Sent events must be valid 8 | * Received events must be processed regardless of their validity 9 | * Event history is very important 10 | 11 | !!! note 12 | 13 | Your needs may not precisely match this scenario, so be prepared to tweak the following configuration to your needs. 14 | 15 | ## Global configuration 16 | 17 | For the above event sourced scenario, a sample Lightbus [configuration](../reference/configuration.md) may look like 18 | something like this: 19 | 20 | ```yaml 21 | # Lightbus config for event sourcing 22 | 23 | bus: 24 | schema: 25 | transport: 26 | redis: 27 | url: "redis://redis_host:6379/0" 28 | 29 | apis: 30 | 31 | # Here we specify the default for your entire bus, but you could 32 | # also specify the config for a specific API by using the API's name 33 | # instead of 'default'. 34 | default: 35 | 36 | validate: 37 | # Sent (outgoing) events must be valid 38 | outgoing: true 39 | # Received (incoming) events must be processed 40 | # regardless of their validity 41 | incoming: false 42 | 43 | event_transport: 44 | redis: 45 | url: 'redis://redis_host:6379/0' 46 | 47 | # Load few events into order to prioritise consistency 48 | batch_size: 1 49 | 50 | # Do not truncate the event stream. We keep all events 51 | # as these events are our source of truth 52 | max_stream_length: null 53 | 54 | # Per-API streams, as we wish to prioritise 55 | # ordering (this is the default) 56 | stream_use: "per_api" 57 | ``` 58 | 59 | ## Run a single Lightbus worker 60 | 61 | Running only a single Lightbus worker process 62 | (with a specific process name) will ensure messages are processed in 63 | order. 64 | 65 | lightbus run --service-name=example_service --process-name=worker 66 | 67 | ## Set process names 68 | 69 | If your event volume requires multiple workers then ensure you set 70 | a deterministic per-process name for each. 71 | This will allow restarted workers to immediately pickup any previously claimed messages 72 | without needing to wait for a timeout. 73 | 74 | For example, if you have three Lightbus workers you can start each as follows: 75 | 76 | lightbus run --service-name=example_service --process-name=worker1 77 | lightbus run --service-name=example_service --process-name=worker2 78 | lightbus run --service-name=example_service --process-name=worker3 79 | 80 | **Ordering will *not* be maintained when running multiple workers.** 81 | -------------------------------------------------------------------------------- /lightbus/transports/redis/schema.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | from typing import Mapping, Dict, Optional 4 | 5 | from lightbus.transports.base import SchemaTransport 6 | from lightbus.schema.encoder import json_encode 7 | from lightbus.transports.redis.utilities import RedisTransportMixin 8 | from lightbus.utilities.frozendict import frozendict 9 | 10 | logger = logging.getLogger("lightbus.transports.redis") 11 | 12 | 13 | class RedisSchemaTransport(RedisTransportMixin, SchemaTransport): 14 | def __init__( 15 | self, 16 | *, 17 | redis_pool=None, 18 | url: str = "redis://127.0.0.1:6379/0", 19 | connection_parameters: Mapping = frozendict(), 20 | ): 21 | self.set_redis_pool(redis_pool, url, connection_parameters) 22 | self._latest_ids = {} 23 | super().__init__() 24 | 25 | @classmethod 26 | def from_config( 27 | cls, 28 | config, 29 | url: str = "redis://127.0.0.1:6379/0", 30 | connection_parameters: Mapping = frozendict(), 31 | ): 32 | return cls(url=url, connection_parameters=connection_parameters) 33 | 34 | def schema_key(self, api_name): 35 | return "schema:{}".format(api_name) 36 | 37 | def schema_set_key(self): 38 | """Maintains a set of api names in redis which can be used to retrieve individual schemas""" 39 | return "schemas" 40 | 41 | async def store(self, api_name: str, schema: Dict, ttl_seconds: Optional[int]): 42 | """Store an individual schema""" 43 | with await self.connection_manager() as redis: 44 | schema_key = self.schema_key(api_name) 45 | 46 | p = redis.pipeline() 47 | p.set(schema_key, json_encode(schema)) 48 | if ttl_seconds is not None: 49 | p.expire(schema_key, ttl_seconds) 50 | p.sadd(self.schema_set_key(), api_name) 51 | await p.execute() 52 | 53 | async def load(self) -> Dict[str, Dict]: 54 | """Load all schemas""" 55 | schemas = {} 56 | with await self.connection_manager() as redis: 57 | # Get & decode the api names 58 | api_names = list(await redis.smembers(self.schema_set_key())) 59 | api_names = [api_name.decode("utf8") for api_name in api_names] 60 | 61 | # Convert the api names into redis keys 62 | keys = [self.schema_key(api_name) for api_name in api_names] 63 | 64 | if not keys: 65 | return {} 66 | 67 | # Get the schemas from the keys 68 | encoded_schemas = await redis.mget(*keys) 69 | for api_name, schema in zip(api_names, encoded_schemas): 70 | # Schema may have expired 71 | if schema: 72 | schemas[api_name] = json.loads(schema) 73 | return schemas 74 | -------------------------------------------------------------------------------- /docs/static/js/version-select.js: -------------------------------------------------------------------------------- 1 | window.addEventListener("DOMContentLoaded", function() { 2 | function normalizePath(path) { 3 | var normalized = []; 4 | path.split("/").forEach(function(bit, i) { 5 | if (bit === "." || (bit === "" && i !== 0)) { 6 | return; 7 | } else if (bit === "..") { 8 | if (normalized.length === 1 && normalized[0] === "") { 9 | // We must be trying to .. past the root! 10 | throw new Error("invalid path"); 11 | } else if (normalized.length === 0 || 12 | normalized[normalized.length - 1] === "..") { 13 | normalized.push(".."); 14 | } else { 15 | normalized.pop(); 16 | } 17 | } else { 18 | normalized.push(bit); 19 | } 20 | }); 21 | return normalized.join("/"); 22 | } 23 | 24 | // `base_url` comes from the base.html template for this theme. 25 | // Lightbus note: The base_url js variable wasn't immediately obviously 26 | // available on the mkdocs-material theme. So we were 27 | // simply assume the first part of the URL is the version. 28 | var CURRENT_VERSION = window.location.pathname.split("/")[1]; 29 | 30 | function makeSelect(options, selected) { 31 | var select = document.createElement("select"); 32 | 33 | options.forEach(function(i) { 34 | var option = new Option(i.text, i.value, undefined, 35 | i.value === selected); 36 | select.add(option); 37 | }); 38 | 39 | return select; 40 | } 41 | 42 | var xhr = new XMLHttpRequest(); 43 | // Lightbus note: Again, we make assumptions about the path 44 | xhr.open("GET", "/versions.json"); 45 | xhr.onload = function() { 46 | var versions = JSON.parse(this.responseText); 47 | 48 | var realVersion = versions.find(function(i) { 49 | return i.version === CURRENT_VERSION || 50 | i.aliases.includes(CURRENT_VERSION); 51 | }).version; 52 | 53 | var select = makeSelect(versions.map(function(i) { 54 | return {text: i.title, value: i.version}; 55 | }), realVersion); 56 | select.addEventListener("change", function(event) { 57 | window.location.href = "/" + this.value; 58 | }); 59 | 60 | var selectInLi = document.createElement('li'); 61 | selectInLi.appendChild(select); 62 | selectInLi.className = 'md-nav__item'; 63 | selectInLi.id = 'version-selector'; 64 | var primarySidebarUl = document.querySelector(".md-nav--primary > .md-nav__list"); 65 | var secondarySidebarUl = document.querySelector(".md-nav--primary > .md-nav__list > .md-nav__item--active.md-nav__item--nested .md-nav__list"); 66 | if(secondarySidebarUl) { 67 | secondarySidebarUl.appendChild(selectInLi); 68 | } else { 69 | primarySidebarUl.appendChild(selectInLi); 70 | } 71 | }; 72 | xhr.send(); 73 | }); 74 | -------------------------------------------------------------------------------- /lightbus/utilities/deforming.py: -------------------------------------------------------------------------------- 1 | # The opposite of casting. See lightbus.utilities.casting 2 | from collections import OrderedDict 3 | from datetime import datetime, date 4 | from decimal import Decimal 5 | from enum import Enum 6 | from uuid import UUID 7 | from base64 import b64encode 8 | 9 | from lightbus.exceptions import DeformError 10 | from lightbus.utilities.frozendict import frozendict 11 | from lightbus.utilities.type_checks import is_namedtuple, is_dataclass, isinstance_safe 12 | 13 | 14 | def deform_to_bus(value): 15 | """Convert value into one which can be safely serialised 16 | and encoded onto the bus 17 | 18 | The opposite of cast_to_signature() 19 | """ 20 | # pylint: disable=too-many-return-statements,unidiomatic-typecheck,import-outside-toplevel 21 | if value is None: 22 | return value 23 | elif hasattr(value, "__to_bus__"): 24 | return deform_to_bus(value.__to_bus__()) 25 | elif isinstance(value, OrderedDict): 26 | return deform_to_bus(dict(value)) 27 | elif isinstance_safe(value, dict): 28 | new_dict = {} 29 | for dict_key, dict_value in value.items(): 30 | new_dict[dict_key] = deform_to_bus(dict_value) 31 | return new_dict 32 | elif is_namedtuple(value): 33 | return deform_to_bus(dict(value._asdict())) 34 | elif is_dataclass(value): 35 | from dataclasses import asdict 36 | 37 | return deform_to_bus(asdict(value)) 38 | elif isinstance_safe(value, frozendict): 39 | return deform_to_bus(value._dict) 40 | elif isinstance_safe(value, Enum): 41 | return deform_to_bus(value.value) 42 | elif isinstance_safe(value, (datetime, date)): 43 | return value.isoformat() 44 | elif isinstance_safe(value, UUID): 45 | return str(value) 46 | elif isinstance_safe(value, set): 47 | return [deform_to_bus(v) for v in value] 48 | elif type(value) == tuple: 49 | return [deform_to_bus(v) for v in value] 50 | elif isinstance_safe(value, list): 51 | return [deform_to_bus(v) for v in value] 52 | elif isinstance_safe(value, (int, float, str)): 53 | return value 54 | elif isinstance_safe(value, (bytes, memoryview)): 55 | return b64encode(bytes(value)).decode("utf8") 56 | elif isinstance_safe(value, (Decimal, complex)): 57 | return str(value) 58 | elif hasattr(value, "__module__"): 59 | # some kind of custom object we don't recognise 60 | raise DeformError( 61 | f"Failed to deform value of type {type(value)} for " 62 | f"transmission on the bus. Perhaps specify the " 63 | f"__to_bus__() and __from_bus__() methods on the class. Alternatively, " 64 | f"transform the data before placing it onto the bus." 65 | ) 66 | else: 67 | # A built-in that we missed in the above checks? 68 | return value 69 | -------------------------------------------------------------------------------- /lightbus_experiments/potential_api_oo.py: -------------------------------------------------------------------------------- 1 | Api = object # Base API class 2 | Event = object # A simple event/signal system 3 | 4 | ############ 5 | # LONG FORM: Separate classes for definition & implementation allow 6 | # putting definitions in a common python package available 7 | # across all apps 8 | ############ 9 | 10 | # client.py 11 | 12 | 13 | class AuthApi(Api): 14 | user_registered = Event() 15 | user_account_closed = Event() 16 | 17 | def get_user(self, username: str) -> dict: 18 | pass 19 | 20 | def check_password(self, password: str) -> bool: 21 | pass 22 | 23 | 24 | api = AuthApi.as_client() 25 | 26 | 27 | # server.py 28 | 29 | 30 | class AuthImplementation(AuthApi): # Inherits from client definition 31 | def get_user(self, username: str) -> dict: 32 | # Actual implementation 33 | return {"name": "Test User", "email": "test@example.com"} 34 | 35 | def check_password(self, password: str) -> dict: 36 | return password == "Passw0rd!" 37 | 38 | 39 | api = AuthImplementation.as_server() 40 | 41 | 42 | ############## 43 | # ALTERNATIVE: Can combine both definitions if separation is not required. 44 | ############## 45 | 46 | # client_server.py 47 | 48 | 49 | class AuthImplementation(Api): 50 | user_registered = Event() 51 | user_account_closed = Event() 52 | 53 | def get_user(self, username: str) -> dict: 54 | # Actual implementation 55 | return {"name": "Test User", "email": "test@example.com"} 56 | 57 | def check_password(self, password: str) -> dict: 58 | return password == "Passw0rd!" 59 | 60 | 61 | client = AuthImplementation.as_client() 62 | server = AuthImplementation.as_server() 63 | 64 | 65 | # Pros: 66 | # - Personal preference: I find this more readable 67 | # - IDE's will warn about definition/implementation signatures not matching 68 | # - Makes our implementation different(/easier?) 69 | # - Has the option of being DRY where client/server separation is not required 70 | # Cons: 71 | # - Not DRY in it's long form 72 | # - Forcing an OO design 73 | 74 | ####################### 75 | # Additional thoughts # 76 | ####################### 77 | 78 | # We could have a top level apis.py, much like Django's urls.py: 79 | 80 | # /apis.py 81 | 82 | apis = [ 83 | SparePartsApi.as_server(), # This is the spare parts application, so serve its API 84 | AuthApi.as_client(), # We need the Auth API in order to authenticate clients 85 | CustomersApi.as_client(only=["support_ticket_opened"]), # Select only certain events 86 | MetricsApi.as_client( 87 | exclude=["page_view"] 88 | ), # Filter out high-volume events we don't care about 89 | ] 90 | 91 | # Warren would be able to read this list of APIs and setup the necessary AMQP bindings. 92 | # Each API gets its own queue to avoid high activity on one API blocking all others. 93 | -------------------------------------------------------------------------------- /docs/reference/command-line-use/shell.md: -------------------------------------------------------------------------------- 1 | # `lightbus shell` 2 | 3 | The `lightbus shell` command provides an interactive prompt through which 4 | you can interface with the bus. 5 | 6 | To use this command you must first install `bpython`: 7 | 8 | ``` 9 | pip install bpython 10 | ``` 11 | 12 | ## Examples 13 | 14 | You should see the following when starting up the shell. This is a fully functional 15 | Python shell, with your bus loaded in a ready to be used: 16 | 17 | ``` 18 | $ lightbus shell 19 | >>> █ 20 | Welcome to the Lightbus shell. Use `bus` to access your bus.\ 21 | ``` 22 | 23 | Upon typing the shell will begin to auto-complete based on the locally available APIs: 24 | 25 | ``` 26 | $ lightbus shell 27 | >>> bus.au█ 28 | ┌──────────────────────────────────────────────────────────────────────────┐ 29 | │ auth │ 30 | └──────────────────────────────────────────────────────────────────────────┘ 31 | ``` 32 | 33 | You can fire events and call RPCs as follows: 34 | 35 | ``` 36 | # Fire an event 37 | >>> bus.auth.user_registered.fire(email="joe@example.com", username="joe") 38 | 39 | # Call an RPC 40 | >>> bus.auth.check_password(username="admin", password="secret") 41 | True 42 | ``` 43 | 44 | ## Option reference 45 | 46 | ``` 47 | $ lightbus shell --help 48 | usage: lightbus shell [-h] [--bus BUS_MODULE] [--service-name SERVICE_NAME] 49 | [--process-name PROCESS_NAME] [--config FILE] 50 | [--log-level LOG_LEVEL] 51 | 52 | optional arguments: 53 | -h, --help show this help message and exit 54 | 55 | Common arguments: 56 | --bus BUS_MODULE, -b BUS_MODULE 57 | The bus module to import. Example 'bus', 58 | 'my_project.bus'. Defaults to the value of the 59 | LIGHTBUS_MODULE environment variable, or 'bus' 60 | (default: None) 61 | --service-name SERVICE_NAME, -s SERVICE_NAME 62 | Name of service in which this process resides. YOU 63 | SHOULD LIKELY SET THIS IN PRODUCTION. Can also be set 64 | using the LIGHTBUS_SERVICE_NAME environment. Will 65 | default to a random string. (default: None) 66 | --process-name PROCESS_NAME, -p PROCESS_NAME 67 | A unique name of this process within the service. Can 68 | also be set using the LIGHTBUS_PROCESS_NAME 69 | environment. Will default to a random string. 70 | (default: None) 71 | --config FILE Config file to load, JSON or YAML (default: None) 72 | --log-level LOG_LEVEL 73 | Set the log level. Overrides any value set in config. 74 | One of debug, info, warning, critical, exception. 75 | (default: None) 76 | ``` 77 | -------------------------------------------------------------------------------- /lightbus_experiments/stub_maker.py: -------------------------------------------------------------------------------- 1 | """ Some spaghetti code to test the practicality of stub generation 2 | 3 | Use: 4 | 5 | python stub_maker.py 6 | 7 | """ 8 | from collections import OrderedDict 9 | from typing import Any 10 | 11 | from bottle import HTTPResponse 12 | 13 | 14 | class Event(): 15 | pass 16 | 17 | 18 | class MyApi(): 19 | my_event = Event() 20 | 21 | def _util(self): 22 | pass 23 | 24 | def method1(self, user_id: int, *args, **kwargs) -> dict: 25 | return {} 26 | 27 | def method2(self) -> HTTPResponse: 28 | return HTTPResponse() 29 | 30 | 31 | def parse_type(t): 32 | if t.__module__ == "typing": 33 | return t, repr(t).split(".", maxsplit=1)[1] 34 | else: 35 | return t, t.__name__ 36 | 37 | 38 | if __name__ == "__main__": 39 | import inspect 40 | 41 | api = MyApi 42 | events = [] 43 | methods = [] 44 | for k, v in api.__dict__.items(): 45 | if isinstance(v, Event): 46 | events.append(k) 47 | elif callable(v) and not k.startswith("_"): 48 | methods.append(k) 49 | 50 | imports_needed = set() 51 | 52 | imports_needed |= set(api.__bases__) 53 | stub = "class {}({}):\n".format(api.__name__, ",".join(c.__name__ for c in api.__bases__)) 54 | 55 | for event in events: 56 | imports_needed.add(Event) 57 | stub += " {} = Event()\n".format(event) 58 | 59 | for method in methods: 60 | arg_spec = inspect.getfullargspec(getattr(api, method)) 61 | annotated_args = OrderedDict() 62 | for arg_name in arg_spec.args: 63 | type_ = arg_spec.annotations.get(arg_name, Any) 64 | imports_needed.add(type_) 65 | annotated_args[arg_name] = type_ 66 | 67 | annotated_args_formatted = [] 68 | for i, (name, t) in enumerate(annotated_args.items()): 69 | if i == 0: 70 | annotated_args_formatted.append(name) # 'self' doesn't need an annotation 71 | else: 72 | t, type_name = parse_type(t) 73 | imports_needed.add(t) 74 | annotated_args_formatted.append("{}: {}".format(name, type_name)) 75 | 76 | return_type = arg_spec.annotations.get("return", Any) 77 | return_type, return_type_name = parse_type(return_type) 78 | imports_needed.add(return_type) 79 | 80 | stub += " def {}({}) -> {}:\n".format( 81 | method, ", ".join(annotated_args_formatted), return_type_name 82 | ) 83 | stub += " pass\n\n" 84 | 85 | import_statements = [] 86 | for type_ in imports_needed: 87 | type_, name = parse_type(type_) 88 | if type_.__module__ in ("builtins", "__main__"): 89 | continue 90 | import_statements.append("from {} import {}".format(type_.__module__, name)) 91 | stub = "\n".join(import_statements) + "\n\n" + stub 92 | 93 | print(stub) 94 | -------------------------------------------------------------------------------- /lightbus/internal_apis.py: -------------------------------------------------------------------------------- 1 | from lightbus.api import Api, Event 2 | 3 | 4 | class LightbusStateApi(Api): 5 | """The API for the state plugin""" 6 | 7 | worker_started = Event( 8 | parameters=[ 9 | "service_name", 10 | "process_name", 11 | "metrics_enabled", 12 | "api_names", 13 | "listening_for", 14 | "timestamp", 15 | "ping_interval", 16 | ] 17 | ) 18 | worker_ping = Event( 19 | parameters=[ 20 | "service_name", 21 | "process_name", 22 | "metrics_enabled", 23 | "api_names", 24 | "listening_for", 25 | "timestamp", 26 | "ping_interval", 27 | ] 28 | ) 29 | worker_stopped = Event(parameters=["process_name", "timestamp"]) 30 | 31 | class Meta: 32 | name = "internal.state" 33 | internal = True 34 | 35 | 36 | class LightbusMetricsApi(Api): 37 | """The API for the metrics plugin""" 38 | 39 | rpc_call_sent = Event( 40 | parameters=[ 41 | "service_name", 42 | "process_name", 43 | "id", 44 | "api_name", 45 | "procedure_name", 46 | "kwargs", 47 | "timestamp", 48 | ] 49 | ) 50 | rpc_call_received = Event( 51 | parameters=["service_name", "process_name", "id", "api_name", "procedure_name", "timestamp"] 52 | ) 53 | rpc_response_sent = Event( 54 | parameters=[ 55 | "service_name", 56 | "process_name", 57 | "id", 58 | "api_name", 59 | "procedure_name", 60 | "result", 61 | "timestamp", 62 | ] 63 | ) 64 | rpc_response_received = Event( 65 | parameters=["service_name", "process_name", "id", "api_name", "procedure_name", "timestamp"] 66 | ) 67 | 68 | event_fired = Event( 69 | parameters=[ 70 | "service_name", 71 | "process_name", 72 | "event_id", 73 | "api_name", 74 | "event_name", 75 | "kwargs", 76 | "timestamp", 77 | ] 78 | ) 79 | event_received = Event( 80 | parameters=[ 81 | "service_name", 82 | "process_name", 83 | "event_id", 84 | "api_name", 85 | "event_name", 86 | "kwargs", 87 | "timestamp", 88 | ] 89 | ) 90 | event_processed = Event( 91 | parameters=[ 92 | "service_name", 93 | "process_name", 94 | "event_id", 95 | "api_name", 96 | "event_name", 97 | "kwargs", 98 | "timestamp", 99 | ] 100 | ) 101 | 102 | class Meta: 103 | name = "internal.metrics" 104 | internal = True 105 | -------------------------------------------------------------------------------- /docs/reference/command-line-use/dumpconfigschema.md: -------------------------------------------------------------------------------- 1 | # `lightbus dumpconfigschema` 2 | 3 | This command will output a [JSON schema](https://json-schema.org/) for the 4 | global bus configuration file. 5 | 6 | The global bus configuration file is typically written as YAML, but it can also be written as JSON. 7 | In which case, you can validate the structure against the JSON schema produced by this 8 | command. 9 | 10 | This schema can also be loaded into some editors to provide auto-completion when 11 | editing your bus' configuration file. 12 | 13 | !!! important 14 | 15 | Be careful not to confuse this command with `dumpschema`. The `dumpschema` 16 | command dumps your bus' schema, whereas this `dumpconfigschema` simply 17 | dumps the schema for your bus' configuration. 18 | 19 | 20 | ## Examples 21 | 22 | Dump the configuration file schema to standard out: 23 | 24 | ``` 25 | lightbus dumpschema 26 | ``` 27 | 28 | Dump the configuration file schema to a file: 29 | 30 | ``` 31 | lightbus dumpschema --out my_schema.json 32 | Schema for 3 APIs saved to my_schema.json 33 | ``` 34 | 35 | ## Option reference 36 | 37 | ``` 38 | $ lightbus dumpconfigschema --help 39 | usage: lightbus dumpconfigschema [-h] [--out FILE] [--bus BUS_MODULE] 40 | [--service-name SERVICE_NAME] 41 | [--process-name PROCESS_NAME] [--config FILE] 42 | [--log-level LOG_LEVEL] 43 | 44 | optional arguments: 45 | -h, --help show this help message and exit 46 | 47 | Dump config schema command arguments: 48 | --out FILE, -o FILE File to write config schema to. If omitted the schema 49 | will be written to standard out. (default: None) 50 | 51 | Common arguments: 52 | --bus BUS_MODULE, -b BUS_MODULE 53 | The bus module to import. Example 'bus', 54 | 'my_project.bus'. Defaults to the value of the 55 | LIGHTBUS_MODULE environment variable, or 'bus' 56 | (default: None) 57 | --service-name SERVICE_NAME, -s SERVICE_NAME 58 | Name of service in which this process resides. YOU 59 | SHOULD LIKELY SET THIS IN PRODUCTION. Can also be set 60 | using the LIGHTBUS_SERVICE_NAME environment. Will 61 | default to a random string. (default: None) 62 | --process-name PROCESS_NAME, -p PROCESS_NAME 63 | A unique name of this process within the service. Can 64 | also be set using the LIGHTBUS_PROCESS_NAME 65 | environment. Will default to a random string. 66 | (default: None) 67 | --config FILE Config file to load, JSON or YAML (default: None) 68 | --log-level LOG_LEVEL 69 | Set the log level. Overrides any value set in config. 70 | One of debug, info, warning, critical, exception. 71 | (default: None) 72 | ``` 73 | -------------------------------------------------------------------------------- /tests/transports/redis/test_reliability_redis_rpc.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import logging 4 | import os 5 | import resource 6 | from asyncio import CancelledError 7 | 8 | import pytest 9 | 10 | import lightbus 11 | import lightbus.path 12 | from lightbus.exceptions import LightbusTimeout 13 | 14 | 15 | pytestmark = pytest.mark.reliability 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | @pytest.mark.asyncio 21 | async def test_many_calls_and_clients(bus: lightbus.path.BusPath, new_bus, caplog, dummy_api, loop): 22 | caplog.set_level(logging.WARNING) 23 | loop.slow_callback_duration = 0.01 24 | results = [] 25 | 26 | async def do_single_call(client_bus): 27 | nonlocal results 28 | result = await client_bus.my.dummy.my_proc.call_async(field="x") 29 | results.append(result) 30 | 31 | client_buses = [new_bus() for n in range(0, 100)] 32 | server_buses = [new_bus() for n in range(0, 10)] 33 | 34 | for server_bus in server_buses: 35 | server_bus.client.register_api(dummy_api) 36 | await server_bus.client.consume_rpcs(apis=[dummy_api]) 37 | 38 | # Perform a lot of calls in parallel 39 | await asyncio.gather(*[do_single_call(client_bus) for client_bus in client_buses]) 40 | 41 | for bus_ in server_buses + client_buses: 42 | await bus_.client.close_async() 43 | 44 | assert len(results) == 100 45 | 46 | 47 | @pytest.mark.skipif(bool(os.environ.get("CI")), reason="This test does not pass reliably in CI") 48 | @pytest.mark.asyncio 49 | async def test_timeouts(bus: lightbus.path.BusPath, new_bus, caplog, dummy_api, loop): 50 | caplog.set_level(logging.ERROR) 51 | loop.slow_callback_duration = 0.01 52 | results = [] 53 | 54 | async def do_single_call(n, client_bus): 55 | nonlocal results 56 | try: 57 | result = await client_bus.my.dummy.random_death.call_async( 58 | n=n, death_every=20, bus_options={"timeout": 1} 59 | ) 60 | results.append(result) 61 | except LightbusTimeout: 62 | results.append(None) 63 | 64 | client_buses = [new_bus() for n in range(0, 100)] 65 | # Create a lot of servers so we have enough to handle all the RPCs before the timeout 66 | server_buses = [new_bus() for n in range(0, 20)] 67 | 68 | for server_bus in server_buses: 69 | server_bus.client.register_api(dummy_api) 70 | await server_bus.client.consume_rpcs(apis=[dummy_api]) 71 | 72 | # Perform a lot of calls in parallel 73 | await asyncio.gather( 74 | *[do_single_call(n, client_bus) for n, client_bus in enumerate(client_buses)] 75 | ) 76 | 77 | for bus_ in server_buses: 78 | await bus_.client.stop_worker() 79 | 80 | for bus_ in server_buses + client_buses: 81 | await bus_.client.close_async() 82 | 83 | total_successful = len([r for r in results if r is not None]) 84 | total_timeouts = len([r for r in results if r is None]) 85 | assert len(results) == 100 86 | assert total_timeouts == 5 87 | assert total_successful == 95 88 | -------------------------------------------------------------------------------- /docs/explanation/configuration.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | As discussed in the [configuration reference], Lightbus has three stages of configuration: 4 | 5 | 1. Module loading 6 | 2. Service-level configuration 7 | 3. Global bus configuration 8 | 9 | See the [configuration reference] for details on how this works in practice. Here we will discuss 10 | the reasoning behind this system. 11 | 12 | ## 1. Module loading 13 | 14 | Lightbus needs to know how to bootstrap itself. It needs to know where to start. 15 | This module loading step is how we provide Lightbus with this information, via the 16 | `LIGHTBUS_MODULE` environment variable. 17 | 18 | The module loading was inspired by [Django]'s `DJANGO_SETTINGS_MODULE` environment variable. 19 | 20 | `LIGHTBUS_MODULE` has a sensible default of `bus` in the same way that `DJANGO_SETTINGS_MODULE` 21 | has a sensible default of `settings`. This default will work in many scenarios, but may also 22 | need to be customised depending on one's project structure. 23 | 24 | # 2. Service-level configuration 25 | 26 | Some configuration must by its nature be specific to a service, and not global to the 27 | entire bus. These options are broadly: 28 | 29 | 1. Configuration which distinguishes this service from any other service (`service_name` / `process_name`) 30 | 1. Configuration related to the specific deployment of this service (`features`) 31 | 1. A pointer to the global bus configuration 32 | 33 | # 3. Global bus configuration 34 | 35 | The global bus configuration provides the bulk of the Lightbus options. 36 | This configuration should be consistent across all Lightbus clients. 37 | 38 | But what is the reasoning here? The reasoning is that the bus is a globally shared 39 | resource, therefore everything that uses the bus is going to need a follow a 40 | common configuration in order to function. 41 | 42 | For example, consider the case where one bus client is 43 | configured to connect to redis server A for the `customers` API, 44 | and another bus client is configured to connect to redis server B for the same API. 45 | What will happen? 46 | 47 | The result will be that you will have effectively created a network partition. Each bus 48 | client will operate in total ignorance of each other. Events could be lost or ignored, and 49 | RPCs may never be processed. 50 | 51 | **Some** configuration **must** therefore be common to all clients, and that is 52 | what the global configuration provides. 53 | 54 | ### Configuration loading over HTTP(S) 55 | 56 | To this end, Lightbus supports loading configuration over HTTP(S). **The intention is not 57 | for you to host you Lightbus configuration on the public internet!**. Rather, and if you wish, 58 | you may find is useful to host your configuration on an internal-only endpoint. 59 | 60 | Alternatively, you may decide to ensure your build/deploy process distributes a copy 61 | of the global configuration file to every service. 62 | [The discussion on monorepositories in Architecture Tips](architecture-tips.md#use-a-monorepository) is relevant here. 63 | 64 | [configuration reference]: ../reference/configuration.md 65 | [Django]: https://www.djangoproject.com/ 66 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | paths: 6 | - '.github/workflows/test.yaml' 7 | - 'lightbus/**' 8 | - 'tests/**' 9 | - 'lightbus_vendored/**' 10 | - 'poetry.lock' 11 | - 'pyproject.toml' 12 | - 'pytest.ini' 13 | - '.coveragerc' 14 | 15 | jobs: 16 | test: 17 | name: Test 18 | runs-on: ubuntu-22.04 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | python: 24 | - '3.9' 25 | - '3.10' 26 | - '3.11' 27 | # Remove greenlet entry from pyproject.toml when greenlet 3.0 is released 28 | - '3.12.0-rc.1' 29 | 30 | services: 31 | redis: 32 | image: redis:5 33 | ports: 34 | - 6379:6379 35 | options: >- 36 | --health-cmd "redis-cli ping" 37 | --health-interval 10s 38 | --health-timeout 5s 39 | --health-retries 5 40 | 41 | steps: 42 | - name: Checkout 43 | uses: actions/checkout@master 44 | 45 | - uses: actions/setup-python@v3 46 | with: 47 | python-version: ${{ matrix.python }} 48 | 49 | - name: Install 50 | run: | 51 | pip install poetry 52 | poetry install 53 | mkdir -p .coverage .test-reports 54 | 55 | - name: Test 56 | run: | 57 | poetry run pytest --cov=lightbus --junit-xml=junit.xml -v -m "not benchmark" --maxfail=10 58 | mv .coverage/coverage .coverage/coverage-${{ matrix.python }} 59 | 60 | - name: Upload code coverage report 61 | uses: actions/upload-artifact@v3 62 | with: 63 | name: code-coverage-reports 64 | path: .coverage/* 65 | 66 | code_coverage: 67 | name: Code Coverage Reports 68 | runs-on: ubuntu-22.04 69 | needs: test 70 | 71 | steps: 72 | - name: Checkout 73 | uses: actions/checkout@master 74 | 75 | - uses: actions/setup-python@v3 76 | with: 77 | python-version: 3.11 78 | 79 | - name: Download coverage reports 80 | uses: actions/download-artifact@v3 81 | with: 82 | name: code-coverage-reports 83 | 84 | - name: Install tools 85 | run: | 86 | # Coverage 87 | pip install coverage 88 | 89 | # Codacy reporter 90 | curl -Ls -o codacy-coverage-reporter "$(curl -Ls https://api.github.com/repos/codacy/codacy-coverage-reporter/releases/latest | jq -r '.assets | map({name, browser_download_url} | select(.name | contains("codacy-coverage-reporter-linux"))) | .[0].browser_download_url')" 91 | chmod +x codacy-coverage-reporter 92 | 93 | - name: Combine reports 94 | run: | 95 | coverage combine coverage* 96 | coverage xml -i 97 | 98 | - name: Upload report 99 | run: | 100 | ./codacy-coverage-reporter report --project-token ${{ secrets.CODACY_PROJECT_TOKEN }} --organization-provider gh --username adamcharnock --project-name lightbus -r coverage.xml 101 | -------------------------------------------------------------------------------- /docs/reference/command-line-use/dumpschema.md: -------------------------------------------------------------------------------- 1 | # `lightbus dumpschema` 2 | 3 | The `lightbus dumpschema` command will dump the bus' JSON schema to either a file or 4 | standard out. 5 | 6 | This schema file can then be manually provided to `lightbus run` using the 7 | `--schema` option. 8 | 9 | ## Why is this useful? 10 | 11 | The idea behind this command is to aid in testing and local development. 12 | You can take a dump of your production bus' schema and use it in your 13 | local development or testing environment. 14 | 15 | This will allow Lightbus to validate your locally emitted events and RPCs 16 | against the expectations of your production environment. 17 | 18 | See [manual validation](../schema.md#manual-validation) for more information. 19 | 20 | ## Examples 21 | 22 | Dump the schema to standard out: 23 | 24 | ``` 25 | lightbus dumpschema 26 | ``` 27 | 28 | Dump the schema to a file: 29 | 30 | ``` 31 | lightbus dumpschema --out my_schema.json 32 | Schema for 3 APIs saved to my_schema.json 33 | ``` 34 | 35 | ## Options reference 36 | 37 | ``` 38 | $ lightbus dumpschema --help 39 | usage: lightbus dumpschema [-h] [--out FILE_OR_DIRECTORY] [--bus BUS_MODULE] 40 | [--service-name SERVICE_NAME] 41 | [--process-name PROCESS_NAME] [--config FILE] 42 | [--log-level LOG_LEVEL] 43 | 44 | optional arguments: 45 | -h, --help show this help message and exit 46 | 47 | Dump config schema command arguments: 48 | --out FILE_OR_DIRECTORY, -o FILE_OR_DIRECTORY 49 | File or directory to write schema to. If a directory 50 | is specified one schema file will be created for each 51 | API. If omitted the schema will be written to standard 52 | out. (default: None) 53 | 54 | Common arguments: 55 | --bus BUS_MODULE, -b BUS_MODULE 56 | The bus module to import. Example 'bus', 57 | 'my_project.bus'. Defaults to the value of the 58 | LIGHTBUS_MODULE environment variable, or 'bus' 59 | (default: None) 60 | --service-name SERVICE_NAME, -s SERVICE_NAME 61 | Name of service in which this process resides. YOU 62 | SHOULD LIKELY SET THIS IN PRODUCTION. Can also be set 63 | using the LIGHTBUS_SERVICE_NAME environment. Will 64 | default to a random string. (default: None) 65 | --process-name PROCESS_NAME, -p PROCESS_NAME 66 | A unique name of this process within the service. Can 67 | also be set using the LIGHTBUS_PROCESS_NAME 68 | environment. Will default to a random string. 69 | (default: None) 70 | --config FILE Config file to load, JSON or YAML (default: None) 71 | --log-level LOG_LEVEL 72 | Set the log level. Overrides any value set in config. 73 | One of debug, info, warning, critical, exception. 74 | (default: None) 75 | ``` 76 | -------------------------------------------------------------------------------- /docs/howto/modify-lightbus.md: -------------------------------------------------------------------------------- 1 | # How to modify Lightbus 2 | 3 | Contributions to Lightbus are very welcome. This will talk you though setting up a 4 | development installation of Lightbus. Using this installation you will be able to: 5 | 6 | * Modify the Lightbus source code and/or documentation 7 | * Run the Lightbus test suite 8 | * View any modified documentation locally 9 | * Use your development Lightbus install within another project 10 | 11 | ## Prerequisites 12 | 13 | You will need: 14 | 15 | * Redis running locally 16 | 17 | ## Getting the code 18 | 19 | Checkout the Lightbus repository from GitHub: 20 | 21 | git clone https://github.com/adamcharnock/lightbus.git 22 | cd lightbus 23 | 24 | ## Environment setup 25 | 26 | It is a good idea to put `asyncio` into debug mode. You can do this by setting the following in 27 | your shell's environment: 28 | 29 | PYTHONASYNCIODEBUG=1 30 | 31 | The testing framework will also need to know where your redis instance is running. 32 | This is set using the `REDIS_URL` and `REDIS_URL_B` environment variables: 33 | 34 | # Default values shown below 35 | REDIS_URL=redis://127.0.0.1:6379/10 36 | REDIS_URL_B=redis://127.0.0.1:6379/11 37 | 38 | ## Installation 39 | 40 | You will need to install Lightbus' standard dependencies, as well as Lightbus' development 41 | dependencies. Note that you may need to [install poetry](https://poetry.eustace.io/docs/#installation) 42 | if you do not already have it: 43 | 44 | You can install both of these groups as follows: 45 | 46 | # Install standard & dev dependencies into a virtual environment 47 | poetry install 48 | 49 | # Enter the virtual environment you have created, 50 | # thereby giving you access the the pytest and mkdocs commands (below) 51 | poetry shell 52 | 53 | ## Running the tests 54 | 55 | You can run the tests once you have completed the above steps: 56 | 57 | pytest 58 | 59 | Note that you can run subsets of the tests as follows: 60 | 61 | pytest -m unit # Fast with high coverage 62 | pytest -m integration 63 | pytest -m reliability 64 | pytest -m benchmark 65 | 66 | ## Viewing the Lightbus documentation locally 67 | 68 | You can view the documentation of your local Lightbus install as follows: 69 | 70 | # Serve the docs locally 71 | mkdocs serve 72 | 73 | You can now view the documentation http://127.0.0.1:8000. 74 | The documentation source can be found in `docs/`. 75 | 76 | You can also check for broken links within the docs by running the 77 | `check_links.sh` script: 78 | 79 | # Check for broken links 80 | ./docs/check_links.sh 81 | 82 | ## Using within your project 83 | 84 | You can install your development Lightbus install within your 85 | project as follows: 86 | 87 | # Within your own project 88 | 89 | # Make sure you remove any existing lightbus version 90 | pip uninstall lightbus 91 | 92 | # Install your local development lightbus 93 | pip install --editable /path/to/your/local/lightbus 94 | 95 | ## See also 96 | 97 | Being familiar with the [explanation](../explanation/index.md) section is highly recommended 98 | if modifying the Lightbus source 99 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool] 2 | 3 | [tool.poetry] 4 | name = "lightbus" 5 | version = "1.3.0a6" 6 | authors = [ 7 | "Adam Charnock ", 8 | ] 9 | readme = "README.md" 10 | homepage = "https://lightbus.org" 11 | documentation = "https://lightbus.org" 12 | repository = "https://github.com/adamcharnock/lightbus/" 13 | keywords = ["python", "messaging", "redis", "bus", "queue"] 14 | description = "RPC & event framework for Python 3" 15 | classifiers = [ 16 | "Development Status :: 5 - Production/Stable", 17 | "Framework :: AsyncIO", 18 | "Intended Audience :: Developers", 19 | "License :: OSI Approved :: Apache Software License", 20 | "Natural Language :: English", 21 | "Operating System :: MacOS :: MacOS X", 22 | "Operating System :: POSIX", 23 | "Programming Language :: Python :: 3", 24 | "Topic :: System :: Networking", 25 | "Topic :: Communications", 26 | ] 27 | packages = [ 28 | { include = "lightbus" }, 29 | { include = "lightbus_vendored" }, 30 | ] 31 | include = [ 32 | "CHANGELOG.md", 33 | "VERSION", 34 | ] 35 | 36 | [tool.poetry.dependencies] 37 | python = ">=3.9" 38 | jsonschema = "^4.19.0" 39 | pyyaml = ">=3.12" 40 | python-dateutil = "^2.8.1" 41 | async-timeout = "^4.0.3" 42 | 43 | [tool.poetry.group.dev.dependencies] 44 | bpython = "*" 45 | colorama = "*" 46 | coverage = "*" 47 | flake8 = "*" 48 | markdown = "*" 49 | markdown-include = "*" 50 | pre-commit = "*" 51 | pylint = "*" 52 | pymdown-extensions = "*" 53 | pytest = ">=5.1.1" 54 | pytest-asyncio = ">=0.12.0" 55 | pytest-benchmark = "*" 56 | pytest-cov = "*" 57 | pytest-mock = "3.2.0" 58 | pytest-repeat = "*" 59 | pytest-timeout = "*" 60 | pytest-xdist = "*" 61 | schedule = "*" 62 | structlog = "*" 63 | tox = "*" 64 | codacy-coverage = "^1.3" 65 | black = "*" 66 | greenlet = { version = "^3.0.0a1", python = "3.12", allow-prereleases = true } 67 | 68 | mkdocs-material = "<5,>=4" 69 | mike = "^1.1.2" 70 | mkdocs = "^1.5.2" 71 | 72 | [tool.poetry.scripts] 73 | lightbus = 'lightbus.commands:lightbus_entry_point' 74 | # These are our entry points (which poetry calls 'plugins') 75 | 76 | [tool.poetry.plugins.lightbus_plugins] 77 | internal_state = "lightbus.plugins.state:StatePlugin" 78 | internal_metrics = "lightbus.plugins.metrics:MetricsPlugin" 79 | 80 | [tool.poetry.plugins.lightbus_event_transports] 81 | redis = "lightbus:RedisEventTransport" 82 | debug = "lightbus:DebugEventTransport" 83 | 84 | [tool.poetry.plugins.lightbus_rpc_transports] 85 | redis = "lightbus:RedisRpcTransport" 86 | debug = "lightbus:DebugRpcTransport" 87 | 88 | [tool.poetry.plugins.lightbus_result_transports] 89 | redis = "lightbus:RedisResultTransport" 90 | debug = "lightbus:DebugResultTransport" 91 | 92 | [tool.poetry.plugins.lightbus_schema_transports] 93 | redis = "lightbus:RedisSchemaTransport" 94 | debug = "lightbus:DebugSchemaTransport" 95 | 96 | [build-system] 97 | requires = ["poetry>=1.0.0"] 98 | build-backend = "poetry.core.masonry.api" 99 | -------------------------------------------------------------------------------- /docs/explanation/apis.md: -------------------------------------------------------------------------------- 1 | # APIs 2 | 3 | When we refer to an *API*, we are referring to an `Api` class definition. 4 | **All functionality on the bus is defined using APIs.** 5 | 6 | For example, consider an API for support tickets within a company's 7 | help desk: 8 | 9 | ```python3 10 | class TicketApi(Api): 11 | ticket_created = Event(parameters=('id', 'sender', 'subject', 'body')) 12 | 13 | class Meta: 14 | name = 'help_desk.ticket' 15 | 16 | def get(self, id): 17 | return get_ticket_from_db(pk=id) 18 | ``` 19 | 20 | This API defines an event, a procedure, and the name used to address the API 21 | on the bus. The help desk service could define multiple additional APIs as needed 22 | (perhaps for listing help desk staff or retrieving reports). 23 | 24 | 25 | ## API registration & authoritative/non-authoritative APIs 26 | 27 | An API can be registered with your service's bus client as follows: 28 | 29 | ```python3 30 | import lightbus 31 | from my_apis import HelpDeskApi 32 | 33 | bus = lightbus.create() 34 | 35 | # Register the API with your service's client 36 | bus.client.register_api(HelpDeskApi()) 37 | ``` 38 | 39 | Registering an API will: 40 | 41 | 1. Allow you to **fire events** on the API using the service's client 42 | 1. Cause the lightbus worker for this service (i.e. `lightbus run`) 43 | to **respond to remote procedure calls** on the registered API 44 | 45 | We say that a service which registers an API is *authoritative* for that API. 46 | Services which do not register a given API are *non-authoritative* for the API. 47 | **Both authoritative and non-authoritative services can listen for events on any API and 48 | call remote procedures on any API.** 49 | 50 | For example, a separate online store service could not fire the `help_desk.ticket_created` 51 | event on the API we defined above. Nor would you reasonably expect the online store to 52 | services remote procedure calls for `help_desk.ticket_created()`. 53 | 54 | ## Why? 55 | 56 | Preventing the online store service from responding to **remote procedure calls** for the 57 | help desk service makes sense. There is no reason the online store should have any 58 | awareness of the help desk, so you would not expect it to respond to remote 59 | procedure calls regarding tickets. 60 | 61 | Therefore, the logic for allowing only authoritative services to respond to remote procedure calls 62 | is hopefully compelling. 63 | 64 | The case for limiting event firing to authoritative services is one of architecture, maintainability, 65 | and consistency: 66 | 67 | * Allowing any event to be fired by any service within your organisation could quickly 68 | lead to spiraling complexity. 69 | * The authoritative service will always have sufficient information to guarantee basic validity of an 70 | emitted message (for example, the event exists, required parameters are present etc). As a result errors 71 | can be caught earlier, rather than allowing them to propagate onto the bus and potentially impact distant 72 | services. 73 | 74 | We welcome discussion on this topic, [open a GitHub issue] if you would like to discuss this further. 75 | 76 | [services]: services.md 77 | [open a GitHub issue]: https://github.com/adamcharnock/lightbus/issues 78 | --------------------------------------------------------------------------------