├── .github
├── FUNDING.yml
└── workflows
│ ├── publish.yml
│ └── test-suite.yml
├── .gitignore
├── LICENSE.md
├── README.md
├── broadcaster
├── __init__.py
├── _base.py
├── backends
│ ├── __init__.py
│ ├── base.py
│ ├── kafka.py
│ ├── memory.py
│ ├── postgres.py
│ └── redis.py
└── py.typed
├── docker-compose.yaml
├── docs
└── demo.gif
├── example
├── README.md
├── app.py
├── requirements.txt
└── templates
│ └── index.html
├── pyproject.toml
├── requirements.txt
├── scripts
├── build
├── check
├── clean
├── coverage
├── install
├── lint
├── publish
├── start
└── test
└── tests
├── __init__.py
├── test_broadcast.py
└── test_unsubscribe.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: encode
2 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 |
3 | on:
4 | push:
5 | tags:
6 | - "*"
7 |
8 | jobs:
9 | publish:
10 | name: "Publish release"
11 | runs-on: "ubuntu-latest"
12 |
13 | steps:
14 | - uses: "actions/checkout@v3"
15 | - uses: "actions/setup-python@v4"
16 | with:
17 | python-version: "3.10"
18 |
19 | - name: "Install dependencies"
20 | run: "scripts/install"
21 |
22 | - name: "Build package & docs"
23 | run: "scripts/build"
24 |
25 | - name: "Publish to PyPI & deploy docs"
26 | run: "scripts/publish"
27 | env:
28 | TWINE_USERNAME: __token__
29 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
30 |
--------------------------------------------------------------------------------
/.github/workflows/test-suite.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Test Suite
3 |
4 | on:
5 | push:
6 | branches: ["master"]
7 | pull_request:
8 | branches: ["master"]
9 |
10 | jobs:
11 | tests:
12 | name: "Python ${{ matrix.python-version }}"
13 | runs-on: "ubuntu-latest"
14 |
15 | strategy:
16 | matrix:
17 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
18 |
19 | services:
20 | zookeeper:
21 | image: confluentinc/cp-zookeeper
22 | ports:
23 | - 32181:32181
24 | env:
25 | ZOOKEEPER_CLIENT_PORT: 32181
26 | ALLOW_ANONYMOUS_LOGIN: yes
27 | options: --hostname zookeeper
28 | kafka:
29 | image: confluentinc/cp-kafka
30 | ports:
31 | - 9092:9092
32 | - 29092:29092
33 | env:
34 | KAFKA_ZOOKEEPER_CONNECT: "zookeeper:32181"
35 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
36 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT"
37 | KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT_HOST://localhost:29092,PLAINTEXT://localhost:9092"
38 | KAFKA_BROKER_ID: 1
39 | ALLOW_PLAINTEXT_LISTENER: yes
40 | options: --hostname kafka
41 | redis:
42 | image: redis:alpine
43 | ports:
44 | - 6379:6379
45 | postgres:
46 | image: postgres:12
47 | env:
48 | POSTGRES_DB: broadcaster
49 | POSTGRES_PASSWORD: postgres
50 | POSTGRES_HOST_AUTH_METHOD: trust
51 | POSTGRES_USER: postgres
52 | ports:
53 | - 5432:5432
54 |
55 | steps:
56 | - uses: "actions/checkout@v2"
57 | - uses: "actions/setup-python@v2"
58 | with:
59 | python-version: "${{ matrix.python-version }}"
60 | - name: "Install dependencies"
61 | run: "scripts/install"
62 | - name: "Run linting checks"
63 | run: "scripts/check"
64 | - name: "Build package & docs"
65 | run: "scripts/build"
66 | - name: "Run tests"
67 | run: "scripts/test"
68 | - name: "Enforce coverage"
69 | run: "scripts/coverage"
70 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | test.db
3 | .coverage
4 | .pytest_cache/
5 | .mypy_cache/
6 | *.egg-info/
7 | venv/
8 | build/
9 | dist/
10 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/).
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | * Neither the name of the copyright holder nor the names of its
15 | contributors may be used to endorse or promote products derived from
16 | this software without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Broadcaster
2 |
3 | Broadcaster helps you develop realtime streaming functionality by providing
4 | a simple broadcast API onto a number of different backend services.
5 |
6 | It currently supports [Redis PUB/SUB](https://redis.io/topics/pubsub), [Redis Streams](https://redis.io/docs/latest/develop/data-types/streams/), [Apache Kafka](https://kafka.apache.org/), and [Postgres LISTEN/NOTIFY](https://www.postgresql.org/docs/current/sql-notify.html), plus a simple in-memory backend, that you can use for local development or during testing.
7 |
8 |
9 |
10 | Here's a complete example of the backend code for a simple websocket chat app:
11 |
12 | **app.py**
13 |
14 | ```python
15 | # Requires: `starlette`, `uvicorn`, `jinja2`
16 | # Run with `uvicorn example:app`
17 | import anyio
18 | from broadcaster import Broadcast
19 | from starlette.applications import Starlette
20 | from starlette.routing import Route, WebSocketRoute
21 | from starlette.templating import Jinja2Templates
22 |
23 |
24 | broadcast = Broadcast("redis://localhost:6379")
25 | templates = Jinja2Templates("templates")
26 |
27 |
28 | async def homepage(request):
29 | template = "index.html"
30 | context = {"request": request}
31 | return templates.TemplateResponse(template, context)
32 |
33 |
34 | async def chatroom_ws(websocket):
35 | await websocket.accept()
36 |
37 | async with anyio.create_task_group() as task_group:
38 | # run until first is complete
39 | async def run_chatroom_ws_receiver() -> None:
40 | await chatroom_ws_receiver(websocket=websocket)
41 | task_group.cancel_scope.cancel()
42 |
43 | task_group.start_soon(run_chatroom_ws_receiver)
44 | await chatroom_ws_sender(websocket)
45 |
46 |
47 | async def chatroom_ws_receiver(websocket):
48 | async for message in websocket.iter_text():
49 | await broadcast.publish(channel="chatroom", message=message)
50 |
51 |
52 | async def chatroom_ws_sender(websocket):
53 | async with broadcast.subscribe(channel="chatroom") as subscriber:
54 | async for event in subscriber:
55 | await websocket.send_text(event.message)
56 |
57 |
58 | routes = [
59 | Route("/", homepage),
60 | WebSocketRoute("/", chatroom_ws, name='chatroom_ws'),
61 | ]
62 |
63 |
64 | app = Starlette(
65 | routes=routes, on_startup=[broadcast.connect], on_shutdown=[broadcast.disconnect],
66 | )
67 | ```
68 |
69 | The HTML template for the front end [is available here](https://github.com/encode/broadcaster/blob/master/example/templates/index.html), and is adapted from [Pieter Noordhuis's PUB/SUB demo](https://gist.github.com/pietern/348262).
70 |
71 | ## Requirements
72 |
73 | Python 3.8+
74 |
75 | ## Installation
76 |
77 | * `pip install broadcaster`
78 | * `pip install broadcaster[redis]`
79 | * `pip install broadcaster[postgres]`
80 | * `pip install broadcaster[kafka]`
81 |
82 | ## Available backends
83 |
84 | * `Broadcast('memory://')`
85 | * `Broadcast("redis://localhost:6379")`
86 | * `Broadcast("redis-stream://localhost:6379")`
87 | * `Broadcast("postgres://localhost:5432/broadcaster")`
88 | * `Broadcast("kafka://localhost:9092")`
89 |
90 |
91 | ### Using custom backends
92 |
93 | You can create your own backend and use it with `broadcaster`.
94 | To do that you need to create a class which extends from `BroadcastBackend`
95 | and pass it to the `broadcaster` via `backend` argument.
96 |
97 | ```python
98 | from broadcaster import Broadcaster, BroadcastBackend
99 |
100 | class MyBackend(BroadcastBackend):
101 |
102 | broadcaster = Broadcaster(backend=MyBackend())
103 | ```
104 |
105 | ## Where next?
106 |
107 | At the moment `broadcaster` is in Alpha, and should be considered a working design document.
108 |
109 | The API should be considered subject to change. If you *do* want to use Broadcaster in its current
110 | state, make sure to strictly pin your requirements to `broadcaster==0.3.0`.
111 |
112 | To be more capable we'd really want to add some additional backends, provide API support for reading recent event history from persistent stores, and provide a serialization/deserialization API...
113 |
114 | * Serialization / deserialization to support broadcasting structured data.
115 | * A backend for RabbitMQ.
116 | * Add support for `subscribe('chatroom', history=100)` for backends which provide persistence. (Redis Streams, Apache Kafka) This will allow applications to subscribe to channel updates, while also being given an initial window onto the most recent events. We *might* also want to support some basic paging operations, to allow applications to scan back in the event history.
117 | * Support for pattern subscribes in backends that support it.
118 |
119 | ## Third Party Packages
120 |
121 | ### MQTT backend
122 | [Gist](https://gist.github.com/alex-oleshkevich/68411a0e7ad24d53afd28c3fa5da468c)
123 |
124 | Integrates MQTT with Broadcaster
125 |
--------------------------------------------------------------------------------
/broadcaster/__init__.py:
--------------------------------------------------------------------------------
1 | from ._base import Broadcast, Event
2 | from .backends.base import BroadcastBackend
3 |
4 | __version__ = "0.3.2"
5 | __all__ = ["Broadcast", "Event", "BroadcastBackend"]
6 |
--------------------------------------------------------------------------------
/broadcaster/_base.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | from contextlib import asynccontextmanager
5 | from typing import TYPE_CHECKING, Any, AsyncGenerator, AsyncIterator, cast
6 | from urllib.parse import urlparse
7 |
8 | if TYPE_CHECKING: # pragma: no cover
9 | from broadcaster.backends.base import BroadcastBackend
10 |
11 |
12 | class Event:
13 | def __init__(self, channel: str, message: str) -> None:
14 | self.channel = channel
15 | self.message = message
16 |
17 | def __eq__(self, other: object) -> bool:
18 | return isinstance(other, Event) and self.channel == other.channel and self.message == other.message
19 |
20 | def __repr__(self) -> str:
21 | return f"Event(channel={self.channel!r}, message={self.message!r})"
22 |
23 |
24 | class Unsubscribed(Exception):
25 | pass
26 |
27 |
28 | class Broadcast:
29 | def __init__(self, url: str | None = None, *, backend: BroadcastBackend | None = None) -> None:
30 | assert url or backend, "Either `url` or `backend` must be provided."
31 | self._backend = backend or self._create_backend(cast(str, url))
32 | self._subscribers: dict[str, set[asyncio.Queue[Event | None]]] = {}
33 |
34 | def _create_backend(self, url: str) -> BroadcastBackend:
35 | parsed_url = urlparse(url)
36 | if parsed_url.scheme in ("redis", "rediss"):
37 | from broadcaster.backends.redis import RedisBackend
38 |
39 | return RedisBackend(url)
40 |
41 | elif parsed_url.scheme == "redis-stream":
42 | from broadcaster.backends.redis import RedisStreamBackend
43 |
44 | return RedisStreamBackend(url)
45 |
46 | elif parsed_url.scheme in ("postgres", "postgresql"):
47 | from broadcaster.backends.postgres import PostgresBackend
48 |
49 | return PostgresBackend(url)
50 |
51 | if parsed_url.scheme == "kafka":
52 | from broadcaster.backends.kafka import KafkaBackend
53 |
54 | return KafkaBackend(url)
55 |
56 | elif parsed_url.scheme == "memory":
57 | from broadcaster.backends.memory import MemoryBackend
58 |
59 | return MemoryBackend(url)
60 | raise ValueError(f"Unsupported backend: {parsed_url.scheme}")
61 |
62 | async def __aenter__(self) -> Broadcast:
63 | await self.connect()
64 | return self
65 |
66 | async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
67 | await self.disconnect()
68 |
69 | async def connect(self) -> None:
70 | await self._backend.connect()
71 | self._listener_task = asyncio.create_task(self._listener())
72 |
73 | async def disconnect(self) -> None:
74 | if self._listener_task.done():
75 | self._listener_task.result()
76 | else:
77 | self._listener_task.cancel()
78 | await self._backend.disconnect()
79 |
80 | async def _listener(self) -> None:
81 | while True:
82 | event = await self._backend.next_published()
83 | for queue in list(self._subscribers.get(event.channel, [])):
84 | await queue.put(event)
85 |
86 | async def publish(self, channel: str, message: Any) -> None:
87 | await self._backend.publish(channel, message)
88 |
89 | @asynccontextmanager
90 | async def subscribe(self, channel: str) -> AsyncIterator[Subscriber]:
91 | queue: asyncio.Queue[Event | None] = asyncio.Queue()
92 |
93 | try:
94 | if not self._subscribers.get(channel):
95 | await self._backend.subscribe(channel)
96 | self._subscribers[channel] = {queue}
97 | else:
98 | self._subscribers[channel].add(queue)
99 |
100 | yield Subscriber(queue)
101 | finally:
102 | self._subscribers[channel].remove(queue)
103 | if not self._subscribers.get(channel):
104 | del self._subscribers[channel]
105 | await self._backend.unsubscribe(channel)
106 | await queue.put(None)
107 |
108 |
109 | class Subscriber:
110 | def __init__(self, queue: asyncio.Queue[Event | None]) -> None:
111 | self._queue = queue
112 |
113 | async def __aiter__(self) -> AsyncGenerator[Event | None, None]:
114 | try:
115 | while True:
116 | yield await self.get()
117 | except Unsubscribed:
118 | pass
119 |
120 | async def get(self) -> Event:
121 | item = await self._queue.get()
122 | if item is None:
123 | raise Unsubscribed()
124 | return item
125 |
--------------------------------------------------------------------------------
/broadcaster/backends/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/encode/broadcaster/6b3ea71d4f8fb038fa7d357a1fb3750d58ac614d/broadcaster/backends/__init__.py
--------------------------------------------------------------------------------
/broadcaster/backends/base.py:
--------------------------------------------------------------------------------
1 | from typing import Any
2 |
3 | from .._base import Event
4 |
5 |
6 | class BroadcastBackend:
7 | def __init__(self, url: str) -> None:
8 | raise NotImplementedError()
9 |
10 | async def connect(self) -> None:
11 | raise NotImplementedError()
12 |
13 | async def disconnect(self) -> None:
14 | raise NotImplementedError()
15 |
16 | async def subscribe(self, channel: str) -> None:
17 | raise NotImplementedError()
18 |
19 | async def unsubscribe(self, channel: str) -> None:
20 | raise NotImplementedError()
21 |
22 | async def publish(self, channel: str, message: Any) -> None:
23 | raise NotImplementedError()
24 |
25 | async def next_published(self) -> Event:
26 | raise NotImplementedError()
27 |
--------------------------------------------------------------------------------
/broadcaster/backends/kafka.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import typing
5 | from urllib.parse import urlparse
6 |
7 | from aiokafka import AIOKafkaConsumer, AIOKafkaProducer
8 |
9 | from .._base import Event
10 | from .base import BroadcastBackend
11 |
12 |
13 | class KafkaBackend(BroadcastBackend):
14 | def __init__(self, urls: str | list[str]) -> None:
15 | urls = [urls] if isinstance(urls, str) else urls
16 | self._servers = [urlparse(url).netloc for url in urls]
17 | self._consumer_channels: set[str] = set()
18 | self._ready = asyncio.Event()
19 |
20 | async def connect(self) -> None:
21 | self._producer = AIOKafkaProducer(bootstrap_servers=self._servers) # pyright: ignore
22 | self._consumer = AIOKafkaConsumer(bootstrap_servers=self._servers) # pyright: ignore
23 | await self._producer.start()
24 | await self._consumer.start()
25 |
26 | async def disconnect(self) -> None:
27 | await self._producer.stop()
28 | await self._consumer.stop()
29 |
30 | async def subscribe(self, channel: str) -> None:
31 | self._consumer_channels.add(channel)
32 | self._consumer.subscribe(topics=self._consumer_channels)
33 | await self._wait_for_assignment()
34 |
35 | async def unsubscribe(self, channel: str) -> None:
36 | self._consumer.unsubscribe()
37 |
38 | async def publish(self, channel: str, message: typing.Any) -> None:
39 | await self._producer.send_and_wait(channel, message.encode("utf8"))
40 |
41 | async def next_published(self) -> Event:
42 | await self._ready.wait()
43 | message = await self._consumer.getone()
44 | value = message.value
45 |
46 | # for type compatibility:
47 | # we declare Event.message as str, so convert None to empty string
48 | if value is None:
49 | value = b""
50 | return Event(channel=message.topic, message=value.decode("utf8"))
51 |
52 | async def _wait_for_assignment(self) -> None:
53 | """Wait for the consumer to be assigned to the partition."""
54 | while not self._consumer.assignment():
55 | await asyncio.sleep(0.001)
56 |
57 | self._ready.set()
58 |
--------------------------------------------------------------------------------
/broadcaster/backends/memory.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import typing
5 |
6 | from .._base import Event
7 | from .base import BroadcastBackend
8 |
9 |
10 | class MemoryBackend(BroadcastBackend):
11 | def __init__(self, url: str):
12 | self._subscribed: set[str] = set()
13 |
14 | async def connect(self) -> None:
15 | self._published: asyncio.Queue[Event] = asyncio.Queue()
16 |
17 | async def disconnect(self) -> None:
18 | pass
19 |
20 | async def subscribe(self, channel: str) -> None:
21 | self._subscribed.add(channel)
22 |
23 | async def unsubscribe(self, channel: str) -> None:
24 | self._subscribed.remove(channel)
25 |
26 | async def publish(self, channel: str, message: typing.Any) -> None:
27 | event = Event(channel=channel, message=message)
28 | await self._published.put(event)
29 |
30 | async def next_published(self) -> Event:
31 | while True:
32 | event = await self._published.get()
33 | if event.channel in self._subscribed:
34 | return event
35 |
--------------------------------------------------------------------------------
/broadcaster/backends/postgres.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from typing import Any
3 |
4 | import asyncpg
5 |
6 | from .._base import Event
7 | from .base import BroadcastBackend
8 |
9 |
10 | class PostgresBackend(BroadcastBackend):
11 | def __init__(self, url: str):
12 | self._url = url
13 |
14 | async def connect(self) -> None:
15 | self._conn = await asyncpg.connect(self._url)
16 | self._listen_queue: asyncio.Queue[Event] = asyncio.Queue()
17 |
18 | async def disconnect(self) -> None:
19 | await self._conn.close()
20 |
21 | async def subscribe(self, channel: str) -> None:
22 | await self._conn.add_listener(channel, self._listener)
23 |
24 | async def unsubscribe(self, channel: str) -> None:
25 | await self._conn.remove_listener(channel, self._listener)
26 |
27 | async def publish(self, channel: str, message: str) -> None:
28 | await self._conn.execute("SELECT pg_notify($1, $2);", channel, message)
29 |
30 | def _listener(self, *args: Any) -> None:
31 | connection, pid, channel, payload = args
32 | event = Event(channel=channel, message=payload)
33 | self._listen_queue.put_nowait(event)
34 |
35 | async def next_published(self) -> Event:
36 | return await self._listen_queue.get()
37 |
--------------------------------------------------------------------------------
/broadcaster/backends/redis.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import asyncio
4 | import typing
5 |
6 | from redis import asyncio as redis
7 |
8 | from .._base import Event
9 | from .base import BroadcastBackend
10 |
11 |
12 | class RedisBackend(BroadcastBackend):
13 | _conn: redis.Redis
14 |
15 | def __init__(self, url: str | None = None, *, conn: redis.Redis | None = None):
16 | if url is None:
17 | assert conn is not None, "conn must be provided if url is not"
18 | self._conn = conn
19 | else:
20 | self._conn = redis.Redis.from_url(url)
21 |
22 | self._pubsub = self._conn.pubsub()
23 | self._ready = asyncio.Event()
24 | self._queue: asyncio.Queue[Event] = asyncio.Queue()
25 | self._listener: asyncio.Task[None] | None = None
26 |
27 | async def connect(self) -> None:
28 | self._listener = asyncio.create_task(self._pubsub_listener())
29 | await self._pubsub.connect() # type: ignore[no-untyped-call]
30 |
31 | async def disconnect(self) -> None:
32 | await self._pubsub.aclose() # type: ignore[no-untyped-call]
33 | await self._conn.aclose()
34 | if self._listener is not None:
35 | self._listener.cancel()
36 |
37 | async def subscribe(self, channel: str) -> None:
38 | self._ready.set()
39 | await self._pubsub.subscribe(channel)
40 |
41 | async def unsubscribe(self, channel: str) -> None:
42 | await self._pubsub.unsubscribe(channel)
43 |
44 | async def publish(self, channel: str, message: typing.Any) -> None:
45 | await self._conn.publish(channel, message)
46 |
47 | async def next_published(self) -> Event:
48 | return await self._queue.get()
49 |
50 | async def _pubsub_listener(self) -> None:
51 | # redis-py does not listen to the pubsub connection if there are no channels subscribed
52 | # so we need to wait until the first channel is subscribed to start listening
53 | while True:
54 | await self._ready.wait()
55 | async for message in self._pubsub.listen():
56 | if message["type"] == "message":
57 | event = Event(
58 | channel=message["channel"].decode(),
59 | message=message["data"].decode(),
60 | )
61 | await self._queue.put(event)
62 |
63 | # when no channel subscribed, clear the event.
64 | # And then in next loop, event will blocked again until
65 | # the new channel subscribed.Now asyncio.Task will not exit again.
66 | self._ready.clear()
67 |
68 |
69 | StreamMessageType = typing.Tuple[bytes, typing.Tuple[typing.Tuple[bytes, typing.Dict[bytes, bytes]]]]
70 |
71 |
72 | class RedisStreamBackend(BroadcastBackend):
73 | def __init__(self, url: str):
74 | url = url.replace("redis-stream", "redis", 1)
75 | self.streams: dict[bytes | str | memoryview, int | bytes | str | memoryview] = {}
76 | self._ready = asyncio.Event()
77 | self._producer = redis.Redis.from_url(url)
78 | self._consumer = redis.Redis.from_url(url)
79 |
80 | async def connect(self) -> None:
81 | pass
82 |
83 | async def disconnect(self) -> None:
84 | await self._producer.aclose()
85 | await self._consumer.aclose()
86 |
87 | async def subscribe(self, channel: str) -> None:
88 | try:
89 | info = await self._consumer.xinfo_stream(channel)
90 | last_id = info["last-generated-id"]
91 | except redis.ResponseError:
92 | last_id = "0"
93 | self.streams[channel] = last_id
94 | self._ready.set()
95 |
96 | async def unsubscribe(self, channel: str) -> None:
97 | self.streams.pop(channel, None)
98 |
99 | async def publish(self, channel: str, message: typing.Any) -> None:
100 | await self._producer.xadd(channel, {"message": message})
101 |
102 | async def wait_for_messages(self) -> list[StreamMessageType]:
103 | await self._ready.wait()
104 | messages = None
105 | while not messages:
106 | messages = await self._consumer.xread(self.streams, count=1, block=100)
107 | return messages
108 |
109 | async def next_published(self) -> Event:
110 | messages = await self.wait_for_messages()
111 | stream, events = messages[0]
112 | _msg_id, message = events[0]
113 | self.streams[stream.decode("utf-8")] = _msg_id.decode("utf-8")
114 | return Event(
115 | channel=stream.decode("utf-8"),
116 | message=message.get(b"message", b"").decode("utf-8"),
117 | )
118 |
--------------------------------------------------------------------------------
/broadcaster/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/encode/broadcaster/6b3ea71d4f8fb038fa7d357a1fb3750d58ac614d/broadcaster/py.typed
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | zookeeper:
4 | image: "confluentinc/cp-zookeeper"
5 | hostname: zookeeper
6 | ports:
7 | - 32181:32181
8 | environment:
9 | - ZOOKEEPER_CLIENT_PORT=32181
10 | - ALLOW_ANONYMOUS_LOGIN=yes
11 | kafka:
12 | image: confluentinc/cp-kafka
13 | hostname: kafka
14 | ports:
15 | - 9092:9092
16 | - 29092:29092
17 | depends_on:
18 | - zookeeper
19 | environment:
20 | - KAFKA_ZOOKEEPER_CONNECT=zookeeper:32181
21 | - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
22 | - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
23 | - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT_HOST://localhost:29092,PLAINTEXT://localhost:9092
24 | - KAFKA_BROKER_ID=1
25 | - ALLOW_PLAINTEXT_LISTENER=yes
26 | redis:
27 | image: "redis:alpine"
28 | ports:
29 | - 6379:6379
30 | postgres:
31 | image: "postgres:12"
32 | environment:
33 | - POSTGRES_DB=broadcaster
34 | - POSTGRES_PASSWORD=postgres
35 | - POSTGRES_HOST_AUTH_METHOD=trust
36 | - POSTGRES_USER=postgres
37 | ports:
38 | - 5432:5432
39 |
--------------------------------------------------------------------------------
/docs/demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/encode/broadcaster/6b3ea71d4f8fb038fa7d357a1fb3750d58ac614d/docs/demo.gif
--------------------------------------------------------------------------------
/example/README.md:
--------------------------------------------------------------------------------
1 | # Setup
2 |
3 | Install python dependencies in your virtualenv
4 |
5 | ```bash
6 | pip install -r requirements.txt
7 | ```
8 |
9 | Run example with memory as backend.
10 |
11 | ```bash
12 | uvicorn example.app:app
13 | ```
14 |
15 | You can also install broadcaster locally using `pip install -e .`.
16 |
17 | In order to run the app with different backends, you have to set the env
18 | `BROADCAST_URL` and start the docker services.
19 |
20 | | Backend | Env | Service command |
21 | | -------- | ------------------------------------------------------------ | ---------------------------- |
22 | | kafka | `export BROADCAST_URL=kafka://localhost:9092` | `docker-compose up kafka` |
23 | | redis | `export BROADCAST_URL=redis://localhost:6379` | `docker-compose up redis` |
24 | | postgres | `export BROADCAST_URL=postgres://localhost:5432/broadcaster` | `docker-compose up postgres` |
25 |
--------------------------------------------------------------------------------
/example/app.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | import anyio
5 | from starlette.applications import Starlette
6 | from starlette.routing import Route, WebSocketRoute
7 | from starlette.templating import Jinja2Templates
8 |
9 | from broadcaster import Broadcast
10 |
11 | BROADCAST_URL = os.environ.get("BROADCAST_URL", "memory://")
12 |
13 | broadcast = Broadcast(BROADCAST_URL)
14 | templates = Jinja2Templates("example/templates")
15 |
16 |
17 | async def homepage(request):
18 | template = "index.html"
19 | context = {"request": request}
20 | return templates.TemplateResponse(template, context)
21 |
22 |
23 | async def chatroom_ws(websocket):
24 | await websocket.accept()
25 |
26 | async with anyio.create_task_group() as task_group:
27 | # run until first is complete
28 | async def run_chatroom_ws_receiver() -> None:
29 | await chatroom_ws_receiver(websocket=websocket)
30 | task_group.cancel_scope.cancel()
31 |
32 | task_group.start_soon(run_chatroom_ws_receiver)
33 | await chatroom_ws_sender(websocket)
34 |
35 |
36 | async def chatroom_ws_receiver(websocket):
37 | async for message in websocket.iter_text():
38 | await broadcast.publish(channel="chatroom", message=message)
39 |
40 |
41 | async def chatroom_ws_sender(websocket):
42 | async with broadcast.subscribe(channel="chatroom") as subscriber:
43 | async for event in subscriber:
44 | await websocket.send_text(event.message)
45 |
46 |
47 | routes = [
48 | Route("/", homepage),
49 | WebSocketRoute("/", chatroom_ws, name="chatroom_ws"),
50 | ]
51 |
52 |
53 | app = Starlette(
54 | routes=routes, on_startup=[broadcast.connect], on_shutdown=[broadcast.disconnect],
55 | )
56 |
--------------------------------------------------------------------------------
/example/requirements.txt:
--------------------------------------------------------------------------------
1 | uvicorn
2 | websockets
3 | starlette
4 | jinja2
5 | broadcaster[redis,postgres,kafka]
6 |
--------------------------------------------------------------------------------
/example/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |