├── esdb ├── py.typed ├── generated │ ├── __init__.py │ ├── code_pb2_grpc.py │ ├── shared_pb2_grpc.py │ ├── status_pb2_grpc.py │ ├── status_pb2.py │ ├── code_pb2.py │ ├── serverfeatures_pb2.py │ ├── monitoring_pb2.py │ ├── serverfeatures_pb2.pyi │ ├── monitoring_pb2.pyi │ ├── gossip_pb2_grpc.py │ ├── monitoring_pb2_grpc.py │ ├── gossip_pb2.py │ ├── serverfeatures_pb2_grpc.py │ ├── status_pb2.pyi │ ├── operations_pb2.py │ ├── shared_pb2.py │ ├── operations_pb2.pyi │ ├── gossip_pb2.pyi │ ├── users_pb2.py │ ├── streams_pb2_grpc.py │ ├── shared_pb2.pyi │ └── projections_pb2.py ├── __init__.py ├── streams │ ├── __init__.py │ ├── types.py │ └── streams.py ├── subscriptions │ ├── __init__.py │ ├── types.py │ └── subscriptions.py ├── exceptions.py ├── shared.py ├── gossip.py └── client.py ├── tests ├── __init__.py ├── subscriptions │ └── __init__.py ├── conftest.py ├── gossip │ └── test_gossip.py ├── streams │ ├── test_tombstone.py │ ├── test_read_all.py │ ├── test_delete.py │ ├── test_append.py │ ├── test_batch_append.py │ └── test_read.py └── connection │ ├── test_connection.py │ └── test_connection_string_parsing.py ├── es_config └── logconfig.json ├── scripts └── fix_protoc_imports.py ├── protos ├── monitoring.proto ├── serverfeatures.proto ├── gossip.proto ├── operations.proto ├── shared.proto ├── status.proto ├── users.proto ├── projections.proto ├── cluster.proto ├── code.proto ├── streams.proto └── persistent.proto ├── shared.env ├── .github ├── dependabot.yml └── workflows │ └── workflow.yml ├── Makefile ├── LICENSE ├── pyproject.toml ├── .gitignore ├── docker-compose.yml └── README.md /esdb/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /esdb/generated/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/subscriptions/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /esdb/__init__.py: -------------------------------------------------------------------------------- 1 | from esdb.client import ESClient 2 | 3 | __all__ = ["ESClient"] 4 | -------------------------------------------------------------------------------- /esdb/streams/__init__.py: -------------------------------------------------------------------------------- 1 | from esdb.streams.streams import Streams 2 | from esdb.streams.types import Message, StreamState 3 | 4 | __all__ = ["Message", "StreamState", "Streams"] 5 | -------------------------------------------------------------------------------- /es_config/logconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "Logging": { 3 | "LogLevel": { 4 | "Default": "Debug", 5 | "System": "Warning", 6 | "Microsoft": "Warning", 7 | "Grpc": "Debug" 8 | } 9 | } 10 | } -------------------------------------------------------------------------------- /esdb/generated/code_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | -------------------------------------------------------------------------------- /esdb/generated/shared_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | -------------------------------------------------------------------------------- /esdb/generated/status_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from esdb import ESClient 4 | 5 | 6 | @pytest.fixture 7 | def client() -> ESClient: 8 | return ESClient("esdb+discover://admin:changeit@localhost:2111?tlscafile=certs/ca/ca.crt") 9 | -------------------------------------------------------------------------------- /esdb/subscriptions/__init__.py: -------------------------------------------------------------------------------- 1 | from esdb.subscriptions.subscriptions import PersistentSubscriptions 2 | from esdb.subscriptions.types import Event, NackAction, SubscriptionSettings 3 | 4 | __all__ = ["NackAction", "SubscriptionSettings", "PersistentSubscriptions", "Event"] 5 | -------------------------------------------------------------------------------- /scripts/fix_protoc_imports.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import fileinput 3 | import re 4 | 5 | regex = re.compile(r"^import (.*_pb2) as") 6 | 7 | with fileinput.input(inplace=True) as f: 8 | for line in f: 9 | if regex.match(line): 10 | print(regex.sub(r"from . import \g<1> as", line), end="") 11 | else: 12 | print(line, end="") 13 | -------------------------------------------------------------------------------- /tests/gossip/test_gossip.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from esdb.gossip import State 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_gossip(client): 8 | async with client.connect() as conn: 9 | members = await conn.gossip.get_members(timeout=3) 10 | assert len([m for m in members if m.state == State.Leader]) == 1 11 | assert len([m for m in members if m.state == State.Follower]) == 2 12 | -------------------------------------------------------------------------------- /protos/monitoring.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.monitoring; 3 | option java_package = "com.eventstore.dbclient.proto.monitoring"; 4 | 5 | service Monitoring { 6 | rpc Stats(StatsReq) returns (stream StatsResp); 7 | } 8 | 9 | message StatsReq { 10 | bool use_metadata = 1; 11 | uint64 refresh_time_period_in_ms = 4; 12 | } 13 | 14 | message StatsResp { 15 | map stats = 1; 16 | } 17 | -------------------------------------------------------------------------------- /shared.env: -------------------------------------------------------------------------------- 1 | EVENTSTORE_CLUSTER_SIZE=3 2 | EVENTSTORE_RUN_PROJECTIONS=All 3 | EVENTSTORE_INT_TCP_PORT=1112 4 | EVENTSTORE_HTTP_PORT=2113 5 | EVENTSTORE_TRUSTED_ROOT_CERTIFICATES_PATH=/etc/eventstore/certs/ca 6 | EVENTSTORE_DISCOVER_VIA_DNS=false 7 | EVENTSTORE_ENABLE_EXTERNAL_TCP=true 8 | EVENTSTORE_ENABLE_ATOM_PUB_OVER_HTTP=true 9 | EVENTSTORE_LOG_CONFIG=/opt/eventstore/config/logconfig.json 10 | EVENTSTORE_START_STANDARD_PROJECTIONS=true 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /protos/serverfeatures.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.server_features; 3 | option java_package = "com.eventstore.dbclient.proto.serverfeatures"; 4 | import "shared.proto"; 5 | 6 | service ServerFeatures { 7 | rpc GetSupportedMethods (event_store.client.Empty) returns (SupportedMethods); 8 | } 9 | 10 | message SupportedMethods { 11 | repeated SupportedMethod methods = 1; 12 | string event_store_server_version = 2; 13 | } 14 | 15 | message SupportedMethod { 16 | string method_name = 1; 17 | string service_name = 2; 18 | repeated string features = 3; 19 | } 20 | 21 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | run-esdb: 2 | docker-compose up -d 3 | 4 | protoc: 5 | python -m grpc_tools.protoc \ 6 | -I=protos \ 7 | --python_out=esdb/generated \ 8 | --grpc_python_out=esdb/generated \ 9 | --mypy_out=esdb/generated protos/*.proto 10 | 11 | ./scripts/fix_protoc_imports.py esdb/generated/*.py* 12 | 13 | 14 | pretty: 15 | poetry run black . 16 | poetry run isort . 17 | 18 | lint: 19 | poetry run black --check . 20 | poetry run isort --check-only . 21 | poetry run ruff check . 22 | poetry run mypy . 23 | 24 | test: 25 | poetry run pytest --cov esdb 26 | 27 | test-ci: 28 | poetry run pytest --cov esdb --cov-report=xml 29 | 30 | html-cov: test 31 | poetry run coverage html 32 | open htmlcov/index.html 33 | 34 | cleanup: 35 | docker-compose down -v 36 | rm -rf dist 37 | rm -rf htmlcov -------------------------------------------------------------------------------- /.github/workflows/workflow.yml: -------------------------------------------------------------------------------- 1 | name: esdb-py 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | strategy: 9 | matrix: 10 | python-version: ["3.9", "3.10", "3.11", "3.12"] 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python ${{ matrix.python-version }} 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | - name: Start EventStore 19 | run: docker-compose up -d 20 | - name: Install poetry 21 | run: curl -sSL https://install.python-poetry.org | python - 22 | - name: Install deps 23 | run: poetry install 24 | - name: Lint 25 | run: make lint 26 | - name: Tests 27 | run: make test-ci 28 | - name: Coverage 29 | uses: codecov/codecov-action@v3 30 | 31 | -------------------------------------------------------------------------------- /protos/gossip.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.gossip; 3 | option java_package = "com.eventstore.dbclient.proto.gossip"; 4 | 5 | import "shared.proto"; 6 | 7 | service Gossip { 8 | rpc Read (event_store.client.Empty) returns (ClusterInfo); 9 | } 10 | 11 | message ClusterInfo { 12 | repeated MemberInfo members = 1; 13 | } 14 | 15 | message EndPoint { 16 | string address = 1; 17 | uint32 port = 2; 18 | } 19 | 20 | message MemberInfo { 21 | enum VNodeState { 22 | Initializing = 0; 23 | DiscoverLeader = 1; 24 | Unknown = 2; 25 | PreReplica = 3; 26 | CatchingUp = 4; 27 | Clone = 5; 28 | Follower = 6; 29 | PreLeader = 7; 30 | Leader = 8; 31 | Manager = 9; 32 | ShuttingDown = 10; 33 | Shutdown = 11; 34 | ReadOnlyLeaderless = 12; 35 | PreReadOnlyReplica = 13; 36 | ReadOnlyReplica = 14; 37 | ResigningLeader = 15; 38 | } 39 | event_store.client.UUID instance_id = 1; 40 | int64 time_stamp = 2; 41 | VNodeState state = 3; 42 | bool is_alive = 4; 43 | EndPoint http_end_point = 5; 44 | } 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Andrii Kohut 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /esdb/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | 3 | from esdb.generated.shared_pb2 import Empty 4 | from esdb.generated.streams_pb2 import AppendResp, ReadResp 5 | 6 | 7 | class ClientException(Exception): ... 8 | 9 | 10 | class WrongExpectedVersion(ClientException): 11 | def __init__(self, error: AppendResp.WrongExpectedVersion) -> None: 12 | expected_revision = cast(str, error.WhichOneof("expected_revision_option")) 13 | expected_val = getattr(error, expected_revision) 14 | current_revision = cast(str, error.WhichOneof("current_revision_option")) 15 | current_val = getattr(error, current_revision) 16 | 17 | expected = ( 18 | expected_revision if isinstance(expected_val, Empty) else f"{expected_revision}={expected_val}" 19 | ).replace("expected_", "") 20 | 21 | current = ( 22 | current_revision if isinstance(current_val, Empty) else f"{current_revision}={current_val}" 23 | ).replace("current_", "") 24 | 25 | super().__init__(f"Expected state '{expected}', got '{current}'") 26 | 27 | 28 | class StreamNotFound(ClientException): 29 | def __init__(self, error: ReadResp.StreamNotFound) -> None: 30 | super().__init__(f"Stream '{error.stream_identifier.stream_name.decode()}' not found") 31 | 32 | 33 | class DiscoveryError(ClientException): ... 34 | -------------------------------------------------------------------------------- /protos/operations.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.operations; 3 | option java_package = "com.eventstore.dbclient.proto.operations"; 4 | 5 | import "shared.proto"; 6 | 7 | service Operations { 8 | rpc StartScavenge (StartScavengeReq) returns (ScavengeResp); 9 | rpc StopScavenge (StopScavengeReq) returns (ScavengeResp); 10 | rpc Shutdown (event_store.client.Empty) returns (event_store.client.Empty); 11 | rpc MergeIndexes (event_store.client.Empty) returns (event_store.client.Empty); 12 | rpc ResignNode (event_store.client.Empty) returns (event_store.client.Empty); 13 | rpc SetNodePriority (SetNodePriorityReq) returns (event_store.client.Empty); 14 | rpc RestartPersistentSubscriptions (event_store.client.Empty) returns (event_store.client.Empty); 15 | } 16 | 17 | message StartScavengeReq { 18 | Options options = 1; 19 | message Options { 20 | int32 thread_count = 1; 21 | int32 start_from_chunk = 2; 22 | } 23 | } 24 | 25 | message StopScavengeReq { 26 | Options options = 1; 27 | message Options { 28 | string scavenge_id = 1; 29 | } 30 | } 31 | 32 | message ScavengeResp { 33 | string scavenge_id = 1; 34 | ScavengeResult scavenge_result = 2; 35 | 36 | enum ScavengeResult { 37 | Started = 0; 38 | InProgress = 1; 39 | Stopped = 2; 40 | } 41 | } 42 | 43 | message SetNodePriorityReq { 44 | int32 priority = 1; 45 | } 46 | -------------------------------------------------------------------------------- /protos/shared.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client; 3 | option java_package = "com.eventstore.dbclient.proto.shared"; 4 | import "google/protobuf/empty.proto"; 5 | 6 | message UUID { 7 | oneof value { 8 | Structured structured = 1; 9 | string string = 2; 10 | } 11 | 12 | message Structured { 13 | int64 most_significant_bits = 1; 14 | int64 least_significant_bits = 2; 15 | } 16 | } 17 | message Empty { 18 | } 19 | 20 | message StreamIdentifier { 21 | reserved 1 to 2; 22 | bytes stream_name = 3; 23 | } 24 | 25 | message AllStreamPosition { 26 | uint64 commit_position = 1; 27 | uint64 prepare_position = 2; 28 | } 29 | 30 | message WrongExpectedVersion { 31 | oneof current_stream_revision_option { 32 | uint64 current_stream_revision = 1; 33 | google.protobuf.Empty current_no_stream = 2; 34 | } 35 | oneof expected_stream_position_option { 36 | uint64 expected_stream_position = 3; 37 | google.protobuf.Empty expected_any = 4; 38 | google.protobuf.Empty expected_stream_exists = 5; 39 | google.protobuf.Empty expected_no_stream = 6; 40 | } 41 | } 42 | 43 | message AccessDenied {} 44 | 45 | message StreamDeleted { 46 | StreamIdentifier stream_identifier = 1; 47 | } 48 | 49 | message Timeout {} 50 | 51 | message Unknown {} 52 | 53 | message InvalidTransaction {} 54 | 55 | message MaximumAppendSizeExceeded { 56 | uint32 maxAppendSize = 1; 57 | } 58 | 59 | message BadRequest { 60 | string message = 1; 61 | } 62 | -------------------------------------------------------------------------------- /esdb/shared.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from dataclasses import dataclass 3 | from typing import Optional, Type, TypeVar 4 | 5 | from esdb.generated.persistent_pb2 import CreateReq 6 | from esdb.generated.shared_pb2 import Empty 7 | from esdb.generated.streams_pb2 import ReadReq 8 | 9 | MessageType = TypeVar("MessageType", ReadReq.Options.FilterOptions, CreateReq.AllOptions.FilterOptions) 10 | 11 | 12 | @dataclass 13 | class Filter: 14 | class Kind(enum.Enum): 15 | STREAM = "stream" 16 | EVENT_TYPE = "event_type" 17 | 18 | kind: Kind 19 | regex: str 20 | prefixes: Optional[list[str]] = None 21 | checkpoint_interval_multiplier: Optional[int] = None 22 | 23 | def to_protobuf(self, message_type: Type[MessageType]) -> MessageType: 24 | expression_type = message_type.Expression 25 | stream_identifier = None 26 | event_type = None 27 | if self.kind == self.Kind.STREAM: 28 | stream_identifier = expression_type(regex=self.regex, prefix=self.prefixes) 29 | elif self.kind == self.Kind.EVENT_TYPE: 30 | event_type = expression_type(regex=self.regex, prefix=self.prefixes) 31 | options = message_type( 32 | stream_identifier=stream_identifier, 33 | event_type=event_type, 34 | max=0, # TODO: This apparently does nothing ¯\_(ツ)_/¯ 35 | count=Empty(), 36 | ) 37 | if self.checkpoint_interval_multiplier: 38 | options.checkpointIntervalMultiplier = self.checkpoint_interval_multiplier 39 | return options 40 | -------------------------------------------------------------------------------- /tests/streams/test_tombstone.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import grpc 4 | import pytest 5 | 6 | from esdb.streams.types import TombstoneResult 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_tombstone(client): 11 | stream = str(uuid.uuid4()) 12 | async with client.connect() as conn: 13 | await conn.streams.append(stream=stream, event_type="foo", data=b"") 14 | result = await conn.streams.tombstone(stream=stream) 15 | assert isinstance(result, TombstoneResult) 16 | assert result.commit_position > 0 17 | assert result.prepare_position > 0 18 | with pytest.raises(grpc.aio._call.AioRpcError) as err: 19 | async for _ in conn.streams.read(stream=stream, count=20): 20 | ... 21 | 22 | assert f"Event stream '{stream}' is deleted." in str(err.value) 23 | 24 | with pytest.raises(grpc.aio._call.AioRpcError) as err: 25 | await conn.streams.append(stream=stream, event_type="foo", data=b"") 26 | 27 | assert f"Event stream '{stream}' is deleted." in str(err.value) 28 | 29 | 30 | @pytest.mark.asyncio 31 | async def test_tombstone_with_revision(client): 32 | stream = str(uuid.uuid4()) 33 | async with client.connect() as conn: 34 | await conn.streams.append(stream=stream, event_type="foo", data=b"") 35 | with pytest.raises(grpc.aio._call.AioRpcError) as err: 36 | await conn.streams.tombstone(stream=stream, revision=23) 37 | 38 | assert "Expected version: 23, Actual version: 0" in str(err.value) 39 | 40 | await conn.streams.tombstone(stream=stream, revision=0) 41 | -------------------------------------------------------------------------------- /tests/connection/test_connection.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | import pytest 3 | 4 | from esdb import ESClient 5 | from esdb.exceptions import DiscoveryError 6 | from esdb.gossip import Gossip 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_invalid_cert(): 11 | client = ESClient("esdb://admin:changeit@localhost:2111", root_certificates=b"") 12 | async with client.connect() as conn: 13 | with pytest.raises(grpc.aio._call.AioRpcError): 14 | await conn.streams.append("foo", "test_event", b"data") 15 | 16 | 17 | @pytest.mark.asyncio 18 | @pytest.mark.parametrize( 19 | ["user", "password"], 20 | ( 21 | ("admin", "foobar"), 22 | ("foobar", "changeit"), 23 | ), 24 | ) 25 | async def test_invalid_user_pass(user, password): 26 | client = ESClient(f"esdb://{user}:{password}@localhost:2111?tlscafile=certs/ca/ca.crt") 27 | async with client.connect() as conn: 28 | with pytest.raises(grpc.aio._call.AioRpcError) as err: 29 | await conn.streams.append("foo", "test_event", b"data") 30 | 31 | assert "UNAUTHENTICATED" in str(err.value) 32 | 33 | 34 | @pytest.mark.asyncio 35 | async def test_discovery_failed(monkeypatch): 36 | client = ESClient( 37 | "esdb+discover://admin:changeit@localhost:2111?" 38 | "discoveryinterval=0&maxdiscoverattempts=3&tlscafile=certs/ca/ca.crt" 39 | ) 40 | 41 | async def _get_members(*_): 42 | return [] 43 | 44 | monkeypatch.setattr(Gossip, "get_members", _get_members) 45 | with pytest.raises(DiscoveryError) as err: 46 | async with client.connect(): 47 | ... 48 | 49 | assert str(err.value) == "Discovery failed after 3 attempt(s)" 50 | -------------------------------------------------------------------------------- /esdb/generated/status_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: status.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 16 | from . import code_pb2 as code__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cstatus.proto\x12\ngoogle.rpc\x1a\x19google/protobuf/any.proto\x1a\ncode.proto\"`\n\x06Status\x12\x1e\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x10.google.rpc.Code\x12\x0f\n\x07message\x18\x02 \x01(\t\x12%\n\x07\x64\x65tails\x18\x03 \x01(\x0b\x32\x14.google.protobuf.AnyBa\n\x0e\x63om.google.rpcB\x0bStatusProtoP\x01Z7google.golang.org/genproto/googleapis/rpc/status;status\xf8\x01\x01\xa2\x02\x03RPCb\x06proto3') 20 | 21 | _globals = globals() 22 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 23 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'status_pb2', _globals) 24 | if _descriptor._USE_C_DESCRIPTORS == False: 25 | _globals['DESCRIPTOR']._options = None 26 | _globals['DESCRIPTOR']._serialized_options = b'\n\016com.google.rpcB\013StatusProtoP\001Z7google.golang.org/genproto/googleapis/rpc/status;status\370\001\001\242\002\003RPC' 27 | _globals['_STATUS']._serialized_start=67 28 | _globals['_STATUS']._serialized_end=163 29 | # @@protoc_insertion_point(module_scope) 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "esdb" 3 | version = "0.3.5" 4 | description = "gRPC client for EventStore DB" 5 | authors = ["Andrii Kohut "] 6 | license = "MIT" 7 | readme = "README.md" 8 | homepage = "https://github.com/andriykohut/esdb-py" 9 | repository = "https://github.com/andriykohut/esdb-py" 10 | keywords = ["eventstore", "esdb", "event sourcing", "cqrs", "event-sourcing", "grpcio", "grpc"] 11 | 12 | packages = [ 13 | { include = "esdb" }, 14 | ] 15 | 16 | exclude = ["EventStore", "certs", "es_config", "protos"] 17 | 18 | [tool.poetry.dependencies] 19 | python = ">=3.9,<4.0" 20 | grpcio = ">=1.4.0" 21 | protobuf = ">=4.0" 22 | 23 | [tool.poetry.group.dev.dependencies] 24 | mypy = "^1.3.0" 25 | mypy-protobuf = "^3.4.0" 26 | black = ">=23.3,<25.0" 27 | pytest = ">=7.3.1,<9.0.0" 28 | isort = "^5.12.0" 29 | pytest-asyncio = ">=0.21,<0.24" 30 | grpcio-tools = "^1.54.2" 31 | grpc-stubs = "^1.53.0.2" 32 | pytest-cov = ">=4.1,<6.0" 33 | ruff = ">=0.5.1,<0.5.6" 34 | 35 | [build-system] 36 | requires = ["poetry-core>=1.0.0"] 37 | build-backend = "poetry.core.masonry.api" 38 | 39 | [tool.mypy] 40 | exclude = ["esdb/generated"] 41 | 42 | [tool.black] 43 | line-length = 119 44 | exclude = "esdb/generated" 45 | 46 | [tool.isort] 47 | profile = "black" 48 | skip_glob = "esdb/generated/*" 49 | 50 | [tool.pytest.ini_options] 51 | filterwarnings = [ 52 | "ignore:::.*grpc.aio._channel:287", 53 | "ignore:::.*grpc.aio._channel:288", 54 | ] 55 | 56 | [tool.coverage.run] 57 | branch = true 58 | omit = ["esdb/generated/*"] 59 | 60 | [tool.coverage.report] 61 | exclude_lines = [ 62 | "pragma: no cover", 63 | "def __repr__", 64 | "raise AssertionError", 65 | "raise NotImplementedError", 66 | "if __name__ == .__main__.:", 67 | "@(abc.)?abstractmethod", 68 | ] 69 | 70 | [tool.ruff] 71 | line-length = 119 72 | exclude = ["esdb/generated/*"] 73 | -------------------------------------------------------------------------------- /esdb/generated/code_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: code.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ncode.proto\x12\ngoogle.rpc*\xb7\x02\n\x04\x43ode\x12\x06\n\x02OK\x10\x00\x12\r\n\tCANCELLED\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x14\n\x10INVALID_ARGUMENT\x10\x03\x12\x15\n\x11\x44\x45\x41\x44LINE_EXCEEDED\x10\x04\x12\r\n\tNOT_FOUND\x10\x05\x12\x12\n\x0e\x41LREADY_EXISTS\x10\x06\x12\x15\n\x11PERMISSION_DENIED\x10\x07\x12\x13\n\x0fUNAUTHENTICATED\x10\x10\x12\x16\n\x12RESOURCE_EXHAUSTED\x10\x08\x12\x17\n\x13\x46\x41ILED_PRECONDITION\x10\t\x12\x0b\n\x07\x41\x42ORTED\x10\n\x12\x10\n\x0cOUT_OF_RANGE\x10\x0b\x12\x11\n\rUNIMPLEMENTED\x10\x0c\x12\x0c\n\x08INTERNAL\x10\r\x12\x0f\n\x0bUNAVAILABLE\x10\x0e\x12\r\n\tDATA_LOSS\x10\x0f\x42X\n\x0e\x63om.google.rpcB\tCodeProtoP\x01Z3google.golang.org/genproto/googleapis/rpc/code;code\xa2\x02\x03RPCb\x06proto3') 18 | 19 | _globals = globals() 20 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 21 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'code_pb2', _globals) 22 | if _descriptor._USE_C_DESCRIPTORS == False: 23 | _globals['DESCRIPTOR']._options = None 24 | _globals['DESCRIPTOR']._serialized_options = b'\n\016com.google.rpcB\tCodeProtoP\001Z3google.golang.org/genproto/googleapis/rpc/code;code\242\002\003RPC' 25 | _globals['_CODE']._serialized_start=27 26 | _globals['_CODE']._serialized_end=338 27 | # @@protoc_insertion_point(module_scope) 28 | -------------------------------------------------------------------------------- /esdb/gossip.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime 4 | import enum 5 | from dataclasses import dataclass 6 | from typing import Optional 7 | 8 | from esdb.generated.gossip_pb2 import ClusterInfo, MemberInfo 9 | from esdb.generated.gossip_pb2_grpc import GossipStub 10 | from esdb.generated.shared_pb2 import Empty 11 | 12 | 13 | class State(enum.IntEnum): 14 | Initializing = 0 15 | DiscoverLeader = enum.auto() 16 | Unknown = enum.auto() 17 | PreReplica = enum.auto() 18 | CatchingUp = enum.auto() 19 | Clone = enum.auto() 20 | Follower = enum.auto() 21 | PreLeader = enum.auto() 22 | Leader = enum.auto() 23 | Manager = enum.auto() 24 | ShuttingDown = enum.auto() 25 | Shutdown = enum.auto() 26 | ReadOnlyLeaderless = enum.auto() 27 | PreReadOnlyReplica = enum.auto() 28 | ReadOnlyReplica = enum.auto() 29 | ResigningLeader = enum.auto() 30 | 31 | 32 | @dataclass 33 | class Member: 34 | @dataclass 35 | class Endpoint: 36 | address: str 37 | port: int 38 | 39 | timestamp: datetime.datetime 40 | state: State 41 | is_alive: bool 42 | endpoint: Optional[Endpoint] 43 | 44 | @classmethod 45 | def from_protobuf(cls, m: MemberInfo) -> Member: 46 | return cls( 47 | timestamp=datetime.datetime.fromtimestamp(m.time_stamp / 10000000, datetime.timezone.utc), 48 | state=State(m.state), 49 | is_alive=m.is_alive, 50 | endpoint=( 51 | cls.Endpoint(m.http_end_point.address, m.http_end_point.port) if m.http_end_point.address else None 52 | ), 53 | ) 54 | 55 | 56 | class Gossip: 57 | def __init__(self, stub: GossipStub) -> None: 58 | self._stub = stub 59 | 60 | async def get_members(self, timeout: int) -> list[Member]: 61 | info: ClusterInfo = await self._stub.Read(Empty(), timeout=timeout) 62 | return [Member.from_protobuf(m) for m in info.members] 63 | -------------------------------------------------------------------------------- /esdb/generated/serverfeatures_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: serverfeatures.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from . import shared_pb2 as shared__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14serverfeatures.proto\x12\"event_store.client.server_features\x1a\x0cshared.proto\"|\n\x10SupportedMethods\x12\x44\n\x07methods\x18\x01 \x03(\x0b\x32\x33.event_store.client.server_features.SupportedMethod\x12\"\n\x1a\x65vent_store_server_version\x18\x02 \x01(\t\"N\n\x0fSupportedMethod\x12\x13\n\x0bmethod_name\x18\x01 \x01(\t\x12\x14\n\x0cservice_name\x18\x02 \x01(\t\x12\x10\n\x08\x66\x65\x61tures\x18\x03 \x03(\t2x\n\x0eServerFeatures\x12\x66\n\x13GetSupportedMethods\x12\x19.event_store.client.Empty\x1a\x34.event_store.client.server_features.SupportedMethodsB.\n,com.eventstore.dbclient.proto.serverfeaturesb\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'serverfeatures_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | _globals['DESCRIPTOR']._options = None 25 | _globals['DESCRIPTOR']._serialized_options = b'\n,com.eventstore.dbclient.proto.serverfeatures' 26 | _globals['_SUPPORTEDMETHODS']._serialized_start=74 27 | _globals['_SUPPORTEDMETHODS']._serialized_end=198 28 | _globals['_SUPPORTEDMETHOD']._serialized_start=200 29 | _globals['_SUPPORTEDMETHOD']._serialized_end=278 30 | _globals['_SERVERFEATURES']._serialized_start=280 31 | _globals['_SERVERFEATURES']._serialized_end=400 32 | # @@protoc_insertion_point(module_scope) 33 | -------------------------------------------------------------------------------- /protos/status.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | syntax = "proto3"; 16 | 17 | package google.rpc; 18 | 19 | import "google/protobuf/any.proto"; 20 | import "code.proto"; 21 | 22 | option cc_enable_arenas = true; 23 | option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; 24 | option java_multiple_files = true; 25 | option java_outer_classname = "StatusProto"; 26 | option java_package = "com.google.rpc"; 27 | option objc_class_prefix = "RPC"; 28 | 29 | // The `Status` type defines a logical error model that is suitable for 30 | // different programming environments, including REST APIs and RPC APIs. It is 31 | // used by [gRPC](https://github.com/grpc). Each `Status` message contains 32 | // three pieces of data: error code, error message, and error details. 33 | // 34 | // You can find out more about this error model and how to work with it in the 35 | // [API Design Guide](https://cloud.google.com/apis/design/errors). 36 | message Status { 37 | // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. 38 | google.rpc.Code code = 1; 39 | 40 | // A developer-facing error message, which should be in English. Any 41 | // user-facing error message should be localized and sent in the 42 | // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. 43 | string message = 2; 44 | 45 | // A list of messages that carry the error details. There is a common set of 46 | // message types for APIs to use. 47 | google.protobuf.Any details = 3; 48 | } 49 | -------------------------------------------------------------------------------- /esdb/generated/monitoring_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: monitoring.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10monitoring.proto\x12\x1d\x65vent_store.client.monitoring\"C\n\x08StatsReq\x12\x14\n\x0cuse_metadata\x18\x01 \x01(\x08\x12!\n\x19refresh_time_period_in_ms\x18\x04 \x01(\x04\"}\n\tStatsResp\x12\x42\n\x05stats\x18\x01 \x03(\x0b\x32\x33.event_store.client.monitoring.StatsResp.StatsEntry\x1a,\n\nStatsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32j\n\nMonitoring\x12\\\n\x05Stats\x12\'.event_store.client.monitoring.StatsReq\x1a(.event_store.client.monitoring.StatsResp0\x01\x42*\n(com.eventstore.dbclient.proto.monitoringb\x06proto3') 18 | 19 | _globals = globals() 20 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 21 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'monitoring_pb2', _globals) 22 | if _descriptor._USE_C_DESCRIPTORS == False: 23 | _globals['DESCRIPTOR']._options = None 24 | _globals['DESCRIPTOR']._serialized_options = b'\n(com.eventstore.dbclient.proto.monitoring' 25 | _globals['_STATSRESP_STATSENTRY']._options = None 26 | _globals['_STATSRESP_STATSENTRY']._serialized_options = b'8\001' 27 | _globals['_STATSREQ']._serialized_start=51 28 | _globals['_STATSREQ']._serialized_end=118 29 | _globals['_STATSRESP']._serialized_start=120 30 | _globals['_STATSRESP']._serialized_end=245 31 | _globals['_STATSRESP_STATSENTRY']._serialized_start=201 32 | _globals['_STATSRESP_STATSENTRY']._serialized_end=245 33 | _globals['_MONITORING']._serialized_start=247 34 | _globals['_MONITORING']._serialized_end=353 35 | # @@protoc_insertion_point(module_scope) 36 | -------------------------------------------------------------------------------- /esdb/generated/serverfeatures_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import collections.abc 7 | import google.protobuf.descriptor 8 | import google.protobuf.internal.containers 9 | import google.protobuf.message 10 | import sys 11 | 12 | if sys.version_info >= (3, 8): 13 | import typing as typing_extensions 14 | else: 15 | import typing_extensions 16 | 17 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 18 | 19 | @typing_extensions.final 20 | class SupportedMethods(google.protobuf.message.Message): 21 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 22 | 23 | METHODS_FIELD_NUMBER: builtins.int 24 | EVENT_STORE_SERVER_VERSION_FIELD_NUMBER: builtins.int 25 | @property 26 | def methods(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SupportedMethod]: ... 27 | event_store_server_version: builtins.str 28 | def __init__( 29 | self, 30 | *, 31 | methods: collections.abc.Iterable[global___SupportedMethod] | None = ..., 32 | event_store_server_version: builtins.str = ..., 33 | ) -> None: ... 34 | def ClearField(self, field_name: typing_extensions.Literal["event_store_server_version", b"event_store_server_version", "methods", b"methods"]) -> None: ... 35 | 36 | global___SupportedMethods = SupportedMethods 37 | 38 | @typing_extensions.final 39 | class SupportedMethod(google.protobuf.message.Message): 40 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 41 | 42 | METHOD_NAME_FIELD_NUMBER: builtins.int 43 | SERVICE_NAME_FIELD_NUMBER: builtins.int 44 | FEATURES_FIELD_NUMBER: builtins.int 45 | method_name: builtins.str 46 | service_name: builtins.str 47 | @property 48 | def features(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... 49 | def __init__( 50 | self, 51 | *, 52 | method_name: builtins.str = ..., 53 | service_name: builtins.str = ..., 54 | features: collections.abc.Iterable[builtins.str] | None = ..., 55 | ) -> None: ... 56 | def ClearField(self, field_name: typing_extensions.Literal["features", b"features", "method_name", b"method_name", "service_name", b"service_name"]) -> None: ... 57 | 58 | global___SupportedMethod = SupportedMethod 59 | -------------------------------------------------------------------------------- /esdb/generated/monitoring_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import collections.abc 7 | import google.protobuf.descriptor 8 | import google.protobuf.internal.containers 9 | import google.protobuf.message 10 | import sys 11 | 12 | if sys.version_info >= (3, 8): 13 | import typing as typing_extensions 14 | else: 15 | import typing_extensions 16 | 17 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 18 | 19 | @typing_extensions.final 20 | class StatsReq(google.protobuf.message.Message): 21 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 22 | 23 | USE_METADATA_FIELD_NUMBER: builtins.int 24 | REFRESH_TIME_PERIOD_IN_MS_FIELD_NUMBER: builtins.int 25 | use_metadata: builtins.bool 26 | refresh_time_period_in_ms: builtins.int 27 | def __init__( 28 | self, 29 | *, 30 | use_metadata: builtins.bool = ..., 31 | refresh_time_period_in_ms: builtins.int = ..., 32 | ) -> None: ... 33 | def ClearField(self, field_name: typing_extensions.Literal["refresh_time_period_in_ms", b"refresh_time_period_in_ms", "use_metadata", b"use_metadata"]) -> None: ... 34 | 35 | global___StatsReq = StatsReq 36 | 37 | @typing_extensions.final 38 | class StatsResp(google.protobuf.message.Message): 39 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 40 | 41 | @typing_extensions.final 42 | class StatsEntry(google.protobuf.message.Message): 43 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 44 | 45 | KEY_FIELD_NUMBER: builtins.int 46 | VALUE_FIELD_NUMBER: builtins.int 47 | key: builtins.str 48 | value: builtins.str 49 | def __init__( 50 | self, 51 | *, 52 | key: builtins.str = ..., 53 | value: builtins.str = ..., 54 | ) -> None: ... 55 | def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... 56 | 57 | STATS_FIELD_NUMBER: builtins.int 58 | @property 59 | def stats(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... 60 | def __init__( 61 | self, 62 | *, 63 | stats: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., 64 | ) -> None: ... 65 | def ClearField(self, field_name: typing_extensions.Literal["stats", b"stats"]) -> None: ... 66 | 67 | global___StatsResp = StatsResp 68 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # dev certs 132 | certs 133 | -------------------------------------------------------------------------------- /protos/users.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.users; 3 | option java_package = "com.eventstore.dbclient.proto.users"; 4 | 5 | service Users { 6 | rpc Create (CreateReq) returns (CreateResp); 7 | rpc Update (UpdateReq) returns (UpdateResp); 8 | rpc Delete (DeleteReq) returns (DeleteResp); 9 | rpc Disable (DisableReq) returns (DisableResp); 10 | rpc Enable (EnableReq) returns (EnableResp); 11 | rpc Details (DetailsReq) returns (stream DetailsResp); 12 | rpc ChangePassword (ChangePasswordReq) returns (ChangePasswordResp); 13 | rpc ResetPassword (ResetPasswordReq) returns (ResetPasswordResp); 14 | } 15 | 16 | message CreateReq { 17 | Options options = 1; 18 | message Options { 19 | string login_name = 1; 20 | string password = 2; 21 | string full_name = 3; 22 | repeated string groups = 4; 23 | } 24 | } 25 | 26 | message CreateResp { 27 | 28 | } 29 | 30 | message UpdateReq { 31 | Options options = 1; 32 | message Options { 33 | string login_name = 1; 34 | string password = 2; 35 | string full_name = 3; 36 | repeated string groups = 4; 37 | } 38 | } 39 | 40 | message UpdateResp { 41 | 42 | } 43 | 44 | message DeleteReq { 45 | Options options = 1; 46 | message Options { 47 | string login_name = 1; 48 | } 49 | } 50 | 51 | message DeleteResp { 52 | 53 | } 54 | 55 | message EnableReq { 56 | Options options = 1; 57 | message Options { 58 | string login_name = 1; 59 | } 60 | } 61 | 62 | message EnableResp { 63 | 64 | } 65 | 66 | message DisableReq { 67 | Options options = 1; 68 | message Options { 69 | string login_name = 1; 70 | } 71 | } 72 | 73 | message DisableResp { 74 | } 75 | 76 | message DetailsReq { 77 | Options options = 1; 78 | message Options { 79 | string login_name = 1; 80 | } 81 | } 82 | 83 | message DetailsResp { 84 | UserDetails user_details = 1; 85 | message UserDetails { 86 | string login_name = 1; 87 | string full_name = 2; 88 | repeated string groups = 3; 89 | DateTime last_updated = 4; 90 | bool disabled = 5; 91 | 92 | message DateTime { 93 | int64 ticks_since_epoch = 1; 94 | } 95 | } 96 | } 97 | 98 | message ChangePasswordReq { 99 | Options options = 1; 100 | message Options { 101 | string login_name = 1; 102 | string current_password = 2; 103 | string new_password = 3; 104 | } 105 | } 106 | 107 | message ChangePasswordResp { 108 | } 109 | 110 | message ResetPasswordReq { 111 | Options options = 1; 112 | message Options { 113 | string login_name = 1; 114 | string new_password = 2; 115 | } 116 | } 117 | 118 | message ResetPasswordResp { 119 | } 120 | -------------------------------------------------------------------------------- /esdb/generated/gossip_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from . import gossip_pb2 as gossip__pb2 6 | from . import shared_pb2 as shared__pb2 7 | 8 | 9 | class GossipStub(object): 10 | """Missing associated documentation comment in .proto file.""" 11 | 12 | def __init__(self, channel): 13 | """Constructor. 14 | 15 | Args: 16 | channel: A grpc.Channel. 17 | """ 18 | self.Read = channel.unary_unary( 19 | '/event_store.client.gossip.Gossip/Read', 20 | request_serializer=shared__pb2.Empty.SerializeToString, 21 | response_deserializer=gossip__pb2.ClusterInfo.FromString, 22 | ) 23 | 24 | 25 | class GossipServicer(object): 26 | """Missing associated documentation comment in .proto file.""" 27 | 28 | def Read(self, request, context): 29 | """Missing associated documentation comment in .proto file.""" 30 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 31 | context.set_details('Method not implemented!') 32 | raise NotImplementedError('Method not implemented!') 33 | 34 | 35 | def add_GossipServicer_to_server(servicer, server): 36 | rpc_method_handlers = { 37 | 'Read': grpc.unary_unary_rpc_method_handler( 38 | servicer.Read, 39 | request_deserializer=shared__pb2.Empty.FromString, 40 | response_serializer=gossip__pb2.ClusterInfo.SerializeToString, 41 | ), 42 | } 43 | generic_handler = grpc.method_handlers_generic_handler( 44 | 'event_store.client.gossip.Gossip', rpc_method_handlers) 45 | server.add_generic_rpc_handlers((generic_handler,)) 46 | 47 | 48 | # This class is part of an EXPERIMENTAL API. 49 | class Gossip(object): 50 | """Missing associated documentation comment in .proto file.""" 51 | 52 | @staticmethod 53 | def Read(request, 54 | target, 55 | options=(), 56 | channel_credentials=None, 57 | call_credentials=None, 58 | insecure=False, 59 | compression=None, 60 | wait_for_ready=None, 61 | timeout=None, 62 | metadata=None): 63 | return grpc.experimental.unary_unary(request, target, '/event_store.client.gossip.Gossip/Read', 64 | shared__pb2.Empty.SerializeToString, 65 | gossip__pb2.ClusterInfo.FromString, 66 | options, channel_credentials, 67 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 68 | -------------------------------------------------------------------------------- /esdb/generated/monitoring_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from . import monitoring_pb2 as monitoring__pb2 6 | 7 | 8 | class MonitoringStub(object): 9 | """Missing associated documentation comment in .proto file.""" 10 | 11 | def __init__(self, channel): 12 | """Constructor. 13 | 14 | Args: 15 | channel: A grpc.Channel. 16 | """ 17 | self.Stats = channel.unary_stream( 18 | '/event_store.client.monitoring.Monitoring/Stats', 19 | request_serializer=monitoring__pb2.StatsReq.SerializeToString, 20 | response_deserializer=monitoring__pb2.StatsResp.FromString, 21 | ) 22 | 23 | 24 | class MonitoringServicer(object): 25 | """Missing associated documentation comment in .proto file.""" 26 | 27 | def Stats(self, request, context): 28 | """Missing associated documentation comment in .proto file.""" 29 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 30 | context.set_details('Method not implemented!') 31 | raise NotImplementedError('Method not implemented!') 32 | 33 | 34 | def add_MonitoringServicer_to_server(servicer, server): 35 | rpc_method_handlers = { 36 | 'Stats': grpc.unary_stream_rpc_method_handler( 37 | servicer.Stats, 38 | request_deserializer=monitoring__pb2.StatsReq.FromString, 39 | response_serializer=monitoring__pb2.StatsResp.SerializeToString, 40 | ), 41 | } 42 | generic_handler = grpc.method_handlers_generic_handler( 43 | 'event_store.client.monitoring.Monitoring', rpc_method_handlers) 44 | server.add_generic_rpc_handlers((generic_handler,)) 45 | 46 | 47 | # This class is part of an EXPERIMENTAL API. 48 | class Monitoring(object): 49 | """Missing associated documentation comment in .proto file.""" 50 | 51 | @staticmethod 52 | def Stats(request, 53 | target, 54 | options=(), 55 | channel_credentials=None, 56 | call_credentials=None, 57 | insecure=False, 58 | compression=None, 59 | wait_for_ready=None, 60 | timeout=None, 61 | metadata=None): 62 | return grpc.experimental.unary_stream(request, target, '/event_store.client.monitoring.Monitoring/Stats', 63 | monitoring__pb2.StatsReq.SerializeToString, 64 | monitoring__pb2.StatsResp.FromString, 65 | options, channel_credentials, 66 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 67 | -------------------------------------------------------------------------------- /tests/streams/test_read_all.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | 5 | from esdb.shared import Filter 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_read_all_filter_by_event_type(client): 10 | event_type = str(uuid.uuid4()) 11 | async with client.connect() as conn: 12 | for _ in range(20): 13 | await conn.streams.append( 14 | stream=str(uuid.uuid4()), 15 | event_type=event_type, 16 | data=b"", 17 | ) 18 | 19 | events = [ 20 | evt 21 | async for evt in conn.streams.read_all( 22 | count=500, 23 | filter_by=Filter( 24 | kind=Filter.Kind.EVENT_TYPE, 25 | regex=event_type, 26 | ), 27 | ) 28 | ] 29 | 30 | assert len(events) == 20 31 | assert all(e.event_type == event_type for e in events) 32 | 33 | 34 | @pytest.mark.asyncio 35 | async def test_read_all_filter_by_stream_name(client): 36 | stream_prefix = str(uuid.uuid4()) 37 | async with client.connect() as conn: 38 | for i in range(20): 39 | await conn.streams.append( 40 | stream=f"{stream_prefix}-{i}", 41 | event_type="i-dont-care", 42 | data=b"", 43 | ) 44 | 45 | events = [ 46 | evt 47 | async for evt in conn.streams.read_all( 48 | count=500, 49 | filter_by=Filter( 50 | kind=Filter.Kind.STREAM, 51 | regex=stream_prefix, 52 | ), 53 | ) 54 | ] 55 | assert len(events) == 20 56 | assert all(e.stream_name.startswith(stream_prefix) for e in events) 57 | 58 | 59 | @pytest.mark.asyncio 60 | async def test_read_all_filter_by_stream_name_subscribe(client): 61 | stream_prefix = str(uuid.uuid4()) 62 | 63 | class Done_(Exception): ... 64 | 65 | async with client.connect() as conn: 66 | for i in range(20): 67 | await conn.streams.append( 68 | stream=f"{stream_prefix}-{i}", 69 | event_type="i-dont-care", 70 | data=b"", 71 | ) 72 | 73 | count = 0 74 | with pytest.raises(Done_): 75 | async for event in conn.streams.read_all( 76 | subscribe=True, 77 | filter_by=Filter( 78 | kind=Filter.Kind.STREAM, 79 | regex=stream_prefix, 80 | checkpoint_interval_multiplier=10, 81 | ), 82 | ): 83 | assert event.stream_name.startswith(stream_prefix) 84 | count += 1 85 | if count == 20: 86 | raise Done_() 87 | -------------------------------------------------------------------------------- /tests/connection/test_connection_string_parsing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from esdb.client import Configuration, Member, Preference, parse_connection_string 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "connection_string, config", 8 | ( 9 | ( 10 | "esdb://host1:2113,host2:2113?keepAliveTimeout=10000&keepAliveInterval=10000", 11 | Configuration( 12 | gossip_seed=[Member.Endpoint("host1", 2113), Member.Endpoint("host2", 2113)], 13 | dns_discover=False, 14 | keep_alive_timeout=10000, 15 | keep_alive_interval=10000, 16 | ), 17 | ), 18 | ( 19 | "esdb+discover://host:2113", 20 | Configuration(dns_discover=True, address=Member.Endpoint(address="host", port=2113)), 21 | ), 22 | ( 23 | "esdb://user:pass@host:2113?tls=False", 24 | Configuration( 25 | dns_discover=False, 26 | address=Member.Endpoint(address="host", port=2113), 27 | username="user", 28 | password="pass", 29 | disable_tls=True, 30 | ), 31 | ), 32 | ( 33 | "esdb://host:2113?nodePreference=follower&tlsVerifyCert=false&defaultDeadline=21&gossipTimeout=12", 34 | Configuration( 35 | dns_discover=False, 36 | address=Member.Endpoint(address="host", port=2113), 37 | node_preference=Preference.FOLLOWER, 38 | verify_cert=False, 39 | default_deadline=21, 40 | gossip_timeout=12, 41 | ), 42 | ), 43 | ( 44 | "esdb://host", 45 | Configuration( 46 | dns_discover=False, 47 | address=Member.Endpoint(address="host", port=2113), 48 | ), 49 | ), 50 | ), 51 | ) 52 | def test_parse_connection_string(connection_string, config): 53 | assert parse_connection_string(connection_string) == config 54 | 55 | 56 | @pytest.mark.parametrize( 57 | "connection_string, error_msg", 58 | ( 59 | ("foo://host:2113", "esdb:// or esdb+discover:// scheme is required"), 60 | ("esdb://ho:st:2113", "Too many colons in a host"), 61 | ("esdb://host:foo", "foo port is not a number"), 62 | ("esdb://host?tls=true&tls=false", "Too many values for tls"), 63 | ("esdb://host?foo=1", "Invalid option foo"), 64 | ("esdb://user@host?foo=1", "Invalid user credentials"), 65 | ("esdb://user:@host?foo=1", "Password is required"), 66 | ("esdb://:password@host?foo=1", "Username is required"), 67 | ), 68 | ) 69 | def test_invalid_string(connection_string, error_msg): 70 | with pytest.raises(ValueError) as err: 71 | parse_connection_string(connection_string) 72 | 73 | assert str(err.value) == error_msg 74 | -------------------------------------------------------------------------------- /esdb/generated/gossip_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: gossip.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from . import shared_pb2 as shared__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cgossip.proto\x12\x19\x65vent_store.client.gossip\x1a\x0cshared.proto\"E\n\x0b\x43lusterInfo\x12\x36\n\x07members\x18\x01 \x03(\x0b\x32%.event_store.client.gossip.MemberInfo\")\n\x08\x45ndPoint\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\r\"\xfc\x03\n\nMemberInfo\x12-\n\x0binstance_id\x18\x01 \x01(\x0b\x32\x18.event_store.client.UUID\x12\x12\n\ntime_stamp\x18\x02 \x01(\x03\x12?\n\x05state\x18\x03 \x01(\x0e\x32\x30.event_store.client.gossip.MemberInfo.VNodeState\x12\x10\n\x08is_alive\x18\x04 \x01(\x08\x12;\n\x0ehttp_end_point\x18\x05 \x01(\x0b\x32#.event_store.client.gossip.EndPoint\"\x9a\x02\n\nVNodeState\x12\x10\n\x0cInitializing\x10\x00\x12\x12\n\x0e\x44iscoverLeader\x10\x01\x12\x0b\n\x07Unknown\x10\x02\x12\x0e\n\nPreReplica\x10\x03\x12\x0e\n\nCatchingUp\x10\x04\x12\t\n\x05\x43lone\x10\x05\x12\x0c\n\x08\x46ollower\x10\x06\x12\r\n\tPreLeader\x10\x07\x12\n\n\x06Leader\x10\x08\x12\x0b\n\x07Manager\x10\t\x12\x10\n\x0cShuttingDown\x10\n\x12\x0c\n\x08Shutdown\x10\x0b\x12\x16\n\x12ReadOnlyLeaderless\x10\x0c\x12\x16\n\x12PreReadOnlyReplica\x10\r\x12\x13\n\x0fReadOnlyReplica\x10\x0e\x12\x13\n\x0fResigningLeader\x10\x0f\x32S\n\x06Gossip\x12I\n\x04Read\x12\x19.event_store.client.Empty\x1a&.event_store.client.gossip.ClusterInfoB&\n$com.eventstore.dbclient.proto.gossipb\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'gossip_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | _globals['DESCRIPTOR']._options = None 25 | _globals['DESCRIPTOR']._serialized_options = b'\n$com.eventstore.dbclient.proto.gossip' 26 | _globals['_CLUSTERINFO']._serialized_start=57 27 | _globals['_CLUSTERINFO']._serialized_end=126 28 | _globals['_ENDPOINT']._serialized_start=128 29 | _globals['_ENDPOINT']._serialized_end=169 30 | _globals['_MEMBERINFO']._serialized_start=172 31 | _globals['_MEMBERINFO']._serialized_end=680 32 | _globals['_MEMBERINFO_VNODESTATE']._serialized_start=398 33 | _globals['_MEMBERINFO_VNODESTATE']._serialized_end=680 34 | _globals['_GOSSIP']._serialized_start=682 35 | _globals['_GOSSIP']._serialized_end=765 36 | # @@protoc_insertion_point(module_scope) 37 | -------------------------------------------------------------------------------- /tests/streams/test_delete.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | 5 | from esdb.exceptions import ClientException, StreamNotFound 6 | from esdb.streams import StreamState 7 | from esdb.streams.types import DeleteResult 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_delete_stream(client): 12 | stream = str(uuid.uuid4()) 13 | async with client.connect() as conn: 14 | await conn.streams.append(stream=stream, event_type="foobar", data=b"a") 15 | await conn.streams.append(stream=stream, event_type="foobar", data=b"b") 16 | 17 | assert len([e async for e in conn.streams.read(stream=stream, count=20)]) == 2 18 | 19 | result = await conn.streams.delete(stream=stream) 20 | assert isinstance(result, DeleteResult) 21 | assert isinstance(result.commit_position, int) 22 | assert isinstance(result.prepare_position, int) 23 | 24 | with pytest.raises(StreamNotFound) as err: 25 | async for _ in conn.streams.read(stream=stream, count=20): 26 | ... 27 | 28 | assert str(err.value) == f"Stream '{stream}' not found" 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_delete_stream_with_revision(client): 33 | stream = str(uuid.uuid4()) 34 | async with client.connect() as conn: 35 | for i in range(3): 36 | await conn.streams.append(stream=stream, event_type="foobar", data=f"{i}".encode()) 37 | 38 | assert len([e async for e in conn.streams.read(stream=stream, count=20)]) == 3 39 | 40 | with pytest.raises(ClientException) as err: 41 | await conn.streams.delete(stream=stream, revision=1) 42 | 43 | assert "Expected version: 1, Actual version: 2" in str(err.value) 44 | await conn.streams.delete(stream=stream, revision=2) 45 | 46 | 47 | @pytest.mark.asyncio 48 | async def test_delete_with_stream_state(client): 49 | stream1 = str(uuid.uuid4()) 50 | stream2 = str(uuid.uuid4()) 51 | stream3 = str(uuid.uuid4()) 52 | async with client.connect() as conn: 53 | await conn.streams.append(stream=stream1, event_type="foobar", data=b"") 54 | await conn.streams.append(stream=stream2, event_type="foobar", data=b"") 55 | 56 | await conn.streams.delete(stream=stream1, stream_state=StreamState.STREAM_EXISTS) 57 | with pytest.raises(ClientException) as err: 58 | await conn.streams.delete(stream=stream2, stream_state=StreamState.NO_STREAM) 59 | assert "Expected version: -1, Actual version: 0" in str(err) 60 | 61 | with pytest.raises(ClientException): 62 | await conn.streams.delete(stream=stream3, stream_state=StreamState.NO_STREAM) 63 | assert "Expected version: -2, Actual version: -1" in str(err) 64 | 65 | with pytest.raises(ClientException) as err: 66 | await conn.streams.delete(stream=stream3) 67 | assert "Expected version: -2, Actual version: -1" in str(err) 68 | -------------------------------------------------------------------------------- /esdb/generated/serverfeatures_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from . import serverfeatures_pb2 as serverfeatures__pb2 6 | from . import shared_pb2 as shared__pb2 7 | 8 | 9 | class ServerFeaturesStub(object): 10 | """Missing associated documentation comment in .proto file.""" 11 | 12 | def __init__(self, channel): 13 | """Constructor. 14 | 15 | Args: 16 | channel: A grpc.Channel. 17 | """ 18 | self.GetSupportedMethods = channel.unary_unary( 19 | '/event_store.client.server_features.ServerFeatures/GetSupportedMethods', 20 | request_serializer=shared__pb2.Empty.SerializeToString, 21 | response_deserializer=serverfeatures__pb2.SupportedMethods.FromString, 22 | ) 23 | 24 | 25 | class ServerFeaturesServicer(object): 26 | """Missing associated documentation comment in .proto file.""" 27 | 28 | def GetSupportedMethods(self, request, context): 29 | """Missing associated documentation comment in .proto file.""" 30 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 31 | context.set_details('Method not implemented!') 32 | raise NotImplementedError('Method not implemented!') 33 | 34 | 35 | def add_ServerFeaturesServicer_to_server(servicer, server): 36 | rpc_method_handlers = { 37 | 'GetSupportedMethods': grpc.unary_unary_rpc_method_handler( 38 | servicer.GetSupportedMethods, 39 | request_deserializer=shared__pb2.Empty.FromString, 40 | response_serializer=serverfeatures__pb2.SupportedMethods.SerializeToString, 41 | ), 42 | } 43 | generic_handler = grpc.method_handlers_generic_handler( 44 | 'event_store.client.server_features.ServerFeatures', rpc_method_handlers) 45 | server.add_generic_rpc_handlers((generic_handler,)) 46 | 47 | 48 | # This class is part of an EXPERIMENTAL API. 49 | class ServerFeatures(object): 50 | """Missing associated documentation comment in .proto file.""" 51 | 52 | @staticmethod 53 | def GetSupportedMethods(request, 54 | target, 55 | options=(), 56 | channel_credentials=None, 57 | call_credentials=None, 58 | insecure=False, 59 | compression=None, 60 | wait_for_ready=None, 61 | timeout=None, 62 | metadata=None): 63 | return grpc.experimental.unary_unary(request, target, '/event_store.client.server_features.ServerFeatures/GetSupportedMethods', 64 | shared__pb2.Empty.SerializeToString, 65 | serverfeatures__pb2.SupportedMethods.FromString, 66 | options, channel_credentials, 67 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 68 | -------------------------------------------------------------------------------- /esdb/generated/status_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | Copyright 2020 Google LLC 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); 7 | you may not use this file except in compliance with the License. 8 | You may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, 14 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | See the License for the specific language governing permissions and 16 | limitations under the License. 17 | """ 18 | import builtins 19 | import code_pb2 20 | import google.protobuf.any_pb2 21 | import google.protobuf.descriptor 22 | import google.protobuf.message 23 | import sys 24 | 25 | if sys.version_info >= (3, 8): 26 | import typing as typing_extensions 27 | else: 28 | import typing_extensions 29 | 30 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 31 | 32 | @typing_extensions.final 33 | class Status(google.protobuf.message.Message): 34 | """The `Status` type defines a logical error model that is suitable for 35 | different programming environments, including REST APIs and RPC APIs. It is 36 | used by [gRPC](https://github.com/grpc). Each `Status` message contains 37 | three pieces of data: error code, error message, and error details. 38 | 39 | You can find out more about this error model and how to work with it in the 40 | [API Design Guide](https://cloud.google.com/apis/design/errors). 41 | """ 42 | 43 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 44 | 45 | CODE_FIELD_NUMBER: builtins.int 46 | MESSAGE_FIELD_NUMBER: builtins.int 47 | DETAILS_FIELD_NUMBER: builtins.int 48 | code: code_pb2.Code.ValueType 49 | """The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].""" 50 | message: builtins.str 51 | """A developer-facing error message, which should be in English. Any 52 | user-facing error message should be localized and sent in the 53 | [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. 54 | """ 55 | @property 56 | def details(self) -> google.protobuf.any_pb2.Any: 57 | """A list of messages that carry the error details. There is a common set of 58 | message types for APIs to use. 59 | """ 60 | def __init__( 61 | self, 62 | *, 63 | code: code_pb2.Code.ValueType = ..., 64 | message: builtins.str = ..., 65 | details: google.protobuf.any_pb2.Any | None = ..., 66 | ) -> None: ... 67 | def HasField(self, field_name: typing_extensions.Literal["details", b"details"]) -> builtins.bool: ... 68 | def ClearField(self, field_name: typing_extensions.Literal["code", b"code", "details", b"details", "message", b"message"]) -> None: ... 69 | 70 | global___Status = Status 71 | -------------------------------------------------------------------------------- /tests/streams/test_append.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import uuid 3 | 4 | import pytest 5 | 6 | from esdb.exceptions import WrongExpectedVersion 7 | from esdb.streams import StreamState 8 | from esdb.streams.types import AppendResult 9 | 10 | 11 | @pytest.mark.asyncio 12 | async def test_appending_to_unknown_stream_with_stream_exists_state(client): 13 | now = datetime.datetime.utcnow().isoformat() 14 | async with client.connect() as conn: 15 | with pytest.raises(WrongExpectedVersion) as e: 16 | await conn.streams.append( 17 | stream=str(uuid.uuid4()), 18 | event_type="test_event", 19 | data={"now": now}, 20 | stream_state=StreamState.STREAM_EXISTS, 21 | ) 22 | 23 | assert str(e.value) == "Expected state 'stream_exists', got 'no_stream'" 24 | 25 | 26 | @pytest.mark.parametrize("stream_state", [StreamState.NO_STREAM, StreamState.ANY]) 27 | @pytest.mark.asyncio 28 | async def test_appending_to_unknown_stream(client, stream_state): 29 | now = datetime.datetime.utcnow().isoformat() 30 | async with client.connect() as conn: 31 | result = await conn.streams.append( 32 | stream=str(uuid.uuid4()), 33 | event_type="test_event", 34 | data={"now": now}, 35 | stream_state=stream_state, 36 | ) 37 | 38 | assert isinstance(result, AppendResult) 39 | 40 | 41 | @pytest.mark.parametrize("data", [b"some bytes", {"x": 1, "y": 2}]) 42 | @pytest.mark.asyncio 43 | async def test_appending_content_types(client, data): 44 | async with client.connect() as conn: 45 | result = await conn.streams.append(stream=str(uuid.uuid4()), event_type="test_event", data=data) 46 | assert isinstance(result, AppendResult) 47 | 48 | 49 | @pytest.mark.asyncio 50 | async def test_appending_at_wrong_revision(client): 51 | # initialize stream 52 | stream = str(uuid.uuid4()) 53 | async with client.connect() as conn: 54 | await conn.streams.append(stream=stream, event_type="test_event", data=b"") 55 | 56 | # try to append at unknown position 57 | with pytest.raises(WrongExpectedVersion) as e: 58 | await conn.streams.append( 59 | stream=stream, 60 | event_type="test_event", 61 | data=b"", 62 | revision=100, 63 | ) 64 | 65 | assert str(e.value) == "Expected state 'revision=100', got 'revision=0'" 66 | 67 | 68 | @pytest.mark.asyncio 69 | async def test_appending_at_correct_revision(client): 70 | stream = str(uuid.uuid4()) 71 | async with client.connect() as conn: 72 | # Publish 0th event 73 | await conn.streams.append(stream=stream, event_type="test_event", data=b"") 74 | # Publish 1th event 75 | await conn.streams.append(stream=stream, event_type="test_event", data=b"") 76 | # Publishing from 1th should work 77 | await conn.streams.append(stream=stream, event_type="test_event", data=b"", revision=1) 78 | -------------------------------------------------------------------------------- /tests/streams/test_batch_append.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | import pytest 4 | 5 | from esdb.exceptions import ClientException 6 | from esdb.streams import Message, StreamState 7 | from esdb.streams.types import BatchAppendResult 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_batch_append(client): 12 | stream = str(uuid.uuid4()) 13 | messages: list[Message] = [ 14 | Message(event_type="one", data={"item": 1}), 15 | Message(event_type="one", data={"item": 2}), 16 | Message(event_type="one", data={"item": 3}), 17 | Message(event_type="two", data={"item": 1}), 18 | Message(event_type="two", data={"item": 2}), 19 | Message(event_type="two", data={"item": 3}), 20 | ] 21 | async with client.connect() as conn: 22 | response = await conn.streams.batch_append(stream=stream, messages=messages) 23 | assert isinstance(response, BatchAppendResult) 24 | events = [e async for e in conn.streams.read(stream=stream, count=50)] 25 | assert len(events) == 6 26 | assert len([e for e in events if e.metadata["type"] == "one"]) == 3 27 | assert len([e for e in events if e.metadata["type"] == "two"]) == 3 28 | 29 | 30 | @pytest.mark.asyncio 31 | async def test_batch_append_to_unknown_stream_expecting_it_exists(client): 32 | stream = str(uuid.uuid4()) 33 | messages = [Message(event_type="foo", data=b"") for _ in range(3)] 34 | async with client.connect() as conn: 35 | with pytest.raises(ClientException) as err: 36 | await conn.streams.batch_append(stream, messages, stream_state=StreamState.STREAM_EXISTS) 37 | 38 | assert "Append failed with WrongExpectedVersion" in str(err.value) 39 | 40 | 41 | @pytest.mark.asyncio 42 | async def test_batch_append_deadline(client): 43 | stream = str(uuid.uuid4()) 44 | messages = [Message(event_type="foo", data=b"") for _ in range(3)] 45 | async with client.connect() as conn: 46 | with pytest.raises(ClientException) as err: 47 | await conn.streams.batch_append(stream, messages, deadline_ms=0) 48 | 49 | assert "Append failed with Timeout" in str(err.value) 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_batch_append_stream_position(client): 54 | stream = str(uuid.uuid4()) 55 | messages = [Message(event_type="foo", data=b"") for _ in range(3)] 56 | async with client.connect() as conn: 57 | await conn.streams.append(stream, "foo", data=b"") 58 | await conn.streams.batch_append(stream, messages, stream_position=0) 59 | with pytest.raises(ClientException) as err: 60 | await conn.streams.batch_append(stream, messages, stream_position=5) 61 | assert "Append failed with WrongExpectedVersion" in str(err.value) 62 | 63 | 64 | @pytest.mark.asyncio 65 | async def test_batch_append_stream_position_and_state(client): 66 | async with client.connect() as conn: 67 | with pytest.raises(ValueError) as err: 68 | await conn.streams.batch_append("foo", [], stream_position=0, stream_state=StreamState.ANY) 69 | 70 | assert str(err.value) == "stream_position can't be used with stream_state" 71 | -------------------------------------------------------------------------------- /tests/streams/test_read.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from unittest.mock import ANY 3 | 4 | import pytest 5 | 6 | from esdb.streams.types import ReadEvent 7 | 8 | 9 | @pytest.mark.asyncio 10 | @pytest.mark.parametrize( 11 | ["data", "expected_content_type"], 12 | ( 13 | (b"some bytes", "application/octet-stream"), 14 | ({"foo": "bar"}, "application/json"), 15 | ), 16 | ) 17 | async def test_append_and_read(data, client, expected_content_type): 18 | stream = str(uuid.uuid4()) 19 | async with client.connect() as conn: 20 | await conn.streams.append(stream=stream, event_type="foobar", data=data, custom_metadata={"raisedBy": "me"}) 21 | [response] = [e async for e in conn.streams.read(stream=stream, count=1)] 22 | 23 | assert isinstance(response, ReadEvent) 24 | assert response.data == data 25 | assert response.commit_position == 0 26 | assert response.prepare_position == 0 27 | assert isinstance(response.id, str) 28 | assert response.metadata == {"created": ANY, "content-type": expected_content_type, "type": "foobar"} 29 | assert response.stream_name == stream 30 | assert response.custom_metadata == {"raisedBy": "me"} 31 | 32 | 33 | @pytest.mark.asyncio 34 | async def test_read_count(client): 35 | stream = str(uuid.uuid4()) 36 | expected_events = list(range(20)) 37 | reversed_events = list(reversed(expected_events)) 38 | 39 | async with client.connect() as conn: 40 | for i in range(20): 41 | await conn.streams.append(stream=stream, event_type="foobar", data={"i": i}) 42 | 43 | all_events = [e async for e in conn.streams.read(stream=stream, count=20)] 44 | all_events_backwards = [e async for e in conn.streams.read(stream=stream, count=20, backwards=True)] 45 | first_ten = [e async for e in conn.streams.read(stream=stream, count=10)] 46 | first_ten_backwards = [e async for e in conn.streams.read(stream=stream, count=10, backwards=True)] 47 | last_ten = [e async for e in conn.streams.read(stream=stream, count=10, revision=10)] 48 | last_ten_backwards = [e async for e in conn.streams.read(stream=stream, count=10, revision=9, backwards=True)] 49 | 50 | assert len(all_events) == 20 51 | assert [e.data["i"] for e in all_events] == expected_events 52 | 53 | assert len(all_events_backwards) == 20 54 | assert [e.data["i"] for e in all_events_backwards] == reversed_events 55 | 56 | assert len(first_ten) == 10 57 | assert [e.data["i"] for e in first_ten] == expected_events[:10] 58 | 59 | assert len(first_ten_backwards) == 10 60 | assert [e.data["i"] for e in first_ten_backwards] == reversed_events[:10] 61 | 62 | assert len(last_ten) == 10 63 | assert [e.data["i"] for e in last_ten] == expected_events[10:] 64 | 65 | assert len(last_ten_backwards) == 10 66 | assert [e.data["i"] for e in last_ten_backwards] == reversed_events[10:] 67 | 68 | 69 | @pytest.mark.asyncio 70 | async def test_read_from_projection(client): 71 | event_type = str(uuid.uuid4()) 72 | async with client.connect() as conn: 73 | for _ in range(10): 74 | await conn.streams.append(stream=str(uuid.uuid4()), event_type=event_type, data={}) 75 | 76 | events = [e async for e in conn.streams.read(stream=f"$et-{event_type}", count=500)] 77 | assert events 78 | assert all(e.metadata["type"] == event_type for e in events) 79 | -------------------------------------------------------------------------------- /esdb/generated/operations_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: operations.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from . import shared_pb2 as shared__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10operations.proto\x12\x1d\x65vent_store.client.operations\x1a\x0cshared.proto\"\x97\x01\n\x10StartScavengeReq\x12H\n\x07options\x18\x01 \x01(\x0b\x32\x37.event_store.client.operations.StartScavengeReq.Options\x1a\x39\n\x07Options\x12\x14\n\x0cthread_count\x18\x01 \x01(\x05\x12\x18\n\x10start_from_chunk\x18\x02 \x01(\x05\"z\n\x0fStopScavengeReq\x12G\n\x07options\x18\x01 \x01(\x0b\x32\x36.event_store.client.operations.StopScavengeReq.Options\x1a\x1e\n\x07Options\x12\x13\n\x0bscavenge_id\x18\x01 \x01(\t\"\xb4\x01\n\x0cScavengeResp\x12\x13\n\x0bscavenge_id\x18\x01 \x01(\t\x12S\n\x0fscavenge_result\x18\x02 \x01(\x0e\x32:.event_store.client.operations.ScavengeResp.ScavengeResult\":\n\x0eScavengeResult\x12\x0b\n\x07Started\x10\x00\x12\x0e\n\nInProgress\x10\x01\x12\x0b\n\x07Stopped\x10\x02\"&\n\x12SetNodePriorityReq\x12\x10\n\x08priority\x18\x01 \x01(\x05\x32\xed\x04\n\nOperations\x12m\n\rStartScavenge\x12/.event_store.client.operations.StartScavengeReq\x1a+.event_store.client.operations.ScavengeResp\x12k\n\x0cStopScavenge\x12..event_store.client.operations.StopScavengeReq\x1a+.event_store.client.operations.ScavengeResp\x12@\n\x08Shutdown\x12\x19.event_store.client.Empty\x1a\x19.event_store.client.Empty\x12\x44\n\x0cMergeIndexes\x12\x19.event_store.client.Empty\x1a\x19.event_store.client.Empty\x12\x42\n\nResignNode\x12\x19.event_store.client.Empty\x1a\x19.event_store.client.Empty\x12_\n\x0fSetNodePriority\x12\x31.event_store.client.operations.SetNodePriorityReq\x1a\x19.event_store.client.Empty\x12V\n\x1eRestartPersistentSubscriptions\x12\x19.event_store.client.Empty\x1a\x19.event_store.client.EmptyB*\n(com.eventstore.dbclient.proto.operationsb\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'operations_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | _globals['DESCRIPTOR']._options = None 25 | _globals['DESCRIPTOR']._serialized_options = b'\n(com.eventstore.dbclient.proto.operations' 26 | _globals['_STARTSCAVENGEREQ']._serialized_start=66 27 | _globals['_STARTSCAVENGEREQ']._serialized_end=217 28 | _globals['_STARTSCAVENGEREQ_OPTIONS']._serialized_start=160 29 | _globals['_STARTSCAVENGEREQ_OPTIONS']._serialized_end=217 30 | _globals['_STOPSCAVENGEREQ']._serialized_start=219 31 | _globals['_STOPSCAVENGEREQ']._serialized_end=341 32 | _globals['_STOPSCAVENGEREQ_OPTIONS']._serialized_start=311 33 | _globals['_STOPSCAVENGEREQ_OPTIONS']._serialized_end=341 34 | _globals['_SCAVENGERESP']._serialized_start=344 35 | _globals['_SCAVENGERESP']._serialized_end=524 36 | _globals['_SCAVENGERESP_SCAVENGERESULT']._serialized_start=466 37 | _globals['_SCAVENGERESP_SCAVENGERESULT']._serialized_end=524 38 | _globals['_SETNODEPRIORITYREQ']._serialized_start=526 39 | _globals['_SETNODEPRIORITYREQ']._serialized_end=564 40 | _globals['_OPERATIONS']._serialized_start=567 41 | _globals['_OPERATIONS']._serialized_end=1188 42 | # @@protoc_insertion_point(module_scope) 43 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.5" 2 | 3 | services: 4 | volumes-provisioner: 5 | image: hasnat/volumes-provisioner 6 | environment: 7 | PROVISION_DIRECTORIES: "1000:1000:0755:/tmp/certs" 8 | volumes: 9 | - "./certs:/tmp/certs" 10 | network_mode: none 11 | 12 | cert-gen: 13 | image: eventstore/es-gencert-cli:1.0.2 14 | entrypoint: bash 15 | user: "1000:1000" 16 | command: > 17 | -c "mkdir -p ./certs && cd /certs 18 | && es-gencert-cli create-ca 19 | && es-gencert-cli create-node -out ./node1 -ip-addresses 127.0.0.1,172.30.240.11 -dns-names localhost 20 | && es-gencert-cli create-node -out ./node2 -ip-addresses 127.0.0.1,172.30.240.12 -dns-names localhost 21 | && es-gencert-cli create-node -out ./node3 -ip-addresses 127.0.0.1,172.30.240.13 -dns-names localhost 22 | && find . -type f -print0 | xargs -0 chmod 666" 23 | volumes: 24 | - "./certs:/certs" 25 | depends_on: 26 | - volumes-provisioner 27 | 28 | esdb-node1: 29 | image: eventstore/eventstore:21.10.7-bionic 30 | env_file: 31 | - shared.env 32 | environment: 33 | - EVENTSTORE_GOSSIP_SEED=172.30.240.12:2113,172.30.240.13:2113 34 | - EVENTSTORE_INT_IP=172.30.240.11 35 | - EVENTSTORE_CERTIFICATE_FILE=/etc/eventstore/certs/node1/node.crt 36 | - EVENTSTORE_CERTIFICATE_PRIVATE_KEY_FILE=/etc/eventstore/certs/node1/node.key 37 | - EVENTSTORE_ADVERTISE_HOST_TO_CLIENT_AS=127.0.0.1 38 | - EVENTSTORE_ADVERTISE_HTTP_PORT_TO_CLIENT_AS=2111 39 | - EVENTSTORE_ADVERTISE_TCP_PORT_TO_CLIENT_AS=1111 40 | ports: 41 | - 1111:1113 42 | - 2111:2113 43 | networks: 44 | clusternetwork: 45 | ipv4_address: 172.30.240.11 46 | volumes: 47 | - ./certs:/etc/eventstore/certs 48 | - ./es_config:/opt/eventstore/config/ 49 | restart: unless-stopped 50 | depends_on: 51 | - cert-gen 52 | 53 | esdb-node2: 54 | image: eventstore/eventstore:21.10.7-bionic 55 | env_file: 56 | - shared.env 57 | environment: 58 | - EVENTSTORE_GOSSIP_SEED=172.30.240.11:2113,172.30.240.13:2113 59 | - EVENTSTORE_INT_IP=172.30.240.12 60 | - EVENTSTORE_CERTIFICATE_FILE=/etc/eventstore/certs/node2/node.crt 61 | - EVENTSTORE_CERTIFICATE_PRIVATE_KEY_FILE=/etc/eventstore/certs/node2/node.key 62 | - EVENTSTORE_ADVERTISE_HOST_TO_CLIENT_AS=127.0.0.1 63 | - EVENTSTORE_ADVERTISE_HTTP_PORT_TO_CLIENT_AS=2112 64 | - EVENTSTORE_ADVERTISE_TCP_PORT_TO_CLIENT_AS=1112 65 | ports: 66 | - 1112:1113 67 | - 2112:2113 68 | networks: 69 | clusternetwork: 70 | ipv4_address: 172.30.240.12 71 | volumes: 72 | - ./certs:/etc/eventstore/certs 73 | - ./es_config:/opt/eventstore/config/ 74 | restart: unless-stopped 75 | depends_on: 76 | - cert-gen 77 | 78 | esdb-node3: 79 | image: eventstore/eventstore:21.10.7-bionic 80 | env_file: 81 | - shared.env 82 | environment: 83 | - EVENTSTORE_GOSSIP_SEED=172.30.240.11:2113,172.30.240.12:2113 84 | - EVENTSTORE_INT_IP=172.30.240.13 85 | - EVENTSTORE_CERTIFICATE_FILE=/etc/eventstore/certs/node3/node.crt 86 | - EVENTSTORE_CERTIFICATE_PRIVATE_KEY_FILE=/etc/eventstore/certs/node3/node.key 87 | - EVENTSTORE_ADVERTISE_HOST_TO_CLIENT_AS=127.0.0.1 88 | - EVENTSTORE_ADVERTISE_HTTP_PORT_TO_CLIENT_AS=2113 89 | - EVENTSTORE_ADVERTISE_TCP_PORT_TO_CLIENT_AS=1113 90 | ports: 91 | - 1113:1113 92 | - 2113:2113 93 | networks: 94 | clusternetwork: 95 | ipv4_address: 172.30.240.13 96 | volumes: 97 | - ./certs:/etc/eventstore/certs 98 | - ./es_config:/opt/eventstore/config/ 99 | restart: unless-stopped 100 | depends_on: 101 | - cert-gen 102 | 103 | networks: 104 | clusternetwork: 105 | name: eventstoredb.local 106 | driver: bridge 107 | ipam: 108 | driver: default 109 | config: 110 | - subnet: 172.30.240.0/24 -------------------------------------------------------------------------------- /esdb/generated/shared_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: shared.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 16 | 17 | 18 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cshared.proto\x12\x12\x65vent_store.client\x1a\x1bgoogle/protobuf/empty.proto\"\xa9\x01\n\x04UUID\x12\x39\n\nstructured\x18\x01 \x01(\x0b\x32#.event_store.client.UUID.StructuredH\x00\x12\x10\n\x06string\x18\x02 \x01(\tH\x00\x1aK\n\nStructured\x12\x1d\n\x15most_significant_bits\x18\x01 \x01(\x03\x12\x1e\n\x16least_significant_bits\x18\x02 \x01(\x03\x42\x07\n\x05value\"\x07\n\x05\x45mpty\"-\n\x10StreamIdentifier\x12\x13\n\x0bstream_name\x18\x03 \x01(\x0cJ\x04\x08\x01\x10\x03\"F\n\x11\x41llStreamPosition\x12\x17\n\x0f\x63ommit_position\x18\x01 \x01(\x04\x12\x18\n\x10prepare_position\x18\x02 \x01(\x04\"\xf7\x02\n\x14WrongExpectedVersion\x12!\n\x17\x63urrent_stream_revision\x18\x01 \x01(\x04H\x00\x12\x33\n\x11\x63urrent_no_stream\x18\x02 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x12\"\n\x18\x65xpected_stream_position\x18\x03 \x01(\x04H\x01\x12.\n\x0c\x65xpected_any\x18\x04 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x01\x12\x38\n\x16\x65xpected_stream_exists\x18\x05 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x01\x12\x34\n\x12\x65xpected_no_stream\x18\x06 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x01\x42 \n\x1e\x63urrent_stream_revision_optionB!\n\x1f\x65xpected_stream_position_option\"\x0e\n\x0c\x41\x63\x63\x65ssDenied\"P\n\rStreamDeleted\x12?\n\x11stream_identifier\x18\x01 \x01(\x0b\x32$.event_store.client.StreamIdentifier\"\t\n\x07Timeout\"\t\n\x07Unknown\"\x14\n\x12InvalidTransaction\"2\n\x19MaximumAppendSizeExceeded\x12\x15\n\rmaxAppendSize\x18\x01 \x01(\r\"\x1d\n\nBadRequest\x12\x0f\n\x07message\x18\x01 \x01(\tB&\n$com.eventstore.dbclient.proto.sharedb\x06proto3') 19 | 20 | _globals = globals() 21 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 22 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shared_pb2', _globals) 23 | if _descriptor._USE_C_DESCRIPTORS == False: 24 | _globals['DESCRIPTOR']._options = None 25 | _globals['DESCRIPTOR']._serialized_options = b'\n$com.eventstore.dbclient.proto.shared' 26 | _globals['_UUID']._serialized_start=66 27 | _globals['_UUID']._serialized_end=235 28 | _globals['_UUID_STRUCTURED']._serialized_start=151 29 | _globals['_UUID_STRUCTURED']._serialized_end=226 30 | _globals['_EMPTY']._serialized_start=237 31 | _globals['_EMPTY']._serialized_end=244 32 | _globals['_STREAMIDENTIFIER']._serialized_start=246 33 | _globals['_STREAMIDENTIFIER']._serialized_end=291 34 | _globals['_ALLSTREAMPOSITION']._serialized_start=293 35 | _globals['_ALLSTREAMPOSITION']._serialized_end=363 36 | _globals['_WRONGEXPECTEDVERSION']._serialized_start=366 37 | _globals['_WRONGEXPECTEDVERSION']._serialized_end=741 38 | _globals['_ACCESSDENIED']._serialized_start=743 39 | _globals['_ACCESSDENIED']._serialized_end=757 40 | _globals['_STREAMDELETED']._serialized_start=759 41 | _globals['_STREAMDELETED']._serialized_end=839 42 | _globals['_TIMEOUT']._serialized_start=841 43 | _globals['_TIMEOUT']._serialized_end=850 44 | _globals['_UNKNOWN']._serialized_start=852 45 | _globals['_UNKNOWN']._serialized_end=861 46 | _globals['_INVALIDTRANSACTION']._serialized_start=863 47 | _globals['_INVALIDTRANSACTION']._serialized_end=883 48 | _globals['_MAXIMUMAPPENDSIZEEXCEEDED']._serialized_start=885 49 | _globals['_MAXIMUMAPPENDSIZEEXCEEDED']._serialized_end=935 50 | _globals['_BADREQUEST']._serialized_start=937 51 | _globals['_BADREQUEST']._serialized_end=966 52 | # @@protoc_insertion_point(module_scope) 53 | -------------------------------------------------------------------------------- /protos/projections.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.projections; 3 | option java_package = "com.eventstore.dbclient.proto.projections"; 4 | 5 | import "google/protobuf/struct.proto"; 6 | import "shared.proto"; 7 | 8 | service Projections { 9 | rpc Create (CreateReq) returns (CreateResp); 10 | rpc Update (UpdateReq) returns (UpdateResp); 11 | rpc Delete (DeleteReq) returns (DeleteResp); 12 | rpc Statistics (StatisticsReq) returns (stream StatisticsResp); 13 | rpc Disable (DisableReq) returns (DisableResp); 14 | rpc Enable (EnableReq) returns (EnableResp); 15 | rpc Reset (ResetReq) returns (ResetResp); 16 | rpc State (StateReq) returns (StateResp); 17 | rpc Result (ResultReq) returns (ResultResp); 18 | rpc RestartSubsystem (event_store.client.Empty) returns (event_store.client.Empty); 19 | } 20 | 21 | message CreateReq { 22 | Options options = 1; 23 | 24 | message Options { 25 | oneof mode { 26 | event_store.client.Empty one_time = 1; 27 | Transient transient = 2; 28 | Continuous continuous = 3; 29 | } 30 | string query = 4; 31 | 32 | message Transient { 33 | string name = 1; 34 | } 35 | message Continuous { 36 | string name = 1; 37 | bool emit_enabled = 2; 38 | bool track_emitted_streams = 3; 39 | } 40 | } 41 | } 42 | 43 | message CreateResp { 44 | } 45 | 46 | message UpdateReq { 47 | Options options = 1; 48 | 49 | message Options { 50 | string name = 1; 51 | string query = 2; 52 | oneof emit_option { 53 | bool emit_enabled = 3; 54 | event_store.client.Empty no_emit_options = 4; 55 | } 56 | } 57 | } 58 | 59 | message UpdateResp { 60 | } 61 | 62 | message DeleteReq { 63 | Options options = 1; 64 | 65 | message Options { 66 | string name = 1; 67 | bool delete_emitted_streams = 2; 68 | bool delete_state_stream = 3; 69 | bool delete_checkpoint_stream = 4; 70 | } 71 | } 72 | 73 | message DeleteResp { 74 | } 75 | 76 | message StatisticsReq { 77 | Options options = 1; 78 | message Options { 79 | oneof mode { 80 | string name = 1; 81 | event_store.client.Empty all = 2; 82 | event_store.client.Empty transient = 3; 83 | event_store.client.Empty continuous = 4; 84 | event_store.client.Empty one_time = 5; 85 | } 86 | } 87 | } 88 | 89 | message StatisticsResp { 90 | Details details = 1; 91 | 92 | message Details { 93 | int64 coreProcessingTime = 1; 94 | int64 version = 2; 95 | int64 epoch = 3; 96 | string effectiveName = 4; 97 | int32 writesInProgress = 5; 98 | int32 readsInProgress = 6; 99 | int32 partitionsCached = 7; 100 | string status = 8; 101 | string stateReason = 9; 102 | string name = 10; 103 | string mode = 11; 104 | string position = 12; 105 | float progress = 13; 106 | string lastCheckpoint = 14; 107 | int64 eventsProcessedAfterRestart = 15; 108 | string checkpointStatus = 16; 109 | int64 bufferedEvents = 17; 110 | int32 writePendingEventsBeforeCheckpoint = 18; 111 | int32 writePendingEventsAfterCheckpoint = 19; 112 | } 113 | } 114 | 115 | message StateReq { 116 | Options options = 1; 117 | 118 | message Options { 119 | string name = 1; 120 | string partition = 2; 121 | } 122 | } 123 | 124 | message StateResp { 125 | google.protobuf.Value state = 1; 126 | } 127 | 128 | message ResultReq { 129 | Options options = 1; 130 | 131 | message Options { 132 | string name = 1; 133 | string partition = 2; 134 | } 135 | } 136 | 137 | message ResultResp { 138 | google.protobuf.Value result = 1; 139 | } 140 | 141 | message ResetReq { 142 | Options options = 1; 143 | 144 | message Options { 145 | string name = 1; 146 | bool write_checkpoint = 2; 147 | } 148 | } 149 | 150 | message ResetResp { 151 | } 152 | 153 | 154 | message EnableReq { 155 | Options options = 1; 156 | 157 | message Options { 158 | string name = 1; 159 | } 160 | } 161 | 162 | message EnableResp { 163 | } 164 | 165 | message DisableReq { 166 | Options options = 1; 167 | 168 | message Options { 169 | string name = 1; 170 | bool write_checkpoint = 2; 171 | } 172 | } 173 | 174 | message DisableResp { 175 | } 176 | -------------------------------------------------------------------------------- /esdb/streams/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import enum 4 | import json 5 | import logging 6 | import uuid 7 | from dataclasses import dataclass, field 8 | from typing import Mapping, Optional, Type, TypeVar, Union 9 | 10 | from esdb.generated.shared_pb2 import UUID 11 | from esdb.generated.streams_pb2 import ( 12 | AppendReq, 13 | AppendResp, 14 | BatchAppendReq, 15 | BatchAppendResp, 16 | DeleteResp, 17 | ReadResp, 18 | TombstoneResp, 19 | ) 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | @enum.unique 25 | class StreamState(enum.Enum): 26 | ANY = "any" 27 | NO_STREAM = "no_stream" 28 | STREAM_EXISTS = "stream_exists" 29 | 30 | 31 | @dataclass 32 | class AppendResult: 33 | current_revision: int 34 | commit_position: int 35 | prepare_position: int 36 | 37 | @staticmethod 38 | def from_response(response: AppendResp) -> AppendResult: 39 | return AppendResult( 40 | current_revision=response.success.current_revision, 41 | commit_position=response.success.position.commit_position, 42 | prepare_position=response.success.position.prepare_position, 43 | ) 44 | 45 | 46 | @dataclass 47 | class DeleteResult: 48 | commit_position: int 49 | prepare_position: int 50 | 51 | @staticmethod 52 | def from_response(response: DeleteResp) -> DeleteResult: 53 | return DeleteResult( 54 | commit_position=response.position.commit_position, 55 | prepare_position=response.position.prepare_position, 56 | ) 57 | 58 | 59 | @dataclass 60 | class TombstoneResult: 61 | commit_position: int 62 | prepare_position: int 63 | 64 | @staticmethod 65 | def from_response(response: TombstoneResp) -> TombstoneResult: 66 | return TombstoneResult( 67 | commit_position=response.position.commit_position, 68 | prepare_position=response.position.prepare_position, 69 | ) 70 | 71 | 72 | @enum.unique 73 | class ContentType(enum.Enum): 74 | OCTET_STREAM = "application/octet-stream" 75 | JSON = "application/json" 76 | 77 | 78 | @dataclass 79 | class ReadEvent: 80 | id: str 81 | stream_name: str 82 | prepare_position: int 83 | commit_position: int 84 | metadata: Mapping[str, str] 85 | event_type: str 86 | custom_metadata: Optional[dict] 87 | data: Union[dict, bytes] 88 | 89 | @staticmethod 90 | def from_response(response: ReadResp) -> ReadEvent: 91 | return ReadEvent( 92 | id=response.event.event.id.string, 93 | stream_name=response.event.event.stream_identifier.stream_name.decode(), 94 | metadata=response.event.event.metadata, 95 | custom_metadata=( 96 | json.loads(response.event.event.custom_metadata) if response.event.event.custom_metadata else None 97 | ), 98 | data=( 99 | json.loads(response.event.event.data) 100 | if response.event.event.metadata["content-type"] == ContentType.JSON.value 101 | else response.event.event.data 102 | ), 103 | prepare_position=response.event.commit_position, 104 | commit_position=response.event.commit_position, 105 | event_type=response.event.event.metadata["type"], 106 | ) 107 | 108 | 109 | @dataclass 110 | class SubscriptionConfirmed: 111 | subscription_id: str 112 | 113 | 114 | @dataclass 115 | class Checkpoint: 116 | commit_position: int 117 | prepare_position: int 118 | 119 | 120 | ProposedMessageType = TypeVar("ProposedMessageType", BatchAppendReq.ProposedMessage, AppendReq.ProposedMessage) 121 | 122 | 123 | @dataclass 124 | class Message: 125 | event_type: str 126 | data: Union[bytes, dict] 127 | id: uuid.UUID = field(default_factory=uuid.uuid4) 128 | custom_metadata: Optional[dict] = None 129 | 130 | def to_protobuf(self, message_type: Type[ProposedMessageType]) -> ProposedMessageType: 131 | assert isinstance(self.data, (bytes, dict)) 132 | return message_type( 133 | id=UUID(string=str(self.id)), 134 | metadata={ 135 | "type": self.event_type, 136 | "content-type": ( 137 | ContentType.OCTET_STREAM.value if isinstance(self.data, bytes) else ContentType.JSON.value 138 | ), 139 | }, 140 | custom_metadata=json.dumps(self.custom_metadata).encode() if self.custom_metadata else b"", 141 | data=json.dumps(self.data).encode() if isinstance(self.data, dict) else self.data, 142 | ) 143 | 144 | 145 | @dataclass 146 | class BatchAppendResult: 147 | correlation_id: str 148 | current_revision: int 149 | commit_position: int 150 | prepare_position: int 151 | 152 | @staticmethod 153 | def from_response(response: BatchAppendResp) -> BatchAppendResult: 154 | return BatchAppendResult( 155 | correlation_id=response.correlation_id, 156 | current_revision=response.success.current_revision, 157 | commit_position=response.success.position.commit_position, 158 | prepare_position=response.success.position.prepare_position, 159 | ) 160 | -------------------------------------------------------------------------------- /esdb/generated/operations_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import google.protobuf.descriptor 7 | import google.protobuf.internal.enum_type_wrapper 8 | import google.protobuf.message 9 | import sys 10 | import typing 11 | 12 | if sys.version_info >= (3, 10): 13 | import typing as typing_extensions 14 | else: 15 | import typing_extensions 16 | 17 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 18 | 19 | @typing_extensions.final 20 | class StartScavengeReq(google.protobuf.message.Message): 21 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 22 | 23 | @typing_extensions.final 24 | class Options(google.protobuf.message.Message): 25 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 26 | 27 | THREAD_COUNT_FIELD_NUMBER: builtins.int 28 | START_FROM_CHUNK_FIELD_NUMBER: builtins.int 29 | thread_count: builtins.int 30 | start_from_chunk: builtins.int 31 | def __init__( 32 | self, 33 | *, 34 | thread_count: builtins.int = ..., 35 | start_from_chunk: builtins.int = ..., 36 | ) -> None: ... 37 | def ClearField(self, field_name: typing_extensions.Literal["start_from_chunk", b"start_from_chunk", "thread_count", b"thread_count"]) -> None: ... 38 | 39 | OPTIONS_FIELD_NUMBER: builtins.int 40 | @property 41 | def options(self) -> global___StartScavengeReq.Options: ... 42 | def __init__( 43 | self, 44 | *, 45 | options: global___StartScavengeReq.Options | None = ..., 46 | ) -> None: ... 47 | def HasField(self, field_name: typing_extensions.Literal["options", b"options"]) -> builtins.bool: ... 48 | def ClearField(self, field_name: typing_extensions.Literal["options", b"options"]) -> None: ... 49 | 50 | global___StartScavengeReq = StartScavengeReq 51 | 52 | @typing_extensions.final 53 | class StopScavengeReq(google.protobuf.message.Message): 54 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 55 | 56 | @typing_extensions.final 57 | class Options(google.protobuf.message.Message): 58 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 59 | 60 | SCAVENGE_ID_FIELD_NUMBER: builtins.int 61 | scavenge_id: builtins.str 62 | def __init__( 63 | self, 64 | *, 65 | scavenge_id: builtins.str = ..., 66 | ) -> None: ... 67 | def ClearField(self, field_name: typing_extensions.Literal["scavenge_id", b"scavenge_id"]) -> None: ... 68 | 69 | OPTIONS_FIELD_NUMBER: builtins.int 70 | @property 71 | def options(self) -> global___StopScavengeReq.Options: ... 72 | def __init__( 73 | self, 74 | *, 75 | options: global___StopScavengeReq.Options | None = ..., 76 | ) -> None: ... 77 | def HasField(self, field_name: typing_extensions.Literal["options", b"options"]) -> builtins.bool: ... 78 | def ClearField(self, field_name: typing_extensions.Literal["options", b"options"]) -> None: ... 79 | 80 | global___StopScavengeReq = StopScavengeReq 81 | 82 | @typing_extensions.final 83 | class ScavengeResp(google.protobuf.message.Message): 84 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 85 | 86 | class _ScavengeResult: 87 | ValueType = typing.NewType("ValueType", builtins.int) 88 | V: typing_extensions.TypeAlias = ValueType 89 | 90 | class _ScavengeResultEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ScavengeResp._ScavengeResult.ValueType], builtins.type): 91 | DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor 92 | Started: ScavengeResp._ScavengeResult.ValueType # 0 93 | InProgress: ScavengeResp._ScavengeResult.ValueType # 1 94 | Stopped: ScavengeResp._ScavengeResult.ValueType # 2 95 | 96 | class ScavengeResult(_ScavengeResult, metaclass=_ScavengeResultEnumTypeWrapper): ... 97 | Started: ScavengeResp.ScavengeResult.ValueType # 0 98 | InProgress: ScavengeResp.ScavengeResult.ValueType # 1 99 | Stopped: ScavengeResp.ScavengeResult.ValueType # 2 100 | 101 | SCAVENGE_ID_FIELD_NUMBER: builtins.int 102 | SCAVENGE_RESULT_FIELD_NUMBER: builtins.int 103 | scavenge_id: builtins.str 104 | scavenge_result: global___ScavengeResp.ScavengeResult.ValueType 105 | def __init__( 106 | self, 107 | *, 108 | scavenge_id: builtins.str = ..., 109 | scavenge_result: global___ScavengeResp.ScavengeResult.ValueType = ..., 110 | ) -> None: ... 111 | def ClearField(self, field_name: typing_extensions.Literal["scavenge_id", b"scavenge_id", "scavenge_result", b"scavenge_result"]) -> None: ... 112 | 113 | global___ScavengeResp = ScavengeResp 114 | 115 | @typing_extensions.final 116 | class SetNodePriorityReq(google.protobuf.message.Message): 117 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 118 | 119 | PRIORITY_FIELD_NUMBER: builtins.int 120 | priority: builtins.int 121 | def __init__( 122 | self, 123 | *, 124 | priority: builtins.int = ..., 125 | ) -> None: ... 126 | def ClearField(self, field_name: typing_extensions.Literal["priority", b"priority"]) -> None: ... 127 | 128 | global___SetNodePriorityReq = SetNodePriorityReq 129 | -------------------------------------------------------------------------------- /esdb/generated/gossip_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import collections.abc 7 | import google.protobuf.descriptor 8 | import google.protobuf.internal.containers 9 | import google.protobuf.internal.enum_type_wrapper 10 | import google.protobuf.message 11 | import shared_pb2 12 | import sys 13 | import typing 14 | 15 | if sys.version_info >= (3, 10): 16 | import typing as typing_extensions 17 | else: 18 | import typing_extensions 19 | 20 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 21 | 22 | @typing_extensions.final 23 | class ClusterInfo(google.protobuf.message.Message): 24 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 25 | 26 | MEMBERS_FIELD_NUMBER: builtins.int 27 | @property 28 | def members(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemberInfo]: ... 29 | def __init__( 30 | self, 31 | *, 32 | members: collections.abc.Iterable[global___MemberInfo] | None = ..., 33 | ) -> None: ... 34 | def ClearField(self, field_name: typing_extensions.Literal["members", b"members"]) -> None: ... 35 | 36 | global___ClusterInfo = ClusterInfo 37 | 38 | @typing_extensions.final 39 | class EndPoint(google.protobuf.message.Message): 40 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 41 | 42 | ADDRESS_FIELD_NUMBER: builtins.int 43 | PORT_FIELD_NUMBER: builtins.int 44 | address: builtins.str 45 | port: builtins.int 46 | def __init__( 47 | self, 48 | *, 49 | address: builtins.str = ..., 50 | port: builtins.int = ..., 51 | ) -> None: ... 52 | def ClearField(self, field_name: typing_extensions.Literal["address", b"address", "port", b"port"]) -> None: ... 53 | 54 | global___EndPoint = EndPoint 55 | 56 | @typing_extensions.final 57 | class MemberInfo(google.protobuf.message.Message): 58 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 59 | 60 | class _VNodeState: 61 | ValueType = typing.NewType("ValueType", builtins.int) 62 | V: typing_extensions.TypeAlias = ValueType 63 | 64 | class _VNodeStateEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MemberInfo._VNodeState.ValueType], builtins.type): 65 | DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor 66 | Initializing: MemberInfo._VNodeState.ValueType # 0 67 | DiscoverLeader: MemberInfo._VNodeState.ValueType # 1 68 | Unknown: MemberInfo._VNodeState.ValueType # 2 69 | PreReplica: MemberInfo._VNodeState.ValueType # 3 70 | CatchingUp: MemberInfo._VNodeState.ValueType # 4 71 | Clone: MemberInfo._VNodeState.ValueType # 5 72 | Follower: MemberInfo._VNodeState.ValueType # 6 73 | PreLeader: MemberInfo._VNodeState.ValueType # 7 74 | Leader: MemberInfo._VNodeState.ValueType # 8 75 | Manager: MemberInfo._VNodeState.ValueType # 9 76 | ShuttingDown: MemberInfo._VNodeState.ValueType # 10 77 | Shutdown: MemberInfo._VNodeState.ValueType # 11 78 | ReadOnlyLeaderless: MemberInfo._VNodeState.ValueType # 12 79 | PreReadOnlyReplica: MemberInfo._VNodeState.ValueType # 13 80 | ReadOnlyReplica: MemberInfo._VNodeState.ValueType # 14 81 | ResigningLeader: MemberInfo._VNodeState.ValueType # 15 82 | 83 | class VNodeState(_VNodeState, metaclass=_VNodeStateEnumTypeWrapper): ... 84 | Initializing: MemberInfo.VNodeState.ValueType # 0 85 | DiscoverLeader: MemberInfo.VNodeState.ValueType # 1 86 | Unknown: MemberInfo.VNodeState.ValueType # 2 87 | PreReplica: MemberInfo.VNodeState.ValueType # 3 88 | CatchingUp: MemberInfo.VNodeState.ValueType # 4 89 | Clone: MemberInfo.VNodeState.ValueType # 5 90 | Follower: MemberInfo.VNodeState.ValueType # 6 91 | PreLeader: MemberInfo.VNodeState.ValueType # 7 92 | Leader: MemberInfo.VNodeState.ValueType # 8 93 | Manager: MemberInfo.VNodeState.ValueType # 9 94 | ShuttingDown: MemberInfo.VNodeState.ValueType # 10 95 | Shutdown: MemberInfo.VNodeState.ValueType # 11 96 | ReadOnlyLeaderless: MemberInfo.VNodeState.ValueType # 12 97 | PreReadOnlyReplica: MemberInfo.VNodeState.ValueType # 13 98 | ReadOnlyReplica: MemberInfo.VNodeState.ValueType # 14 99 | ResigningLeader: MemberInfo.VNodeState.ValueType # 15 100 | 101 | INSTANCE_ID_FIELD_NUMBER: builtins.int 102 | TIME_STAMP_FIELD_NUMBER: builtins.int 103 | STATE_FIELD_NUMBER: builtins.int 104 | IS_ALIVE_FIELD_NUMBER: builtins.int 105 | HTTP_END_POINT_FIELD_NUMBER: builtins.int 106 | @property 107 | def instance_id(self) -> shared_pb2.UUID: ... 108 | time_stamp: builtins.int 109 | state: global___MemberInfo.VNodeState.ValueType 110 | is_alive: builtins.bool 111 | @property 112 | def http_end_point(self) -> global___EndPoint: ... 113 | def __init__( 114 | self, 115 | *, 116 | instance_id: shared_pb2.UUID | None = ..., 117 | time_stamp: builtins.int = ..., 118 | state: global___MemberInfo.VNodeState.ValueType = ..., 119 | is_alive: builtins.bool = ..., 120 | http_end_point: global___EndPoint | None = ..., 121 | ) -> None: ... 122 | def HasField(self, field_name: typing_extensions.Literal["http_end_point", b"http_end_point", "instance_id", b"instance_id"]) -> builtins.bool: ... 123 | def ClearField(self, field_name: typing_extensions.Literal["http_end_point", b"http_end_point", "instance_id", b"instance_id", "is_alive", b"is_alive", "state", b"state", "time_stamp", b"time_stamp"]) -> None: ... 124 | 125 | global___MemberInfo = MemberInfo 126 | -------------------------------------------------------------------------------- /protos/cluster.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.cluster; 3 | option java_package = "com.eventstore.dbclient.proto.cluster"; 4 | 5 | import "shared.proto"; 6 | 7 | service Gossip { 8 | rpc Update (GossipRequest) returns (ClusterInfo); 9 | rpc Read (event_store.client.Empty) returns (ClusterInfo); 10 | } 11 | 12 | service Elections { 13 | rpc ViewChange (ViewChangeRequest) returns (event_store.client.Empty); 14 | rpc ViewChangeProof (ViewChangeProofRequest) returns (event_store.client.Empty); 15 | rpc Prepare (PrepareRequest) returns (event_store.client.Empty); 16 | rpc PrepareOk (PrepareOkRequest) returns (event_store.client.Empty); 17 | rpc Proposal (ProposalRequest) returns (event_store.client.Empty); 18 | rpc Accept (AcceptRequest) returns (event_store.client.Empty); 19 | 20 | rpc LeaderIsResigning (LeaderIsResigningRequest) returns (event_store.client.Empty); 21 | rpc LeaderIsResigningOk (LeaderIsResigningOkRequest) returns (event_store.client.Empty); 22 | } 23 | 24 | message GossipRequest { 25 | ClusterInfo info = 1; 26 | EndPoint server = 2; 27 | } 28 | 29 | message ViewChangeRequest { 30 | event_store.client.UUID server_id = 1; 31 | EndPoint server_http = 2; 32 | int32 attempted_view = 3; 33 | } 34 | 35 | message ViewChangeProofRequest { 36 | event_store.client.UUID server_id = 1; 37 | EndPoint server_http = 2; 38 | int32 installed_view = 3; 39 | } 40 | 41 | message PrepareRequest { 42 | event_store.client.UUID server_id = 1; 43 | EndPoint server_http = 2; 44 | int32 view = 3; 45 | } 46 | 47 | message PrepareOkRequest { 48 | int32 view = 1; 49 | event_store.client.UUID server_id = 2; 50 | EndPoint server_http = 3; 51 | int32 epoch_number = 4; 52 | int64 epoch_position = 5; 53 | event_store.client.UUID epoch_id = 6; 54 | event_store.client.UUID epoch_leader_instance_id = 7; 55 | int64 last_commit_position = 8; 56 | int64 writer_checkpoint = 9; 57 | int64 chaser_checkpoint = 10; 58 | int32 node_priority = 11; 59 | ClusterInfo cluster_info = 12; 60 | } 61 | 62 | message ProposalRequest { 63 | event_store.client.UUID server_id = 1; 64 | EndPoint server_http = 2; 65 | event_store.client.UUID leader_id = 3; 66 | EndPoint leader_http = 4; 67 | int32 view = 5; 68 | int32 epoch_number = 6; 69 | int64 epoch_position = 7; 70 | event_store.client.UUID epoch_id = 8; 71 | event_store.client.UUID epoch_leader_instance_id = 9; 72 | int64 last_commit_position = 10; 73 | int64 writer_checkpoint = 11; 74 | int64 chaser_checkpoint = 12; 75 | int32 node_priority = 13; 76 | } 77 | 78 | message AcceptRequest { 79 | event_store.client.UUID server_id = 1; 80 | EndPoint server_http = 2; 81 | event_store.client.UUID leader_id = 3; 82 | EndPoint leader_http = 4; 83 | int32 view = 5; 84 | } 85 | 86 | message LeaderIsResigningRequest { 87 | event_store.client.UUID leader_id = 1; 88 | EndPoint leader_http = 2; 89 | } 90 | 91 | message LeaderIsResigningOkRequest { 92 | event_store.client.UUID leader_id = 1; 93 | EndPoint leader_http = 2; 94 | event_store.client.UUID server_id = 3; 95 | EndPoint server_http = 4; 96 | } 97 | 98 | message ClusterInfo { 99 | repeated MemberInfo members = 1; 100 | } 101 | 102 | message EndPoint { 103 | string address = 1; 104 | uint32 port = 2; 105 | } 106 | 107 | message MemberInfo { 108 | enum VNodeState { 109 | Initializing = 0; 110 | DiscoverLeader = 1; 111 | Unknown = 2; 112 | PreReplica = 3; 113 | CatchingUp = 4; 114 | Clone = 5; 115 | Follower = 6; 116 | PreLeader = 7; 117 | Leader = 8; 118 | Manager = 9; 119 | ShuttingDown = 10; 120 | Shutdown = 11; 121 | ReadOnlyLeaderless = 12; 122 | PreReadOnlyReplica = 13; 123 | ReadOnlyReplica = 14; 124 | ResigningLeader = 15; 125 | } 126 | event_store.client.UUID instance_id = 1; 127 | int64 time_stamp = 2; 128 | VNodeState state = 3; 129 | bool is_alive = 4; 130 | EndPoint http_end_point = 5; 131 | EndPoint internal_tcp = 6; 132 | EndPoint external_tcp = 7; 133 | bool internal_tcp_uses_tls = 8; 134 | bool external_tcp_uses_tls = 9; 135 | 136 | int64 last_commit_position = 10; 137 | int64 writer_checkpoint = 11; 138 | int64 chaser_checkpoint = 12; 139 | int64 epoch_position = 13; 140 | int32 epoch_number = 14; 141 | event_store.client.UUID epoch_id = 15; 142 | 143 | int32 node_priority = 16; 144 | bool is_read_only_replica = 17; 145 | string advertise_host_to_client_as = 18; 146 | uint32 advertise_http_port_to_client_as = 19; 147 | uint32 advertise_tcp_port_to_client_as = 20; 148 | } 149 | 150 | message ReplicaLogWrite{ 151 | int64 log_position = 1; 152 | bytes replica_id = 2; 153 | } 154 | 155 | message ReplicatedTo{ 156 | int64 log_position = 1; 157 | } 158 | 159 | message Epoch{ 160 | int64 epoch_position = 1; 161 | int32 epoch_number = 2; 162 | bytes epoch_id = 3; 163 | } 164 | 165 | message SubscribeReplica{ 166 | int64 log_position = 1; 167 | bytes chunk_id = 2; 168 | repeated Epoch LastEpochs = 3; 169 | bytes ip = 4; 170 | int32 port = 5; 171 | bytes leader_id = 6; 172 | bytes subscription_id = 7; 173 | bool is_promotable = 8; 174 | int32 version = 9; 175 | } 176 | 177 | message ReplicaSubscriptionRetry{ 178 | bytes leader_id = 1; 179 | bytes subscription_id = 2; 180 | } 181 | 182 | message ReplicaSubscribed{ 183 | bytes leader_id = 1; 184 | bytes subscription_id = 2; 185 | int64 subscription_position = 3; 186 | } 187 | 188 | message ReplicaLogPositionAck{ 189 | bytes subscription_id = 1; 190 | int64 replication_log_position = 2; 191 | int64 writer_log_position = 3; 192 | } 193 | 194 | message CreateChunk{ 195 | bytes leader_id = 1; 196 | bytes subscription_id = 2; 197 | bytes chunk_header_bytes = 3; 198 | int32 file_size = 4; 199 | bool is_completed_chunk = 5; 200 | } 201 | 202 | message RawChunkBulk{ 203 | bytes leader_id = 1; 204 | bytes subscription_id = 2; 205 | int32 chunk_start_number = 3; 206 | int32 chunk_end_number = 4; 207 | int32 raw_position = 5; 208 | bytes raw_bytes = 6; 209 | bool complete_chunk = 7; 210 | } 211 | 212 | message DataChunkBulk{ 213 | bytes leader_id = 1; 214 | bytes subscription_id = 2; 215 | int32 chunk_start_number = 3; 216 | int32 chunk_end_number = 4; 217 | int64 subscription_position = 5; 218 | bytes data_bytes = 6; 219 | bool complete_chunk = 7; 220 | } 221 | 222 | message FollowerAssignment{ 223 | bytes leader_id = 1; 224 | bytes subscription_id = 2; 225 | } 226 | 227 | message CloneAssignment{ 228 | bytes leader_id = 1; 229 | bytes subscription_id = 2; 230 | } 231 | 232 | message DropSubscription{ 233 | bytes leader_id = 1; 234 | bytes subscription_id = 2; 235 | } 236 | -------------------------------------------------------------------------------- /esdb/generated/users_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: users.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0busers.proto\x12\x18\x65vent_store.client.users\"\x9d\x01\n\tCreateReq\x12<\n\x07options\x18\x01 \x01(\x0b\x32+.event_store.client.users.CreateReq.Options\x1aR\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tfull_name\x18\x03 \x01(\t\x12\x0e\n\x06groups\x18\x04 \x03(\t\"\x0c\n\nCreateResp\"\x9d\x01\n\tUpdateReq\x12<\n\x07options\x18\x01 \x01(\x0b\x32+.event_store.client.users.UpdateReq.Options\x1aR\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tfull_name\x18\x03 \x01(\t\x12\x0e\n\x06groups\x18\x04 \x03(\t\"\x0c\n\nUpdateResp\"h\n\tDeleteReq\x12<\n\x07options\x18\x01 \x01(\x0b\x32+.event_store.client.users.DeleteReq.Options\x1a\x1d\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\"\x0c\n\nDeleteResp\"h\n\tEnableReq\x12<\n\x07options\x18\x01 \x01(\x0b\x32+.event_store.client.users.EnableReq.Options\x1a\x1d\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\"\x0c\n\nEnableResp\"j\n\nDisableReq\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.event_store.client.users.DisableReq.Options\x1a\x1d\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\"\r\n\x0b\x44isableResp\"j\n\nDetailsReq\x12=\n\x07options\x18\x01 \x01(\x0b\x32,.event_store.client.users.DetailsReq.Options\x1a\x1d\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\"\xa8\x02\n\x0b\x44\x65tailsResp\x12G\n\x0cuser_details\x18\x01 \x01(\x0b\x32\x31.event_store.client.users.DetailsResp.UserDetails\x1a\xcf\x01\n\x0bUserDetails\x12\x12\n\nlogin_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\x0e\n\x06groups\x18\x03 \x03(\t\x12P\n\x0clast_updated\x18\x04 \x01(\x0b\x32:.event_store.client.users.DetailsResp.UserDetails.DateTime\x12\x10\n\x08\x64isabled\x18\x05 \x01(\x08\x1a%\n\x08\x44\x61teTime\x12\x19\n\x11ticks_since_epoch\x18\x01 \x01(\x03\"\xa8\x01\n\x11\x43hangePasswordReq\x12\x44\n\x07options\x18\x01 \x01(\x0b\x32\x33.event_store.client.users.ChangePasswordReq.Options\x1aM\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\x12\x18\n\x10\x63urrent_password\x18\x02 \x01(\t\x12\x14\n\x0cnew_password\x18\x03 \x01(\t\"\x14\n\x12\x43hangePasswordResp\"\x8c\x01\n\x10ResetPasswordReq\x12\x43\n\x07options\x18\x01 \x01(\x0b\x32\x32.event_store.client.users.ResetPasswordReq.Options\x1a\x33\n\x07Options\x12\x12\n\nlogin_name\x18\x01 \x01(\t\x12\x14\n\x0cnew_password\x18\x02 \x01(\t\"\x13\n\x11ResetPasswordResp2\xe4\x05\n\x05Users\x12S\n\x06\x43reate\x12#.event_store.client.users.CreateReq\x1a$.event_store.client.users.CreateResp\x12S\n\x06Update\x12#.event_store.client.users.UpdateReq\x1a$.event_store.client.users.UpdateResp\x12S\n\x06\x44\x65lete\x12#.event_store.client.users.DeleteReq\x1a$.event_store.client.users.DeleteResp\x12V\n\x07\x44isable\x12$.event_store.client.users.DisableReq\x1a%.event_store.client.users.DisableResp\x12S\n\x06\x45nable\x12#.event_store.client.users.EnableReq\x1a$.event_store.client.users.EnableResp\x12X\n\x07\x44\x65tails\x12$.event_store.client.users.DetailsReq\x1a%.event_store.client.users.DetailsResp0\x01\x12k\n\x0e\x43hangePassword\x12+.event_store.client.users.ChangePasswordReq\x1a,.event_store.client.users.ChangePasswordResp\x12h\n\rResetPassword\x12*.event_store.client.users.ResetPasswordReq\x1a+.event_store.client.users.ResetPasswordRespB%\n#com.eventstore.dbclient.proto.usersb\x06proto3') 18 | 19 | _globals = globals() 20 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 21 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'users_pb2', _globals) 22 | if _descriptor._USE_C_DESCRIPTORS == False: 23 | _globals['DESCRIPTOR']._options = None 24 | _globals['DESCRIPTOR']._serialized_options = b'\n#com.eventstore.dbclient.proto.users' 25 | _globals['_CREATEREQ']._serialized_start=42 26 | _globals['_CREATEREQ']._serialized_end=199 27 | _globals['_CREATEREQ_OPTIONS']._serialized_start=117 28 | _globals['_CREATEREQ_OPTIONS']._serialized_end=199 29 | _globals['_CREATERESP']._serialized_start=201 30 | _globals['_CREATERESP']._serialized_end=213 31 | _globals['_UPDATEREQ']._serialized_start=216 32 | _globals['_UPDATEREQ']._serialized_end=373 33 | _globals['_UPDATEREQ_OPTIONS']._serialized_start=117 34 | _globals['_UPDATEREQ_OPTIONS']._serialized_end=199 35 | _globals['_UPDATERESP']._serialized_start=375 36 | _globals['_UPDATERESP']._serialized_end=387 37 | _globals['_DELETEREQ']._serialized_start=389 38 | _globals['_DELETEREQ']._serialized_end=493 39 | _globals['_DELETEREQ_OPTIONS']._serialized_start=117 40 | _globals['_DELETEREQ_OPTIONS']._serialized_end=146 41 | _globals['_DELETERESP']._serialized_start=495 42 | _globals['_DELETERESP']._serialized_end=507 43 | _globals['_ENABLEREQ']._serialized_start=509 44 | _globals['_ENABLEREQ']._serialized_end=613 45 | _globals['_ENABLEREQ_OPTIONS']._serialized_start=117 46 | _globals['_ENABLEREQ_OPTIONS']._serialized_end=146 47 | _globals['_ENABLERESP']._serialized_start=615 48 | _globals['_ENABLERESP']._serialized_end=627 49 | _globals['_DISABLEREQ']._serialized_start=629 50 | _globals['_DISABLEREQ']._serialized_end=735 51 | _globals['_DISABLEREQ_OPTIONS']._serialized_start=117 52 | _globals['_DISABLEREQ_OPTIONS']._serialized_end=146 53 | _globals['_DISABLERESP']._serialized_start=737 54 | _globals['_DISABLERESP']._serialized_end=750 55 | _globals['_DETAILSREQ']._serialized_start=752 56 | _globals['_DETAILSREQ']._serialized_end=858 57 | _globals['_DETAILSREQ_OPTIONS']._serialized_start=117 58 | _globals['_DETAILSREQ_OPTIONS']._serialized_end=146 59 | _globals['_DETAILSRESP']._serialized_start=861 60 | _globals['_DETAILSRESP']._serialized_end=1157 61 | _globals['_DETAILSRESP_USERDETAILS']._serialized_start=950 62 | _globals['_DETAILSRESP_USERDETAILS']._serialized_end=1157 63 | _globals['_DETAILSRESP_USERDETAILS_DATETIME']._serialized_start=1120 64 | _globals['_DETAILSRESP_USERDETAILS_DATETIME']._serialized_end=1157 65 | _globals['_CHANGEPASSWORDREQ']._serialized_start=1160 66 | _globals['_CHANGEPASSWORDREQ']._serialized_end=1328 67 | _globals['_CHANGEPASSWORDREQ_OPTIONS']._serialized_start=1251 68 | _globals['_CHANGEPASSWORDREQ_OPTIONS']._serialized_end=1328 69 | _globals['_CHANGEPASSWORDRESP']._serialized_start=1330 70 | _globals['_CHANGEPASSWORDRESP']._serialized_end=1350 71 | _globals['_RESETPASSWORDREQ']._serialized_start=1353 72 | _globals['_RESETPASSWORDREQ']._serialized_end=1493 73 | _globals['_RESETPASSWORDREQ_OPTIONS']._serialized_start=1442 74 | _globals['_RESETPASSWORDREQ_OPTIONS']._serialized_end=1493 75 | _globals['_RESETPASSWORDRESP']._serialized_start=1495 76 | _globals['_RESETPASSWORDRESP']._serialized_end=1514 77 | _globals['_USERS']._serialized_start=1517 78 | _globals['_USERS']._serialized_end=2257 79 | # @@protoc_insertion_point(module_scope) 80 | -------------------------------------------------------------------------------- /protos/code.proto: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | syntax = "proto3"; 16 | 17 | package google.rpc; 18 | 19 | option go_package = "google.golang.org/genproto/googleapis/rpc/code;code"; 20 | option java_multiple_files = true; 21 | option java_outer_classname = "CodeProto"; 22 | option java_package = "com.google.rpc"; 23 | option objc_class_prefix = "RPC"; 24 | 25 | // The canonical error codes for gRPC APIs. 26 | // 27 | // 28 | // Sometimes multiple error codes may apply. Services should return 29 | // the most specific error code that applies. For example, prefer 30 | // `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply. 31 | // Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`. 32 | enum Code { 33 | // Not an error; returned on success 34 | // 35 | // HTTP Mapping: 200 OK 36 | OK = 0; 37 | 38 | // The operation was cancelled, typically by the caller. 39 | // 40 | // HTTP Mapping: 499 Client Closed Request 41 | CANCELLED = 1; 42 | 43 | // Unknown error. For example, this error may be returned when 44 | // a `Status` value received from another address space belongs to 45 | // an error space that is not known in this address space. Also 46 | // errors raised by APIs that do not return enough error information 47 | // may be converted to this error. 48 | // 49 | // HTTP Mapping: 500 Internal Server Error 50 | UNKNOWN = 2; 51 | 52 | // The client specified an invalid argument. Note that this differs 53 | // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments 54 | // that are problematic regardless of the state of the system 55 | // (e.g., a malformed file name). 56 | // 57 | // HTTP Mapping: 400 Bad Request 58 | INVALID_ARGUMENT = 3; 59 | 60 | // The deadline expired before the operation could complete. For operations 61 | // that change the state of the system, this error may be returned 62 | // even if the operation has completed successfully. For example, a 63 | // successful response from a server could have been delayed long 64 | // enough for the deadline to expire. 65 | // 66 | // HTTP Mapping: 504 Gateway Timeout 67 | DEADLINE_EXCEEDED = 4; 68 | 69 | // Some requested entity (e.g., file or directory) was not found. 70 | // 71 | // Note to server developers: if a request is denied for an entire class 72 | // of users, such as gradual feature rollout or undocumented whitelist, 73 | // `NOT_FOUND` may be used. If a request is denied for some users within 74 | // a class of users, such as user-based access control, `PERMISSION_DENIED` 75 | // must be used. 76 | // 77 | // HTTP Mapping: 404 Not Found 78 | NOT_FOUND = 5; 79 | 80 | // The entity that a client attempted to create (e.g., file or directory) 81 | // already exists. 82 | // 83 | // HTTP Mapping: 409 Conflict 84 | ALREADY_EXISTS = 6; 85 | 86 | // The caller does not have permission to execute the specified 87 | // operation. `PERMISSION_DENIED` must not be used for rejections 88 | // caused by exhausting some resource (use `RESOURCE_EXHAUSTED` 89 | // instead for those errors). `PERMISSION_DENIED` must not be 90 | // used if the caller can not be identified (use `UNAUTHENTICATED` 91 | // instead for those errors). This error code does not imply the 92 | // request is valid or the requested entity exists or satisfies 93 | // other pre-conditions. 94 | // 95 | // HTTP Mapping: 403 Forbidden 96 | PERMISSION_DENIED = 7; 97 | 98 | // The request does not have valid authentication credentials for the 99 | // operation. 100 | // 101 | // HTTP Mapping: 401 Unauthorized 102 | UNAUTHENTICATED = 16; 103 | 104 | // Some resource has been exhausted, perhaps a per-user quota, or 105 | // perhaps the entire file system is out of space. 106 | // 107 | // HTTP Mapping: 429 Too Many Requests 108 | RESOURCE_EXHAUSTED = 8; 109 | 110 | // The operation was rejected because the system is not in a state 111 | // required for the operation's execution. For example, the directory 112 | // to be deleted is non-empty, an rmdir operation is applied to 113 | // a non-directory, etc. 114 | // 115 | // Service implementors can use the following guidelines to decide 116 | // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`: 117 | // (a) Use `UNAVAILABLE` if the client can retry just the failing call. 118 | // (b) Use `ABORTED` if the client should retry at a higher level 119 | // (e.g., when a client-specified test-and-set fails, indicating the 120 | // client should restart a read-modify-write sequence). 121 | // (c) Use `FAILED_PRECONDITION` if the client should not retry until 122 | // the system state has been explicitly fixed. E.g., if an "rmdir" 123 | // fails because the directory is non-empty, `FAILED_PRECONDITION` 124 | // should be returned since the client should not retry unless 125 | // the files are deleted from the directory. 126 | // 127 | // HTTP Mapping: 400 Bad Request 128 | FAILED_PRECONDITION = 9; 129 | 130 | // The operation was aborted, typically due to a concurrency issue such as 131 | // a sequencer check failure or transaction abort. 132 | // 133 | // See the guidelines above for deciding between `FAILED_PRECONDITION`, 134 | // `ABORTED`, and `UNAVAILABLE`. 135 | // 136 | // HTTP Mapping: 409 Conflict 137 | ABORTED = 10; 138 | 139 | // The operation was attempted past the valid range. E.g., seeking or 140 | // reading past end-of-file. 141 | // 142 | // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may 143 | // be fixed if the system state changes. For example, a 32-bit file 144 | // system will generate `INVALID_ARGUMENT` if asked to read at an 145 | // offset that is not in the range [0,2^32-1], but it will generate 146 | // `OUT_OF_RANGE` if asked to read from an offset past the current 147 | // file size. 148 | // 149 | // There is a fair bit of overlap between `FAILED_PRECONDITION` and 150 | // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific 151 | // error) when it applies so that callers who are iterating through 152 | // a space can easily look for an `OUT_OF_RANGE` error to detect when 153 | // they are done. 154 | // 155 | // HTTP Mapping: 400 Bad Request 156 | OUT_OF_RANGE = 11; 157 | 158 | // The operation is not implemented or is not supported/enabled in this 159 | // service. 160 | // 161 | // HTTP Mapping: 501 Not Implemented 162 | UNIMPLEMENTED = 12; 163 | 164 | // Internal errors. This means that some invariants expected by the 165 | // underlying system have been broken. This error code is reserved 166 | // for serious errors. 167 | // 168 | // HTTP Mapping: 500 Internal Server Error 169 | INTERNAL = 13; 170 | 171 | // The service is currently unavailable. This is most likely a 172 | // transient condition, which can be corrected by retrying with 173 | // a backoff. Note that it is not always safe to retry 174 | // non-idempotent operations. 175 | // 176 | // See the guidelines above for deciding between `FAILED_PRECONDITION`, 177 | // `ABORTED`, and `UNAVAILABLE`. 178 | // 179 | // HTTP Mapping: 503 Service Unavailable 180 | UNAVAILABLE = 14; 181 | 182 | // Unrecoverable data loss or corruption. 183 | // 184 | // HTTP Mapping: 500 Internal Server Error 185 | DATA_LOSS = 15; 186 | } 187 | -------------------------------------------------------------------------------- /esdb/subscriptions/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import enum 4 | import json 5 | from dataclasses import dataclass 6 | from typing import Mapping, Optional, Type, TypeVar, Union 7 | 8 | from esdb.generated.persistent_pb2 import CreateReq, ReadReq, ReadResp 9 | from esdb.generated.persistent_pb2 import SubscriptionInfo as SubscriptionInfoPB 10 | from esdb.generated.persistent_pb2 import UpdateReq 11 | from esdb.streams.types import ContentType 12 | 13 | 14 | @dataclass 15 | class Event: 16 | id: str 17 | retry_count: int 18 | stream: str 19 | prepare_position: int 20 | commit_position: int 21 | metadata: Mapping[str, str] 22 | type: str 23 | data: Union[bytes, dict] 24 | 25 | @staticmethod 26 | def from_read_response_event(event: ReadResp.ReadEvent) -> Event: 27 | return Event( 28 | id=event.event.id.string, 29 | retry_count=event.retry_count, 30 | stream=event.event.stream_identifier.stream_name.decode(), 31 | prepare_position=event.event.prepare_position, 32 | commit_position=event.event.commit_position, 33 | metadata=event.event.metadata, 34 | type=event.event.metadata["type"], 35 | data=( 36 | json.loads(event.event.data) 37 | if event.event.metadata["content-type"] == ContentType.JSON.value 38 | else event.event.data 39 | ), 40 | ) 41 | 42 | 43 | SettingsType = TypeVar("SettingsType", CreateReq.Settings, UpdateReq.Settings) 44 | 45 | 46 | @dataclass 47 | class SubscriptionSettings: 48 | class ConsumerStrategy(enum.Enum): 49 | DISPATCH_TO_SINGLE = "DispatchToSingle" 50 | ROUND_ROBIN = "RoundRobin" 51 | PINNED = "Pinned" 52 | 53 | live_buffer_size: int 54 | read_batch_size: int 55 | history_buffer_size: int 56 | checkpoint_ms: int 57 | resolve_links: Optional[bool] = None 58 | extra_statistics: Optional[bool] = None 59 | max_retry_count: Optional[int] = None 60 | min_checkpoint_count: Optional[int] = None 61 | max_checkpoint_count: Optional[int] = None 62 | max_subscriber_count: Optional[int] = None 63 | message_timeout_ms: Optional[int] = None 64 | consumer_strategy: Optional[ConsumerStrategy] = None 65 | 66 | def to_protobuf(self, cls: Type[SettingsType]) -> SettingsType: 67 | assert ( 68 | self.read_batch_size < self.live_buffer_size 69 | ), "read_batch_size may not be greater than or equal to live_buffer_size" 70 | assert ( 71 | self.read_batch_size < self.history_buffer_size 72 | ), "read_batch_size may not be greater than or equal to history_buffer_size" 73 | settings = cls( 74 | live_buffer_size=self.live_buffer_size, 75 | read_batch_size=self.read_batch_size, 76 | history_buffer_size=self.history_buffer_size, 77 | ) 78 | 79 | if self.resolve_links is not None: 80 | settings.resolve_links = self.resolve_links 81 | if self.extra_statistics is not None: 82 | settings.extra_statistics = self.extra_statistics 83 | if self.max_retry_count is not None: 84 | settings.max_retry_count = self.max_retry_count 85 | if self.min_checkpoint_count is not None: 86 | settings.min_checkpoint_count = self.min_checkpoint_count 87 | if self.max_checkpoint_count is not None: 88 | settings.max_checkpoint_count = self.max_checkpoint_count 89 | if self.max_subscriber_count is not None: 90 | settings.max_subscriber_count = self.max_subscriber_count 91 | if self.consumer_strategy: 92 | if cls is CreateReq.Settings: 93 | settings.consumer_strategy = self.consumer_strategy.value # type: ignore 94 | else: 95 | settings.named_consumer_strategy = { # type: ignore 96 | self.ConsumerStrategy.DISPATCH_TO_SINGLE: 0, 97 | self.ConsumerStrategy.ROUND_ROBIN: 1, 98 | self.ConsumerStrategy.PINNED: 2, 99 | }[self.consumer_strategy] 100 | 101 | settings.checkpoint_after_ms = self.checkpoint_ms 102 | 103 | if self.message_timeout_ms: 104 | settings.message_timeout_ms = self.message_timeout_ms 105 | 106 | return settings 107 | 108 | 109 | class NackAction(enum.Enum): 110 | UNKNOWN = ReadReq.Nack.Action.Unknown 111 | PARK = ReadReq.Nack.Action.Park 112 | RETRY = ReadReq.Nack.Action.Retry 113 | SKIP = ReadReq.Nack.Action.Skip 114 | STOP = ReadReq.Nack.Action.Stop 115 | 116 | 117 | @dataclass 118 | class ConnectionInfo: 119 | username: str 120 | average_items_per_second: int 121 | total_items: int 122 | count_since_last_measurement: int 123 | observed_measurements: dict[str, int] 124 | available_slots: int 125 | in_flight_messages: int 126 | connection_name: str 127 | 128 | @classmethod 129 | def from_protobuf(cls, info: SubscriptionInfoPB.ConnectionInfo) -> ConnectionInfo: 130 | return cls( 131 | username=info.username, 132 | average_items_per_second=info.average_items_per_second, 133 | total_items=info.total_items, 134 | count_since_last_measurement=info.count_since_last_measurement, 135 | observed_measurements={m.key: m.value for m in info.observed_measurements}, 136 | available_slots=info.available_slots, 137 | in_flight_messages=info.in_flight_messages, 138 | connection_name=info.connection_name, 139 | ) 140 | 141 | 142 | @dataclass 143 | class SubscriptionInfo: 144 | event_source: str 145 | group_name: str 146 | status: str 147 | connections: list[ConnectionInfo] 148 | average_per_second: int 149 | total_items: int 150 | count_since_last_measurement: int 151 | last_checkpointed_event_position: str 152 | last_known_event_position: str 153 | resolve_link_tos: bool 154 | start_from: str 155 | message_timeout_milliseconds: int 156 | extra_statistics: bool 157 | max_retry_count: int 158 | live_buffer_size: int 159 | buffer_size: int 160 | read_batch_size: int 161 | check_point_after_milliseconds: int 162 | min_check_point_count: int 163 | max_check_point_count: int 164 | read_buffer_count: int 165 | live_buffer_count: int 166 | retry_buffer_count: int 167 | total_in_flight_messages: int 168 | outstanding_messages_count: int 169 | consumer_strategy: str 170 | max_subscriber_count: int 171 | parked_message_count: int 172 | 173 | @classmethod 174 | def from_protobuf(cls, info: SubscriptionInfoPB) -> SubscriptionInfo: 175 | return cls( 176 | event_source=info.event_source, 177 | group_name=info.group_name, 178 | status=info.status, 179 | connections=[ConnectionInfo.from_protobuf(c) for c in info.connections], 180 | start_from=info.start_from, 181 | message_timeout_milliseconds=info.message_timeout_milliseconds, 182 | extra_statistics=info.extra_statistics, 183 | max_retry_count=info.max_retry_count, 184 | live_buffer_size=info.live_buffer_size, 185 | buffer_size=info.buffer_size, 186 | read_batch_size=info.read_batch_size, 187 | check_point_after_milliseconds=info.check_point_after_milliseconds, 188 | min_check_point_count=info.min_check_point_count, 189 | max_check_point_count=info.max_check_point_count, 190 | consumer_strategy=info.named_consumer_strategy, 191 | max_subscriber_count=info.max_subscriber_count, 192 | average_per_second=info.average_per_second, 193 | count_since_last_measurement=info.count_since_last_measurement, 194 | last_checkpointed_event_position=info.last_checkpointed_event_position, 195 | last_known_event_position=info.last_known_event_position, 196 | live_buffer_count=info.live_buffer_count, 197 | outstanding_messages_count=info.outstanding_messages_count, 198 | parked_message_count=info.parked_message_count, 199 | read_buffer_count=info.read_buffer_count, 200 | resolve_link_tos=info.resolve_link_tos, 201 | retry_buffer_count=info.retry_buffer_count, 202 | total_in_flight_messages=info.total_in_flight_messages, 203 | total_items=info.total_items, 204 | ) 205 | -------------------------------------------------------------------------------- /protos/streams.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.streams; 3 | option java_package = "com.eventstore.dbclient.proto.streams"; 4 | 5 | import "shared.proto"; 6 | import "status.proto"; 7 | import "google/protobuf/duration.proto"; 8 | import "google/protobuf/empty.proto"; 9 | import "google/protobuf/timestamp.proto"; 10 | 11 | service Streams { 12 | rpc Read (ReadReq) returns (stream ReadResp); 13 | rpc Append (stream AppendReq) returns (AppendResp); 14 | rpc Delete (DeleteReq) returns (DeleteResp); 15 | rpc Tombstone (TombstoneReq) returns (TombstoneResp); 16 | rpc BatchAppend (stream BatchAppendReq) returns (stream BatchAppendResp); 17 | } 18 | 19 | message ReadReq { 20 | Options options = 1; 21 | 22 | message Options { 23 | oneof stream_option { 24 | StreamOptions stream = 1; 25 | AllOptions all = 2; 26 | } 27 | ReadDirection read_direction = 3; 28 | bool resolve_links = 4; 29 | oneof count_option { 30 | uint64 count = 5; 31 | SubscriptionOptions subscription = 6; 32 | } 33 | oneof filter_option { 34 | FilterOptions filter = 7; 35 | event_store.client.Empty no_filter = 8; 36 | } 37 | UUIDOption uuid_option = 9; 38 | ControlOption control_option = 10; 39 | 40 | enum ReadDirection { 41 | Forwards = 0; 42 | Backwards = 1; 43 | } 44 | message StreamOptions { 45 | event_store.client.StreamIdentifier stream_identifier = 1; 46 | oneof revision_option { 47 | uint64 revision = 2; 48 | event_store.client.Empty start = 3; 49 | event_store.client.Empty end = 4; 50 | } 51 | } 52 | message AllOptions { 53 | oneof all_option { 54 | Position position = 1; 55 | event_store.client.Empty start = 2; 56 | event_store.client.Empty end = 3; 57 | } 58 | } 59 | message SubscriptionOptions { 60 | } 61 | message Position { 62 | uint64 commit_position = 1; 63 | uint64 prepare_position = 2; 64 | } 65 | message FilterOptions { 66 | oneof filter { 67 | Expression stream_identifier = 1; 68 | Expression event_type = 2; 69 | } 70 | oneof window { 71 | uint32 max = 3; 72 | event_store.client.Empty count = 4; 73 | } 74 | uint32 checkpointIntervalMultiplier = 5; 75 | 76 | message Expression { 77 | string regex = 1; 78 | repeated string prefix = 2; 79 | } 80 | } 81 | message UUIDOption { 82 | oneof content { 83 | event_store.client.Empty structured = 1; 84 | event_store.client.Empty string = 2; 85 | } 86 | } 87 | message ControlOption { 88 | uint32 compatibility = 1; 89 | } 90 | } 91 | } 92 | 93 | message ReadResp { 94 | oneof content { 95 | ReadEvent event = 1; 96 | SubscriptionConfirmation confirmation = 2; 97 | Checkpoint checkpoint = 3; 98 | StreamNotFound stream_not_found = 4; 99 | uint64 first_stream_position = 5; 100 | uint64 last_stream_position = 6; 101 | AllStreamPosition last_all_stream_position = 7; 102 | } 103 | 104 | message ReadEvent { 105 | RecordedEvent event = 1; 106 | RecordedEvent link = 2; 107 | oneof position { 108 | uint64 commit_position = 3; 109 | event_store.client.Empty no_position = 4; 110 | } 111 | 112 | message RecordedEvent { 113 | event_store.client.UUID id = 1; 114 | event_store.client.StreamIdentifier stream_identifier = 2; 115 | uint64 stream_revision = 3; 116 | uint64 prepare_position = 4; 117 | uint64 commit_position = 5; 118 | map metadata = 6; 119 | bytes custom_metadata = 7; 120 | bytes data = 8; 121 | } 122 | } 123 | message SubscriptionConfirmation { 124 | string subscription_id = 1; 125 | } 126 | message Checkpoint { 127 | uint64 commit_position = 1; 128 | uint64 prepare_position = 2; 129 | } 130 | message StreamNotFound { 131 | event_store.client.StreamIdentifier stream_identifier = 1; 132 | } 133 | } 134 | 135 | message AppendReq { 136 | oneof content { 137 | Options options = 1; 138 | ProposedMessage proposed_message = 2; 139 | } 140 | 141 | message Options { 142 | event_store.client.StreamIdentifier stream_identifier = 1; 143 | oneof expected_stream_revision { 144 | uint64 revision = 2; 145 | event_store.client.Empty no_stream = 3; 146 | event_store.client.Empty any = 4; 147 | event_store.client.Empty stream_exists = 5; 148 | } 149 | } 150 | message ProposedMessage { 151 | event_store.client.UUID id = 1; 152 | map metadata = 2; 153 | bytes custom_metadata = 3; 154 | bytes data = 4; 155 | } 156 | } 157 | 158 | message AppendResp { 159 | oneof result { 160 | Success success = 1; 161 | WrongExpectedVersion wrong_expected_version = 2; 162 | } 163 | 164 | message Position { 165 | uint64 commit_position = 1; 166 | uint64 prepare_position = 2; 167 | } 168 | 169 | message Success { 170 | oneof current_revision_option { 171 | uint64 current_revision = 1; 172 | event_store.client.Empty no_stream = 2; 173 | } 174 | oneof position_option { 175 | Position position = 3; 176 | event_store.client.Empty no_position = 4; 177 | } 178 | } 179 | 180 | message WrongExpectedVersion { 181 | oneof current_revision_option_20_6_0 { 182 | uint64 current_revision_20_6_0 = 1; 183 | event_store.client.Empty no_stream_20_6_0 = 2; 184 | } 185 | oneof expected_revision_option_20_6_0 { 186 | uint64 expected_revision_20_6_0 = 3; 187 | event_store.client.Empty any_20_6_0 = 4; 188 | event_store.client.Empty stream_exists_20_6_0 = 5; 189 | } 190 | oneof current_revision_option { 191 | uint64 current_revision = 6; 192 | event_store.client.Empty current_no_stream = 7; 193 | } 194 | oneof expected_revision_option { 195 | uint64 expected_revision = 8; 196 | event_store.client.Empty expected_any = 9; 197 | event_store.client.Empty expected_stream_exists = 10; 198 | event_store.client.Empty expected_no_stream = 11; 199 | } 200 | 201 | } 202 | } 203 | 204 | message BatchAppendReq { 205 | event_store.client.UUID correlation_id = 1; 206 | Options options = 2; 207 | repeated ProposedMessage proposed_messages = 3; 208 | bool is_final = 4; 209 | 210 | message Options { 211 | event_store.client.StreamIdentifier stream_identifier = 1; 212 | oneof expected_stream_position { 213 | uint64 stream_position = 2; 214 | google.protobuf.Empty no_stream = 3; 215 | google.protobuf.Empty any = 4; 216 | google.protobuf.Empty stream_exists = 5; 217 | } 218 | oneof deadline_option { 219 | google.protobuf.Timestamp deadline_21_10_0 = 6; 220 | google.protobuf.Duration deadline = 7; 221 | } 222 | } 223 | 224 | message ProposedMessage { 225 | event_store.client.UUID id = 1; 226 | map metadata = 2; 227 | bytes custom_metadata = 3; 228 | bytes data = 4; 229 | } 230 | } 231 | 232 | message BatchAppendResp { 233 | event_store.client.UUID correlation_id = 1; 234 | oneof result { 235 | google.rpc.Status error = 2; 236 | Success success = 3; 237 | } 238 | 239 | event_store.client.StreamIdentifier stream_identifier = 4; 240 | 241 | oneof expected_stream_position { 242 | uint64 stream_position = 5; 243 | google.protobuf.Empty no_stream = 6; 244 | google.protobuf.Empty any = 7; 245 | google.protobuf.Empty stream_exists = 8; 246 | } 247 | 248 | message Success { 249 | oneof current_revision_option { 250 | uint64 current_revision = 1; 251 | google.protobuf.Empty no_stream = 2; 252 | } 253 | oneof position_option { 254 | event_store.client.AllStreamPosition position = 3; 255 | google.protobuf.Empty no_position = 4; 256 | } 257 | } 258 | } 259 | 260 | message DeleteReq { 261 | Options options = 1; 262 | 263 | message Options { 264 | event_store.client.StreamIdentifier stream_identifier = 1; 265 | oneof expected_stream_revision { 266 | uint64 revision = 2; 267 | event_store.client.Empty no_stream = 3; 268 | event_store.client.Empty any = 4; 269 | event_store.client.Empty stream_exists = 5; 270 | } 271 | } 272 | } 273 | 274 | message DeleteResp { 275 | oneof position_option { 276 | Position position = 1; 277 | event_store.client.Empty no_position = 2; 278 | } 279 | 280 | message Position { 281 | uint64 commit_position = 1; 282 | uint64 prepare_position = 2; 283 | } 284 | } 285 | 286 | message TombstoneReq { 287 | Options options = 1; 288 | 289 | message Options { 290 | event_store.client.StreamIdentifier stream_identifier = 1; 291 | oneof expected_stream_revision { 292 | uint64 revision = 2; 293 | event_store.client.Empty no_stream = 3; 294 | event_store.client.Empty any = 4; 295 | event_store.client.Empty stream_exists = 5; 296 | } 297 | } 298 | } 299 | 300 | message TombstoneResp { 301 | oneof position_option { 302 | Position position = 1; 303 | event_store.client.Empty no_position = 2; 304 | } 305 | 306 | message Position { 307 | uint64 commit_position = 1; 308 | uint64 prepare_position = 2; 309 | } 310 | } 311 | 312 | -------------------------------------------------------------------------------- /esdb/generated/streams_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | from . import streams_pb2 as streams__pb2 6 | 7 | 8 | class StreamsStub(object): 9 | """Missing associated documentation comment in .proto file.""" 10 | 11 | def __init__(self, channel): 12 | """Constructor. 13 | 14 | Args: 15 | channel: A grpc.Channel. 16 | """ 17 | self.Read = channel.unary_stream( 18 | '/event_store.client.streams.Streams/Read', 19 | request_serializer=streams__pb2.ReadReq.SerializeToString, 20 | response_deserializer=streams__pb2.ReadResp.FromString, 21 | ) 22 | self.Append = channel.stream_unary( 23 | '/event_store.client.streams.Streams/Append', 24 | request_serializer=streams__pb2.AppendReq.SerializeToString, 25 | response_deserializer=streams__pb2.AppendResp.FromString, 26 | ) 27 | self.Delete = channel.unary_unary( 28 | '/event_store.client.streams.Streams/Delete', 29 | request_serializer=streams__pb2.DeleteReq.SerializeToString, 30 | response_deserializer=streams__pb2.DeleteResp.FromString, 31 | ) 32 | self.Tombstone = channel.unary_unary( 33 | '/event_store.client.streams.Streams/Tombstone', 34 | request_serializer=streams__pb2.TombstoneReq.SerializeToString, 35 | response_deserializer=streams__pb2.TombstoneResp.FromString, 36 | ) 37 | self.BatchAppend = channel.stream_stream( 38 | '/event_store.client.streams.Streams/BatchAppend', 39 | request_serializer=streams__pb2.BatchAppendReq.SerializeToString, 40 | response_deserializer=streams__pb2.BatchAppendResp.FromString, 41 | ) 42 | 43 | 44 | class StreamsServicer(object): 45 | """Missing associated documentation comment in .proto file.""" 46 | 47 | def Read(self, request, context): 48 | """Missing associated documentation comment in .proto file.""" 49 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 50 | context.set_details('Method not implemented!') 51 | raise NotImplementedError('Method not implemented!') 52 | 53 | def Append(self, request_iterator, context): 54 | """Missing associated documentation comment in .proto file.""" 55 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 56 | context.set_details('Method not implemented!') 57 | raise NotImplementedError('Method not implemented!') 58 | 59 | def Delete(self, request, context): 60 | """Missing associated documentation comment in .proto file.""" 61 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 62 | context.set_details('Method not implemented!') 63 | raise NotImplementedError('Method not implemented!') 64 | 65 | def Tombstone(self, request, context): 66 | """Missing associated documentation comment in .proto file.""" 67 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 68 | context.set_details('Method not implemented!') 69 | raise NotImplementedError('Method not implemented!') 70 | 71 | def BatchAppend(self, request_iterator, context): 72 | """Missing associated documentation comment in .proto file.""" 73 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 74 | context.set_details('Method not implemented!') 75 | raise NotImplementedError('Method not implemented!') 76 | 77 | 78 | def add_StreamsServicer_to_server(servicer, server): 79 | rpc_method_handlers = { 80 | 'Read': grpc.unary_stream_rpc_method_handler( 81 | servicer.Read, 82 | request_deserializer=streams__pb2.ReadReq.FromString, 83 | response_serializer=streams__pb2.ReadResp.SerializeToString, 84 | ), 85 | 'Append': grpc.stream_unary_rpc_method_handler( 86 | servicer.Append, 87 | request_deserializer=streams__pb2.AppendReq.FromString, 88 | response_serializer=streams__pb2.AppendResp.SerializeToString, 89 | ), 90 | 'Delete': grpc.unary_unary_rpc_method_handler( 91 | servicer.Delete, 92 | request_deserializer=streams__pb2.DeleteReq.FromString, 93 | response_serializer=streams__pb2.DeleteResp.SerializeToString, 94 | ), 95 | 'Tombstone': grpc.unary_unary_rpc_method_handler( 96 | servicer.Tombstone, 97 | request_deserializer=streams__pb2.TombstoneReq.FromString, 98 | response_serializer=streams__pb2.TombstoneResp.SerializeToString, 99 | ), 100 | 'BatchAppend': grpc.stream_stream_rpc_method_handler( 101 | servicer.BatchAppend, 102 | request_deserializer=streams__pb2.BatchAppendReq.FromString, 103 | response_serializer=streams__pb2.BatchAppendResp.SerializeToString, 104 | ), 105 | } 106 | generic_handler = grpc.method_handlers_generic_handler( 107 | 'event_store.client.streams.Streams', rpc_method_handlers) 108 | server.add_generic_rpc_handlers((generic_handler,)) 109 | 110 | 111 | # This class is part of an EXPERIMENTAL API. 112 | class Streams(object): 113 | """Missing associated documentation comment in .proto file.""" 114 | 115 | @staticmethod 116 | def Read(request, 117 | target, 118 | options=(), 119 | channel_credentials=None, 120 | call_credentials=None, 121 | insecure=False, 122 | compression=None, 123 | wait_for_ready=None, 124 | timeout=None, 125 | metadata=None): 126 | return grpc.experimental.unary_stream(request, target, '/event_store.client.streams.Streams/Read', 127 | streams__pb2.ReadReq.SerializeToString, 128 | streams__pb2.ReadResp.FromString, 129 | options, channel_credentials, 130 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 131 | 132 | @staticmethod 133 | def Append(request_iterator, 134 | target, 135 | options=(), 136 | channel_credentials=None, 137 | call_credentials=None, 138 | insecure=False, 139 | compression=None, 140 | wait_for_ready=None, 141 | timeout=None, 142 | metadata=None): 143 | return grpc.experimental.stream_unary(request_iterator, target, '/event_store.client.streams.Streams/Append', 144 | streams__pb2.AppendReq.SerializeToString, 145 | streams__pb2.AppendResp.FromString, 146 | options, channel_credentials, 147 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 148 | 149 | @staticmethod 150 | def Delete(request, 151 | target, 152 | options=(), 153 | channel_credentials=None, 154 | call_credentials=None, 155 | insecure=False, 156 | compression=None, 157 | wait_for_ready=None, 158 | timeout=None, 159 | metadata=None): 160 | return grpc.experimental.unary_unary(request, target, '/event_store.client.streams.Streams/Delete', 161 | streams__pb2.DeleteReq.SerializeToString, 162 | streams__pb2.DeleteResp.FromString, 163 | options, channel_credentials, 164 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 165 | 166 | @staticmethod 167 | def Tombstone(request, 168 | target, 169 | options=(), 170 | channel_credentials=None, 171 | call_credentials=None, 172 | insecure=False, 173 | compression=None, 174 | wait_for_ready=None, 175 | timeout=None, 176 | metadata=None): 177 | return grpc.experimental.unary_unary(request, target, '/event_store.client.streams.Streams/Tombstone', 178 | streams__pb2.TombstoneReq.SerializeToString, 179 | streams__pb2.TombstoneResp.FromString, 180 | options, channel_credentials, 181 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 182 | 183 | @staticmethod 184 | def BatchAppend(request_iterator, 185 | target, 186 | options=(), 187 | channel_credentials=None, 188 | call_credentials=None, 189 | insecure=False, 190 | compression=None, 191 | wait_for_ready=None, 192 | timeout=None, 193 | metadata=None): 194 | return grpc.experimental.stream_stream(request_iterator, target, '/event_store.client.streams.Streams/BatchAppend', 195 | streams__pb2.BatchAppendReq.SerializeToString, 196 | streams__pb2.BatchAppendResp.FromString, 197 | options, channel_credentials, 198 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 199 | -------------------------------------------------------------------------------- /esdb/subscriptions/subscriptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | from typing import AsyncIterable, AsyncIterator, Optional 5 | 6 | from esdb.generated.persistent_pb2 import ( 7 | CreateReq, 8 | CreateResp, 9 | DeleteReq, 10 | GetInfoReq, 11 | GetInfoResp, 12 | ListReq, 13 | ListResp, 14 | ReadReq, 15 | ReadResp, 16 | UpdateReq, 17 | UpdateResp, 18 | ) 19 | from esdb.generated.persistent_pb2_grpc import PersistentSubscriptionsStub 20 | from esdb.generated.shared_pb2 import UUID, Empty, StreamIdentifier 21 | from esdb.shared import Filter 22 | from esdb.subscriptions.types import ( 23 | Event, 24 | NackAction, 25 | SubscriptionInfo, 26 | SubscriptionSettings, 27 | ) 28 | 29 | 30 | class Subscription: 31 | def __init__( 32 | self, group_name: str, buffer_size: int, stream: Optional[str], stub: PersistentSubscriptionsStub 33 | ) -> None: 34 | self.stream = stream 35 | self.group_name = group_name 36 | self.buffer_size = buffer_size 37 | self.send_queue: asyncio.Queue = asyncio.Queue() 38 | self.stub = stub 39 | self.subscription_id: Optional[str] = None 40 | 41 | async def __aiter__(self) -> AsyncIterable[Event]: 42 | read_request = ReadReq( 43 | options=ReadReq.Options( 44 | stream_identifier=StreamIdentifier(stream_name=self.stream.encode()) if self.stream else None, 45 | all=Empty() if self.stream is None else None, 46 | group_name=self.group_name, 47 | buffer_size=self.buffer_size, 48 | uuid_option=ReadReq.Options.UUIDOption(structured=Empty(), string=Empty()), 49 | ), 50 | ack=None, 51 | nack=None, 52 | ) 53 | await self.send_queue.put(read_request) 54 | 55 | async def queue_iter(): 56 | while True: 57 | yield await self.send_queue.get() 58 | self.send_queue.task_done() 59 | 60 | reader: AsyncIterator[ReadResp] = self.stub.Read(queue_iter()) 61 | async for response in reader: 62 | if not self.subscription_id and response.WhichOneof("content") == "subscription_confirmation": 63 | self.subscription_id = response.subscription_confirmation.subscription_id 64 | continue 65 | yield Event.from_read_response_event(response.event) 66 | 67 | async def ack(self, events: list[Event]) -> None: 68 | assert self.subscription_id, "Nothing to ack, not reading from a subscription yet" 69 | await self.send_queue.put( 70 | ReadReq( 71 | ack=ReadReq.Ack( 72 | id=self.subscription_id.encode(), 73 | ids=(UUID(string=evt.id) for evt in events), 74 | ) 75 | ) 76 | ) 77 | await self.send_queue.join() 78 | 79 | async def nack(self, events: list[Event], action: NackAction, reason: Optional[str] = None) -> None: 80 | assert self.subscription_id, "Nothing to nack, not reading from a subscription yet" 81 | await self.send_queue.put( 82 | ReadReq( 83 | nack=ReadReq.Nack( 84 | id=self.subscription_id.encode(), 85 | ids=(UUID(string=evt.id) for evt in events), 86 | action=action.value, 87 | reason=reason or "", 88 | ) 89 | ) 90 | ) 91 | 92 | 93 | class PersistentSubscriptions: 94 | def __init__(self, stub: PersistentSubscriptionsStub) -> None: 95 | self._stub = stub 96 | 97 | async def create_stream_subscription( 98 | self, stream: str, group_name: str, settings: SubscriptionSettings, backwards: bool = False 99 | ) -> None: 100 | stream_identifier = StreamIdentifier(stream_name=stream.encode()) 101 | create_request = CreateReq( 102 | options=CreateReq.Options( 103 | stream=CreateReq.StreamOptions( 104 | stream_identifier=stream_identifier, 105 | start=None if backwards else Empty(), 106 | end=Empty() if backwards else None, 107 | ), 108 | stream_identifier=stream_identifier, 109 | group_name=group_name, 110 | settings=settings.to_protobuf(CreateReq.Settings), 111 | ) 112 | ) 113 | response: CreateResp = await self._stub.Create(create_request) 114 | assert isinstance(response, CreateResp), f"Expected {CreateResp} got {response.__class__}" 115 | 116 | async def create_all_subscription( 117 | self, 118 | group_name: str, 119 | settings: SubscriptionSettings, 120 | backwards: bool = False, 121 | filter_by: Optional[Filter] = None, 122 | ) -> None: 123 | create_request = CreateReq( 124 | options=CreateReq.Options( 125 | group_name=group_name, 126 | settings=settings.to_protobuf(CreateReq.Settings), 127 | all=CreateReq.AllOptions( 128 | # position=CreateReq.Position(), TODO: deal with position 129 | start=None if backwards else Empty(), 130 | end=Empty() if backwards else None, 131 | no_filter=Empty() if filter_by is None else None, 132 | filter=filter_by.to_protobuf(CreateReq.AllOptions.FilterOptions) if filter_by else None, 133 | ), 134 | ) 135 | ) 136 | response: CreateResp = await self._stub.Create(create_request) 137 | assert isinstance(response, CreateResp), f"Expected {CreateResp} got {response.__class__}" 138 | 139 | def subscribe( 140 | self, 141 | group_name: str, 142 | buffer_size: int, 143 | stream: Optional[str] = None, 144 | ) -> Subscription: 145 | return Subscription(stream=stream, group_name=group_name, buffer_size=buffer_size, stub=self._stub) 146 | 147 | async def update_stream_subscription( 148 | self, stream: str, group_name: str, settings: SubscriptionSettings, backwards: bool = False 149 | ) -> None: 150 | stream_identifier = StreamIdentifier(stream_name=stream.encode()) 151 | update_request = UpdateReq( 152 | options=UpdateReq.Options( 153 | stream=UpdateReq.StreamOptions( 154 | stream_identifier=stream_identifier, 155 | start=None if backwards else Empty(), 156 | end=Empty() if backwards else None, 157 | ), 158 | stream_identifier=stream_identifier, 159 | group_name=group_name, 160 | settings=settings.to_protobuf(UpdateReq.Settings), 161 | ) 162 | ) 163 | response: UpdateResp = await self._stub.Update(update_request) 164 | assert isinstance(response, UpdateResp), f"Expected {UpdateResp} got {response.__class__}" 165 | 166 | async def update_all_subscription( 167 | self, group_name: str, settings: SubscriptionSettings, backwards: bool = False 168 | ) -> None: 169 | update_request = UpdateReq( 170 | options=UpdateReq.Options( 171 | group_name=group_name, 172 | settings=settings.to_protobuf(UpdateReq.Settings), 173 | all=UpdateReq.AllOptions( 174 | # position=CreateReq.Position(), TODO: deal with position 175 | start=None if backwards else Empty(), 176 | end=Empty() if backwards else None, 177 | ), 178 | ) 179 | ) 180 | response: UpdateResp = await self._stub.Update(update_request) 181 | assert isinstance(response, UpdateResp), f"Expected {UpdateResp} got {response.__class__}" 182 | 183 | async def get_info(self, group_name: str, stream: Optional[str] = None) -> SubscriptionInfo: 184 | info_request = GetInfoReq( 185 | options=GetInfoReq.Options( 186 | group_name=group_name, 187 | stream_identifier=StreamIdentifier(stream_name=stream.encode()) if stream else None, 188 | all=Empty() if stream is None else None, 189 | ) 190 | ) 191 | response: GetInfoResp = await self._stub.GetInfo(info_request) 192 | return SubscriptionInfo.from_protobuf(response.subscription_info) 193 | 194 | async def list(self, stream: Optional[str] = None) -> list[SubscriptionInfo]: 195 | request = ListReq( 196 | options=ListReq.Options( 197 | list_all_subscriptions=Empty() if stream is None else None, 198 | list_for_stream=( 199 | None 200 | if stream is None 201 | else ListReq.StreamOption(stream=StreamIdentifier(stream_name=stream.encode())) 202 | ), 203 | ) 204 | ) 205 | response: ListResp = await self._stub.List(request) 206 | return [SubscriptionInfo.from_protobuf(sub) for sub in response.subscriptions] 207 | 208 | async def delete(self, group_name: str, stream: Optional[str] = None) -> None: 209 | request = DeleteReq( 210 | options=DeleteReq.Options( 211 | group_name=group_name, 212 | all=Empty() if stream is None else None, 213 | stream_identifier=StreamIdentifier(stream_name=stream.encode()) if stream is not None else None, 214 | ) 215 | ) 216 | await self._stub.Delete(request) 217 | -------------------------------------------------------------------------------- /esdb/generated/shared_pb2.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | @generated by mypy-protobuf. Do not edit manually! 3 | isort:skip_file 4 | """ 5 | import builtins 6 | import google.protobuf.descriptor 7 | import google.protobuf.empty_pb2 8 | import google.protobuf.message 9 | import sys 10 | import typing 11 | 12 | if sys.version_info >= (3, 8): 13 | import typing as typing_extensions 14 | else: 15 | import typing_extensions 16 | 17 | DESCRIPTOR: google.protobuf.descriptor.FileDescriptor 18 | 19 | @typing_extensions.final 20 | class UUID(google.protobuf.message.Message): 21 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 22 | 23 | @typing_extensions.final 24 | class Structured(google.protobuf.message.Message): 25 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 26 | 27 | MOST_SIGNIFICANT_BITS_FIELD_NUMBER: builtins.int 28 | LEAST_SIGNIFICANT_BITS_FIELD_NUMBER: builtins.int 29 | most_significant_bits: builtins.int 30 | least_significant_bits: builtins.int 31 | def __init__( 32 | self, 33 | *, 34 | most_significant_bits: builtins.int = ..., 35 | least_significant_bits: builtins.int = ..., 36 | ) -> None: ... 37 | def ClearField(self, field_name: typing_extensions.Literal["least_significant_bits", b"least_significant_bits", "most_significant_bits", b"most_significant_bits"]) -> None: ... 38 | 39 | STRUCTURED_FIELD_NUMBER: builtins.int 40 | STRING_FIELD_NUMBER: builtins.int 41 | @property 42 | def structured(self) -> global___UUID.Structured: ... 43 | string: builtins.str 44 | def __init__( 45 | self, 46 | *, 47 | structured: global___UUID.Structured | None = ..., 48 | string: builtins.str = ..., 49 | ) -> None: ... 50 | def HasField(self, field_name: typing_extensions.Literal["string", b"string", "structured", b"structured", "value", b"value"]) -> builtins.bool: ... 51 | def ClearField(self, field_name: typing_extensions.Literal["string", b"string", "structured", b"structured", "value", b"value"]) -> None: ... 52 | def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["structured", "string"] | None: ... 53 | 54 | global___UUID = UUID 55 | 56 | @typing_extensions.final 57 | class Empty(google.protobuf.message.Message): 58 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 59 | 60 | def __init__( 61 | self, 62 | ) -> None: ... 63 | 64 | global___Empty = Empty 65 | 66 | @typing_extensions.final 67 | class StreamIdentifier(google.protobuf.message.Message): 68 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 69 | 70 | STREAM_NAME_FIELD_NUMBER: builtins.int 71 | stream_name: builtins.bytes 72 | def __init__( 73 | self, 74 | *, 75 | stream_name: builtins.bytes = ..., 76 | ) -> None: ... 77 | def ClearField(self, field_name: typing_extensions.Literal["stream_name", b"stream_name"]) -> None: ... 78 | 79 | global___StreamIdentifier = StreamIdentifier 80 | 81 | @typing_extensions.final 82 | class AllStreamPosition(google.protobuf.message.Message): 83 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 84 | 85 | COMMIT_POSITION_FIELD_NUMBER: builtins.int 86 | PREPARE_POSITION_FIELD_NUMBER: builtins.int 87 | commit_position: builtins.int 88 | prepare_position: builtins.int 89 | def __init__( 90 | self, 91 | *, 92 | commit_position: builtins.int = ..., 93 | prepare_position: builtins.int = ..., 94 | ) -> None: ... 95 | def ClearField(self, field_name: typing_extensions.Literal["commit_position", b"commit_position", "prepare_position", b"prepare_position"]) -> None: ... 96 | 97 | global___AllStreamPosition = AllStreamPosition 98 | 99 | @typing_extensions.final 100 | class WrongExpectedVersion(google.protobuf.message.Message): 101 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 102 | 103 | CURRENT_STREAM_REVISION_FIELD_NUMBER: builtins.int 104 | CURRENT_NO_STREAM_FIELD_NUMBER: builtins.int 105 | EXPECTED_STREAM_POSITION_FIELD_NUMBER: builtins.int 106 | EXPECTED_ANY_FIELD_NUMBER: builtins.int 107 | EXPECTED_STREAM_EXISTS_FIELD_NUMBER: builtins.int 108 | EXPECTED_NO_STREAM_FIELD_NUMBER: builtins.int 109 | current_stream_revision: builtins.int 110 | @property 111 | def current_no_stream(self) -> google.protobuf.empty_pb2.Empty: ... 112 | expected_stream_position: builtins.int 113 | @property 114 | def expected_any(self) -> google.protobuf.empty_pb2.Empty: ... 115 | @property 116 | def expected_stream_exists(self) -> google.protobuf.empty_pb2.Empty: ... 117 | @property 118 | def expected_no_stream(self) -> google.protobuf.empty_pb2.Empty: ... 119 | def __init__( 120 | self, 121 | *, 122 | current_stream_revision: builtins.int = ..., 123 | current_no_stream: google.protobuf.empty_pb2.Empty | None = ..., 124 | expected_stream_position: builtins.int = ..., 125 | expected_any: google.protobuf.empty_pb2.Empty | None = ..., 126 | expected_stream_exists: google.protobuf.empty_pb2.Empty | None = ..., 127 | expected_no_stream: google.protobuf.empty_pb2.Empty | None = ..., 128 | ) -> None: ... 129 | def HasField(self, field_name: typing_extensions.Literal["current_no_stream", b"current_no_stream", "current_stream_revision", b"current_stream_revision", "current_stream_revision_option", b"current_stream_revision_option", "expected_any", b"expected_any", "expected_no_stream", b"expected_no_stream", "expected_stream_exists", b"expected_stream_exists", "expected_stream_position", b"expected_stream_position", "expected_stream_position_option", b"expected_stream_position_option"]) -> builtins.bool: ... 130 | def ClearField(self, field_name: typing_extensions.Literal["current_no_stream", b"current_no_stream", "current_stream_revision", b"current_stream_revision", "current_stream_revision_option", b"current_stream_revision_option", "expected_any", b"expected_any", "expected_no_stream", b"expected_no_stream", "expected_stream_exists", b"expected_stream_exists", "expected_stream_position", b"expected_stream_position", "expected_stream_position_option", b"expected_stream_position_option"]) -> None: ... 131 | @typing.overload 132 | def WhichOneof(self, oneof_group: typing_extensions.Literal["current_stream_revision_option", b"current_stream_revision_option"]) -> typing_extensions.Literal["current_stream_revision", "current_no_stream"] | None: ... 133 | @typing.overload 134 | def WhichOneof(self, oneof_group: typing_extensions.Literal["expected_stream_position_option", b"expected_stream_position_option"]) -> typing_extensions.Literal["expected_stream_position", "expected_any", "expected_stream_exists", "expected_no_stream"] | None: ... 135 | 136 | global___WrongExpectedVersion = WrongExpectedVersion 137 | 138 | @typing_extensions.final 139 | class AccessDenied(google.protobuf.message.Message): 140 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 141 | 142 | def __init__( 143 | self, 144 | ) -> None: ... 145 | 146 | global___AccessDenied = AccessDenied 147 | 148 | @typing_extensions.final 149 | class StreamDeleted(google.protobuf.message.Message): 150 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 151 | 152 | STREAM_IDENTIFIER_FIELD_NUMBER: builtins.int 153 | @property 154 | def stream_identifier(self) -> global___StreamIdentifier: ... 155 | def __init__( 156 | self, 157 | *, 158 | stream_identifier: global___StreamIdentifier | None = ..., 159 | ) -> None: ... 160 | def HasField(self, field_name: typing_extensions.Literal["stream_identifier", b"stream_identifier"]) -> builtins.bool: ... 161 | def ClearField(self, field_name: typing_extensions.Literal["stream_identifier", b"stream_identifier"]) -> None: ... 162 | 163 | global___StreamDeleted = StreamDeleted 164 | 165 | @typing_extensions.final 166 | class Timeout(google.protobuf.message.Message): 167 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 168 | 169 | def __init__( 170 | self, 171 | ) -> None: ... 172 | 173 | global___Timeout = Timeout 174 | 175 | @typing_extensions.final 176 | class Unknown(google.protobuf.message.Message): 177 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 178 | 179 | def __init__( 180 | self, 181 | ) -> None: ... 182 | 183 | global___Unknown = Unknown 184 | 185 | @typing_extensions.final 186 | class InvalidTransaction(google.protobuf.message.Message): 187 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 188 | 189 | def __init__( 190 | self, 191 | ) -> None: ... 192 | 193 | global___InvalidTransaction = InvalidTransaction 194 | 195 | @typing_extensions.final 196 | class MaximumAppendSizeExceeded(google.protobuf.message.Message): 197 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 198 | 199 | MAXAPPENDSIZE_FIELD_NUMBER: builtins.int 200 | maxAppendSize: builtins.int 201 | def __init__( 202 | self, 203 | *, 204 | maxAppendSize: builtins.int = ..., 205 | ) -> None: ... 206 | def ClearField(self, field_name: typing_extensions.Literal["maxAppendSize", b"maxAppendSize"]) -> None: ... 207 | 208 | global___MaximumAppendSizeExceeded = MaximumAppendSizeExceeded 209 | 210 | @typing_extensions.final 211 | class BadRequest(google.protobuf.message.Message): 212 | DESCRIPTOR: google.protobuf.descriptor.Descriptor 213 | 214 | MESSAGE_FIELD_NUMBER: builtins.int 215 | message: builtins.str 216 | def __init__( 217 | self, 218 | *, 219 | message: builtins.str = ..., 220 | ) -> None: ... 221 | def ClearField(self, field_name: typing_extensions.Literal["message", b"message"]) -> None: ... 222 | 223 | global___BadRequest = BadRequest 224 | -------------------------------------------------------------------------------- /esdb/generated/projections_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: projections.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 16 | from . import shared_pb2 as shared__pb2 17 | 18 | 19 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11projections.proto\x12\x1e\x65vent_store.client.projections\x1a\x1cgoogle/protobuf/struct.proto\x1a\x0cshared.proto\"\xb3\x03\n\tCreateReq\x12\x42\n\x07options\x18\x01 \x01(\x0b\x32\x31.event_store.client.projections.CreateReq.Options\x1a\xe1\x02\n\x07Options\x12-\n\x08one_time\x18\x01 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x12P\n\ttransient\x18\x02 \x01(\x0b\x32;.event_store.client.projections.CreateReq.Options.TransientH\x00\x12R\n\ncontinuous\x18\x03 \x01(\x0b\x32<.event_store.client.projections.CreateReq.Options.ContinuousH\x00\x12\r\n\x05query\x18\x04 \x01(\t\x1a\x19\n\tTransient\x12\x0c\n\x04name\x18\x01 \x01(\t\x1aO\n\nContinuous\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x65mit_enabled\x18\x02 \x01(\x08\x12\x1d\n\x15track_emitted_streams\x18\x03 \x01(\x08\x42\x06\n\x04mode\"\x0c\n\nCreateResp\"\xd5\x01\n\tUpdateReq\x12\x42\n\x07options\x18\x01 \x01(\x0b\x32\x31.event_store.client.projections.UpdateReq.Options\x1a\x83\x01\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x16\n\x0c\x65mit_enabled\x18\x03 \x01(\x08H\x00\x12\x34\n\x0fno_emit_options\x18\x04 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x42\r\n\x0b\x65mit_option\"\x0c\n\nUpdateResp\"\xc7\x01\n\tDeleteReq\x12\x42\n\x07options\x18\x01 \x01(\x0b\x32\x31.event_store.client.projections.DeleteReq.Options\x1av\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x64\x65lete_emitted_streams\x18\x02 \x01(\x08\x12\x1b\n\x13\x64\x65lete_state_stream\x18\x03 \x01(\x08\x12 \n\x18\x64\x65lete_checkpoint_stream\x18\x04 \x01(\x08\"\x0c\n\nDeleteResp\"\xb5\x02\n\rStatisticsReq\x12\x46\n\x07options\x18\x01 \x01(\x0b\x32\x35.event_store.client.projections.StatisticsReq.Options\x1a\xdb\x01\n\x07Options\x12\x0e\n\x04name\x18\x01 \x01(\tH\x00\x12(\n\x03\x61ll\x18\x02 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x12.\n\ttransient\x18\x03 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x12/\n\ncontinuous\x18\x04 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x12-\n\x08one_time\x18\x05 \x01(\x0b\x32\x19.event_store.client.EmptyH\x00\x42\x06\n\x04mode\"\xb0\x04\n\x0eStatisticsResp\x12G\n\x07\x64\x65tails\x18\x01 \x01(\x0b\x32\x36.event_store.client.projections.StatisticsResp.Details\x1a\xd4\x03\n\x07\x44\x65tails\x12\x1a\n\x12\x63oreProcessingTime\x18\x01 \x01(\x03\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\r\n\x05\x65poch\x18\x03 \x01(\x03\x12\x15\n\reffectiveName\x18\x04 \x01(\t\x12\x18\n\x10writesInProgress\x18\x05 \x01(\x05\x12\x17\n\x0freadsInProgress\x18\x06 \x01(\x05\x12\x18\n\x10partitionsCached\x18\x07 \x01(\x05\x12\x0e\n\x06status\x18\x08 \x01(\t\x12\x13\n\x0bstateReason\x18\t \x01(\t\x12\x0c\n\x04name\x18\n \x01(\t\x12\x0c\n\x04mode\x18\x0b \x01(\t\x12\x10\n\x08position\x18\x0c \x01(\t\x12\x10\n\x08progress\x18\r \x01(\x02\x12\x16\n\x0elastCheckpoint\x18\x0e \x01(\t\x12#\n\x1b\x65ventsProcessedAfterRestart\x18\x0f \x01(\x03\x12\x18\n\x10\x63heckpointStatus\x18\x10 \x01(\t\x12\x16\n\x0e\x62ufferedEvents\x18\x11 \x01(\x03\x12*\n\"writePendingEventsBeforeCheckpoint\x18\x12 \x01(\x05\x12)\n!writePendingEventsAfterCheckpoint\x18\x13 \x01(\x05\"y\n\x08StateReq\x12\x41\n\x07options\x18\x01 \x01(\x0b\x32\x30.event_store.client.projections.StateReq.Options\x1a*\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpartition\x18\x02 \x01(\t\"2\n\tStateResp\x12%\n\x05state\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\"{\n\tResultReq\x12\x42\n\x07options\x18\x01 \x01(\x0b\x32\x31.event_store.client.projections.ResultReq.Options\x1a*\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpartition\x18\x02 \x01(\t\"4\n\nResultResp\x12&\n\x06result\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\"\x80\x01\n\x08ResetReq\x12\x41\n\x07options\x18\x01 \x01(\x0b\x32\x30.event_store.client.projections.ResetReq.Options\x1a\x31\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10write_checkpoint\x18\x02 \x01(\x08\"\x0b\n\tResetResp\"h\n\tEnableReq\x12\x42\n\x07options\x18\x01 \x01(\x0b\x32\x31.event_store.client.projections.EnableReq.Options\x1a\x17\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x0c\n\nEnableResp\"\x84\x01\n\nDisableReq\x12\x43\n\x07options\x18\x01 \x01(\x0b\x32\x32.event_store.client.projections.DisableReq.Options\x1a\x31\n\x07Options\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10write_checkpoint\x18\x02 \x01(\x08\"\r\n\x0b\x44isableResp2\xcb\x07\n\x0bProjections\x12_\n\x06\x43reate\x12).event_store.client.projections.CreateReq\x1a*.event_store.client.projections.CreateResp\x12_\n\x06Update\x12).event_store.client.projections.UpdateReq\x1a*.event_store.client.projections.UpdateResp\x12_\n\x06\x44\x65lete\x12).event_store.client.projections.DeleteReq\x1a*.event_store.client.projections.DeleteResp\x12m\n\nStatistics\x12-.event_store.client.projections.StatisticsReq\x1a..event_store.client.projections.StatisticsResp0\x01\x12\x62\n\x07\x44isable\x12*.event_store.client.projections.DisableReq\x1a+.event_store.client.projections.DisableResp\x12_\n\x06\x45nable\x12).event_store.client.projections.EnableReq\x1a*.event_store.client.projections.EnableResp\x12\\\n\x05Reset\x12(.event_store.client.projections.ResetReq\x1a).event_store.client.projections.ResetResp\x12\\\n\x05State\x12(.event_store.client.projections.StateReq\x1a).event_store.client.projections.StateResp\x12_\n\x06Result\x12).event_store.client.projections.ResultReq\x1a*.event_store.client.projections.ResultResp\x12H\n\x10RestartSubsystem\x12\x19.event_store.client.Empty\x1a\x19.event_store.client.EmptyB+\n)com.eventstore.dbclient.proto.projectionsb\x06proto3') 20 | 21 | _globals = globals() 22 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 23 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'projections_pb2', _globals) 24 | if _descriptor._USE_C_DESCRIPTORS == False: 25 | _globals['DESCRIPTOR']._options = None 26 | _globals['DESCRIPTOR']._serialized_options = b'\n)com.eventstore.dbclient.proto.projections' 27 | _globals['_CREATEREQ']._serialized_start=98 28 | _globals['_CREATEREQ']._serialized_end=533 29 | _globals['_CREATEREQ_OPTIONS']._serialized_start=180 30 | _globals['_CREATEREQ_OPTIONS']._serialized_end=533 31 | _globals['_CREATEREQ_OPTIONS_TRANSIENT']._serialized_start=419 32 | _globals['_CREATEREQ_OPTIONS_TRANSIENT']._serialized_end=444 33 | _globals['_CREATEREQ_OPTIONS_CONTINUOUS']._serialized_start=446 34 | _globals['_CREATEREQ_OPTIONS_CONTINUOUS']._serialized_end=525 35 | _globals['_CREATERESP']._serialized_start=535 36 | _globals['_CREATERESP']._serialized_end=547 37 | _globals['_UPDATEREQ']._serialized_start=550 38 | _globals['_UPDATEREQ']._serialized_end=763 39 | _globals['_UPDATEREQ_OPTIONS']._serialized_start=632 40 | _globals['_UPDATEREQ_OPTIONS']._serialized_end=763 41 | _globals['_UPDATERESP']._serialized_start=765 42 | _globals['_UPDATERESP']._serialized_end=777 43 | _globals['_DELETEREQ']._serialized_start=780 44 | _globals['_DELETEREQ']._serialized_end=979 45 | _globals['_DELETEREQ_OPTIONS']._serialized_start=861 46 | _globals['_DELETEREQ_OPTIONS']._serialized_end=979 47 | _globals['_DELETERESP']._serialized_start=981 48 | _globals['_DELETERESP']._serialized_end=993 49 | _globals['_STATISTICSREQ']._serialized_start=996 50 | _globals['_STATISTICSREQ']._serialized_end=1305 51 | _globals['_STATISTICSREQ_OPTIONS']._serialized_start=1086 52 | _globals['_STATISTICSREQ_OPTIONS']._serialized_end=1305 53 | _globals['_STATISTICSRESP']._serialized_start=1308 54 | _globals['_STATISTICSRESP']._serialized_end=1868 55 | _globals['_STATISTICSRESP_DETAILS']._serialized_start=1400 56 | _globals['_STATISTICSRESP_DETAILS']._serialized_end=1868 57 | _globals['_STATEREQ']._serialized_start=1870 58 | _globals['_STATEREQ']._serialized_end=1991 59 | _globals['_STATEREQ_OPTIONS']._serialized_start=1949 60 | _globals['_STATEREQ_OPTIONS']._serialized_end=1991 61 | _globals['_STATERESP']._serialized_start=1993 62 | _globals['_STATERESP']._serialized_end=2043 63 | _globals['_RESULTREQ']._serialized_start=2045 64 | _globals['_RESULTREQ']._serialized_end=2168 65 | _globals['_RESULTREQ_OPTIONS']._serialized_start=1949 66 | _globals['_RESULTREQ_OPTIONS']._serialized_end=1991 67 | _globals['_RESULTRESP']._serialized_start=2170 68 | _globals['_RESULTRESP']._serialized_end=2222 69 | _globals['_RESETREQ']._serialized_start=2225 70 | _globals['_RESETREQ']._serialized_end=2353 71 | _globals['_RESETREQ_OPTIONS']._serialized_start=2304 72 | _globals['_RESETREQ_OPTIONS']._serialized_end=2353 73 | _globals['_RESETRESP']._serialized_start=2355 74 | _globals['_RESETRESP']._serialized_end=2366 75 | _globals['_ENABLEREQ']._serialized_start=2368 76 | _globals['_ENABLEREQ']._serialized_end=2472 77 | _globals['_ENABLEREQ_OPTIONS']._serialized_start=632 78 | _globals['_ENABLEREQ_OPTIONS']._serialized_end=655 79 | _globals['_ENABLERESP']._serialized_start=2474 80 | _globals['_ENABLERESP']._serialized_end=2486 81 | _globals['_DISABLEREQ']._serialized_start=2489 82 | _globals['_DISABLEREQ']._serialized_end=2621 83 | _globals['_DISABLEREQ_OPTIONS']._serialized_start=2304 84 | _globals['_DISABLEREQ_OPTIONS']._serialized_end=2353 85 | _globals['_DISABLERESP']._serialized_start=2623 86 | _globals['_DISABLERESP']._serialized_end=2636 87 | _globals['_PROJECTIONS']._serialized_start=2639 88 | _globals['_PROJECTIONS']._serialized_end=3610 89 | # @@protoc_insertion_point(module_scope) 90 | -------------------------------------------------------------------------------- /protos/persistent.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package event_store.client.persistent_subscriptions; 3 | option java_package = "com.eventstore.dbclient.proto.persistentsubscriptions"; 4 | 5 | import "shared.proto"; 6 | 7 | service PersistentSubscriptions { 8 | rpc Create (CreateReq) returns (CreateResp); 9 | rpc Update (UpdateReq) returns (UpdateResp); 10 | rpc Delete (DeleteReq) returns (DeleteResp); 11 | rpc Read (stream ReadReq) returns (stream ReadResp); 12 | rpc GetInfo (GetInfoReq) returns (GetInfoResp); 13 | rpc ReplayParked (ReplayParkedReq) returns (ReplayParkedResp); 14 | rpc List (ListReq) returns (ListResp); 15 | rpc RestartSubsystem (event_store.client.Empty) returns (event_store.client.Empty); 16 | } 17 | 18 | message ReadReq { 19 | oneof content { 20 | Options options = 1; 21 | Ack ack = 2; 22 | Nack nack = 3; 23 | } 24 | 25 | message Options { 26 | oneof stream_option { 27 | event_store.client.StreamIdentifier stream_identifier = 1; 28 | event_store.client.Empty all = 5; 29 | } 30 | 31 | string group_name = 2; 32 | int32 buffer_size = 3; 33 | UUIDOption uuid_option = 4; 34 | 35 | message UUIDOption { 36 | oneof content { 37 | event_store.client.Empty structured = 1; 38 | event_store.client.Empty string = 2; 39 | } 40 | } 41 | } 42 | 43 | message Ack { 44 | bytes id = 1; 45 | repeated event_store.client.UUID ids = 2; 46 | } 47 | 48 | message Nack { 49 | bytes id = 1; 50 | repeated event_store.client.UUID ids = 2; 51 | Action action = 3; 52 | string reason = 4; 53 | 54 | enum Action { 55 | Unknown = 0; 56 | Park = 1; 57 | Retry = 2; 58 | Skip = 3; 59 | Stop = 4; 60 | } 61 | } 62 | } 63 | 64 | message ReadResp { 65 | oneof content { 66 | ReadEvent event = 1; 67 | SubscriptionConfirmation subscription_confirmation = 2; 68 | } 69 | message ReadEvent { 70 | RecordedEvent event = 1; 71 | RecordedEvent link = 2; 72 | oneof position { 73 | uint64 commit_position = 3; 74 | event_store.client.Empty no_position = 4; 75 | } 76 | oneof count { 77 | int32 retry_count = 5; 78 | event_store.client.Empty no_retry_count = 6; 79 | } 80 | message RecordedEvent { 81 | event_store.client.UUID id = 1; 82 | event_store.client.StreamIdentifier stream_identifier = 2; 83 | uint64 stream_revision = 3; 84 | uint64 prepare_position = 4; 85 | uint64 commit_position = 5; 86 | map metadata = 6; 87 | bytes custom_metadata = 7; 88 | bytes data = 8; 89 | } 90 | } 91 | message SubscriptionConfirmation { 92 | string subscription_id = 1; 93 | } 94 | } 95 | 96 | message CreateReq { 97 | Options options = 1; 98 | 99 | message Options { 100 | oneof stream_option { 101 | StreamOptions stream = 4; 102 | AllOptions all = 5; 103 | } 104 | event_store.client.StreamIdentifier stream_identifier = 1 [deprecated=true]; 105 | string group_name = 2; 106 | Settings settings = 3; 107 | } 108 | 109 | message StreamOptions { 110 | event_store.client.StreamIdentifier stream_identifier = 1; 111 | oneof revision_option { 112 | uint64 revision = 2; 113 | event_store.client.Empty start = 3; 114 | event_store.client.Empty end = 4; 115 | } 116 | } 117 | 118 | message AllOptions { 119 | oneof all_option { 120 | Position position = 1; 121 | event_store.client.Empty start = 2; 122 | event_store.client.Empty end = 3; 123 | } 124 | oneof filter_option { 125 | FilterOptions filter = 4; 126 | event_store.client.Empty no_filter = 5; 127 | } 128 | message FilterOptions { 129 | oneof filter { 130 | Expression stream_identifier = 1; 131 | Expression event_type = 2; 132 | } 133 | oneof window { 134 | uint32 max = 3; 135 | event_store.client.Empty count = 4; 136 | } 137 | uint32 checkpointIntervalMultiplier = 5; 138 | 139 | message Expression { 140 | string regex = 1; 141 | repeated string prefix = 2; 142 | } 143 | } 144 | } 145 | 146 | message Position { 147 | uint64 commit_position = 1; 148 | uint64 prepare_position = 2; 149 | } 150 | 151 | message Settings { 152 | bool resolve_links = 1; 153 | uint64 revision = 2 [deprecated = true]; 154 | bool extra_statistics = 3; 155 | int32 max_retry_count = 5; 156 | int32 min_checkpoint_count = 7; 157 | int32 max_checkpoint_count = 8; 158 | int32 max_subscriber_count = 9; 159 | int32 live_buffer_size = 10; 160 | int32 read_batch_size = 11; 161 | int32 history_buffer_size = 12; 162 | ConsumerStrategy named_consumer_strategy = 13 [deprecated = true]; 163 | oneof message_timeout { 164 | int64 message_timeout_ticks = 4; 165 | int32 message_timeout_ms = 14; 166 | } 167 | oneof checkpoint_after { 168 | int64 checkpoint_after_ticks = 6; 169 | int32 checkpoint_after_ms = 15; 170 | } 171 | string consumer_strategy = 16; 172 | } 173 | 174 | enum ConsumerStrategy { 175 | DispatchToSingle = 0; 176 | RoundRobin = 1; 177 | Pinned = 2; 178 | } 179 | } 180 | 181 | message CreateResp { 182 | } 183 | 184 | message UpdateReq { 185 | Options options = 1; 186 | 187 | message Options { 188 | oneof stream_option { 189 | StreamOptions stream = 4; 190 | AllOptions all = 5; 191 | } 192 | event_store.client.StreamIdentifier stream_identifier = 1 [deprecated = true]; 193 | string group_name = 2; 194 | Settings settings = 3; 195 | } 196 | 197 | message StreamOptions { 198 | event_store.client.StreamIdentifier stream_identifier = 1; 199 | oneof revision_option { 200 | uint64 revision = 2; 201 | event_store.client.Empty start = 3; 202 | event_store.client.Empty end = 4; 203 | } 204 | } 205 | 206 | message AllOptions { 207 | oneof all_option { 208 | Position position = 1; 209 | event_store.client.Empty start = 2; 210 | event_store.client.Empty end = 3; 211 | } 212 | } 213 | 214 | message Position { 215 | uint64 commit_position = 1; 216 | uint64 prepare_position = 2; 217 | } 218 | 219 | message Settings { 220 | bool resolve_links = 1; 221 | uint64 revision = 2 [deprecated = true]; 222 | bool extra_statistics = 3; 223 | int32 max_retry_count = 5; 224 | int32 min_checkpoint_count = 7; 225 | int32 max_checkpoint_count = 8; 226 | int32 max_subscriber_count = 9; 227 | int32 live_buffer_size = 10; 228 | int32 read_batch_size = 11; 229 | int32 history_buffer_size = 12; 230 | ConsumerStrategy named_consumer_strategy = 13; 231 | oneof message_timeout { 232 | int64 message_timeout_ticks = 4; 233 | int32 message_timeout_ms = 14; 234 | } 235 | oneof checkpoint_after { 236 | int64 checkpoint_after_ticks = 6; 237 | int32 checkpoint_after_ms = 15; 238 | } 239 | } 240 | 241 | enum ConsumerStrategy { 242 | DispatchToSingle = 0; 243 | RoundRobin = 1; 244 | Pinned = 2; 245 | } 246 | } 247 | 248 | message UpdateResp { 249 | } 250 | 251 | message DeleteReq { 252 | Options options = 1; 253 | 254 | message Options { 255 | oneof stream_option { 256 | event_store.client.StreamIdentifier stream_identifier = 1; 257 | event_store.client.Empty all = 3; 258 | } 259 | 260 | string group_name = 2; 261 | } 262 | } 263 | 264 | message DeleteResp { 265 | } 266 | 267 | message GetInfoReq { 268 | Options options = 1; 269 | 270 | message Options { 271 | oneof stream_option { 272 | event_store.client.StreamIdentifier stream_identifier = 1; 273 | event_store.client.Empty all = 2; 274 | } 275 | 276 | string group_name = 3; 277 | } 278 | } 279 | 280 | message GetInfoResp { 281 | SubscriptionInfo subscription_info = 1; 282 | } 283 | 284 | message SubscriptionInfo { 285 | string event_source = 1; 286 | string group_name = 2; 287 | string status = 3; 288 | repeated ConnectionInfo connections = 4; 289 | int32 average_per_second = 5; 290 | int64 total_items = 6; 291 | int64 count_since_last_measurement = 7; 292 | string last_checkpointed_event_position = 8; 293 | string last_known_event_position = 9; 294 | bool resolve_link_tos = 10; 295 | string start_from = 11; 296 | int32 message_timeout_milliseconds = 12; 297 | bool extra_statistics = 13; 298 | int32 max_retry_count = 14; 299 | int32 live_buffer_size = 15; 300 | int32 buffer_size = 16; 301 | int32 read_batch_size = 17; 302 | int32 check_point_after_milliseconds = 18; 303 | int32 min_check_point_count = 19; 304 | int32 max_check_point_count = 20; 305 | int32 read_buffer_count = 21; 306 | int64 live_buffer_count = 22; 307 | int32 retry_buffer_count = 23; 308 | int32 total_in_flight_messages = 24; 309 | int32 outstanding_messages_count = 25; 310 | string named_consumer_strategy = 26; 311 | int32 max_subscriber_count = 27; 312 | int64 parked_message_count = 28; 313 | 314 | message ConnectionInfo { 315 | string from = 1; 316 | string username = 2; 317 | int32 average_items_per_second = 3; 318 | int64 total_items = 4; 319 | int64 count_since_last_measurement = 5; 320 | repeated Measurement observed_measurements = 6; 321 | int32 available_slots = 7; 322 | int32 in_flight_messages = 8; 323 | string connection_name = 9; 324 | } 325 | 326 | message Measurement { 327 | string key = 1; 328 | int64 value = 2; 329 | } 330 | } 331 | 332 | message ReplayParkedReq { 333 | Options options = 1; 334 | 335 | message Options { 336 | string group_name = 1; 337 | oneof stream_option { 338 | event_store.client.StreamIdentifier stream_identifier = 2; 339 | event_store.client.Empty all = 3; 340 | } 341 | oneof stop_at_option { 342 | int64 stop_at = 4; 343 | event_store.client.Empty no_limit = 5; 344 | } 345 | } 346 | } 347 | 348 | message ReplayParkedResp { 349 | } 350 | 351 | message ListReq { 352 | Options options = 1; 353 | 354 | message Options { 355 | oneof list_option { 356 | event_store.client.Empty list_all_subscriptions = 1; 357 | StreamOption list_for_stream = 2; 358 | } 359 | } 360 | message StreamOption { 361 | oneof stream_option { 362 | event_store.client.StreamIdentifier stream = 1; 363 | event_store.client.Empty all = 2; 364 | } 365 | } 366 | } 367 | 368 | message ListResp { 369 | repeated SubscriptionInfo subscriptions = 1; 370 | } 371 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # esdb-py 2 | 3 | [![PyPI version](https://badge.fury.io/py/esdb.svg)](https://pypi.org/project/esdb/) 4 | [![codecov](https://codecov.io/gh/andriykohut/esdb-py/branch/main/graph/badge.svg?token=YVDPTDBPFB)](https://codecov.io/gh/andriykohut/esdb-py) 5 | 6 | ## EventStoreDB Python gRPC client 7 | 8 | > NOTE: This project is still work in progress 9 | 10 | 11 | * [Completed features](#completed-features) 12 | * [Installation](#installation) 13 | * [Development](#development) 14 | * [Usage](#usage) 15 | * [Connection string](#connection-string) 16 | * [Discovery and node preferences](#discovery-and-node-preferences) 17 | * [Connection configuration](#connection-configuration) 18 | * [Append, Read, Catch-up subscriptions](#append-read-catch-up-subscriptions) 19 | * [Batch append](#batch-append) 20 | * [Catch-up subscription to all events with filtering](#catch-up-subscription-to-all-events-with-filtering) 21 | * [Persistent subscriptions](#persistent-subscriptions) 22 | 23 | 24 | ## Completed features 25 | 26 | * [x] secure connection 27 | * [x] basic auth 28 | * [x] connection string parsing 29 | * [x] streams 30 | * [x] append 31 | * [x] batch append (v21.10+) 32 | * [x] delete 33 | * [x] read stream 34 | * [x] read all with stream/event type filters (v21.10+) 35 | * [x] catch-up subscriptions 36 | * [x] tombstone 37 | * [x] filtering 38 | * [x] persistent subscriptions 39 | * [x] create 40 | * [x] read stream 41 | * [x] read all with filter (v21.10+) 42 | * [x] update 43 | * [x] delete 44 | * [x] list 45 | * [x] info 46 | * [ ] reply parked events 47 | * [ ] CRUD for projections 48 | * [ ] users 49 | 50 | ## Installation 51 | 52 | Using pip: 53 | 54 | ```sh 55 | pip install esdb 56 | ``` 57 | 58 | Using poetry: 59 | 60 | ```sh 61 | poetry add esdb 62 | ``` 63 | 64 | ## Development 65 | 66 | 1. Install [poetry](https://python-poetry.org/docs/#installation) 67 | 2. Create virtualenv (i.e. using pyenv): 68 | 69 | ```sh 70 | pyenv install 3.12.0 71 | pyenv virtualenv 3.12.0 esdb-py 72 | pyenv local esdb-py 73 | ``` 74 | 75 | 3. Install deps with `poetry install` 76 | 4. Start eventstore in docker: `make run-esdb` 77 | 5. Run the tests: `pytest tests` 78 | 79 | ## Usage 80 | 81 | Have a look at [tests](https://github.com/andriykohut/esdb-py/tree/main/tests) for more examples. 82 | 83 | ### Connection string examples 84 | 85 | DNS discovery with credentials, discovery configuration, node preference and ca file path 86 | ``` 87 | esdb+discover://admin:changeit@localhost:2111?discoveryInterval=0&maxDiscoverAttempts=3&tlsCafile=certs/ca/ca.crt&nodePreference=follower 88 | ``` 89 | 90 | Single-node insecure connection 91 | ``` 92 | esdb://localhost:2111?tls=false 93 | ``` 94 | 95 | Supported parameters: 96 | - `discoveryInterval` 97 | - `gossipTimeout` 98 | - `maxDiscoverAttempts` 99 | - `nodePreference` 100 | - `keepAliveInterval` 101 | - `keepAliveTimeout` 102 | - `tls` 103 | - `tlsCafile` 104 | - `tlsVerifyCert` 105 | - `defaultDeadline` 106 | 107 | 108 | Connection string can be generated [here](https://developers.eventstore.com/clients/grpc/#connection-details). 109 | 110 | ### Discovery and node preferences 111 | 112 | ```py 113 | from esdb import ESClient 114 | 115 | client = ESClient("esdb+discover://admin:changeit@localhost:2111?nodePreference=follower") 116 | 117 | ``` 118 | 119 | ### Connection configuration 120 | 121 | ```py 122 | from esdb import ESClient 123 | 124 | # Connect without TLS 125 | client = ESClient("esdb://localhost:2111?tls=false") 126 | 127 | # Secure connection with basic auth and keepalive 128 | client = ESClient("esdb://admin:changeit@localhost:2111?tlsCafile=certs/ca/ca.crt&keepAliveInterval=5&keepAliveTimeout=5") 129 | ``` 130 | 131 | ### Append, Read, Catch-up subscriptions 132 | 133 | ```py 134 | import asyncio 135 | import datetime 136 | import uuid 137 | 138 | from esdb import ESClient 139 | 140 | 141 | client = ESClient("esdb+discover://admin:changeit@localhost:2111") 142 | stream = f"test-{str(uuid.uuid4())}" 143 | 144 | 145 | async def streams(): 146 | async with client.connect() as conn: 147 | # Appending to stream 148 | for i in range(10): 149 | append_result = await conn.streams.append( 150 | stream=stream, 151 | event_type="test_event", 152 | data={"i": i, "ts": datetime.datetime.utcnow().isoformat()}, 153 | ) 154 | 155 | # Read up to 10 events 156 | async for result in conn.streams.read(stream=stream, count=10): 157 | print(result.data) 158 | 159 | # Read up to 10 events, backwards 160 | async for result in conn.streams.read(stream=stream, count=10, backwards=True): 161 | print(result.data) 162 | 163 | # Read up to 10 events, starting from 5th event 164 | async for result in conn.streams.read(stream=stream, count=10, revision=5): 165 | print(result.data) 166 | 167 | # Read up to 10 events backwards, starting from 5th event 168 | async for result in conn.streams.read(stream=stream, count=10, backwards=True, revision=5): 169 | print(result.data) 170 | 171 | # Create a catch-up subscription to a stream 172 | async for result in conn.streams.read(stream=stream, subscribe=True): 173 | print(result.data) 174 | 175 | 176 | asyncio.run(streams()) 177 | ``` 178 | 179 | ### Batch append 180 | 181 | ```py 182 | import asyncio 183 | import uuid 184 | 185 | from esdb import ESClient 186 | from esdb.streams import Message 187 | 188 | 189 | async def batch_append(): 190 | # Append multiple events in as a single batch 191 | # Batch append is not supported on EventStore < v21.10 192 | stream = str(uuid.uuid4()) 193 | messages: list[Message] = [ 194 | Message(event_type="one", data={"item": 1}), 195 | Message(event_type="one", data={"item": 2}), 196 | Message(event_type="one", data={"item": 3}), 197 | Message(event_type="two", data={"item": 1}), 198 | Message(event_type="two", data={"item": 2}), 199 | Message(event_type="two", data={"item": 3}), 200 | ] 201 | async with ESClient("esdb+discover://admin:changeit@localhost:2111").connect() as conn: 202 | response = await conn.streams.batch_append(stream=stream, messages=messages) 203 | assert response.current_revision == 5 204 | events = [e async for e in conn.streams.read(stream=stream, count=50)] 205 | assert len(events) == 6 206 | 207 | 208 | asyncio.run(batch_append()) 209 | ``` 210 | 211 | ### Catch-up subscription to all events with filtering 212 | 213 | ```py 214 | import uuid 215 | import asyncio 216 | 217 | from esdb import ESClient 218 | from esdb.shared import Filter 219 | 220 | 221 | async def filters(): 222 | async with ESClient("esdb+discover://admin:changeit@localhost:2111").connect() as conn: 223 | # Append 10 events with the same prefix to random streams 224 | for i in range(10): 225 | await conn.streams.append(stream=str(uuid.uuid4()), event_type=f"prefix-{i}", data=b"") 226 | # subscribe to events from all streams, filtering by event type 227 | async for event in conn.streams.read_all( 228 | subscribe=True, # subscribe will wait for events, use count= to read events and stop 229 | filter_by=Filter( 230 | kind=Filter.Kind.EVENT_TYPE, 231 | regex="^prefix-", 232 | # Checkpoint only required when subscribe=True, it's not needed when using count= 233 | checkpoint_interval_multiplier=1000, 234 | ), 235 | ): 236 | print(event) 237 | 238 | 239 | asyncio.run(filters()) 240 | ``` 241 | 242 | ### Persistent subscriptions 243 | 244 | ```python 245 | import asyncio 246 | from esdb import ESClient 247 | from esdb.shared import Filter 248 | from esdb.subscriptions import SubscriptionSettings, NackAction 249 | 250 | client = ESClient("esdb+discover://admin:changeit@localhost:2111") 251 | 252 | stream = "stream-foo" 253 | group = "group-bar" 254 | 255 | 256 | async def persistent(): 257 | async with client.connect() as conn: 258 | # emit some events to the same stream 259 | for i in range(50): 260 | await conn.streams.append(stream, "foobar", {"i": i}) 261 | 262 | # create a stream subscription 263 | await conn.subscriptions.create_stream_subscription( 264 | stream=stream, 265 | group_name=group, 266 | settings=SubscriptionSettings( 267 | max_subscriber_count=50, 268 | read_batch_size=5, 269 | live_buffer_size=10, 270 | history_buffer_size=10, 271 | consumer_strategy=SubscriptionSettings.ConsumerStrategy.ROUND_ROBIN, 272 | checkpoint_ms=10000, 273 | ), 274 | ) 275 | 276 | # create subscription to all events with filtering 277 | # Only supported on EventStore v21.10+ 278 | await conn.subscriptions.create_all_subscription( 279 | group_name="subscription_group", 280 | filter_by=Filter(kind=Filter.Kind.EVENT_TYPE, regex="^some_type$", checkpoint_interval_multiplier=200), 281 | settings=SubscriptionSettings( 282 | read_batch_size=50, 283 | live_buffer_size=100, 284 | history_buffer_size=100, 285 | max_retry_count=2, 286 | checkpoint_ms=20000, 287 | ), 288 | ) 289 | 290 | # read from a subscription 291 | async with client.connect() as conn: 292 | sub = conn.subscriptions.subscribe(stream=stream, group_name=group, buffer_size=5) 293 | async for event in sub: 294 | try: 295 | # do work with event 296 | print(event) 297 | await sub.ack([event]) 298 | except Exception as err: 299 | await sub.nack([event], NackAction.RETRY, reason=str(err)) 300 | 301 | # get subscription info 302 | info = await conn.subscriptions.get_info(group, stream) 303 | assert info.group_name == group 304 | 305 | # delete subscription 306 | await conn.subscriptions.delete(group, stream) 307 | 308 | # list subscriptions 309 | subs = await conn.subscriptions.list() 310 | for sub in subs: 311 | print(sub.total_items) 312 | 313 | 314 | asyncio.run(persistent()) 315 | ``` 316 | -------------------------------------------------------------------------------- /esdb/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import base64 5 | import contextlib 6 | import enum 7 | import itertools 8 | import logging 9 | import random 10 | import urllib.parse 11 | from collections.abc import AsyncIterator 12 | from dataclasses import dataclass 13 | from functools import cmp_to_key 14 | from typing import Optional 15 | 16 | import grpc 17 | 18 | from esdb.exceptions import DiscoveryError 19 | from esdb.generated.gossip_pb2_grpc import GossipStub 20 | from esdb.generated.persistent_pb2_grpc import PersistentSubscriptionsStub 21 | from esdb.generated.streams_pb2_grpc import StreamsStub 22 | from esdb.gossip import Gossip, Member, State 23 | from esdb.streams import Streams 24 | from esdb.subscriptions import PersistentSubscriptions 25 | 26 | logger = logging.getLogger(__name__) 27 | 28 | 29 | class Preference(enum.Enum): 30 | LEADER = enum.auto() 31 | FOLLOWER = enum.auto() 32 | READ_ONLY_REPLICA = enum.auto() 33 | 34 | @classmethod 35 | def from_string(cls, s: str) -> Preference: 36 | return { 37 | "follower": cls.FOLLOWER, 38 | "leader": cls.LEADER, 39 | "readonlyreplica": cls.READ_ONLY_REPLICA, 40 | }[s.lower()] 41 | 42 | 43 | @dataclass 44 | class Configuration: 45 | address: Optional[Member.Endpoint] = None 46 | gossip_seed: Optional[list[Member.Endpoint]] = None 47 | username: Optional[str] = None 48 | password: Optional[str] = None 49 | disable_tls: bool = False 50 | node_preference: Preference = Preference.LEADER 51 | verify_cert: bool = True 52 | root_cert: Optional[bytes] = None 53 | max_discovery_attempts: int = 10 54 | discovery_interval: int = 100 55 | gossip_timeout: int = 5 56 | dns_discover: bool = False 57 | keep_alive_interval: int = 10 58 | keep_alive_timeout: int = 10 59 | default_deadline: int = 10 60 | 61 | 62 | class BasicAuthPlugin(grpc.AuthMetadataPlugin): 63 | def __init__(self, user: str, password: str) -> None: 64 | self.__auth = base64.b64encode(f"{user}:{password}".encode()) 65 | 66 | def __call__(self, _: grpc.AuthMetadataContext, callback: grpc.AuthMetadataPluginCallback) -> None: 67 | callback((("authorization", b"Basic " + self.__auth),), None) 68 | 69 | 70 | @dataclass 71 | class Connection: 72 | channel: grpc.aio._base_channel.Channel # type: ignore 73 | streams: Streams 74 | subscriptions: PersistentSubscriptions 75 | gossip: Gossip 76 | config: Configuration 77 | 78 | 79 | def pick_node(preference: Preference, members: list[Member]) -> Optional[Member]: 80 | preference_map = { 81 | Preference.LEADER: [State.Leader], 82 | Preference.FOLLOWER: [State.Follower], 83 | Preference.READ_ONLY_REPLICA: [State.ReadOnlyLeaderless, State.PreReadOnlyReplica, State.ReadOnlyReplica], 84 | } 85 | preferred_states = preference_map[preference] 86 | 87 | def _compare(a: Member, b: Member) -> int: 88 | return (preferred_states.index(b.state) if b.state in preferred_states else -1) - ( 89 | preferred_states.index(a.state) if a.state in preferred_states else -1 90 | ) 91 | 92 | members_ = members.copy() 93 | random.shuffle(members_) 94 | 95 | member: Optional[Member] = next( 96 | ( 97 | m 98 | for m in sorted(members_, key=cmp_to_key(_compare)) 99 | if m.is_alive and m.state in list(itertools.chain(*preference_map.values())) and m.endpoint 100 | ), 101 | None, 102 | ) 103 | if not member or not member.endpoint: 104 | return None 105 | return member 106 | 107 | 108 | def parse_endpoint(s: str) -> Member.Endpoint: 109 | if ":" in s: 110 | items = s.split(":") 111 | if len(items) != 2: 112 | raise ValueError("Too many colons in a host") 113 | host, port_str = items 114 | try: 115 | port = int(port_str) 116 | except ValueError: 117 | raise ValueError(f"{port_str} port is not a number") 118 | return Member.Endpoint(host, port) 119 | 120 | return Member.Endpoint(s, 2113) 121 | 122 | 123 | def parse_settings(query: str, c: Configuration) -> None: 124 | def _str_to_bool(s: str) -> bool: 125 | return {"true": True, "false": False}[s.lower()] 126 | 127 | for k, v in urllib.parse.parse_qs(query, strict_parsing=True).items(): 128 | if len(v) != 1: 129 | raise ValueError(f"Too many values for {k}") 130 | key = k.lower() 131 | [val] = v 132 | if key == "discoveryinterval": 133 | c.discovery_interval = int(val) 134 | elif key == "gossiptimeout": 135 | c.gossip_timeout = int(val) 136 | elif key == "maxdiscoverattempts": 137 | c.max_discovery_attempts = int(val) 138 | elif key == "nodepreference": 139 | c.node_preference = Preference.from_string(val) 140 | elif key == "keepaliveinterval": 141 | c.keep_alive_interval = int(val) 142 | elif key == "keepalivetimeout": 143 | c.keep_alive_timeout = int(val) 144 | elif key == "tls": 145 | c.disable_tls = not _str_to_bool(val) 146 | elif key == "tlscafile": 147 | with open(val, "rb") as fh: 148 | c.root_cert = fh.read() 149 | elif key == "tlsverifycert": 150 | c.verify_cert = _str_to_bool(val) 151 | elif key == "defaultdeadline": 152 | c.default_deadline = int(val) 153 | else: 154 | raise ValueError(f"Invalid option {k}") 155 | 156 | 157 | def parse_connection_string(connection_string: str) -> Configuration: 158 | config = Configuration() 159 | scheme, rest = connection_string.split("://") 160 | if scheme not in ("esdb", "esdb+discover"): 161 | raise ValueError("esdb:// or esdb+discover:// scheme is required") 162 | 163 | config.dns_discover = scheme == "esdb+discover" 164 | 165 | if "@" in rest: 166 | user_info, rest = rest.split("@") 167 | user_info_items = user_info.split(":") 168 | if len(user_info_items) != 2: 169 | raise ValueError("Invalid user credentials") 170 | user, password = user_info_items 171 | if not user: 172 | raise ValueError("Username is required") 173 | if not password: 174 | raise ValueError("Password is required") 175 | config.username, config.password = user, password 176 | 177 | hosts, *queries = rest.split("?") 178 | endpoints = [] 179 | for host in hosts.split(","): 180 | endpoints.append(parse_endpoint(host)) 181 | if len(endpoints) == 1: 182 | config.address = endpoints[0] 183 | else: 184 | config.gossip_seed = endpoints 185 | 186 | if queries: 187 | [settings_query] = queries 188 | parse_settings(settings_query, config) 189 | 190 | return config 191 | 192 | 193 | class ESClient: 194 | def __init__( 195 | self, 196 | connection_string: str, 197 | root_certificates: Optional[bytes] = None, 198 | private_key: Optional[bytes] = None, 199 | certificate_chain: Optional[bytes] = None, 200 | ) -> None: 201 | self.config = parse_connection_string(connection_string) 202 | self.options = [ 203 | ("grpc.keepalive_time_ms", self.config.keep_alive_interval * 1000), 204 | ("grpc.keepalive_timeout_ms", self.config.keep_alive_timeout * 1000), 205 | ] 206 | 207 | if not self.config.disable_tls: 208 | self.channel_credentials = grpc.ssl_channel_credentials( 209 | root_certificates=root_certificates or self.config.root_cert, 210 | private_key=private_key, 211 | certificate_chain=certificate_chain, 212 | ) 213 | 214 | if self.config.username and self.config.password: 215 | self.call_credentials = grpc.metadata_call_credentials( 216 | BasicAuthPlugin(self.config.username, self.config.password), name="auth" 217 | ) 218 | 219 | def _create_channel(self, endpoint: Member.Endpoint) -> grpc.aio.Channel: # type: ignore 220 | if self.config.disable_tls: 221 | return grpc.aio.insecure_channel( # type: ignore 222 | f"{endpoint.address}:{endpoint.port}", options=self.options 223 | ) 224 | assert self.channel_credentials 225 | credentials = ( 226 | grpc.composite_channel_credentials(self.channel_credentials, self.call_credentials) 227 | if self.call_credentials 228 | else self.channel_credentials 229 | ) 230 | return grpc.aio.secure_channel( # type: ignore 231 | f"{endpoint.address}:{endpoint.port}", credentials, self.options 232 | ) 233 | 234 | @contextlib.asynccontextmanager 235 | async def connect(self) -> AsyncIterator[Connection]: 236 | if self.config.dns_discover: 237 | endpoint = await self.discover_endpoint() 238 | else: 239 | assert self.config.address 240 | endpoint = self.config.address 241 | 242 | async with self._create_channel(endpoint) as channel: 243 | yield Connection( 244 | channel=channel, 245 | streams=Streams(StreamsStub(channel)), 246 | subscriptions=PersistentSubscriptions(PersistentSubscriptionsStub(channel)), 247 | gossip=Gossip(GossipStub(channel)), 248 | config=self.config, 249 | ) 250 | 251 | async def discover_endpoint(self) -> Member.Endpoint: 252 | for attempt in range(1, self.config.max_discovery_attempts + 1): 253 | candidates = ( 254 | self.config.gossip_seed.copy() if self.config.gossip_seed else [self.config.address] # type: ignore 255 | ) 256 | random.shuffle(candidates) 257 | logger.info( 258 | "Starting discovery attempt %s on %s", attempt, ",".join(f"{c.address}:{c.port}" for c in candidates) 259 | ) 260 | for candidate in candidates: 261 | async with self._create_channel(candidate) as chan: 262 | gossip = Gossip(GossipStub(chan)) 263 | members = await gossip.get_members(self.config.gossip_timeout) 264 | if pick := pick_node(self.config.node_preference, members): 265 | assert pick.endpoint 266 | logger.info( 267 | "Discovered %s node at %s (Preference: %s)", 268 | pick.state.name, 269 | f"{pick.endpoint.address}:{pick.endpoint.port}", 270 | self.config.node_preference.name, 271 | ) 272 | return pick.endpoint 273 | 274 | await asyncio.sleep(self.config.discovery_interval) 275 | raise DiscoveryError(f"Discovery failed after {self.config.max_discovery_attempts} attempt(s)") 276 | -------------------------------------------------------------------------------- /esdb/streams/streams.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import uuid 3 | from typing import AsyncIterable, Iterable, Optional, Union 4 | 5 | import grpc 6 | from google.protobuf.duration_pb2 import Duration 7 | from google.protobuf.empty_pb2 import Empty as GEmpty 8 | 9 | from esdb.exceptions import ClientException, StreamNotFound, WrongExpectedVersion 10 | from esdb.generated.shared_pb2 import UUID, Empty, StreamIdentifier 11 | from esdb.generated.streams_pb2 import ( 12 | AppendReq, 13 | AppendResp, 14 | BatchAppendReq, 15 | DeleteReq, 16 | DeleteResp, 17 | ReadReq, 18 | ReadResp, 19 | TombstoneReq, 20 | TombstoneResp, 21 | ) 22 | from esdb.generated.streams_pb2_grpc import StreamsStub 23 | from esdb.shared import Filter 24 | from esdb.streams.types import ( 25 | AppendResult, 26 | BatchAppendResult, 27 | Checkpoint, 28 | DeleteResult, 29 | Message, 30 | ReadEvent, 31 | StreamState, 32 | SubscriptionConfirmed, 33 | TombstoneResult, 34 | ) 35 | 36 | logger = logging.getLogger(__name__) 37 | 38 | 39 | class Streams: 40 | def __init__(self, stub: StreamsStub) -> None: 41 | self._stub = stub 42 | 43 | async def append( 44 | self, 45 | stream: str, 46 | event_type: str, 47 | data: Union[dict, bytes], 48 | stream_state: StreamState = StreamState.ANY, 49 | revision: Optional[int] = None, 50 | custom_metadata: Optional[dict] = None, 51 | ) -> AppendResult: 52 | options = AppendReq.Options( 53 | stream_identifier=StreamIdentifier(stream_name=stream.encode()), 54 | **{stream_state.value: Empty()}, # type: ignore 55 | ) 56 | if revision is not None: 57 | # Append at specified revision 58 | options.revision = revision 59 | requests = [ 60 | AppendReq(options=options), 61 | AppendReq( 62 | proposed_message=Message( 63 | event_type=event_type, 64 | data=data, 65 | custom_metadata=custom_metadata, 66 | ).to_protobuf(AppendReq.ProposedMessage) 67 | ), 68 | ] 69 | response: AppendResp = await self._stub.Append(requests) 70 | if response.WhichOneof("result") == "wrong_expected_version": 71 | raise WrongExpectedVersion(response.wrong_expected_version) 72 | return AppendResult.from_response(response) 73 | 74 | async def _get_events(self, responses: AsyncIterable[ReadResp]) -> AsyncIterable[ReadEvent]: 75 | """Filter out the responses from read requests, since we only care about read events.""" 76 | async for response in responses: 77 | result = self._process_read_response(response) 78 | if isinstance(result, ReadEvent): 79 | yield result 80 | elif isinstance(result, SubscriptionConfirmed): 81 | logger.info(f"Subscription {result.subscription_id} confirmed") 82 | elif isinstance(result, Checkpoint): 83 | logger.info( 84 | f"Checkpoint commit position: {result.commit_position}, " 85 | f"prepare position: {result.prepare_position}" 86 | ) 87 | 88 | @staticmethod 89 | def _process_read_response(response: ReadResp) -> Union[ReadEvent, SubscriptionConfirmed, Checkpoint]: 90 | content = response.WhichOneof("content") 91 | if content == "event": 92 | return ReadEvent.from_response(response) 93 | if content == "confirmation": 94 | return SubscriptionConfirmed(response.confirmation.subscription_id) 95 | if content == "checkpoint": 96 | return Checkpoint( 97 | commit_position=response.checkpoint.commit_position, 98 | prepare_position=response.checkpoint.prepare_position, 99 | ) 100 | if content == "stream_not_found": 101 | raise StreamNotFound(response.stream_not_found) 102 | raise ClientException(f"Got unexpected response {content}: {getattr(response, content)}") # type: ignore 103 | 104 | async def read( 105 | self, 106 | stream: str, 107 | count: Optional[int] = None, 108 | backwards: bool = False, 109 | revision: Optional[int] = None, 110 | subscribe: bool = False, 111 | ) -> AsyncIterable[ReadEvent]: 112 | assert (count is not None) ^ subscribe, "count or subscribe is required" 113 | 114 | stream_options = ReadReq.Options.StreamOptions( 115 | stream_identifier=StreamIdentifier(stream_name=stream.encode()), 116 | start=None if backwards else Empty(), 117 | end=Empty() if backwards else None, 118 | ) 119 | if revision is not None: 120 | stream_options.revision = revision 121 | 122 | options = ReadReq.Options( 123 | stream=stream_options, 124 | all=None, 125 | read_direction=ReadReq.Options.Backwards if backwards else ReadReq.Options.Forwards, 126 | resolve_links=True, # Resolve to actual data instead of a link when reading from projection 127 | subscription=ReadReq.Options.SubscriptionOptions() if subscribe else None, 128 | filter=None, 129 | no_filter=Empty(), 130 | uuid_option=ReadReq.Options.UUIDOption(structured=Empty(), string=Empty()), 131 | ) 132 | if count is not None: 133 | options.count = count 134 | 135 | async for response in self._get_events(self._stub.Read(ReadReq(options=options))): 136 | yield response 137 | 138 | async def read_all( 139 | self, count: Optional[int] = None, backwards=False, subscribe=False, filter_by: Optional[Filter] = None 140 | ) -> AsyncIterable[ReadEvent]: 141 | assert subscribe ^ (count is not None), "subscribe and count are mutually exclusive arguments" 142 | request = ReadReq( 143 | options=ReadReq.Options( 144 | stream=None, 145 | all=ReadReq.Options.AllOptions( 146 | start=None if backwards else Empty(), 147 | end=Empty() if backwards else None, 148 | # position=ReadReq.Options.Position() # TODO: implement position 149 | ), 150 | read_direction=ReadReq.Options.Backwards if backwards else ReadReq.Options.Forwards, 151 | resolve_links=True, 152 | subscription=ReadReq.Options.SubscriptionOptions() if subscribe else None, 153 | filter=filter_by.to_protobuf(ReadReq.Options.FilterOptions) if filter_by else None, 154 | no_filter=None if filter_by else Empty(), 155 | uuid_option=ReadReq.Options.UUIDOption(structured=Empty(), string=Empty()), 156 | ) 157 | ) 158 | if count is not None: 159 | request.options.count = count 160 | if subscribe and filter_by: 161 | assert ( 162 | filter_by.checkpoint_interval_multiplier is not None 163 | ), "checkpoint_interval_multiplier is required when subscribing" 164 | async for response in self._get_events(self._stub.Read(request)): 165 | yield response 166 | 167 | async def delete( 168 | self, stream: str, stream_state: StreamState = StreamState.ANY, revision: Optional[int] = None 169 | ) -> Optional[DeleteResult]: 170 | request = DeleteReq( 171 | options=DeleteReq.Options( 172 | stream_identifier=StreamIdentifier(stream_name=stream.encode()), 173 | **{stream_state.value: Empty()}, # type: ignore 174 | ) 175 | ) 176 | if revision is not None: 177 | request.options.revision = revision 178 | try: 179 | response: DeleteResp = await self._stub.Delete(request) 180 | except grpc.aio._call.AioRpcError as err: # type: ignore 181 | raise ClientException(f"Delete failed: {err.details()}") from err 182 | has_position = response.WhichOneof("position_option") == "position" 183 | return DeleteResult.from_response(response) if has_position else None 184 | 185 | async def tombstone( 186 | self, stream: str, stream_state: StreamState = StreamState.ANY, revision: Optional[int] = None 187 | ) -> Optional[TombstoneResult]: 188 | request = TombstoneReq( 189 | options=TombstoneReq.Options( 190 | stream_identifier=StreamIdentifier(stream_name=stream.encode()), 191 | **{stream_state.value: Empty()}, # type: ignore 192 | ) 193 | ) 194 | if revision is not None: 195 | request.options.revision = revision 196 | response: TombstoneResp = await self._stub.Tombstone(request) 197 | has_position = response.WhichOneof("position_option") == "position" 198 | return TombstoneResult.from_response(response) if has_position else None 199 | 200 | async def batch_append( 201 | self, 202 | stream: str, 203 | messages: Iterable[Message], 204 | stream_state: Optional[StreamState] = None, 205 | correlation_id: Optional[uuid.UUID] = None, 206 | deadline_ms: Optional[int] = None, 207 | stream_position: Optional[int] = None, 208 | ) -> BatchAppendResult: 209 | if stream_position is not None and stream_state is not None: 210 | raise ValueError("stream_position can't be used with stream_state") 211 | if stream_position is None and stream_state is None: 212 | stream_state = StreamState.ANY 213 | correlation_id_ = UUID(string=str(correlation_id or uuid.uuid4())) 214 | stream_opts = {stream_state.value: GEmpty()} if stream_state else {} 215 | options_req = BatchAppendReq( 216 | correlation_id=correlation_id_, 217 | options=BatchAppendReq.Options( 218 | stream_identifier=StreamIdentifier(stream_name=stream.encode()), 219 | deadline=Duration(nanos=deadline_ms * 1000000) if deadline_ms is not None else None, 220 | **stream_opts, # type: ignore 221 | ), 222 | is_final=False, 223 | ) 224 | if stream_position is not None: 225 | options_req.options.stream_position = stream_position 226 | 227 | requests = [ 228 | options_req, 229 | BatchAppendReq( 230 | correlation_id=correlation_id_, 231 | proposed_messages=(m.to_protobuf(BatchAppendReq.ProposedMessage) for m in messages), 232 | is_final=True, 233 | ), 234 | ] 235 | result = None 236 | async for response in self._stub.BatchAppend(requests): 237 | if response.WhichOneof("result") == "error": 238 | # For some reason ES uses google.rpc.Status here instead of more meaningful error. 239 | raise ClientException(f"Append failed with {response.error.message} and code {response.error.code}") 240 | result = BatchAppendResult.from_response(response) 241 | # TODO: Handle streaming of multiple batches and responses 242 | break 243 | assert result 244 | return result 245 | --------------------------------------------------------------------------------