├── __init__.py ├── kubemq ├── __init__.py ├── grpc │ ├── __init__.py │ └── client.py ├── common │ ├── __init__.py │ ├── subscribe_type.py │ ├── cancellation_token.py │ ├── helpers.py │ ├── exceptions.py │ ├── requests.py │ └── channel_stats.py ├── transport │ ├── __init__.py │ ├── tls_config.py │ ├── server_info.py │ ├── keep_alive.py │ ├── connection.py │ ├── interceptors.py │ ├── transport.py │ └── channel_manager.py ├── queues │ ├── __init__.py │ ├── queues_poll_request.py │ ├── queues_send_result.py │ ├── queues_message.py │ ├── upstream_sender.py │ └── downstream_receiver.py ├── cq │ ├── __init__.py │ ├── command_message_received.py │ ├── command_message.py │ ├── command_response_message.py │ ├── query_message_received.py │ ├── commands_subscription.py │ ├── query_message.py │ ├── queries_subscription.py │ └── query_response_message.py └── pubsub │ ├── __init__.py │ ├── event_send_result.py │ ├── event_message_received.py │ ├── event_store_message_received.py │ ├── event_message.py │ ├── event_store_message.py │ ├── events_subscription.py │ ├── events_store_subscription.py │ └── event_sender.py ├── examples ├── cq │ ├── __init__.py │ ├── list.py │ ├── create.py │ ├── delete.py │ ├── queries.py │ ├── async_simple.py │ ├── commands.py │ └── async.py ├── __init__.py ├── client │ ├── __init__.py │ ├── basic_client.py │ ├── tls_client.py │ └── auth_client.py ├── pubsub │ ├── __init__.py │ ├── list.py │ ├── create.py │ ├── delete.py │ ├── events.py │ ├── events_store.py │ ├── async_simple.py │ ├── async.py │ └── run.py ├── queues │ ├── __init__.py │ ├── list.py │ ├── create.py │ ├── delete.py │ ├── waiting_pulled.py │ ├── queues_workers.py │ └── async.py └── client_test.py ├── requirements.grpc.txt ├── requirements.txt ├── scripts └── gen-proto.bat ├── .gitignore ├── Taskfile.yaml ├── .github ├── dependabot.yml └── workflows │ └── deploy.yml ├── setup.py ├── LICENSE ├── README.md └── protos └── kubemq └── grpc └── kubemq.proto /__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /kubemq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/cq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /examples/client/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/pubsub/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /examples/queues/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /kubemq/grpc/__init__.py: -------------------------------------------------------------------------------- 1 | from .kubemq_pb2 import * 2 | from .kubemq_pb2_grpc import * 3 | -------------------------------------------------------------------------------- /requirements.grpc.txt: -------------------------------------------------------------------------------- 1 | grpcio-tools>=1.71.0,<2.0.0 2 | googleapis-common-protos>=1.58.0,<2.0.0 -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | grpcio>=1.71.0,<2.0.0 2 | protobuf>=4.21.0,<7.0.0 3 | setuptools>=65.5.0 4 | PyJWT>=2.6.0,<3.0.0 5 | pydantic>=2.0.0,<3.0.0 6 | -------------------------------------------------------------------------------- /scripts/gen-proto.bat: -------------------------------------------------------------------------------- 1 | python -m grpc_tools.protoc -I./protos --python_out=. --pyi_out=. --grpc_python_out=. ./protos/kubemq/grpc/kubemq.proto 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | /dist/ 3 | /*.egg-info 4 | /venv/ 5 | /build/ 6 | .vscode/settings.json 7 | .vscode/launch.json 8 | .idea/ 9 | .pypirc 10 | .Taskfile.yaml -------------------------------------------------------------------------------- /kubemq/common/__init__.py: -------------------------------------------------------------------------------- 1 | from .cancellation_token import CancellationToken 2 | from .subscribe_type import SubscribeType 3 | from .exceptions import * 4 | from .requests import create_channel_request 5 | from .channel_stats import * 6 | from .helpers import * 7 | -------------------------------------------------------------------------------- /kubemq/transport/__init__.py: -------------------------------------------------------------------------------- 1 | from .transport import Transport 2 | from .server_info import ServerInfo 3 | from .connection import Connection 4 | from .keep_alive import KeepAliveConfig 5 | from .tls_config import TlsConfig 6 | from kubemq.common.cancellation_token import CancellationToken 7 | -------------------------------------------------------------------------------- /Taskfile.yaml: -------------------------------------------------------------------------------- 1 | 2 | version: '3' 3 | 4 | 5 | tasks: 6 | publish: 7 | cmds: 8 | - pip install wheel 9 | - python -m pip install --upgrade build 10 | - python -m build 11 | - python -m pip install --upgrade twine 12 | - python -m twine upload --repository pypi dist/* 13 | -------------------------------------------------------------------------------- /examples/client/basic_client.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import Client 2 | 3 | 4 | def main(): 5 | try: 6 | client = Client(address="localhost:50000") 7 | server_info = client.ping() 8 | print(server_info) 9 | except Exception as e: 10 | print(e) 11 | return 12 | 13 | 14 | if __name__ == "__main__": 15 | main() 16 | -------------------------------------------------------------------------------- /kubemq/queues/__init__.py: -------------------------------------------------------------------------------- 1 | from .queues_poll_request import * 2 | from .queues_message import * 3 | from .queues_message_received import * 4 | from .queues_poll_request import * 5 | from .queues_poll_response import * 6 | from .downstream_receiver import * 7 | from .queues_send_result import * 8 | from .upstream_sender import * 9 | from .queues_messages_waiting_pulled import * 10 | from .client import * 11 | -------------------------------------------------------------------------------- /kubemq/common/subscribe_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class SubscribeType(Enum): 5 | """Type of subscription operation pattern""" 6 | 7 | SubscribeTypeUndefined = 0 8 | """Default""" 9 | 10 | Events = 1 11 | """PubSub event""" 12 | 13 | EventsStore = 2 14 | """PubSub event with persistence""" 15 | 16 | Commands = 3 17 | """ReqRep perform action""" 18 | 19 | Queries = 4 20 | """ReqRep return data""" 21 | -------------------------------------------------------------------------------- /examples/queues/list.py: -------------------------------------------------------------------------------- 1 | from kubemq.queues import * 2 | 3 | 4 | def list_queues_channels(search_pattern=""): 5 | try: 6 | with Client(address="localhost:50000", client_id="list_example") as client: 7 | list = client.list_queues_channels(search_pattern) 8 | print(f"Queues channels list: {list}") 9 | except Exception as e: 10 | print(f"Error listing Queues Channels: {e}") 11 | 12 | 13 | if __name__ == "__main__": 14 | list_queues_channels() 15 | -------------------------------------------------------------------------------- /examples/queues/create.py: -------------------------------------------------------------------------------- 1 | from kubemq.queues import * 2 | 3 | 4 | def create_queues_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000") as client: 7 | client.create_queues_channel(channelName) 8 | print(f"Queues channel: {channelName} created successfully.") 9 | except Exception as e: 10 | print(f"Error creating Queues Channel: {channelName}, {e}") 11 | 12 | 13 | if __name__ == "__main__": 14 | create_queues_channel("new_queues_channel") 15 | -------------------------------------------------------------------------------- /examples/queues/delete.py: -------------------------------------------------------------------------------- 1 | from kubemq.queues import * 2 | 3 | 4 | def delete_queues_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000", client_id="delete_example") as client: 7 | client.delete_queues_channel(channelName) 8 | print(f"Queues channel: {channelName} deleted successfully.") 9 | except Exception as e: 10 | print(f"Error deleting Queues Channel: {channelName}, {e}") 11 | 12 | 13 | if __name__ == "__main__": 14 | delete_queues_channel("new_queues_channel") 15 | -------------------------------------------------------------------------------- /kubemq/cq/__init__.py: -------------------------------------------------------------------------------- 1 | from .command_message import CommandMessage 2 | from .command_message_received import CommandMessageReceived 3 | from .command_response_message import CommandResponseMessage 4 | from .commands_subscription import CommandsSubscription 5 | from .query_message import QueryMessage 6 | from .query_message_received import QueryMessageReceived 7 | from .query_response_message import QueryResponseMessage 8 | from .queries_subscription import QueriesSubscription 9 | from kubemq.common import CancellationToken 10 | from .client import Client 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /kubemq/pubsub/__init__.py: -------------------------------------------------------------------------------- 1 | from .event_message import EventMessage 2 | from .event_message_received import EventMessageReceived 3 | from .event_send_result import EventSendResult 4 | from .events_subscription import EventsSubscription 5 | from .event_store_message import EventStoreMessage 6 | from .event_store_message_received import EventStoreMessageReceived 7 | from .events_store_subscription import EventsStoreSubscription, EventsStoreType 8 | from .event_sender import EventSender 9 | from kubemq.common import CancellationToken 10 | from .client import Client 11 | -------------------------------------------------------------------------------- /examples/client/tls_client.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import Client 2 | 3 | 4 | def main(): 5 | try: 6 | client = Client( 7 | address="localhost:50000", 8 | client_id="client-id", 9 | tls=True, 10 | tls_ca_file="ca.pem", 11 | tls_cert_file="cert.pem", 12 | tls_key_file="key.pem", 13 | ) 14 | server_info = client.ping() 15 | print(server_info) 16 | except Exception as e: 17 | print(e) 18 | return 19 | 20 | 21 | if __name__ == "__main__": 22 | main() 23 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_send_result.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from pydantic import BaseModel 3 | from kubemq.grpc import Result 4 | 5 | 6 | class EventSendResult(BaseModel): 7 | id: Optional[str] = None 8 | sent: bool = False 9 | error: Optional[str] = None 10 | 11 | @classmethod 12 | def decode(cls, result: Result) -> "EventSendResult": 13 | return cls(id=result.EventID, sent=result.Sent, error=result.Error) 14 | 15 | def model_dump_json(self, **kwargs): 16 | return super().model_dump_json(exclude_none=True, **kwargs) 17 | 18 | class Config: 19 | validate_assignment = True 20 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Set up Python 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: '3.x' 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install build 21 | - name: Build package 22 | run: python -m build 23 | - name: Publish package 24 | uses: pypa/gh-action-pypi-publish@v1.8.5 25 | with: 26 | user: __token__ 27 | password: ${{ secrets.PYPI_API_TOKEN }} 28 | -------------------------------------------------------------------------------- /kubemq/common/cancellation_token.py: -------------------------------------------------------------------------------- 1 | import threading 2 | 3 | 4 | class CancellationToken: 5 | """ 6 | A class representing a cancellation token. 7 | 8 | This class provides a simple mechanism to cancel an operation or check if 9 | cancellation has been requested. 10 | 11 | Attributes: 12 | event (threading.Event): The event used to signal cancellation. 13 | 14 | Methods: 15 | cancel: Set the cancellation event. 16 | is_set: Check if cancellation has been requested. 17 | """ 18 | 19 | def __init__(self): 20 | self.event = threading.Event() 21 | 22 | def cancel(self): 23 | self.event.set() 24 | 25 | def is_set(self): 26 | return self.event.is_set() 27 | -------------------------------------------------------------------------------- /examples/cq/list.py: -------------------------------------------------------------------------------- 1 | from kubemq.cq import * 2 | 3 | 4 | def list_commands_channels(search_pattern=""): 5 | try: 6 | with Client(address="localhost:50000", client_id="list_example") as client: 7 | list = client.list_commands_channels(search_pattern) 8 | print(f"Commands channels list: {list}") 9 | except Exception as e: 10 | print(f"Error listing Commands Channels: {e}") 11 | 12 | 13 | def list_queries_channels(search_pattern=""): 14 | try: 15 | with Client(address="localhost:50000", client_id="list_example") as client: 16 | list = client.list_queries_channels(search_pattern) 17 | print(f"Queries channels list: {list}") 18 | except Exception as e: 19 | print(f"Error listing Queries Channels: {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | list_commands_channels() 24 | list_queries_channels() 25 | -------------------------------------------------------------------------------- /examples/client/auth_client.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import Client, EventMessage 2 | 3 | 4 | def main(): 5 | try: 6 | client = Client( 7 | address="localhost:50000", 8 | client_id="client-id", 9 | auth_token="eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3NTY2MTU3NzR9.cfFW3QD6oAFtEh3AFucxTHd7vmt6VIXSWVHTAlIVFnbXJfe6YLybQnjRixKkVa3kgbBFZTo-NcFupU-cdk7vlsSy8as6SRVZ1xWRp59uXgY2P9MorHLv3NFDX7abl6SLZdmSi_PCq5Dakflz13MySxCrD5noEmI6DiWKgRs-4Gxi-linJimt3xgnb34mO5uwh_AK1mQbRf_aO_MdKokss0gT2n-HSGajU45DmIEyFA9zPSzvITu7-mJ1qdFvVo-9JphFlO0nfUvXvJtyn96r_whqAAGt5C1PEoHEx4dp8QTiKbm2Gxf4hsuhH4ShdgUvGCB-UqWVJX4aw3EWxXiAxw", 10 | ) 11 | client.send_events_message(EventMessage(channel="e1", body=b"hello kubemq")) 12 | print("Event sent") 13 | except Exception as e: 14 | print(e) 15 | return 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /examples/pubsub/list.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import * 2 | 3 | 4 | def list_events_channels(search_pattern=""): 5 | try: 6 | with Client(address="localhost:50000", client_id="list_example") as client: 7 | list = client.list_events_channels(search_pattern) 8 | print(f"Events channels list: {list}") 9 | except Exception as e: 10 | print(f"Error listing Events Channels: {e}") 11 | 12 | 13 | def list_events_store_channels(search_pattern=""): 14 | try: 15 | with Client(address="localhost:50000", client_id="list_example") as client: 16 | list = client.list_events_store_channels(search_pattern) 17 | print(f"Events Store channels list: {list}") 18 | except Exception as e: 19 | print(f"Error listing Events Store Channels: {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | list_events_channels() 24 | list_events_store_channels() 25 | -------------------------------------------------------------------------------- /examples/cq/create.py: -------------------------------------------------------------------------------- 1 | from kubemq.cq import * 2 | 3 | 4 | def create_commands_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000") as client: 7 | client.create_commands_channel(channelName) 8 | print(f"Commands channel: {channelName} created successfully.") 9 | except Exception as e: 10 | print(f"Error creating Commands Channel: {channelName}, {e}") 11 | 12 | 13 | def create_queries_channel(channelName): 14 | try: 15 | with Client(address="localhost:50000") as client: 16 | client.create_queries_channel(channelName) 17 | print(f"Queries channel: {channelName} created successfully.") 18 | except Exception as e: 19 | print(f"Error creating Queries Channel: {channelName}, {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | create_commands_channel("new_commands_channel") 24 | create_queries_channel("new_queries_channel") 25 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from setuptools import setup, find_packages # Corrected import statement 3 | 4 | # The directory containing this file 5 | HERE = pathlib.Path(__file__).parent 6 | 7 | # The text of the README file 8 | README = (HERE / "README.md").read_text() 9 | 10 | setup( 11 | name="kubemq", 12 | version="3.6.0", 13 | description="KubeMQ SDK for Python", 14 | long_description=README, 15 | long_description_content_type="text/markdown", 16 | url="https://github.com/kubemq-io/kubemq-Python", 17 | author="KubeMQ", 18 | author_email="info@kubemq.io", 19 | license="MIT", 20 | python_requires=">=3.9", 21 | packages=find_packages(), # Corrected function call 22 | install_requires=[ 23 | "grpcio>=1.71.0,<2.0.0", 24 | "protobuf>=4.21.0,<7.0.0", 25 | "setuptools>=65.5.0", 26 | "PyJWT>=2.6.0,<3.0.0", 27 | "pydantic>=2.0.0,<3.0.0", 28 | ], 29 | zip_safe=False, 30 | ) 31 | -------------------------------------------------------------------------------- /examples/pubsub/create.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import * 2 | 3 | 4 | def create_events_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000") as client: 7 | client.create_events_channel(channelName) 8 | print(f"Events channel: {channelName} created successfully.") 9 | except Exception as e: 10 | print(f"Error creating Events Channel: {channelName}, {e}") 11 | 12 | 13 | def create_events_store_channel(channelName): 14 | try: 15 | with Client(address="localhost:50000") as client: 16 | client.create_events_store_channel(channelName) 17 | print(f"Events Store channel: {channelName} created successfully.") 18 | except Exception as e: 19 | print(f"Error creating Events Store Channel: {channelName}, {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | create_events_channel("new_events_channel") 24 | create_events_store_channel("new_events_store_channel") 25 | -------------------------------------------------------------------------------- /examples/cq/delete.py: -------------------------------------------------------------------------------- 1 | from kubemq.cq import * 2 | 3 | 4 | def delete_commands_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000", client_id="delete_example") as client: 7 | client.delete_commands_channel(channelName) 8 | print(f"Commands channel: {channelName} deleted successfully.") 9 | except Exception as e: 10 | print(f"Error deleting Commands Channel: {channelName}, {e}") 11 | 12 | 13 | def delete_queries_channel(channelName): 14 | try: 15 | with Client(address="localhost:50000", client_id="delete_example") as client: 16 | client.delete_queries_channel(channelName) 17 | print(f"Queries channel: {channelName} deleted successfully.") 18 | except Exception as e: 19 | print(f"Error creating Queries Channel: {channelName}, {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | delete_commands_channel("new_commands_channel") 24 | delete_queries_channel("new_queries_channel") 25 | -------------------------------------------------------------------------------- /examples/pubsub/delete.py: -------------------------------------------------------------------------------- 1 | from kubemq.pubsub import * 2 | 3 | 4 | def delete_events_channel(channelName): 5 | try: 6 | with Client(address="localhost:50000", client_id="delete_example") as client: 7 | client.delete_events_channel(channelName) 8 | print(f"Events channel: {channelName} deleted successfully.") 9 | except Exception as e: 10 | print(f"Error deleting Events Channel: {channelName}, {e}") 11 | 12 | 13 | def delete_events_store_channel(channelName): 14 | try: 15 | with Client(address="localhost:50000", client_id="delete_example") as client: 16 | client.delete_events_store_channel(channelName) 17 | print(f"Events Store channel: {channelName} deleted successfully.") 18 | except Exception as e: 19 | print(f"Error creating Events Store Channel: {channelName}, {e}") 20 | 21 | 22 | if __name__ == "__main__": 23 | delete_events_channel("new_events_channel") 24 | delete_events_store_channel("new_events_store_channel") 25 | -------------------------------------------------------------------------------- /examples/queues/waiting_pulled.py: -------------------------------------------------------------------------------- 1 | from kubemq.queues import * 2 | 3 | 4 | def main(): 5 | with Client(address="localhost:50000") as client: 6 | send_result = client.send_queues_message( 7 | QueueMessage( 8 | channel="q1", 9 | body=b"some-simple_queue-queue-message", 10 | metadata="some-metadata", 11 | ) 12 | ) 13 | print(f"Queue Message Sent: {send_result}") 14 | 15 | waitingResult = client.waiting("q1", 1, 10) 16 | 17 | if waitingResult.is_error: 18 | print(f"{waitingResult.error}") 19 | return 20 | for message in waitingResult.messages: 21 | print(f"Id:{message.id}, Body:{message.body.decode('utf-8')}") 22 | 23 | pullResult = client.pull("q1", 1, 10) 24 | if pullResult.is_error: 25 | print(f"{pullResult.error}") 26 | return 27 | for message in pullResult.messages: 28 | print(f"Id:{message.id}, Body:{message.body.decode('utf-8')}") 29 | 30 | 31 | if __name__ == "__main__": 32 | main() 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 KubeMQ 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /examples/pubsub/events.py: -------------------------------------------------------------------------------- 1 | import time 2 | from kubemq.pubsub import * 3 | 4 | 5 | def main(): 6 | try: 7 | client = Client(address="localhost:50000", client_id="events_example") 8 | 9 | def on_receive_event(event: EventMessageReceived): 10 | print( 11 | f"Id:{event.id}, Timestamp:{event.timestamp} From: {event.from_client_id}, Body:{event.body.decode('utf-8')}" 12 | ) 13 | 14 | def on_error_handler(err: str): 15 | print(f"{err}") 16 | 17 | client.subscribe_to_events( 18 | subscription=EventsSubscription( 19 | channel="e1", 20 | group="", 21 | on_receive_event_callback=on_receive_event, 22 | on_error_callback=on_error_handler, 23 | ), 24 | cancel=CancellationToken(), 25 | ) 26 | time.sleep(1) 27 | client.send_events_message(EventMessage(channel="e1", body=b"hello kubemq")) 28 | time.sleep(1) 29 | except Exception as e: 30 | print(e) 31 | return 32 | 33 | 34 | if __name__ == "__main__": 35 | main() 36 | -------------------------------------------------------------------------------- /examples/client_test.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | from kubemq.client import Client 4 | 5 | 6 | def main(): 7 | try: 8 | client = Client( 9 | address="localhost:50000", 10 | client_id="events_example", 11 | log_level=logging.DEBUG, 12 | ) 13 | # client._run_events_upstream_sender(CancellationToken()) 14 | 15 | # def on_receive_event(event: EventMessageReceived): 16 | # print( 17 | # f"Id:{event.id}, Timestamp:{event.timestamp} From: {event.from_client_id}, Body:{event.body.decode('utf-8')}") 18 | # 19 | # def on_error_handler(err: str): 20 | # print(f"Error: {err}") 21 | # client.subscribe( 22 | # subscription=EventsSubscription( 23 | # channel="e1", 24 | # group="", 25 | # on_receive_event_callback=on_receive_event, 26 | # on_error_callback=on_error_handler, 27 | # ) 28 | # , cancellation_token=CancellationToken()) 29 | # time.sleep(1) 30 | # client.send(EventMessage( 31 | # channel="e1", 32 | # body=b"hello kubemq" 33 | # )) 34 | time.sleep(10) 35 | except Exception as e: 36 | print(e) 37 | return 38 | 39 | 40 | if __name__ == "__main__": 41 | main() 42 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_message_received.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Dict 3 | from pydantic import BaseModel, Field 4 | from kubemq.grpc import EventReceive as pbEventReceive 5 | 6 | 7 | class EventMessageReceived(BaseModel): 8 | id: str = "" 9 | from_client_id: str = "" 10 | timestamp: datetime = Field(default_factory=datetime.now) 11 | channel: str = "" 12 | metadata: str = "" 13 | body: bytes = b"" 14 | tags: Dict[str, str] = Field(default_factory=dict) 15 | 16 | @classmethod 17 | def decode(cls, event_receive: pbEventReceive) -> "EventMessageReceived": 18 | from_client_id = ( 19 | event_receive.Tags.get("x-kubemq-client-id", "") 20 | if event_receive.Tags 21 | else "" 22 | ) 23 | tags = dict(event_receive.Tags) if event_receive.Tags else {} 24 | 25 | return cls( 26 | id=event_receive.EventID, 27 | from_client_id=from_client_id, 28 | channel=event_receive.Channel, 29 | metadata=event_receive.Metadata, 30 | body=event_receive.Body, 31 | tags=tags, 32 | ) 33 | 34 | class Config: 35 | arbitrary_types_allowed = True 36 | 37 | def model_dump(self, **kwargs): 38 | dump = super().model_dump(**kwargs) 39 | dump["timestamp"] = self.timestamp.isoformat() 40 | return dump 41 | -------------------------------------------------------------------------------- /kubemq/transport/tls_config.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, field_validator, model_validator 2 | import os 3 | 4 | 5 | class TlsConfig(BaseModel): 6 | """ 7 | Handles the configuration settings for TLS (Transport Layer Security) encryption. 8 | """ 9 | 10 | enabled: bool = Field( 11 | default=False, description="Indicates whether TLS is enabled or not" 12 | ) 13 | cert_file: str = Field(default="", description="Path to the TLS certificate file") 14 | key_file: str = Field(default="", description="Path to the TLS private key file") 15 | ca_file: str = Field( 16 | default="", description="Path to the TLS CA (Certificate Authority) file" 17 | ) 18 | 19 | @field_validator("cert_file", "key_file", "ca_file") 20 | def validate_file_path(cls, v: str) -> str: 21 | if v and not os.path.isfile(v): 22 | raise FileNotFoundError(f"The file was not found: {v}") 23 | return v 24 | 25 | @model_validator(mode="after") 26 | def validate_enabled_config(self) -> "TlsConfig": 27 | if self.enabled: 28 | if not self.cert_file: 29 | raise ValueError( 30 | "Certificate file must be specified when TLS is enabled" 31 | ) 32 | if not self.key_file: 33 | raise ValueError("Key file must be specified when TLS is enabled") 34 | return self 35 | -------------------------------------------------------------------------------- /examples/pubsub/events_store.py: -------------------------------------------------------------------------------- 1 | import time 2 | from kubemq.pubsub import * 3 | 4 | 5 | def main(): 6 | try: 7 | with Client( 8 | address="localhost:50000", client_id="events_store_example" 9 | ) as client: 10 | 11 | def on_receive_event(event: EventStoreMessageReceived): 12 | print( 13 | f"Id:{event.id}, Timestamp:{event.timestamp} From: {event.from_client_id}, Body:{event.body.decode('utf-8')}" 14 | ) 15 | 16 | def on_error_handler(err: str): 17 | print(f"{err}") 18 | 19 | client.subscribe_to_events_store( 20 | subscription=EventsStoreSubscription( 21 | channel="es1", 22 | group="", 23 | on_receive_event_callback=on_receive_event, 24 | on_error_callback=on_error_handler, 25 | events_store_type=EventsStoreType.StartNewOnly, 26 | ), 27 | cancel=CancellationToken(), 28 | ) 29 | 30 | time.sleep(1) 31 | result = client.send_events_store_message( 32 | EventStoreMessage(channel="es1", body=b"hello kubemq") 33 | ) 34 | print(f"send result:{result}") 35 | time.sleep(1000) 36 | except Exception as e: 37 | print(e) 38 | return 39 | 40 | 41 | if __name__ == "__main__": 42 | main() 43 | -------------------------------------------------------------------------------- /kubemq/transport/server_info.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, field_validator 2 | 3 | 4 | class ServerInfo(BaseModel): 5 | """ 6 | Represents information about a server. 7 | 8 | Attributes: 9 | host (str): The host of the server. 10 | version (str): The version of the server. 11 | server_start_time (int): The start time of the server (in seconds). 12 | server_up_time_seconds (int): The uptime of the server (in seconds). 13 | """ 14 | 15 | host: str = Field(..., description="The host of the server") 16 | version: str = Field(..., description="The version of the server") 17 | server_start_time: int = Field( 18 | ..., description="The start time of the server (in seconds)" 19 | ) 20 | server_up_time_seconds: int = Field( 21 | ..., description="The uptime of the server (in seconds)" 22 | ) 23 | 24 | @field_validator("server_start_time", "server_up_time_seconds") 25 | def validate_positive_time(cls, v: int) -> int: 26 | if v < 0: 27 | raise ValueError("Time values must be non-negative") 28 | return v 29 | 30 | def __str__(self) -> str: 31 | return ( 32 | f"ServerInfo(host={self.host}, version={self.version}, " 33 | f"server_start_time={self.server_start_time}, server_up_time_seconds={self.server_up_time_seconds})" 34 | ) 35 | 36 | class Config: 37 | frozen = True # This makes the model immutable 38 | -------------------------------------------------------------------------------- /kubemq/transport/keep_alive.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, field_validator, model_validator 2 | 3 | 4 | class KeepAliveConfig(BaseModel): 5 | """ 6 | Represents the configuration for keep alive functionality. 7 | """ 8 | 9 | enabled: bool = Field( 10 | default=False, description="Specifies if keep alive is enabled" 11 | ) 12 | ping_interval_in_seconds: int = Field( 13 | default=0, description="The interval at which ping requests are sent in seconds" 14 | ) 15 | ping_timeout_in_seconds: int = Field( 16 | default=0, description="The timeout for ping requests in seconds" 17 | ) 18 | 19 | @field_validator("ping_interval_in_seconds", "ping_timeout_in_seconds") 20 | def validate_positive_values(cls, v: int, info) -> int: 21 | if v < 0: 22 | raise ValueError(f"{info.field_name} must be greater than or equal to 0") 23 | return v 24 | 25 | @model_validator(mode="after") 26 | def validate_enabled_config(self) -> "KeepAliveConfig": 27 | if self.enabled: 28 | if self.ping_interval_in_seconds <= 0: 29 | raise ValueError( 30 | "Keep alive ping interval must be greater than 0 when enabled" 31 | ) 32 | if self.ping_timeout_in_seconds <= 0: 33 | raise ValueError( 34 | "Keep alive ping timeout must be greater than 0 when enabled" 35 | ) 36 | return self 37 | -------------------------------------------------------------------------------- /kubemq/cq/command_message_received.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field 2 | from typing import Dict 3 | from datetime import datetime 4 | from kubemq.grpc import Request as pbRequest 5 | 6 | 7 | class CommandMessageReceived(BaseModel): 8 | id: str = Field(default="") 9 | from_client_id: str = Field(default="") 10 | timestamp: datetime = Field(default_factory=datetime.now) 11 | channel: str = Field(default="") 12 | metadata: str = Field(default="") 13 | body: bytes = Field(default=b"") 14 | reply_channel: str = Field(default="") 15 | tags: Dict[str, str] = Field(default_factory=dict) 16 | 17 | model_config = {"arbitrary_types_allowed": True} 18 | 19 | @classmethod 20 | def decode(cls, command_receive: pbRequest) -> "CommandMessageReceived": 21 | return cls( 22 | id=command_receive.RequestID, 23 | from_client_id=command_receive.ClientID, 24 | timestamp=datetime.now(), 25 | channel=command_receive.Channel, 26 | metadata=command_receive.Metadata, 27 | body=command_receive.Body, 28 | reply_channel=command_receive.ReplyChannel, 29 | tags=dict(command_receive.Tags), 30 | ) 31 | 32 | def __repr__(self) -> str: 33 | return ( 34 | f"CommandMessageReceived: id={self.id}, channel={self.channel}, " 35 | f"metadata={self.metadata}, body={self.body}, " 36 | f"from_client_id={self.from_client_id}, timestamp={self.timestamp}, " 37 | f"reply_channel={self.reply_channel}, tags={self.tags}" 38 | ) 39 | -------------------------------------------------------------------------------- /examples/cq/queries.py: -------------------------------------------------------------------------------- 1 | import time 2 | from kubemq.cq import * 3 | 4 | 5 | def main(): 6 | # try: 7 | client = Client(address="localhost:50000") 8 | 9 | def on_receive_query(request: QueryMessageReceived): 10 | try: 11 | print(f"Id:{request.id}, Body:{request.body.decode('utf-8')}") 12 | response = QueryResponseMessage( 13 | query_received=request, 14 | is_executed=True, 15 | body=b"hello kubemq, I'm replying to you!", 16 | ) 17 | client.send_response_message(response) 18 | except Exception as e: 19 | print(e) 20 | 21 | def on_error_handler(err: str): 22 | print(f"Error: {err}") 23 | 24 | client.subscribe_to_queries( 25 | subscription=QueriesSubscription( 26 | channel="q1", 27 | group="", 28 | on_receive_query_callback=on_receive_query, 29 | on_error_callback=on_error_handler, 30 | ), 31 | cancel=CancellationToken(), 32 | ) 33 | time.sleep(1) 34 | response: QueryResponseMessage = client.send_query_request( 35 | QueryMessage( 36 | channel="q1", 37 | body=b"hello kubemq, please reply to me!", 38 | timeout_in_seconds=10, 39 | ) 40 | ) 41 | print( 42 | f"Request Execution: {response.is_executed}, Body: {response.body} Executed at: {response.timestamp}, Error: {response.error}" 43 | ) 44 | 45 | 46 | # except Exception as e: 47 | # print(e) 48 | # return 49 | 50 | 51 | if __name__ == "__main__": 52 | main() 53 | -------------------------------------------------------------------------------- /examples/cq/async_simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from kubemq.cq import * 3 | 4 | 5 | async def simple_cq_example(): 6 | """Simple async CQ example.""" 7 | print("=== Simple Async CQ Example ===\n") 8 | 9 | client = Client(address="localhost:50000", client_id="async_cq_simple") 10 | 11 | try: 12 | # Test ping 13 | print("1. Testing async ping...") 14 | result = await client.ping_async() 15 | print(f" Server version: {result.version}, Host: {result.host}\n") 16 | 17 | # List channels concurrently 18 | print("2. Listing channels concurrently...") 19 | cmd_channels, query_channels = await asyncio.gather( 20 | client.list_commands_channels_async(""), 21 | client.list_queries_channels_async("") 22 | ) 23 | print(f" Found {len(cmd_channels)} command channels") 24 | print(f" Found {len(query_channels)} query channels\n") 25 | 26 | # Create channels 27 | print("3. Creating channels...") 28 | await client.create_commands_channel_async("async_test_cmd") 29 | await client.create_queries_channel_async("async_test_query") 30 | print(" Channels created!\n") 31 | 32 | # Note: Actual send/receive would require subscribers 33 | print("=== All tests passed! ===") 34 | 35 | except Exception as e: 36 | print(f"Error: {e}") 37 | finally: 38 | # Manual cleanup 39 | try: 40 | client.shutdown_event.set() 41 | except: 42 | pass 43 | 44 | 45 | if __name__ == "__main__": 46 | asyncio.run(simple_cq_example()) 47 | -------------------------------------------------------------------------------- /examples/cq/commands.py: -------------------------------------------------------------------------------- 1 | import time 2 | from kubemq.cq import * 3 | 4 | 5 | def main(): 6 | try: 7 | client = Client(address="localhost:50000") 8 | 9 | def on_receive_command(request: CommandMessageReceived): 10 | try: 11 | print(f"Id:{request.id}, Body:{request.body.decode('utf-8')}") 12 | response = CommandResponseMessage( 13 | command_received=request, 14 | is_executed=True, 15 | ) 16 | client.send_response_message(response) 17 | except Exception as e: 18 | print(e) 19 | 20 | def on_error_handler(err: str): 21 | print(f"{err}") 22 | 23 | client.subscribe_to_commands( 24 | subscription=CommandsSubscription( 25 | channel="c1", 26 | group="", 27 | on_receive_command_callback=on_receive_command, 28 | on_error_callback=on_error_handler, 29 | ), 30 | cancel=CancellationToken(), 31 | ) 32 | time.sleep(1) 33 | response = client.send_command_request( 34 | CommandMessage( 35 | channel="c1", 36 | body=b"hello kubemq, please reply to me!", 37 | timeout_in_seconds=10, 38 | ) 39 | ) 40 | print( 41 | f"Request Execution: {response.is_executed}, Executed at: {response.timestamp}, Error: {response.error}" 42 | ) 43 | except Exception as e: 44 | print(e) 45 | return 46 | 47 | 48 | if __name__ == "__main__": 49 | main() 50 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # KubeMQ Python SDK 2 | 3 | KubeMQ is an enterprise-grade message queue and broker for containers, designed for any workload and architecture running in Kubernetes. 4 | This library is Python implementation of KubeMQ client connection. 5 | 6 | ## Install KubeMQ Community Edition 7 | Please visit [KubeMQ Community](https://github.com/kubemq-io/kubemq-community) for intallation steps. 8 | 9 | ## Examples - Cookbook Recipes 10 | Please visit our cookbook [repository](https://github.com/kubemq-io/python-sdk-cookbook) 11 | 12 | ## Install Python SDK 13 | ### Prerequisites 14 | 15 | KubeMQ-SDK-Python works with **Python 3.2** or newer. 16 | 17 | ### Installing 18 | 19 | The recommended way to use the SDK for Python in your project is to consume it from pip. 20 | 21 | ``` 22 | pip install kubemq 23 | ``` 24 | 25 | This package uses setuptools for the installation if needed please run: 26 | ``` 27 | python3 -m pip install --upgrade pip setuptools wheel 28 | ``` 29 | 30 | ### Building from source 31 | 32 | Once you check out the code from GitHub, you can install the package locally with: 33 | 34 | ``` 35 | $ pip install . 36 | ``` 37 | 38 | You can also install the package with a symlink, 39 | so that changes to the source files will be immediately available: 40 | 41 | ``` 42 | $ pip install -e . 43 | ``` 44 | 45 | Installation: 46 | $ pip install kubemq 47 | 48 | ## Learn KubeMQ 49 | Visit our [Extensive KubeMQ Documentation](https://docs.kubemq.io/). 50 | 51 | ## Support 52 | if you encounter any issues, please open an issue here, 53 | In addition, you can reach us for support by: 54 | - [**Email**](mailto://support@kubemq.io) 55 | - [**Slack**](https://kubmq.slack.com) 56 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_store_message_received.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Dict 3 | from pydantic import BaseModel, Field 4 | from kubemq.grpc import EventReceive as pbEventReceive 5 | 6 | 7 | class EventStoreMessageReceived(BaseModel): 8 | id: str = "" 9 | from_client_id: str = "" 10 | timestamp: datetime = Field(default_factory=lambda: datetime.fromtimestamp(0)) 11 | channel: str = "" 12 | metadata: str = "" 13 | body: bytes = b"" 14 | sequence: int = 0 15 | tags: Dict[str, str] = Field(default_factory=dict) 16 | 17 | @classmethod 18 | def decode(cls, event_receive: pbEventReceive) -> "EventStoreMessageReceived": 19 | from_client_id = ( 20 | event_receive.Tags.get("x-kubemq-client-id", "") 21 | if event_receive.Tags 22 | else "" 23 | ) 24 | tags = dict(event_receive.Tags) if event_receive.Tags else {} 25 | 26 | return cls( 27 | id=event_receive.EventID, 28 | from_client_id=from_client_id, 29 | timestamp=datetime.fromtimestamp(event_receive.Timestamp / 1e9), 30 | channel=event_receive.Channel, 31 | metadata=event_receive.Metadata, 32 | body=event_receive.Body, 33 | sequence=event_receive.Sequence, 34 | tags=tags, 35 | ) 36 | 37 | class Config: 38 | arbitrary_types_allowed = True 39 | 40 | def model_dump(self, **kwargs): 41 | dump = super().model_dump(**kwargs) 42 | dump["timestamp"] = self.timestamp.isoformat() 43 | dump["body"] = ( 44 | self.body.hex() 45 | ) # Convert bytes to hex string for better readability 46 | return dump 47 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_message.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | from uuid import uuid4 3 | from pydantic import BaseModel, Field, field_validator 4 | from kubemq.grpc import Event as pbEvent 5 | 6 | 7 | class EventMessage(BaseModel): 8 | id: Optional[str] = None 9 | channel: str 10 | metadata: Optional[str] = None 11 | body: bytes = Field(default=b"") 12 | tags: Dict[str, str] = Field(default_factory=dict) 13 | 14 | @field_validator("channel") 15 | def channel_must_exist(cls, v): 16 | if not v: 17 | raise ValueError("Event message must have a channel.") 18 | return v 19 | 20 | @field_validator("metadata", "body", "tags") 21 | def at_least_one_must_exist(cls, v, info): 22 | if ( 23 | info.data.get("metadata") is None 24 | and info.data.get("body") == b"" 25 | and not info.data.get("tags") 26 | ): 27 | raise ValueError( 28 | "Event message must have at least one of the following: metadata, body, or tags." 29 | ) 30 | return v 31 | 32 | def encode(self, client_id: str) -> pbEvent: 33 | tags = self.tags.copy() 34 | tags["x-kubemq-client-id"] = client_id 35 | 36 | pb_event = pbEvent() 37 | pb_event.EventID = self.id or str(uuid4()) 38 | pb_event.ClientID = client_id 39 | pb_event.Channel = self.channel 40 | pb_event.Metadata = self.metadata or "" 41 | pb_event.Body = self.body 42 | pb_event.Store = False 43 | pb_event.Tags.update(tags) 44 | return pb_event 45 | 46 | def model_post_init(self, __context) -> None: 47 | if self.id is None: 48 | self.id = str(uuid4()) 49 | 50 | class Config: 51 | arbitrary_types_allowed = True 52 | -------------------------------------------------------------------------------- /examples/queues/queues_workers.py: -------------------------------------------------------------------------------- 1 | import time 2 | import threading 3 | from random import random 4 | 5 | from kubemq.queues import * 6 | 7 | 8 | def worker(message: QueueMessageReceived): 9 | print(f"Worker received message: {message.body.decode('utf-8')}\n") 10 | time.sleep(random()) # Simulate processing time 11 | message.re_queue("q2") 12 | 13 | 14 | def main(): 15 | with Client(address="localhost:50000") as client: 16 | # Send 10 messages 17 | for i in range(10): 18 | send_result = client.send_queues_message( 19 | QueueMessage( 20 | channel="q1", 21 | body=f"Message {i + 1}".encode("utf-8"), 22 | metadata="some-metadata", 23 | ) 24 | ) 25 | print(f"Queue Message Sent: {send_result}") 26 | 27 | # Wait for 1 second 28 | time.sleep(1) 29 | 30 | # Receive 10 messages 31 | result = client.receive_queues_messages( 32 | channel="q1", 33 | max_messages=10, 34 | wait_timeout_in_seconds=10, 35 | auto_ack=False, 36 | ) 37 | if result.is_error: 38 | print(f"{result.error}") 39 | return 40 | 41 | # Create 10 threads, each processing one message 42 | threads = [] 43 | for message in result.messages: 44 | thread = threading.Thread(target=worker, args=(message,)) 45 | threads.append(thread) 46 | 47 | # Start all threads simultaneously 48 | for thread in threads: 49 | thread.start() 50 | 51 | # Wait for all threads to complete 52 | for thread in threads: 53 | thread.join() 54 | time.sleep(1) 55 | 56 | 57 | if __name__ == "__main__": 58 | main() 59 | -------------------------------------------------------------------------------- /examples/queues/async.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import os 4 | from logging.config import dictConfig 5 | 6 | 7 | 8 | from kubemq.queues import * 9 | 10 | async def example_send_receive(): 11 | with Client( 12 | address="localhost:50000", 13 | ) as client: 14 | async def _send(): 15 | while True: 16 | send_result = await client.send_queues_message_async( 17 | QueueMessage( 18 | channel="send_receive", 19 | body=b"message", 20 | ), 21 | ) 22 | if send_result.is_error: 23 | print(f"Error sending message: {send_result.error}") 24 | await asyncio.sleep(1) 25 | continue 26 | print(f"Queue Message Sent: {send_result}") 27 | await asyncio.sleep(1) 28 | 29 | async def _receive(): 30 | while True: 31 | send_receive_result = await client.receive_queues_messages_async( 32 | channel="send_receive", 33 | max_messages=1, 34 | wait_timeout_in_seconds=1, 35 | ) 36 | if send_receive_result.is_error: 37 | print(f"Error receiving message: {send_receive_result.error}") 38 | await asyncio.sleep(1) 39 | continue 40 | for message in send_receive_result.messages: 41 | print(f"Id:{message.id}, Body:{message.body.decode('utf-8')}") 42 | message.ack() 43 | 44 | await asyncio.sleep(1) 45 | 46 | task1 = asyncio.create_task(_send()) 47 | task2 = asyncio.create_task(_receive()) 48 | tasks = [task1, task2] 49 | await asyncio.gather(*tasks) 50 | 51 | 52 | if __name__ == "__main__": 53 | asyncio.run(example_send_receive()) 54 | 55 | -------------------------------------------------------------------------------- /kubemq/cq/command_message.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | from pydantic import BaseModel, Field, field_validator, model_validator 3 | import uuid 4 | from kubemq.grpc import Request as pbCommand 5 | 6 | 7 | class CommandMessage(BaseModel): 8 | id: Optional[str] = Field(default_factory=lambda: str(uuid.uuid4())) 9 | channel: str 10 | metadata: Optional[str] = None 11 | body: bytes = Field(default=b"") 12 | tags: Dict[str, str] = Field(default_factory=dict) 13 | timeout_in_seconds: int = Field(gt=0) 14 | 15 | model_config = {"arbitrary_types_allowed": True} 16 | 17 | @field_validator("channel") 18 | def channel_must_exist(cls, v: str) -> str: 19 | if not v: 20 | raise ValueError("Command message must have a channel.") 21 | return v 22 | 23 | @model_validator(mode="after") 24 | def at_least_one_must_exist(self) -> "CommandMessage": 25 | if not self.metadata and not self.body and not self.tags: 26 | raise ValueError( 27 | "Command message must have at least one of the following: metadata, body, or tags." 28 | ) 29 | return self 30 | 31 | def encode(self, client_id: str) -> pbCommand: 32 | pb_command = pbCommand() 33 | pb_command.RequestID = self.id 34 | pb_command.ClientID = client_id 35 | pb_command.Channel = self.channel 36 | pb_command.Metadata = self.metadata or "" 37 | pb_command.Body = self.body 38 | pb_command.Timeout = self.timeout_in_seconds * 1000 39 | pb_command.RequestTypeData = pbCommand.RequestType.Command 40 | for key, value in self.tags.items(): 41 | pb_command.Tags[key] = value 42 | return pb_command 43 | 44 | def __repr__(self) -> str: 45 | return ( 46 | f"CommandMessage: id={self.id}, channel={self.channel}, " 47 | f"metadata={self.metadata}, body={self.body}, tags={self.tags}, " 48 | f"timeout_in_seconds={self.timeout_in_seconds}" 49 | ) 50 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_store_message.py: -------------------------------------------------------------------------------- 1 | from uuid import uuid4 2 | from typing import Dict, Optional 3 | from pydantic import BaseModel, Field, field_validator 4 | from kubemq.grpc import Event as pbEvent 5 | 6 | 7 | class EventStoreMessage(BaseModel): 8 | id: Optional[str] = None 9 | channel: str 10 | metadata: Optional[str] = None 11 | body: bytes = Field(default=b"") 12 | tags: Dict[str, str] = Field(default_factory=dict) 13 | 14 | @field_validator("channel") 15 | def channel_must_exist(cls, v): 16 | if not v: 17 | raise ValueError("Event Store message must have a channel.") 18 | return v 19 | 20 | @field_validator("metadata", "body", "tags") 21 | def at_least_one_must_exist(cls, v, info): 22 | if ( 23 | info.data.get("metadata") is None 24 | and info.data.get("body") == b"" 25 | and not info.data.get("tags") 26 | ): 27 | raise ValueError( 28 | "Event Store message must have at least one of the following: metadata, body, or tags." 29 | ) 30 | return v 31 | 32 | def encode(self, client_id: str) -> pbEvent: 33 | tags = self.tags.copy() 34 | tags["x-kubemq-client-id"] = client_id 35 | 36 | pb_event = pbEvent() 37 | pb_event.EventID = self.id or str(uuid4()) 38 | pb_event.ClientID = client_id 39 | pb_event.Channel = self.channel 40 | pb_event.Metadata = self.metadata or "" 41 | pb_event.Body = self.body 42 | pb_event.Store = True 43 | pb_event.Tags.update(tags) 44 | return pb_event 45 | 46 | def model_post_init(self, __context) -> None: 47 | if self.id is None: 48 | self.id = str(uuid4()) 49 | 50 | class Config: 51 | arbitrary_types_allowed = True 52 | 53 | def model_dump(self, **kwargs): 54 | dump = super().model_dump(**kwargs) 55 | dump["body"] = ( 56 | self.body.hex() 57 | ) # Convert bytes to hex string for better readability 58 | return dump 59 | -------------------------------------------------------------------------------- /examples/pubsub/async_simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from kubemq.pubsub import * 3 | 4 | 5 | async def simple_events_example(): 6 | """Simple async events example.""" 7 | print("=== Simple Async Events Example ===\n") 8 | 9 | client = Client(address="localhost:50000", client_id="async_simple_example") 10 | 11 | try: 12 | # Test ping 13 | print("1. Testing async ping...") 14 | result = await client.ping_async() 15 | print(f" Server version: {result.version}, Host: {result.host}\n") 16 | 17 | # Send an event 18 | print("2. Sending async event...") 19 | await client.send_events_message_async( 20 | EventMessage( 21 | channel="test_channel", 22 | body=b"Hello from async!" 23 | ) 24 | ) 25 | print(" Event sent successfully!\n") 26 | 27 | # Send event store message 28 | print("3. Sending async event store message...") 29 | result = await client.send_events_store_message_async( 30 | EventStoreMessage( 31 | channel="test_store", 32 | body=b"Stored event from async!" 33 | ) 34 | ) 35 | print(f" Event stored! ID: {result.id}, Sent: {result.sent}\n") 36 | 37 | # Test concurrent operations 38 | print("4. Testing concurrent operations...") 39 | channels = ["ch1", "ch2", "ch3"] 40 | results = await asyncio.gather(*[ 41 | client.send_events_message_async( 42 | EventMessage(channel=ch, body=f"Message to {ch}".encode()) 43 | ) 44 | for ch in channels 45 | ]) 46 | print(f" Sent {len(results)} messages concurrently!\n") 47 | 48 | print("=== All tests passed! ===") 49 | 50 | except Exception as e: 51 | print(f"Error: {e}") 52 | finally: 53 | # Manual cleanup to avoid event loop issues 54 | try: 55 | client.shutdown_event.set() 56 | except: 57 | pass 58 | 59 | 60 | if __name__ == "__main__": 61 | asyncio.run(simple_events_example()) 62 | -------------------------------------------------------------------------------- /kubemq/cq/command_response_message.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, field_validator 2 | from datetime import datetime 3 | from typing import Optional 4 | from kubemq.cq.command_message_received import CommandMessageReceived 5 | from kubemq.grpc import Response as pbResponse 6 | 7 | 8 | class CommandResponseMessage(BaseModel): 9 | command_received: Optional[CommandMessageReceived] = None 10 | client_id: str = Field(default="") 11 | request_id: str = Field(default="") 12 | is_executed: bool = Field(default=False) 13 | timestamp: datetime = Field(default_factory=datetime.now) 14 | error: str = Field(default="") 15 | 16 | model_config = {"arbitrary_types_allowed": True} 17 | 18 | @field_validator("command_received") 19 | def validate_command_received( 20 | cls, v: Optional[CommandMessageReceived] 21 | ) -> Optional[CommandMessageReceived]: 22 | if v is None: 23 | raise ValueError("Command response must have a command request.") 24 | if v.reply_channel == "": 25 | raise ValueError("Command response must have a reply channel.") 26 | return v 27 | 28 | @classmethod 29 | def decode(cls, pb_response: pbResponse) -> "CommandResponseMessage": 30 | return cls( 31 | client_id=pb_response.ClientID, 32 | request_id=pb_response.RequestID, 33 | is_executed=pb_response.Executed, 34 | error=pb_response.Error, 35 | timestamp=datetime.fromtimestamp(pb_response.Timestamp / 1e9), 36 | ) 37 | 38 | def encode(self, client_id: str) -> pbResponse: 39 | if not self.command_received: 40 | raise ValueError("Command received is required for encoding.") 41 | pb_response = pbResponse() 42 | pb_response.ClientID = client_id 43 | pb_response.RequestID = self.command_received.id 44 | pb_response.ReplyChannel = self.command_received.reply_channel 45 | pb_response.Executed = self.is_executed 46 | pb_response.Error = self.error 47 | pb_response.Timestamp = int(self.timestamp.timestamp() * 1e9) 48 | return pb_response 49 | 50 | def __repr__(self) -> str: 51 | return ( 52 | f"CommandResponseMessage: client_id={self.client_id}, " 53 | f"request_id={self.request_id}, is_executed={self.is_executed}, " 54 | f"error={self.error}, timestamp={self.timestamp}" 55 | ) 56 | -------------------------------------------------------------------------------- /kubemq/cq/query_message_received.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Dict 3 | from pydantic import BaseModel, Field 4 | from kubemq.grpc import Request as pbRequest 5 | 6 | 7 | class QueryMessageReceived(BaseModel): 8 | """ 9 | Class representing a query message received. 10 | 11 | Attributes: 12 | id (str): The unique ID of the query message. 13 | from_client_id (str): The ID of the client that sent the query message. 14 | timestamp (datetime): The timestamp when the query message was received. 15 | channel (str): The channel through which the query message was received. 16 | metadata (str): Additional metadata associated with the query message. 17 | body (bytes): The body of the query message. 18 | reply_channel (str): The channel through which the reply for the query message should be sent. 19 | tags (Dict[str, str]): A dictionary containing tags associated with the query message. 20 | """ 21 | 22 | id: str = Field(default="") 23 | from_client_id: str = Field(default="") 24 | timestamp: datetime = Field(default_factory=datetime.now) 25 | channel: str = Field(default="") 26 | metadata: str = Field(default="") 27 | body: bytes = Field(default=b"") 28 | reply_channel: str = Field(default="") 29 | tags: Dict[str, str] = Field(default_factory=dict) 30 | 31 | model_config = {"arbitrary_types_allowed": True} 32 | 33 | @classmethod 34 | def decode(cls, query_receive: pbRequest) -> "QueryMessageReceived": 35 | """ 36 | Decodes a protobuf request object and returns a QueryMessageReceived instance. 37 | """ 38 | return cls( 39 | id=query_receive.RequestID, 40 | from_client_id=query_receive.ClientID, 41 | timestamp=datetime.now(), 42 | channel=query_receive.Channel, 43 | metadata=query_receive.Metadata, 44 | body=query_receive.Body, 45 | reply_channel=query_receive.ReplyChannel, 46 | tags=dict(query_receive.Tags), 47 | ) 48 | 49 | def __repr__(self) -> str: 50 | """ 51 | Returns a string representation of the QueryMessageReceived object. 52 | """ 53 | return ( 54 | f"QueryMessageReceived: id={self.id}, channel={self.channel}, " 55 | f"metadata={self.metadata}, body={self.body}, " 56 | f"from_client_id={self.from_client_id}, timestamp={self.timestamp}, " 57 | f"reply_channel={self.reply_channel}, tags={self.tags}" 58 | ) 59 | -------------------------------------------------------------------------------- /kubemq/pubsub/events_subscription.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Callable, Optional 3 | from pydantic import BaseModel, field_validator 4 | from kubemq.grpc import Subscribe 5 | from kubemq.common.subscribe_type import SubscribeType 6 | from kubemq.pubsub import EventMessageReceived 7 | 8 | 9 | class EventsSubscription(BaseModel): 10 | channel: str 11 | group: Optional[str] = None 12 | on_receive_event_callback: Callable[[EventMessageReceived], None] 13 | on_error_callback: Optional[Callable[[str], None]] = None 14 | 15 | @field_validator("channel") 16 | def channel_must_exist(cls, v): 17 | if not v: 18 | raise ValueError("Event subscription must have a channel.") 19 | return v 20 | 21 | def raise_on_receive_message(self, received_event: EventMessageReceived): 22 | if self.on_receive_event_callback: 23 | self.on_receive_event_callback(received_event) 24 | 25 | async def raise_on_receive_message_async(self, received_event: EventMessageReceived): 26 | """Async-aware version that supports both sync and async callbacks.""" 27 | if self.on_receive_event_callback: 28 | if asyncio.iscoroutinefunction(self.on_receive_event_callback): 29 | await self.on_receive_event_callback(received_event) 30 | else: 31 | self.on_receive_event_callback(received_event) 32 | 33 | def raise_on_error(self, msg: str): 34 | if self.on_error_callback: 35 | self.on_error_callback(msg) 36 | 37 | async def raise_on_error_async(self, msg: str): 38 | """Async-aware version that supports both sync and async callbacks.""" 39 | if self.on_error_callback: 40 | if asyncio.iscoroutinefunction(self.on_error_callback): 41 | await self.on_error_callback(msg) 42 | else: 43 | self.on_error_callback(msg) 44 | 45 | def encode(self, client_id: str = "") -> Subscribe: 46 | request = Subscribe() 47 | request.Channel = self.channel 48 | request.Group = self.group or "" 49 | request.ClientID = client_id 50 | request.SubscribeTypeData = SubscribeType.Events.value 51 | return request 52 | 53 | class Config: 54 | arbitrary_types_allowed = True 55 | 56 | def model_dump(self, **kwargs): 57 | dump = super().model_dump(**kwargs) 58 | # Remove callback functions from the dump 59 | dump.pop("on_receive_event_callback", None) 60 | dump.pop("on_error_callback", None) 61 | return dump 62 | -------------------------------------------------------------------------------- /kubemq/transport/connection.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, Field, field_validator, model_validator, ConfigDict 2 | from typing import Optional, ClassVar 3 | from kubemq.transport.keep_alive import KeepAliveConfig 4 | from kubemq.transport.tls_config import TlsConfig 5 | 6 | 7 | class Connection(BaseModel): 8 | DEFAULT_MAX_SEND_SIZE: ClassVar[int] = 1024 * 1024 * 100 # 100MB 9 | DEFAULT_MAX_RCV_SIZE: ClassVar[int] = 1024 * 1024 * 100 # 100MB 10 | DEFAULT_RECONNECT_INTERVAL_SECONDS: ClassVar[int] = 1 11 | 12 | address: str = Field(default="") 13 | client_id: str = Field(default="") 14 | auth_token: str = Field(default="") 15 | max_send_size: int = Field(default=DEFAULT_MAX_SEND_SIZE) 16 | max_receive_size: int = Field(default=DEFAULT_MAX_RCV_SIZE) 17 | disable_auto_reconnect: bool = Field(default=False) 18 | reconnect_interval_seconds: int = Field(default=DEFAULT_RECONNECT_INTERVAL_SECONDS) 19 | tls: TlsConfig = Field(default_factory=TlsConfig) 20 | keep_alive: KeepAliveConfig = Field(default_factory=KeepAliveConfig) 21 | log_level: Optional[int] = Field(default=None) 22 | 23 | model_config = ConfigDict(validate_assignment=True, arbitrary_types_allowed=True) 24 | 25 | @field_validator("address") 26 | def validate_address(cls, v: str) -> str: 27 | if not v: 28 | raise ValueError("Connection must have an address") 29 | return v 30 | 31 | @field_validator("client_id") 32 | def validate_client_id(cls, v: str) -> str: 33 | if not v: 34 | raise ValueError("Connection must have a client_id") 35 | return v 36 | 37 | @field_validator("max_send_size", "max_receive_size", "reconnect_interval_seconds") 38 | def validate_positive_values(cls, v: int, info) -> int: 39 | if v < 0: 40 | raise ValueError(f"{info.field_name} must be greater than or equal to 0") 41 | return v 42 | 43 | @model_validator(mode="after") 44 | def validate_tls_and_keep_alive(self) -> "Connection": 45 | # TLS and KeepAlive configs are automatically validated by Pydantic 46 | return self 47 | 48 | def get_reconnect_delay(self) -> int: 49 | return self.reconnect_interval_seconds 50 | 51 | def set_log_level(self, value: int) -> "Connection": 52 | self.log_level = value 53 | return self 54 | 55 | def complete(self) -> "Connection": 56 | if self.max_send_size == 0: 57 | self.max_send_size = self.DEFAULT_MAX_SEND_SIZE 58 | if self.max_receive_size == 0: 59 | self.max_receive_size = self.DEFAULT_MAX_RCV_SIZE 60 | if self.reconnect_interval_seconds == 0: 61 | self.reconnect_interval_seconds = self.DEFAULT_RECONNECT_INTERVAL_SECONDS 62 | return self 63 | -------------------------------------------------------------------------------- /kubemq/common/helpers.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | 3 | 4 | def decode_grpc_error(error: grpc.RpcError) -> str: 5 | """ 6 | Decodes the error message from a gRPC error. 7 | 8 | Args: 9 | error (grpc.RpcError): The gRPC error to decode. 10 | 11 | Returns: 12 | str: The decoded error message. 13 | """ 14 | if hasattr(error, "code") and error.code() == grpc.StatusCode.UNAVAILABLE: 15 | return "Connection Error: Server is unavailable" 16 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.DEADLINE_EXCEEDED: 17 | return "Timeout Error: Request has timed out" 18 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.UNAUTHENTICATED: 19 | return "Authentication Error: Invalid authentication token" 20 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.PERMISSION_DENIED: 21 | return "Permission Error: Permission denied" 22 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.UNIMPLEMENTED: 23 | return "Unimplemented Error: The requested operation is not implemented" 24 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.INTERNAL: 25 | return "Internal Error: An internal error has occurred" 26 | elif hasattr(error, "code") and error.code() == grpc.StatusCode.UNKNOWN: 27 | return "Unknown Error: An unknown error has occurred" 28 | elif hasattr(error, "details") and error.details(): 29 | return error.details() 30 | else: 31 | return str(error) 32 | 33 | 34 | def is_channel_error(exception: Exception) -> bool: 35 | """ 36 | Determines if an exception is related to channel connectivity issues. 37 | 38 | Args: 39 | exception (Exception): The exception to check 40 | 41 | Returns: 42 | bool: True if the exception is a channel error, False otherwise 43 | """ 44 | # Check for common gRPC connectivity errors 45 | if isinstance(exception, grpc.RpcError): 46 | if hasattr(exception, "code"): 47 | # Check for common gRPC status codes that indicate connectivity issues 48 | if exception.code() in [ 49 | grpc.StatusCode.UNAVAILABLE, 50 | grpc.StatusCode.UNKNOWN, 51 | grpc.StatusCode.DEADLINE_EXCEEDED, 52 | grpc.StatusCode.CANCELLED, 53 | ]: 54 | return True 55 | return "Connection" in str(exception) or "channel" in str(exception).lower() 56 | 57 | # Check for other exception types that might be wrapping channel errors 58 | error_str = str(exception).lower() 59 | channel_error_phrases = [ 60 | "channel closed", 61 | "cannot invoke rpc", 62 | "connection refused", 63 | "socket closed", 64 | "connection reset", 65 | "connection error", 66 | "not connected", 67 | "broken pipe", 68 | "transport failure", 69 | ] 70 | 71 | for phrase in channel_error_phrases: 72 | if phrase in error_str: 73 | return True 74 | 75 | return False 76 | -------------------------------------------------------------------------------- /kubemq/cq/commands_subscription.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pydantic import BaseModel, field_validator, ValidationError 3 | from typing import Callable, Optional 4 | from kubemq.grpc import Subscribe 5 | from kubemq.common.subscribe_type import SubscribeType 6 | from kubemq.cq.command_message_received import CommandMessageReceived 7 | 8 | 9 | class CommandsSubscription(BaseModel): 10 | channel: str 11 | group: Optional[str] = None 12 | on_receive_command_callback: Callable[[CommandMessageReceived], None] 13 | on_error_callback: Optional[Callable[[str], None]] = None 14 | 15 | @field_validator("channel") 16 | def channel_must_exist(cls, v: str) -> str: 17 | if not v: 18 | raise ValueError("command subscription must have a channel.") 19 | return v 20 | 21 | @field_validator("on_receive_command_callback") 22 | def callback_must_exist(cls, v: Callable) -> Callable: 23 | if not callable(v): 24 | raise ValueError( 25 | "command subscription must have a on_receive_command_callback function." 26 | ) 27 | return v 28 | 29 | def raise_on_receive_message( 30 | self, received_command: CommandMessageReceived 31 | ) -> None: 32 | self.on_receive_command_callback(received_command) 33 | 34 | async def raise_on_receive_message_async( 35 | self, received_command: CommandMessageReceived 36 | ) -> None: 37 | """Async-aware version that supports both sync and async callbacks.""" 38 | if asyncio.iscoroutinefunction(self.on_receive_command_callback): 39 | await self.on_receive_command_callback(received_command) 40 | else: 41 | self.on_receive_command_callback(received_command) 42 | 43 | def raise_on_error(self, msg: str) -> None: 44 | if self.on_error_callback: 45 | self.on_error_callback(msg) 46 | 47 | async def raise_on_error_async(self, msg: str) -> None: 48 | """Async-aware version that supports both sync and async callbacks.""" 49 | if self.on_error_callback: 50 | if asyncio.iscoroutinefunction(self.on_error_callback): 51 | await self.on_error_callback(msg) 52 | else: 53 | self.on_error_callback(msg) 54 | 55 | def decode(self, client_id: str = "") -> Subscribe: 56 | request = Subscribe() 57 | request.Channel = self.channel 58 | request.Group = self.group or "" 59 | request.ClientID = client_id 60 | request.SubscribeTypeData = SubscribeType.Commands.value 61 | return request 62 | 63 | def __repr__(self) -> str: 64 | return f"CommandsSubscription: channel={self.channel}, group={self.group}" 65 | 66 | model_config = {"arbitrary_types_allowed": True} 67 | 68 | @classmethod 69 | def create( 70 | cls, 71 | channel: str, 72 | group: Optional[str] = None, 73 | on_receive_command_callback: Callable[[CommandMessageReceived], None] = None, 74 | on_error_callback: Optional[Callable[[str], None]] = None, 75 | ) -> "CommandsSubscription": 76 | try: 77 | return cls( 78 | channel=channel, 79 | group=group, 80 | on_receive_command_callback=on_receive_command_callback, 81 | on_error_callback=on_error_callback, 82 | ) 83 | except ValidationError as e: 84 | raise ValueError(str(e)) 85 | -------------------------------------------------------------------------------- /kubemq/transport/interceptors.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | from typing import Callable, Any 3 | from grpc import ClientCallDetails 4 | 5 | 6 | class AuthInterceptors( 7 | grpc.UnaryUnaryClientInterceptor, 8 | grpc.StreamUnaryClientInterceptor, 9 | grpc.UnaryStreamClientInterceptor, 10 | grpc.StreamStreamClientInterceptor, 11 | ): 12 | def __init__(self, auth_token: str): 13 | self.auth_token = auth_token 14 | 15 | def _intercept_call( 16 | self, 17 | continuation: Callable[[ClientCallDetails, Any], Any], 18 | client_call_details: ClientCallDetails, 19 | request_or_iterator: Any, 20 | ) -> Any: 21 | metadata = [] 22 | if client_call_details.metadata is not None: 23 | metadata = list(client_call_details.metadata) 24 | 25 | if self.auth_token and self.auth_token.strip(): 26 | metadata.append(("authorization", f"{self.auth_token}")) 27 | 28 | new_client_call_details = client_call_details._replace(metadata=metadata) 29 | response = continuation(new_client_call_details, request_or_iterator) 30 | return response 31 | 32 | def intercept_unary_unary(self, continuation, client_call_details, request): 33 | return self._intercept_call(continuation, client_call_details, request) 34 | 35 | def intercept_unary_stream(self, continuation, client_call_details, request): 36 | return self._intercept_call(continuation, client_call_details, request) 37 | 38 | def intercept_stream_stream( 39 | self, continuation, client_call_details, request_iterator 40 | ): 41 | return self._intercept_call(continuation, client_call_details, request_iterator) 42 | 43 | def intercept_stream_unary( 44 | self, continuation, client_call_details, request_iterator 45 | ): 46 | return self._intercept_call(continuation, client_call_details, request_iterator) 47 | 48 | 49 | class AuthInterceptorsAsync( 50 | grpc.aio.UnaryUnaryClientInterceptor, 51 | grpc.aio.StreamStreamClientInterceptor, 52 | grpc.aio.UnaryStreamClientInterceptor, 53 | grpc.aio.StreamUnaryClientInterceptor, 54 | ): 55 | def __init__(self, auth_token: str): 56 | self.auth_token = auth_token 57 | 58 | async def _intercept_call( 59 | self, 60 | continuation: Callable[[ClientCallDetails, Any], Any], 61 | client_call_details: ClientCallDetails, 62 | request_or_iterator: Any, 63 | ) -> Any: 64 | metadata = [] 65 | if client_call_details.metadata is not None: 66 | metadata = list(client_call_details.metadata) 67 | 68 | if self.auth_token and self.auth_token.strip(): 69 | metadata.append(("authorization", f"{self.auth_token}")) 70 | 71 | new_client_call_details = client_call_details._replace(metadata=metadata) 72 | response = await continuation(new_client_call_details, request_or_iterator) 73 | return response 74 | 75 | async def intercept_unary_unary(self, continuation, client_call_details, request): 76 | return await self._intercept_call(continuation, client_call_details, request) 77 | 78 | async def intercept_unary_stream(self, continuation, client_call_details, request): 79 | return await self._intercept_call(continuation, client_call_details, request) 80 | 81 | async def intercept_stream_stream( 82 | self, continuation, client_call_details, request_iterator 83 | ): 84 | return await self._intercept_call( 85 | continuation, client_call_details, request_iterator 86 | ) 87 | 88 | async def intercept_stream_unary( 89 | self, continuation, client_call_details, request_iterator 90 | ): 91 | return await self._intercept_call( 92 | continuation, client_call_details, request_iterator 93 | ) 94 | -------------------------------------------------------------------------------- /kubemq/cq/query_message.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from typing import Dict, Optional 3 | from pydantic import BaseModel, Field, field_validator, model_validator 4 | from kubemq.grpc import Request as pbQuery 5 | 6 | 7 | class QueryMessage(BaseModel): 8 | """ 9 | Class representing a query message. 10 | 11 | Attributes: 12 | id (Optional[str]): The ID of the query message. 13 | channel (str): The channel of the query message. 14 | metadata (Optional[str]): The metadata of the query message. 15 | body (bytes): The body of the query message. 16 | tags (Dict[str, str]): The tags of the query message. 17 | timeout_in_seconds (int): The timeout of the query message in seconds. 18 | cache_key (str): The cache key of the query message. 19 | cache_ttl_int_seconds (int): The cache TTL of the query message in seconds. 20 | """ 21 | 22 | id: Optional[str] = Field(default_factory=lambda: str(uuid.uuid4())) 23 | channel: str 24 | metadata: Optional[str] = None 25 | body: bytes = Field(default=b"") 26 | tags: Dict[str, str] = Field(default_factory=dict) 27 | timeout_in_seconds: int = Field(gt=0) 28 | cache_key: str = "" 29 | cache_ttl_int_seconds: int = 0 30 | 31 | model_config = {"arbitrary_types_allowed": True} 32 | 33 | @field_validator("channel") 34 | def channel_must_exist(cls, v: str) -> str: 35 | if not v: 36 | raise ValueError("Query message must have a channel.") 37 | return v 38 | 39 | @model_validator(mode="after") 40 | def check_metadata_body_tags(self) -> "QueryMessage": 41 | if not self.metadata and not self.body and not self.tags: 42 | raise ValueError( 43 | "Query message must have at least one of the following: metadata, body, or tags." 44 | ) 45 | return self 46 | 47 | def encode(self, client_id: str) -> pbQuery: 48 | pb_query = pbQuery() 49 | pb_query.RequestID = self.id 50 | pb_query.ClientID = client_id 51 | pb_query.Channel = self.channel 52 | pb_query.Metadata = self.metadata or "" 53 | pb_query.Body = self.body 54 | pb_query.Timeout = self.timeout_in_seconds * 1000 55 | pb_query.RequestTypeData = pbQuery.RequestType.Query 56 | for key, value in self.tags.items(): 57 | pb_query.Tags[key] = value 58 | pb_query.CacheKey = self.cache_key 59 | pb_query.CacheTTL = self.cache_ttl_int_seconds * 1000 60 | return pb_query 61 | 62 | def __repr__(self) -> str: 63 | return ( 64 | f"QueryMessage: id={self.id}, channel={self.channel}, " 65 | f"metadata={self.metadata}, body={self.body}, tags={self.tags}, " 66 | f"timeout_in_seconds={self.timeout_in_seconds}, " 67 | f"cache_key={self.cache_key}, " 68 | f"cache_ttl_int_seconds={self.cache_ttl_int_seconds}" 69 | ) 70 | 71 | @classmethod 72 | def create( 73 | cls, 74 | id: Optional[str] = None, 75 | channel: Optional[str] = None, 76 | metadata: Optional[str] = None, 77 | body: bytes = b"", 78 | tags: Optional[Dict[str, str]] = None, 79 | timeout_in_seconds: int = 0, 80 | cache_key: str = "", 81 | cache_ttl_int_seconds: int = 0, 82 | ) -> "QueryMessage": 83 | """ 84 | Creates a new QueryMessage instance. 85 | 86 | This method provides backwards compatibility with the original constructor. 87 | """ 88 | return cls( 89 | id=id, 90 | channel=channel, 91 | metadata=metadata, 92 | body=body, 93 | tags=tags, 94 | timeout_in_seconds=timeout_in_seconds, 95 | cache_key=cache_key, 96 | cache_ttl_int_seconds=cache_ttl_int_seconds, 97 | ) 98 | -------------------------------------------------------------------------------- /kubemq/cq/queries_subscription.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pydantic import BaseModel, field_validator 3 | from typing import Callable, Optional 4 | from kubemq.grpc import Subscribe 5 | from kubemq.common.subscribe_type import SubscribeType 6 | from kubemq.cq.query_message_received import QueryMessageReceived 7 | 8 | 9 | class QueriesSubscription(BaseModel): 10 | """ 11 | QueriesSubscription class represents a subscription to receive query messages from a channel. 12 | 13 | Attributes: 14 | channel (str): The name of the channel to subscribe to. 15 | group (Optional[str]): The optional name of the group to subscribe to. 16 | on_receive_query_callback (Callable[[QueryMessageReceived], None]): The callback function to be called when a query message is received. 17 | on_error_callback (Optional[Callable[[str], None]]): The callback function to be called when an error occurs. 18 | """ 19 | 20 | channel: str 21 | group: Optional[str] = None 22 | on_receive_query_callback: Callable[[QueryMessageReceived], None] 23 | on_error_callback: Optional[Callable[[str], None]] = None 24 | 25 | model_config = {"arbitrary_types_allowed": True} 26 | 27 | @field_validator("channel") 28 | def channel_must_exist(cls, v: str) -> str: 29 | if not v: 30 | raise ValueError("query subscription must have a channel.") 31 | return v 32 | 33 | @field_validator("on_receive_query_callback") 34 | def callback_must_exist(cls, v: Callable) -> Callable: 35 | if not callable(v): 36 | raise ValueError( 37 | "query subscription must have a on_receive_query_callback function." 38 | ) 39 | return v 40 | 41 | def raise_on_receive_message(self, received_query: QueryMessageReceived) -> None: 42 | """Raises the on_receive_query_callback with the received query message.""" 43 | self.on_receive_query_callback(received_query) 44 | 45 | async def raise_on_receive_message_async(self, received_query: QueryMessageReceived) -> None: 46 | """Async-aware version that supports both sync and async callbacks.""" 47 | if asyncio.iscoroutinefunction(self.on_receive_query_callback): 48 | await self.on_receive_query_callback(received_query) 49 | else: 50 | self.on_receive_query_callback(received_query) 51 | 52 | def raise_on_error(self, msg: str) -> None: 53 | """Raises the on_error_callback with the specified error message.""" 54 | if self.on_error_callback: 55 | self.on_error_callback(msg) 56 | 57 | async def raise_on_error_async(self, msg: str) -> None: 58 | """Async-aware version that supports both sync and async callbacks.""" 59 | if self.on_error_callback: 60 | if asyncio.iscoroutinefunction(self.on_error_callback): 61 | await self.on_error_callback(msg) 62 | else: 63 | self.on_error_callback(msg) 64 | 65 | def encode(self, client_id: str = "") -> Subscribe: 66 | """Encodes the query subscription into a Subscribe message.""" 67 | request = Subscribe() 68 | request.Channel = self.channel 69 | request.Group = self.group or "" 70 | request.ClientID = client_id 71 | request.SubscribeTypeData = SubscribeType.Queries.value 72 | return request 73 | 74 | def __repr__(self) -> str: 75 | """Returns a string representation of the QueriesSubscription object.""" 76 | return f"QueriesSubscription: channel={self.channel}, group={self.group}" 77 | 78 | @classmethod 79 | def create( 80 | cls, 81 | channel: str, 82 | group: Optional[str] = None, 83 | on_receive_query_callback: Callable[[QueryMessageReceived], None] = None, 84 | on_error_callback: Optional[Callable[[str], None]] = None, 85 | ) -> "QueriesSubscription": 86 | """ 87 | Creates a new QueriesSubscription instance. 88 | 89 | This method provides backwards compatibility with the original constructor. 90 | """ 91 | return cls( 92 | channel=channel, 93 | group=group, 94 | on_receive_query_callback=on_receive_query_callback, 95 | on_error_callback=on_error_callback, 96 | ) 97 | -------------------------------------------------------------------------------- /kubemq/cq/query_response_message.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Dict, Optional 3 | from pydantic import BaseModel, Field, field_validator 4 | from kubemq.cq.query_message_received import QueryMessageReceived 5 | from kubemq.grpc import Response as pbResponse 6 | 7 | 8 | class QueryResponseMessage(BaseModel): 9 | """ 10 | Class for representing a query response message. 11 | 12 | Attributes: 13 | query_received (Optional[QueryMessageReceived]): The received query message. 14 | client_id (str): The client ID. 15 | request_id (str): The request ID. 16 | is_executed (bool): Indicates if the query has been executed. 17 | timestamp (datetime): The timestamp of the query response. 18 | error (str): The error message, if any. 19 | metadata (Optional[str]): The metadata associated with the query response. 20 | body (bytes): The body of the query response. 21 | tags (Dict[str, str]): The tags associated with the query response. 22 | """ 23 | 24 | query_received: Optional[QueryMessageReceived] = None 25 | client_id: str = Field(default="") 26 | request_id: str = Field(default="") 27 | is_executed: bool = Field(default=False) 28 | timestamp: datetime = Field(default_factory=datetime.now) 29 | error: str = Field(default="") 30 | metadata: Optional[str] = None 31 | body: bytes = Field(default=b"") 32 | tags: Dict[str, str] = Field(default_factory=dict) 33 | 34 | model_config = {"arbitrary_types_allowed": True} 35 | 36 | @field_validator("query_received") 37 | def validate_query_received(cls, v): 38 | if v is None: 39 | raise ValueError("Query response must have a query request.") 40 | if v.reply_channel == "": 41 | raise ValueError("Query response must have a reply channel.") 42 | return v 43 | 44 | @classmethod 45 | def decode(cls, pb_response: pbResponse) -> "QueryResponseMessage": 46 | """ 47 | Decodes the protocol buffer response and creates a new QueryResponseMessage instance. 48 | """ 49 | return cls( 50 | client_id=pb_response.ClientID, 51 | request_id=pb_response.RequestID, 52 | is_executed=pb_response.Executed, 53 | error=pb_response.Error, 54 | timestamp=datetime.fromtimestamp(pb_response.Timestamp / 1e9), 55 | metadata=pb_response.Metadata, 56 | body=pb_response.Body, 57 | tags=dict(pb_response.Tags), 58 | ) 59 | 60 | def encode(self, client_id: str) -> pbResponse: 61 | """ 62 | Encodes the query response message into a protocol buffer response. 63 | """ 64 | if not self.query_received: 65 | raise ValueError("Query received is required for encoding.") 66 | pb_response = pbResponse() 67 | pb_response.ClientID = client_id 68 | pb_response.RequestID = self.query_received.id 69 | pb_response.ReplyChannel = self.query_received.reply_channel 70 | pb_response.Executed = self.is_executed 71 | pb_response.Error = self.error 72 | pb_response.Timestamp = int(self.timestamp.timestamp() * 1e9) 73 | pb_response.Metadata = self.metadata or "" 74 | pb_response.Body = self.body or b"" 75 | for key, value in self.tags.items(): 76 | pb_response.Tags[key] = value 77 | return pb_response 78 | 79 | def __repr__(self) -> str: 80 | return ( 81 | f"QueryResponseMessage: client_id={self.client_id}, " 82 | f"request_id={self.request_id}, is_executed={self.is_executed}, " 83 | f"error={self.error}, timestamp={self.timestamp}" 84 | ) 85 | 86 | @classmethod 87 | def create( 88 | cls, 89 | query_received: Optional[QueryMessageReceived] = None, 90 | metadata: Optional[str] = None, 91 | body: bytes = b"", 92 | tags: Optional[Dict[str, str]] = None, 93 | is_executed: bool = False, 94 | error: str = "", 95 | timestamp: Optional[datetime] = None, 96 | ) -> "QueryResponseMessage": 97 | """ 98 | Creates a new QueryResponseMessage instance. 99 | 100 | This method provides backwards compatibility with the original constructor. 101 | """ 102 | return cls( 103 | query_received=query_received, 104 | metadata=metadata, 105 | body=body, 106 | tags=tags, 107 | is_executed=is_executed, 108 | error=error, 109 | timestamp=timestamp or datetime.now(), 110 | ) 111 | -------------------------------------------------------------------------------- /kubemq/grpc/client.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from datetime import datetime 3 | 4 | import grpc 5 | import threading 6 | import time 7 | import queue 8 | from kubemq_pb2_grpc import kubemqStub 9 | from kubemq_pb2 import Event, Result 10 | 11 | 12 | def sender_thread(sender, thread_id): 13 | for _ in range(100000): 14 | event = Event( 15 | EventID=str(uuid.uuid4()), 16 | Channel="es4", 17 | Metadata=f"Thread {thread_id} - Message {_}", 18 | ClientID="python-sdk", 19 | Store=True, 20 | ) 21 | response = sender.send(event=event) 22 | # Uncomment the next line if you need to print the response or handle it. 23 | # print(f"Response from Thread {thread_id}: {response}") 24 | # time.sleep(0.1) # 0.1 second interval 25 | 26 | 27 | def generate_and_send_messages(sender): 28 | threads = [] 29 | for i in range(50): # 50 threads 30 | thread = threading.Thread(target=sender_thread, args=(sender, i)) 31 | thread.start() 32 | threads.append(thread) 33 | 34 | # Wait for all threads to complete 35 | for thread in threads: 36 | thread.join() 37 | 38 | 39 | def run(): 40 | with grpc.insecure_channel("localhost:50000") as channel: 41 | stub = kubemqStub(channel) 42 | shutdown_event = threading.Event() 43 | sender = EventSender(stub, shutdown_event) 44 | # Start the send_message function in a separate thread 45 | 46 | time.sleep(2) 47 | # Example of sending a single message and waiting for the response 48 | print("Starting to send messages", datetime.now()) 49 | generate_and_send_messages(sender) 50 | print("All messages sent", datetime.now()) 51 | try: 52 | # Keep the main thread running, unless interrupted 53 | while not shutdown_event.is_set(): 54 | time.sleep(0.1) 55 | except KeyboardInterrupt: 56 | print("Received shutdown signal") 57 | shutdown_event.set() 58 | 59 | 60 | class EventSender: 61 | def __init__(self, client_stub: kubemqStub, shutdown_event: threading.Event): 62 | self._clientStub = client_stub 63 | self._shutdown_event = shutdown_event 64 | self._lock = threading.Lock() 65 | self._response_tracking = {} 66 | self._sending_queue = queue.Queue() 67 | self._allow_new_messages = True 68 | threading.Thread(target=self._send_events_stream, args=(), daemon=True).start() 69 | 70 | def send(self, event: Event) -> [Result, None]: 71 | if not event.Store: 72 | self._sending_queue.put(event) 73 | return None 74 | response_event = threading.Event() 75 | response_container = {} 76 | 77 | with self._lock: 78 | self._response_tracking[event.EventID] = ( 79 | response_container, 80 | response_event, 81 | ) 82 | self._sending_queue.put(event) 83 | response_event.wait() 84 | response = response_container.get("response") 85 | with self._lock: 86 | del self._response_tracking[event.EventID] 87 | return response 88 | 89 | def _send_events_stream(self): 90 | def send_requests(): 91 | while not self._shutdown_event.is_set(): 92 | try: 93 | msg = self._sending_queue.get( 94 | timeout=1 95 | ) # timeout to check for shutdown event periodically 96 | yield msg 97 | except queue.Empty: 98 | continue 99 | 100 | while not self._shutdown_event.is_set(): 101 | try: 102 | responses = self._clientStub.SendEventsStream(send_requests()) 103 | print("Connecting to the server...") 104 | for response in responses: 105 | if self._shutdown_event.is_set(): 106 | break 107 | response_event_id = response.EventID 108 | with self._lock: 109 | if response_event_id in self._response_tracking: 110 | response_container, response_event = ( 111 | self._response_tracking[response_event_id] 112 | ) 113 | response_container["response"] = response 114 | response_event.set() 115 | except grpc.RpcError as e: 116 | print(f"GRPC Error: {e}. Retrying in 5 seconds...") 117 | time.sleep(5) 118 | continue 119 | 120 | 121 | if __name__ == "__main__": 122 | run() 123 | -------------------------------------------------------------------------------- /kubemq/pubsub/events_store_subscription.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from datetime import datetime 3 | from typing import Callable, Optional 4 | from enum import Enum 5 | from pydantic import BaseModel, field_validator 6 | from kubemq.grpc import Subscribe 7 | from kubemq.common.subscribe_type import SubscribeType 8 | from kubemq.pubsub import EventStoreMessageReceived 9 | 10 | 11 | class EventsStoreType(Enum): 12 | Undefined = 0 13 | StartNewOnly = 1 14 | StartFromFirst = 2 15 | StartFromLast = 3 16 | StartAtSequence = 4 17 | StartAtTime = 5 18 | StartAtTimeDelta = 6 19 | 20 | 21 | class EventsStoreSubscription(BaseModel): 22 | channel: str 23 | group: Optional[str] = None 24 | events_store_type: EventsStoreType = EventsStoreType.Undefined 25 | events_store_sequence_value: int = 0 26 | events_store_start_time: Optional[datetime] = None 27 | on_receive_event_callback: Callable[[EventStoreMessageReceived], None] 28 | on_error_callback: Optional[Callable[[str], None]] = None 29 | 30 | @field_validator("channel") 31 | def channel_must_exist(cls, v): 32 | if not v: 33 | raise ValueError("Event Store subscription must have a channel.") 34 | return v 35 | 36 | @field_validator("events_store_type") 37 | def events_store_type_must_be_defined(cls, v): 38 | if v == EventsStoreType.Undefined: 39 | raise ValueError("Event Store subscription must have an events store type.") 40 | return v 41 | 42 | @field_validator("events_store_sequence_value") 43 | def validate_sequence_value(cls, v, values): 44 | if ( 45 | "events_store_type" in values 46 | and values["events_store_type"] == EventsStoreType.StartAtSequence 47 | and v == 0 48 | ): 49 | raise ValueError( 50 | "Event Store subscription with StartAtSequence events store type must have a sequence value." 51 | ) 52 | return v 53 | 54 | @field_validator("events_store_start_time") 55 | def validate_start_time(cls, v, values): 56 | if ( 57 | "events_store_type" in values 58 | and values["events_store_type"] == EventsStoreType.StartAtTime 59 | and v is None 60 | ): 61 | raise ValueError( 62 | "Event Store subscription with StartAtTime events store type must have a start time." 63 | ) 64 | return v 65 | 66 | def raise_on_receive_message(self, received_event: EventStoreMessageReceived): 67 | if self.on_receive_event_callback: 68 | self.on_receive_event_callback(received_event) 69 | 70 | async def raise_on_receive_message_async(self, received_event: EventStoreMessageReceived): 71 | """Async-aware version that supports both sync and async callbacks.""" 72 | if self.on_receive_event_callback: 73 | if asyncio.iscoroutinefunction(self.on_receive_event_callback): 74 | await self.on_receive_event_callback(received_event) 75 | else: 76 | self.on_receive_event_callback(received_event) 77 | 78 | def raise_on_error(self, msg: str): 79 | if self.on_error_callback: 80 | self.on_error_callback(msg) 81 | 82 | async def raise_on_error_async(self, msg: str): 83 | """Async-aware version that supports both sync and async callbacks.""" 84 | if self.on_error_callback: 85 | if asyncio.iscoroutinefunction(self.on_error_callback): 86 | await self.on_error_callback(msg) 87 | else: 88 | self.on_error_callback(msg) 89 | 90 | def encode(self, client_id: str = "") -> Subscribe: 91 | request = Subscribe() 92 | request.Channel = self.channel 93 | request.Group = self.group or "" 94 | request.EventsStoreTypeData = self.events_store_type.value 95 | 96 | if self.events_store_type == EventsStoreType.StartAtSequence: 97 | request.EventsStoreTypeValue = self.events_store_sequence_value 98 | elif self.events_store_type == EventsStoreType.StartAtTime: 99 | request.EventsStoreTypeValue = int(self.events_store_start_time.timestamp()) 100 | 101 | request.ClientID = client_id 102 | request.SubscribeTypeData = SubscribeType.EventsStore.value 103 | return request 104 | 105 | class Config: 106 | arbitrary_types_allowed = True 107 | 108 | def model_dump(self, **kwargs): 109 | dump = super().model_dump(**kwargs) 110 | dump["events_store_type"] = self.events_store_type.name 111 | if self.events_store_start_time: 112 | dump["events_store_start_time"] = self.events_store_start_time.isoformat() 113 | # Remove callback functions from the dump 114 | dump.pop("on_receive_event_callback", None) 115 | dump.pop("on_error_callback", None) 116 | return dump 117 | -------------------------------------------------------------------------------- /examples/pubsub/async.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from kubemq.pubsub import * 3 | 4 | 5 | async def example_events(): 6 | """Async events example with subscription and sending.""" 7 | try: 8 | async with Client(address="localhost:50000", client_id="async_events_example") as client: 9 | # Define async callback (you can also use sync callbacks for backward compatibility) 10 | async def on_receive_event(event: EventMessageReceived): 11 | print(f"[Event] Id:{event.id}, From:{event.from_client_id}, Body:{event.body.decode('utf-8')}") 12 | # Async callbacks can use await for async operations 13 | await asyncio.sleep(0.01) # Simulate async processing 14 | 15 | async def on_error(error: str): 16 | print(f"[Error] {error}") 17 | 18 | # Subscribe to events 19 | subscription = EventsSubscription( 20 | channel="async_events", 21 | on_receive_event_callback=on_receive_event, 22 | on_error_callback=on_error, 23 | ) 24 | task = client.subscribe_to_events_async(subscription) 25 | 26 | # Wait for subscription to be ready 27 | await asyncio.sleep(0.5) 28 | 29 | # Send events asynchronously 30 | for i in range(5): 31 | await client.send_events_message_async( 32 | EventMessage(channel="async_events", body=f"Async event {i}".encode()) 33 | ) 34 | await asyncio.sleep(0.2) 35 | 36 | # Wait for events to be processed 37 | await asyncio.sleep(1) 38 | 39 | except Exception as e: 40 | print(f"Error: {e}") 41 | 42 | 43 | async def example_events_store(): 44 | """Async events store example with subscription and sending.""" 45 | try: 46 | async with Client(address="localhost:50000", client_id="async_events_store_example") as client: 47 | async def on_receive_event(event: EventStoreMessageReceived): 48 | print(f"[EventStore] Id:{event.id}, Sequence:{event.sequence}, Body:{event.body.decode('utf-8')}") 49 | 50 | async def on_error(error: str): 51 | print(f"[Error] {error}") 52 | 53 | # Subscribe to events store 54 | subscription = EventsStoreSubscription( 55 | channel="async_events_store", 56 | on_receive_event_callback=on_receive_event, 57 | on_error_callback=on_error, 58 | events_store_type=EventsStoreType.StartNewOnly, 59 | ) 60 | task = client.subscribe_to_events_store_async(subscription) 61 | 62 | await asyncio.sleep(0.5) 63 | 64 | # Send events store messages and get results 65 | for i in range(3): 66 | result = await client.send_events_store_message_async( 67 | EventStoreMessage(channel="async_events_store", body=f"Stored event {i}".encode()) 68 | ) 69 | print(f"[Sent] Id:{result.id}, Sent:{result.sent}") 70 | await asyncio.sleep(0.2) 71 | 72 | await asyncio.sleep(1) 73 | 74 | except Exception as e: 75 | print(f"Error: {e}") 76 | 77 | 78 | async def example_concurrent_operations(): 79 | """Demonstrate concurrent async operations using asyncio.gather().""" 80 | try: 81 | async with Client(address="localhost:50000", client_id="concurrent_pubsub") as client: 82 | # Send to multiple channels concurrently 83 | channels = ["channel_1", "channel_2", "channel_3"] 84 | 85 | # Send messages to all channels in parallel 86 | await asyncio.gather(*[ 87 | client.send_events_message_async( 88 | EventMessage(channel=ch, body=f"Message to {ch}".encode()) 89 | ) 90 | for ch in channels 91 | ]) 92 | print(f"Sent messages to {len(channels)} channels concurrently") 93 | 94 | # Ping and list channels concurrently 95 | server_info, channel_list = await asyncio.gather( 96 | client.ping_async(), 97 | client.list_events_channels_async("") 98 | ) 99 | print(f"Server version: {server_info.version}") 100 | print(f"Found {len(channel_list)} channels") 101 | 102 | except Exception as e: 103 | print(f"Error: {e}") 104 | 105 | 106 | async def main(): 107 | """Run all async examples.""" 108 | print("=== Async PubSub Examples ===\n") 109 | 110 | print("1. Events Example:") 111 | await example_events() 112 | 113 | print("\n2. Events Store Example:") 114 | await example_events_store() 115 | 116 | print("\n3. Concurrent Operations Example:") 117 | await example_concurrent_operations() 118 | 119 | print("\n=== Done ===") 120 | 121 | 122 | if __name__ == "__main__": 123 | asyncio.run(main()) 124 | -------------------------------------------------------------------------------- /examples/pubsub/run.py: -------------------------------------------------------------------------------- 1 | import time 2 | import concurrent.futures 3 | from kubemq.pubsub import * 4 | import asyncio 5 | 6 | 7 | def events_run(address, channelName, itr, one_mb_message): 8 | try: 9 | loop = asyncio.new_event_loop() 10 | asyncio.set_event_loop(loop) 11 | with Client(address=address, client_id="events_example") as client: 12 | print( 13 | f"Starting events run with address: {address}, channel: {channelName}, itr: {itr}" 14 | ) 15 | received = 0 16 | errors = 0 17 | 18 | def on_receive_event(event: EventMessageReceived): 19 | nonlocal received 20 | received += 1 21 | 22 | def on_error_handler(err: str): 23 | nonlocal errors 24 | errors += 1 25 | 26 | client.subscribe_to_events( 27 | subscription=EventsSubscription( 28 | channel=channelName, 29 | group="", 30 | on_receive_event_callback=on_receive_event, 31 | on_error_callback=on_error_handler, 32 | ), 33 | cancel=CancellationToken(), 34 | ) 35 | time.sleep(1) 36 | 37 | for _ in range(itr): 38 | result = client.send_events_message( 39 | EventMessage(channel=channelName, body=one_mb_message) 40 | ) 41 | time.sleep(1) 42 | print( 43 | f"Completed events_run with address: {address}, channel: {channelName}, itr: {itr}, received: {received}, errors: {errors}" 44 | ) 45 | loop.close() 46 | except Exception as e: 47 | print(f"Events Run: {e}") 48 | os.exit(1) 49 | 50 | 51 | def events_store_run(address, channelName, itr, one_mb_message): 52 | try: 53 | loop = asyncio.new_event_loop() 54 | asyncio.set_event_loop(loop) 55 | with Client(address=address, client_id="events_store_example") as client: 56 | print( 57 | f"Starting events store run with address: {address}, channel: {channelName}, itr: {itr}" 58 | ) 59 | received = 0 60 | errors = 0 61 | 62 | def on_receive_event(event: EventStoreMessageReceived): 63 | nonlocal received 64 | received += 1 65 | 66 | def on_error_handler(err: str): 67 | nonlocal errors 68 | errors += 1 69 | 70 | client.subscribe_to_events_store( 71 | subscription=EventsStoreSubscription( 72 | channel=channelName, 73 | group="", 74 | on_receive_event_callback=on_receive_event, 75 | on_error_callback=on_error_handler, 76 | events_store_type=EventsStoreType.StartNewOnly, 77 | ), 78 | cancel=CancellationToken(), 79 | ) 80 | time.sleep(1) 81 | 82 | for _ in range(itr): 83 | result = client.send_events_store_message( 84 | EventStoreMessage(channel=channelName, body=one_mb_message) 85 | ) 86 | time.sleep(1) 87 | print( 88 | f"Completed events_store_run with address: {address}, channel: {channelName}, itr: {itr}, received: {received}, errors: {errors}" 89 | ) 90 | loop.close() 91 | except Exception as e: 92 | print(f"Events Store Run: {e}") 93 | os.exit(1) 94 | 95 | 96 | def main(): 97 | one_mb_message = b"x" * int(1e4) 98 | itr = 200 99 | repeat = 1 100 | channels_events_store_ports = [ 101 | ("localhost:50000", "es1"), 102 | ("localhost:50000", "es2"), 103 | ("localhost:50000", "es3"), 104 | ] 105 | channels_events_ports = [ 106 | ("localhost:50000", "e1"), 107 | ("localhost:50000", "e2"), 108 | ("localhost:50000", "e3"), 109 | ] 110 | print(f"Starting events_store_run and events_run with itr: {itr}, repeat: {repeat}") 111 | for _ in range(repeat): 112 | with concurrent.futures.ThreadPoolExecutor() as executor: 113 | futures_es = [ 114 | executor.submit( 115 | events_store_run, address, channelName, itr, one_mb_message 116 | ) 117 | for address, channelName in channels_events_store_ports 118 | ] 119 | futures_e = [ 120 | executor.submit(events_run, address, channelName, itr, one_mb_message) 121 | for address, channelName in channels_events_ports 122 | ] 123 | concurrent.futures.wait(futures_es) 124 | concurrent.futures.wait(futures_e) 125 | print( 126 | f"Completed events_store_run and events_run with itr: {itr}, repeat: {repeat}" 127 | ) 128 | 129 | 130 | if __name__ == "__main__": 131 | main() 132 | -------------------------------------------------------------------------------- /kubemq/common/exceptions.py: -------------------------------------------------------------------------------- 1 | class BaseError(Exception): 2 | """ 3 | 4 | The `BaseError` class is a custom exception class that serves as the base class for all other custom exception classes in this software application. 5 | 6 | Attributes: 7 | None 8 | 9 | Methods: 10 | None 11 | 12 | Usage: 13 | The `BaseError` class should be inherited by any custom exception class that needs to be raised within the application. 14 | 15 | Example: 16 | 17 | ``` 18 | class CustomError(BaseError): 19 | def __init__(self, message): 20 | self.message = message 21 | 22 | try: 23 | raise CustomError("This is a custom error.") 24 | except CustomError as e: 25 | print(e.message) 26 | ``` 27 | 28 | """ 29 | 30 | pass 31 | 32 | 33 | class ValidationError(BaseError): 34 | """ 35 | ValidationError Class 36 | 37 | Subclass of BaseError used for representing validation errors. 38 | 39 | Attributes: 40 | message (str): The error message associated with the validation error. 41 | 42 | Methods: 43 | __init__(self, message: str) -> None: Constructor method that initializes the ValidationError instance with the provided error message. 44 | 45 | """ 46 | 47 | def __init__(self, message: str) -> None: 48 | self.message = f"Validation Error: {message}" 49 | super().__init__(self.message) 50 | 51 | 52 | class ConnectionError(BaseError): 53 | """ 54 | Initializes a new instance of the ConnectionError class with the specified error message. 55 | 56 | Args: 57 | message (str): The error message. 58 | 59 | Returns: 60 | None 61 | """ 62 | 63 | def __init__(self, message: str) -> None: 64 | self.message = f"Connection Error: {message}" 65 | super().__init__(self.message) 66 | 67 | 68 | class SendEventError(BaseError): 69 | """ 70 | The `SendEventError` class is a subclass of the `BaseError` class. It represents an error that occurs when sending an event. 71 | 72 | Attributes: 73 | message (str): The error message. 74 | 75 | Methods: 76 | __init__(self, message: str) -> None: Initializes a new instance of the `SendEventError` class with the specified message. 77 | 78 | Example usage: 79 | error = SendEventError("Failed to send event") 80 | """ 81 | 82 | def __init__(self, message: str) -> None: 83 | self.message = f"Send Event Error: {message}" 84 | super().__init__(self.message) 85 | 86 | 87 | class DeleteChannelError(BaseError): 88 | """ 89 | 90 | DeleteChannelError 91 | 92 | This class represents an error that occurs when attempting to delete a channel. 93 | 94 | Attributes: 95 | message (str): An error message describing the specific cause of the error. 96 | 97 | Methods: 98 | __init__(self, message: str) -> None: 99 | Initializes a new instance of the DeleteChannelError class. 100 | 101 | Parameters: 102 | message (str): An error message describing the specific cause of the error. 103 | 104 | Returns: 105 | None 106 | 107 | """ 108 | 109 | def __init__(self, message: str) -> None: 110 | self.message = f"Delete Channel Error: {message}" 111 | super().__init__(self.message) 112 | 113 | 114 | class CreateChannelError(BaseError): 115 | """ 116 | 117 | This class represents an error that occurs when attempting to create a channel. 118 | 119 | """ 120 | 121 | def __init__(self, message: str) -> None: 122 | self.message = f"Create Channel Error: {message}" 123 | super().__init__(self.message) 124 | 125 | 126 | class ListChannelsError(BaseError): 127 | """ """ 128 | 129 | def __init__(self, message: str) -> None: 130 | self.message = f"List Channels Error: {message}" 131 | super().__init__(self.message) 132 | 133 | 134 | class GRPCError(Exception): 135 | """ 136 | A custom exception class for handling GRPC errors. 137 | 138 | Args: 139 | exc (Exception): The exception object that occurred. 140 | 141 | Attributes: 142 | message (str): The exception message. 143 | 144 | Raises: 145 | None 146 | 147 | """ 148 | 149 | def __init__(self, exc): 150 | # Initialize the base exception message in case neither status nor details are found 151 | self.message = str(exc) 152 | 153 | # Check if the exception has 'code' (status) and 'details' methods 154 | if ( 155 | hasattr(exc, "code") 156 | and callable(exc.code) 157 | and hasattr(exc, "details") 158 | and callable(exc.details) 159 | ): 160 | status = exc.code() 161 | details = exc.details() 162 | 163 | # Ensure that status and details are not None 164 | if status is not None and details is not None: 165 | self.message = ( 166 | f"KubeMQ Connection Error - Status: {status} Details: {details}" 167 | ) 168 | 169 | super().__init__(self.message) 170 | -------------------------------------------------------------------------------- /kubemq/pubsub/event_sender.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | import threading 4 | import queue 5 | import grpc 6 | from kubemq.transport import Transport, Connection 7 | from kubemq.grpc import Event, Result 8 | from kubemq.common import * 9 | 10 | 11 | class EventSender: 12 | """ 13 | EventSender is a class that is responsible for sending events to a server using a transport, tracking the response of each event, and handling disconnections. 14 | 15 | Attributes: 16 | - clientStub: A client stub object for communicating with the server. 17 | - connection: A Connection object containing connection information. 18 | - shutdown_event: A threading.Event object to signal whether the sender should shutdown. 19 | - logger: A logging.Logger object for logging messages. 20 | - lock: A threading.Lock object for thread safety. 21 | - response_tracking: A dictionary to track the response of each event. 22 | - sending_queue: A queue.Queue object for storing events to be sent. 23 | - allow_new_messages: A flag indicating whether new messages are allowed to be sent. 24 | 25 | Methods: 26 | - __init__(transport: Transport, shutdown_event: threading.Event, logger: logging.Logger, 27 | connection: Connection): Initializes the EventSender object with the given transport, shutdown event, logger, and connection. Starts a new thread to send events. 28 | - send(event: Event) -> Optional[Result]: Sends an event to the server. If the event is not set to be stored, it queues the event. If it is set to be stored, it waits for the response 29 | * and returns it. Raises a ConnectionError if the client is not connected. 30 | - handle_disconnection(): Handles the disconnection from the server. Clears the sending queue and sets an error on all response containers. 31 | - send_events_stream(): Continuously sends events from the sending queue to the server. Handles disconnections and tracks responses. 32 | """ 33 | 34 | def __init__( 35 | self, 36 | transport: Transport, 37 | shutdown_event: threading.Event, 38 | logger: logging.Logger, 39 | connection: Connection, 40 | ): 41 | self.clientStub = transport.kubemq_client() 42 | self.connection = connection 43 | self.shutdown_event = shutdown_event 44 | self.logger = logger 45 | self.lock = threading.Lock() 46 | self.response_tracking = {} 47 | self.sending_queue = queue.Queue() 48 | self.allow_new_messages = True 49 | threading.Thread(target=self.send_events_stream, args=(), daemon=True).start() 50 | 51 | def send(self, event: Event) -> [Result, None]: 52 | if not self.allow_new_messages: 53 | raise ConnectionError( 54 | "Client is not connected to the server and cannot send messages." 55 | ) 56 | 57 | if not event.Store: 58 | self.sending_queue.put(event) 59 | return None 60 | response_event = threading.Event() 61 | response_container = {} 62 | 63 | with self.lock: 64 | self.response_tracking[event.EventID] = (response_container, response_event) 65 | self.sending_queue.put(event) 66 | response_event.wait() 67 | response = response_container.get("response") 68 | with self.lock: 69 | del self.response_tracking[event.EventID] 70 | return response 71 | 72 | def handle_disconnection(self): 73 | with self.lock: 74 | self.allow_new_messages = False 75 | while not self.sending_queue.empty(): 76 | try: 77 | self.sending_queue.get_nowait() # Clear the queue 78 | except queue.Empty: 79 | continue 80 | 81 | # Set error on all response containers 82 | for event_id, ( 83 | response_container, 84 | response_event, 85 | ) in self.response_tracking.items(): 86 | response_container["response"] = Result( 87 | EventID=event_id, 88 | Sent=False, 89 | Error="Error: Disconnected from server", 90 | ) 91 | response_event.set() # Signal that the response has been processed 92 | self.response_tracking.clear() 93 | 94 | def send_events_stream(self): 95 | def send_requests(): 96 | while not self.shutdown_event.is_set(): 97 | try: 98 | msg = self.sending_queue.get( 99 | timeout=1 100 | ) # timeout to check for shutdown event periodically 101 | yield msg 102 | except queue.Empty: 103 | continue 104 | 105 | while not self.shutdown_event.is_set(): 106 | try: 107 | with self.lock: 108 | self.allow_new_messages = True 109 | responses = self.clientStub.SendEventsStream(send_requests()) 110 | for response in responses: 111 | if self.shutdown_event.is_set(): 112 | break 113 | response_event_id = response.EventID 114 | with self.lock: 115 | if response_event_id in self.response_tracking: 116 | response_container, response_event = self.response_tracking[ 117 | response_event_id 118 | ] 119 | response_container["response"] = response 120 | response_event.set() 121 | except grpc.RpcError as e: 122 | self.logger.debug(decode_grpc_error(e)) 123 | self.handle_disconnection() 124 | time.sleep(self.connection.reconnect_interval_seconds) 125 | continue 126 | except Exception as e: 127 | self.logger.debug(f"Error: {str(e)}") 128 | self.handle_disconnection() 129 | time.sleep(self.connection.reconnect_interval_seconds) 130 | continue 131 | -------------------------------------------------------------------------------- /examples/cq/async.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from kubemq.cq import * 3 | 4 | 5 | async def example_commands(): 6 | """Async commands example with subscription and request/response.""" 7 | try: 8 | async with Client(address="localhost:50000", client_id="async_commands_example") as client: 9 | # Define async callback (you can also use sync callbacks for backward compatibility) 10 | async def on_receive_command(command: CommandMessageReceived): 11 | print(f"[Command Received] Id:{command.id}, Body:{command.body.decode('utf-8')}") 12 | 13 | # Async processing 14 | await asyncio.sleep(0.01) 15 | 16 | # Send response asynchronously 17 | response = CommandResponseMessage( 18 | command_received=command, 19 | is_executed=True, 20 | ) 21 | await client.send_response_message_async(response) 22 | print(f"[Command Response] Sent for Id:{command.id}") 23 | 24 | async def on_error(error: str): 25 | print(f"[Error] {error}") 26 | 27 | # Subscribe to commands 28 | subscription = CommandsSubscription( 29 | channel="async_commands", 30 | on_receive_command_callback=on_receive_command, 31 | on_error_callback=on_error, 32 | ) 33 | task = client.subscribe_to_commands_async(subscription) 34 | 35 | # Wait for subscription to be ready 36 | await asyncio.sleep(0.5) 37 | 38 | # Send command requests asynchronously 39 | for i in range(3): 40 | response = await client.send_command_request_async( 41 | CommandMessage( 42 | channel="async_commands", 43 | body=f"Async command {i}".encode(), 44 | timeout_in_seconds=10, 45 | ) 46 | ) 47 | print(f"[Command Request] Response - Executed:{response.is_executed}, Timestamp:{response.timestamp}") 48 | await asyncio.sleep(0.3) 49 | 50 | await asyncio.sleep(1) 51 | 52 | except Exception as e: 53 | print(f"Error: {e}") 54 | 55 | 56 | async def example_queries(): 57 | """Async queries example with subscription and request/response.""" 58 | try: 59 | async with Client(address="localhost:50000", client_id="async_queries_example") as client: 60 | async def on_receive_query(query: QueryMessageReceived): 61 | print(f"[Query Received] Id:{query.id}, Body:{query.body.decode('utf-8')}") 62 | 63 | # Async processing 64 | await asyncio.sleep(0.01) 65 | 66 | # Send response with data 67 | response = QueryResponseMessage( 68 | query_received=query, 69 | is_executed=True, 70 | body=f"Response to query {query.id}".encode(), 71 | ) 72 | await client.send_response_message_async(response) 73 | print(f"[Query Response] Sent for Id:{query.id}") 74 | 75 | async def on_error(error: str): 76 | print(f"[Error] {error}") 77 | 78 | # Subscribe to queries 79 | subscription = QueriesSubscription( 80 | channel="async_queries", 81 | on_receive_query_callback=on_receive_query, 82 | on_error_callback=on_error, 83 | ) 84 | task = client.subscribe_to_queries_async(subscription) 85 | 86 | await asyncio.sleep(0.5) 87 | 88 | # Send query requests asynchronously 89 | for i in range(3): 90 | response = await client.send_query_request_async( 91 | QueryMessage( 92 | channel="async_queries", 93 | body=f"Async query {i}".encode(), 94 | timeout_in_seconds=10, 95 | ) 96 | ) 97 | body_text = response.body.decode('utf-8') if response.body else "No response" 98 | print(f"[Query Request] Response - Executed:{response.is_executed}, Body:{body_text}") 99 | await asyncio.sleep(0.3) 100 | 101 | await asyncio.sleep(1) 102 | 103 | except Exception as e: 104 | print(f"Error: {e}") 105 | 106 | 107 | async def example_concurrent_operations(): 108 | """Demonstrate concurrent async operations using asyncio.gather().""" 109 | try: 110 | async with Client(address="localhost:50000", client_id="concurrent_cq") as client: 111 | # Ping server and list channels concurrently 112 | server_info, cmd_channels, query_channels = await asyncio.gather( 113 | client.ping_async(), 114 | client.list_commands_channels_async(""), 115 | client.list_queries_channels_async("") 116 | ) 117 | print(f"Server version: {server_info.version}") 118 | print(f"Commands channels: {len(cmd_channels)}, Queries channels: {len(query_channels)}") 119 | 120 | # Send multiple command requests concurrently 121 | # Note: This requires subscribers to be running on those channels 122 | channels = ["cmd_1", "cmd_2"] 123 | 124 | async def send_command(channel: str, msg: str): 125 | try: 126 | return await client.send_command_request_async( 127 | CommandMessage( 128 | channel=channel, 129 | body=msg.encode(), 130 | timeout_in_seconds=2, 131 | ) 132 | ) 133 | except Exception as e: 134 | print(f"Command to {channel} failed: {e}") 135 | return None 136 | 137 | # Send to multiple channels in parallel 138 | results = await asyncio.gather(*[ 139 | send_command(ch, f"Concurrent command to {ch}") 140 | for ch in channels 141 | ]) 142 | successful = sum(1 for r in results if r and r.is_executed) 143 | print(f"Sent {len(channels)} concurrent commands, {successful} successful") 144 | 145 | except Exception as e: 146 | print(f"Error: {e}") 147 | 148 | 149 | async def main(): 150 | """Run all async examples.""" 151 | print("=== Async CQ Examples ===\n") 152 | 153 | print("1. Commands Example:") 154 | await example_commands() 155 | 156 | print("\n2. Queries Example:") 157 | await example_queries() 158 | 159 | print("\n3. Concurrent Operations Example:") 160 | await example_concurrent_operations() 161 | 162 | print("\n=== Done ===") 163 | 164 | 165 | if __name__ == "__main__": 166 | asyncio.run(main()) 167 | -------------------------------------------------------------------------------- /kubemq/queues/queues_poll_request.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from pydantic import BaseModel, Field, field_validator 3 | from typing import Optional 4 | import uuid 5 | from kubemq.grpc import QueuesDownstreamRequest, QueuesDownstreamRequestType 6 | 7 | 8 | class QueuesPollRequest(BaseModel): 9 | """ 10 | Class representing a request to poll messages from a queue. 11 | 12 | This class encapsulates the parameters needed to poll messages from a KubeMQ queue. 13 | It provides methods for validation and encoding to a protobuf request. 14 | 15 | Attributes: 16 | channel: The channel (queue name) to poll messages from. 17 | poll_max_messages: The maximum number of messages to poll in a single request. 18 | poll_wait_timeout_in_seconds: The maximum time to wait for messages in seconds. 19 | auto_ack_messages: Whether to automatically acknowledge received messages. 20 | visibility_seconds: The visibility time in seconds for the messages. 21 | 22 | Examples: 23 | ```python 24 | # Create a basic poll request 25 | request = QueuesPollRequest( 26 | channel="my-queue", 27 | poll_max_messages=10, 28 | poll_wait_timeout_in_seconds=30 29 | ) 30 | 31 | # Create a poll request with auto acknowledgment 32 | request = QueuesPollRequest( 33 | channel="my-queue", 34 | poll_max_messages=5, 35 | poll_wait_timeout_in_seconds=10, 36 | auto_ack_messages=True 37 | ) 38 | 39 | # Create a poll request with visibility timeout 40 | request = QueuesPollRequest( 41 | channel="my-queue", 42 | poll_max_messages=1, 43 | poll_wait_timeout_in_seconds=5, 44 | visibility_seconds=60 # Message will be invisible to other consumers for 60 seconds 45 | ) 46 | 47 | # Encode the request to protobuf 48 | pb_request = request.encode("client-1") 49 | ``` 50 | """ 51 | 52 | # Pydantic configuration 53 | class Config: 54 | arbitrary_types_allowed = True 55 | frozen = True # Make instances immutable 56 | 57 | # Instance attributes 58 | channel: Optional[str] = Field( 59 | default=None, description="The channel (queue name) to poll messages from" 60 | ) 61 | poll_max_messages: int = Field( 62 | default=1, 63 | ge=1, 64 | description="The maximum number of messages to poll in a single request", 65 | ) 66 | poll_wait_timeout_in_seconds: int = Field( 67 | default=60, 68 | ge=1, 69 | description="The maximum time to wait for messages in seconds" 70 | ) 71 | auto_ack_messages: bool = Field( 72 | default=False, 73 | description="Whether to automatically acknowledge received messages", 74 | ) 75 | visibility_seconds: int = Field( 76 | default=0, 77 | ge=0, 78 | description="The visibility time in seconds for the messages", 79 | ) 80 | 81 | # Validators 82 | @field_validator("channel") 83 | def channel_must_not_be_empty(cls, v: Optional[str]) -> str: 84 | """ 85 | Validate that the channel is not empty. 86 | 87 | Args: 88 | v: The channel value to validate 89 | 90 | Returns: 91 | The validated channel value 92 | 93 | Raises: 94 | ValueError: If the channel is empty 95 | """ 96 | if not v: 97 | raise ValueError("Queue poll request must have a channel. Please provide a valid queue name.") 98 | return v 99 | 100 | @field_validator("visibility_seconds") 101 | def validate_visibility_seconds(cls, v: int) -> int: 102 | """ 103 | Validate that the visibility seconds is within acceptable limits. 104 | 105 | Args: 106 | v: The visibility seconds value to validate 107 | 108 | Returns: 109 | The validated visibility seconds value 110 | 111 | Raises: 112 | ValueError: If the visibility seconds is negative 113 | """ 114 | if v < 0: 115 | raise ValueError("Visibility seconds cannot be negative") 116 | return v 117 | 118 | # Utility methods 119 | def with_updates(self, **kwargs) -> QueuesPollRequest: 120 | """ 121 | Create a new poll request with updated values. 122 | 123 | Since instances are immutable, this method creates a new 124 | instance with the specified updates. 125 | 126 | Args: 127 | **kwargs: The fields to update and their new values 128 | 129 | Returns: 130 | A new instance with the updated values 131 | """ 132 | data = self.model_dump() 133 | data.update(kwargs) 134 | return self.__class__(**data) 135 | 136 | # Encoding methods 137 | def encode(self, client_id: str = "") -> QueuesDownstreamRequest: 138 | """ 139 | Encode the poll request to a QueuesDownstreamRequest protobuf object. 140 | 141 | This method is used when sending a poll request to the KubeMQ server. 142 | 143 | Args: 144 | client_id: The client ID to use for the request 145 | 146 | Returns: 147 | A QueuesDownstreamRequest protobuf object containing the encoded request 148 | """ 149 | request = QueuesDownstreamRequest() 150 | request.RequestID = str(uuid.uuid4()) 151 | request.ClientID = client_id 152 | request.Channel = self.channel 153 | request.MaxItems = self.poll_max_messages 154 | request.WaitTimeout = self.poll_wait_timeout_in_seconds * 1000 155 | request.AutoAck = self.auto_ack_messages 156 | request.RequestTypeData = QueuesDownstreamRequestType.Get 157 | return request 158 | 159 | # String representations 160 | def __str__(self) -> str: 161 | """ 162 | Get a string representation of the poll request. 163 | 164 | Returns: 165 | A string representation of the poll request 166 | """ 167 | return ( 168 | f"QueuesPollRequest: channel={self.channel}, " 169 | f"poll_max_messages={self.poll_max_messages}, " 170 | f"poll_wait_timeout_in_seconds={self.poll_wait_timeout_in_seconds}, " 171 | f"auto_ack_messages={self.auto_ack_messages}, " 172 | f"visibility_seconds={self.visibility_seconds}" 173 | ) 174 | 175 | def __repr__(self) -> str: 176 | """ 177 | Get a detailed representation of the poll request. 178 | 179 | Returns: 180 | A detailed representation of the poll request 181 | """ 182 | return ( 183 | f"QueuesPollRequest(channel={self.channel!r}, " 184 | f"poll_max_messages={self.poll_max_messages}, " 185 | f"poll_wait_timeout_in_seconds={self.poll_wait_timeout_in_seconds}, " 186 | f"auto_ack_messages={self.auto_ack_messages}, " 187 | f"visibility_seconds={self.visibility_seconds})" 188 | ) 189 | -------------------------------------------------------------------------------- /kubemq/queues/queues_send_result.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from pydantic import BaseModel, Field 3 | from typing import Optional, ClassVar 4 | from datetime import datetime 5 | from kubemq.grpc import SendQueueMessageResult 6 | 7 | 8 | class QueueSendResult(BaseModel): 9 | """ 10 | Represents the result of sending a message to a queue. 11 | 12 | This class encapsulates the result of sending a message to a KubeMQ queue. 13 | It provides information about the message's ID, timestamps, and any errors 14 | that occurred during the send operation. 15 | 16 | Attributes: 17 | id: The unique identifier of the message. 18 | sent_at: The timestamp when the message was sent. 19 | expired_at: The timestamp when the message will expire. 20 | delayed_to: The timestamp when the message will be delivered. 21 | is_error: Indicates if there was an error while sending the message. 22 | error: The error message if `is_error` is True. 23 | 24 | Examples: 25 | ```python 26 | # Send a message and check the result 27 | result = client.send_queues_message(message) 28 | 29 | if result.is_error: 30 | print(f"Error sending message: {result.error}") 31 | else: 32 | print(f"Message sent successfully with ID: {result.id}") 33 | 34 | # Check if the message is delayed 35 | if result.is_delayed(): 36 | print(f"Message will be delivered at: {result.delayed_to}") 37 | 38 | # Check if the message has an expiration 39 | if result.has_expiration(): 40 | print(f"Message will expire at: {result.expired_at}") 41 | ``` 42 | """ 43 | 44 | # Pydantic configuration 45 | class Config: 46 | arbitrary_types_allowed = True 47 | frozen = True # Make instances immutable 48 | 49 | # Class attributes 50 | EPOCH: ClassVar[datetime] = datetime.fromtimestamp(0) 51 | 52 | # Instance attributes 53 | id: Optional[str] = Field( 54 | default=None, description="The unique identifier of the message" 55 | ) 56 | sent_at: Optional[datetime] = Field( 57 | default=None, description="The timestamp when the message was sent" 58 | ) 59 | expired_at: Optional[datetime] = Field( 60 | default=None, description="The timestamp when the message will expire" 61 | ) 62 | delayed_to: Optional[datetime] = Field( 63 | default=None, description="The timestamp when the message will be delivered" 64 | ) 65 | is_error: bool = Field( 66 | default=False, 67 | description="Indicates if there was an error while sending the message", 68 | ) 69 | error: Optional[str] = Field( 70 | default=None, description="The error message if `is_error` is True" 71 | ) 72 | 73 | # Utility methods 74 | def is_successful(self) -> bool: 75 | """ 76 | Check if the message was sent successfully. 77 | 78 | Returns: 79 | bool: True if the message was sent successfully, False otherwise. 80 | """ 81 | return not self.is_error and self.id is not None 82 | 83 | def is_delayed(self) -> bool: 84 | """ 85 | Check if the message is delayed. 86 | 87 | Returns: 88 | bool: True if the message is delayed, False otherwise. 89 | """ 90 | return self.delayed_to is not None and self.delayed_to > datetime.now() 91 | 92 | def has_expiration(self) -> bool: 93 | """ 94 | Check if the message has an expiration time. 95 | 96 | Returns: 97 | bool: True if the message has an expiration time, False otherwise. 98 | """ 99 | return self.expired_at is not None 100 | 101 | def get_delay_seconds(self) -> float: 102 | """ 103 | Get the number of seconds until the message is delivered. 104 | 105 | Returns: 106 | float: The number of seconds until the message is delivered, or 0 if not delayed. 107 | """ 108 | if not self.is_delayed(): 109 | return 0 110 | 111 | return max(0, (self.delayed_to - datetime.now()).total_seconds()) 112 | 113 | def with_updates(self, **kwargs) -> QueueSendResult: 114 | """ 115 | Create a new result with updated values. 116 | 117 | Since instances are immutable, this method creates a new 118 | instance with the specified updates. 119 | 120 | Args: 121 | **kwargs: The fields to update and their new values 122 | 123 | Returns: 124 | A new instance with the updated values 125 | """ 126 | data = self.model_dump() 127 | data.update(kwargs) 128 | return self.__class__(**data) 129 | 130 | # Decoding methods 131 | @classmethod 132 | def decode(cls, result: SendQueueMessageResult) -> QueueSendResult: 133 | """ 134 | Create a QueueSendResult from a protobuf SendQueueMessageResult. 135 | 136 | Args: 137 | result: The protobuf result to decode 138 | 139 | Returns: 140 | A new QueueSendResult instance 141 | 142 | Raises: 143 | ValueError: If the result is invalid 144 | """ 145 | if not result: 146 | raise ValueError("Cannot decode None result") 147 | 148 | try: 149 | return cls( 150 | id=result.MessageID if result.MessageID else None, 151 | sent_at=( 152 | datetime.fromtimestamp(result.SentAt / 1e9) 153 | if result.SentAt > 0 154 | else None 155 | ), 156 | expired_at=( 157 | datetime.fromtimestamp(result.ExpirationAt / 1e9) 158 | if result.ExpirationAt > 0 159 | else None 160 | ), 161 | delayed_to=( 162 | datetime.fromtimestamp(result.DelayedTo / 1e9) 163 | if result.DelayedTo > 0 164 | else None 165 | ), 166 | is_error=result.IsError if result.IsError else False, 167 | error=result.Error if result.Error else None, 168 | ) 169 | except Exception as e: 170 | raise ValueError(f"Failed to decode result: {str(e)}") 171 | 172 | # String representations 173 | def __str__(self) -> str: 174 | """ 175 | Get a string representation of the send result. 176 | 177 | Returns: 178 | A string representation of the send result 179 | """ 180 | try: 181 | status = "ERROR" if self.is_error else "SUCCESS" 182 | return ( 183 | f"QueueSendResult: status={status}, id={self.id}, " 184 | f"sent_at={self.sent_at}, " 185 | f"expired_at={self.expired_at}, " 186 | f"delayed_to={self.delayed_to}, " 187 | f"error={self.error if self.is_error else 'None'}" 188 | ) 189 | except Exception as e: 190 | return f"QueueSendResult: [Error displaying result: {str(e)}]" 191 | 192 | def __repr__(self) -> str: 193 | """ 194 | Get a detailed representation of the send result. 195 | 196 | Returns: 197 | A detailed representation of the send result 198 | """ 199 | return ( 200 | f"QueueSendResult(id={self.id!r}, " 201 | f"sent_at={self.sent_at!r}, " 202 | f"expired_at={self.expired_at!r}, " 203 | f"delayed_to={self.delayed_to!r}, " 204 | f"is_error={self.is_error}, " 205 | f"error={self.error!r})" 206 | ) 207 | -------------------------------------------------------------------------------- /kubemq/transport/transport.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import threading 3 | 4 | import grpc 5 | from grpc import Channel 6 | from grpc._cython.cygrpc import ChannelCredentials 7 | import kubemq.grpc.kubemq_pb2_grpc as kubemq_pb2_grpc 8 | from typing import Sequence 9 | from kubemq.transport.connection import Connection 10 | from kubemq.transport.tls_config import TlsConfig 11 | from kubemq.transport.keep_alive import KeepAliveConfig 12 | from kubemq.transport.interceptors import AuthInterceptorsAsync 13 | from kubemq.transport.server_info import ServerInfo 14 | from kubemq.grpc import Empty 15 | import logging 16 | from kubemq.transport.channel_manager import ChannelManager 17 | 18 | 19 | def _get_ssl_credentials(tls_config: TlsConfig) -> ChannelCredentials: 20 | certificate_chain = ( 21 | _read_file(tls_config.cert_file) if tls_config.cert_file else None 22 | ) 23 | private_key = _read_file(tls_config.key_file) if tls_config.key_file else None 24 | root_certificates = _read_file(tls_config.ca_file) if tls_config.ca_file else None 25 | return grpc.ssl_channel_credentials( 26 | root_certificates, private_key, certificate_chain 27 | ) 28 | 29 | 30 | def _read_file(file_path): 31 | with open(file_path, "rb") as f: 32 | return f.read() 33 | 34 | 35 | def _get_call_options(connection: Connection) -> Sequence: 36 | options = [ 37 | ("grpc.max_send_message_length", connection.max_send_size), 38 | ("grpc.max_receive_message_length", connection.max_receive_size), 39 | ] 40 | if ( 41 | connection.keep_alive 42 | and isinstance(connection.keep_alive, KeepAliveConfig) 43 | and connection.keep_alive.enabled 44 | ): 45 | options.append( 46 | ( 47 | "grpc.keepalive_time_ms", 48 | connection.keep_alive.ping_timeout_in_seconds * 1000, 49 | ) 50 | ) 51 | options.append( 52 | ( 53 | "grpc.keepalive_timeout_ms", 54 | connection.keep_alive.ping_interval_in_seconds * 1000, 55 | ) 56 | ) 57 | options.append(("grpc.keepalive_permit_without_calls", 1)) 58 | options.append( 59 | ( 60 | "grpc.http2.min_time_between_pings_ms", 61 | connection.keep_alive.ping_timeout_in_seconds * 1000, 62 | ) 63 | ) 64 | options.append( 65 | ( 66 | "grpc.http2.min_ping_interval_without_data_ms", 67 | connection.keep_alive.ping_interval_in_seconds * 1000, 68 | ) 69 | ) 70 | return options 71 | 72 | 73 | class Transport: 74 | def __init__(self, connection: Connection) -> None: 75 | self._opts: Connection = connection.complete() 76 | self._channel: Channel = None 77 | self._client: kubemq_pb2_grpc.kubemqStub = None 78 | self._async_channel: Channel = None 79 | self._async_client: kubemq_pb2_grpc.kubemqStub = None 80 | self._is_connected_lock = threading.Lock() 81 | self._is_connected: bool = False 82 | self._logger = logging.getLogger("KubeMQ") 83 | self._channel_manager = None 84 | 85 | def initialize(self) -> "Transport": 86 | try: 87 | # Initialize the channel manager 88 | self._channel_manager = ChannelManager(self._opts, self._logger) 89 | self._client = self._channel_manager.get_client() 90 | with self._is_connected_lock: 91 | self._is_connected = True 92 | self._initialize_async() 93 | except Exception as ex: 94 | with self._is_connected_lock: 95 | self._is_connected = False 96 | raise ex 97 | return self 98 | 99 | def _initialize_async(self) -> None: 100 | auth_interceptor_async: AuthInterceptorsAsync = AuthInterceptorsAsync( 101 | self._opts.auth_token 102 | ) 103 | interceptors_async: Sequence[grpc.aio.ClientInterceptor] = [ 104 | auth_interceptor_async 105 | ] 106 | if self._opts.tls.enabled: 107 | try: 108 | credentials: ChannelCredentials = _get_ssl_credentials(self._opts.tls) 109 | self._async_channel = grpc.aio.secure_channel( 110 | self._opts.address, 111 | credentials, 112 | options=_get_call_options(self._opts), 113 | interceptors=interceptors_async, 114 | ) 115 | except Exception as e: 116 | raise e 117 | else: 118 | self._async_channel = grpc.aio.insecure_channel( 119 | self._opts.address, 120 | options=_get_call_options(self._opts), 121 | interceptors=interceptors_async, 122 | ) 123 | 124 | self._async_client = kubemq_pb2_grpc.kubemqStub(self._async_channel) 125 | 126 | def ping(self) -> ServerInfo: 127 | response = self._client.Ping(Empty()) 128 | return ServerInfo( 129 | host=response.Host, 130 | version=response.Version, 131 | server_start_time=response.ServerStartTime, 132 | server_up_time_seconds=response.ServerUpTimeSeconds, 133 | ) 134 | 135 | def kubemq_client(self) -> kubemq_pb2_grpc.kubemqStub: 136 | if self._channel_manager: 137 | return self._channel_manager.get_client() 138 | return self._client 139 | 140 | def kubemq_async_client(self) -> kubemq_pb2_grpc.kubemqStub: 141 | return self._async_client 142 | 143 | def is_connected(self) -> bool: 144 | if self._channel_manager: 145 | return self._channel_manager.connection_state.is_accepting_requests() 146 | 147 | with self._is_connected_lock: 148 | return self._is_connected 149 | 150 | def recreate_channel(self) -> kubemq_pb2_grpc.kubemqStub: 151 | """ 152 | Recreates the gRPC channel and client after a connection failure. 153 | 154 | Returns: 155 | kubemq_pb2_grpc.kubemqStub: New client instance 156 | """ 157 | if self._channel_manager: 158 | return self._channel_manager.recreate_channel() 159 | 160 | # This should never be reached with the new architecture 161 | self._logger.error("Channel manager not initialized, cannot recreate channel") 162 | raise ConnectionError( 163 | "Channel manager not initialized, cannot recreate channel" 164 | ) 165 | 166 | async def close_async(self) -> None: 167 | """Close the transport asynchronously.""" 168 | if self._channel_manager: 169 | self._channel_manager.close() 170 | with self._is_connected_lock: 171 | self._is_connected = False 172 | 173 | if self._async_channel is not None: 174 | await self._async_channel.close() 175 | self._async_channel = None 176 | self._async_client = None 177 | 178 | def close(self) -> None: 179 | """Close the transport synchronously.""" 180 | if self._channel_manager: 181 | self._channel_manager.close() 182 | with self._is_connected_lock: 183 | self._is_connected = False 184 | 185 | if self._async_channel is not None: 186 | try: 187 | # Check if we're running in an async context 188 | asyncio.get_running_loop() 189 | # In async context - just nullify and let garbage collection handle cleanup 190 | self._async_channel = None 191 | self._async_client = None 192 | except RuntimeError: 193 | # Not in async context - safe to use run_until_complete 194 | asyncio.get_event_loop().run_until_complete(self._async_channel.close()) 195 | self._async_channel = None 196 | self._async_client = None 197 | 198 | -------------------------------------------------------------------------------- /kubemq/transport/channel_manager.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import logging 3 | import time 4 | import grpc 5 | from kubemq.transport.connection import Connection 6 | from kubemq.transport.interceptors import AuthInterceptors 7 | from kubemq.transport.tls_config import TlsConfig 8 | import kubemq.grpc.kubemq_pb2_grpc as kubemq_pb2_grpc 9 | from kubemq.grpc import Empty 10 | 11 | 12 | def _get_ssl_credentials(tls_config: TlsConfig): 13 | from kubemq.transport.transport import _get_ssl_credentials as get_creds 14 | 15 | return get_creds(tls_config) 16 | 17 | 18 | def _get_call_options(connection: Connection): 19 | from kubemq.transport.transport import _get_call_options as get_opts 20 | 21 | return get_opts(connection) 22 | 23 | 24 | class ConnectionState: 25 | """ 26 | Class for managing the shared connection state across components. 27 | """ 28 | 29 | def __init__(self): 30 | self.lock = threading.Lock() 31 | self.is_connected = True 32 | 33 | def set_connected(self, value: bool): 34 | with self.lock: 35 | old_state = self.is_connected 36 | self.is_connected = value 37 | return old_state != value # Return True if state changed 38 | 39 | def is_accepting_requests(self) -> bool: 40 | with self.lock: 41 | return self.is_connected 42 | 43 | 44 | class ChannelManager: 45 | """ 46 | Centralized manager for gRPC channel creation and reconnection. 47 | 48 | This class coordinates channel access and recreation across multiple components. 49 | """ 50 | 51 | def __init__(self, connection: Connection, logger: logging.Logger): 52 | self._opts = connection.complete() 53 | self._connection = connection 54 | self._channel = None 55 | self._client = None 56 | self._channel_lock = threading.Lock() 57 | self.connection_state = ConnectionState() 58 | self.logger = logger 59 | self._registered_clients = [] 60 | self._initialize_channel() 61 | 62 | def _initialize_channel(self): 63 | """Initialize the gRPC channel and client stub""" 64 | with self._channel_lock: 65 | try: 66 | auth_interceptor = AuthInterceptors(self._opts.auth_token) 67 | interceptors = [auth_interceptor] 68 | credentials = ( 69 | _get_ssl_credentials(self._opts.tls) 70 | if self._opts.tls.enabled 71 | else None 72 | ) 73 | self._channel = ( 74 | grpc.secure_channel( 75 | self._opts.address, 76 | credentials, 77 | options=_get_call_options(self._opts), 78 | ) 79 | if credentials 80 | else grpc.insecure_channel( 81 | self._opts.address, options=_get_call_options(self._opts) 82 | ) 83 | ) 84 | self._channel = grpc.intercept_channel(self._channel, *interceptors) 85 | self._client = kubemq_pb2_grpc.kubemqStub(self._channel) 86 | self._test_connection() 87 | self.connection_state.set_connected(True) 88 | except Exception as ex: 89 | self.logger.error(f"Failed to initialize channel: {str(ex)}") 90 | self.connection_state.set_connected(False) 91 | raise ex 92 | 93 | def _test_connection(self): 94 | """Test the connection to the server""" 95 | try: 96 | self._client.Ping(Empty()) 97 | return True 98 | except Exception as e: 99 | self.logger.error(f"Connection test failed: {str(e)}") 100 | return False 101 | 102 | def register_client(self, client_ref): 103 | """Register a client to be notified of channel updates""" 104 | self._registered_clients.append(client_ref) 105 | 106 | def get_client(self) -> kubemq_pb2_grpc.kubemqStub: 107 | """Get the current gRPC client stub""" 108 | with self._channel_lock: 109 | return self._client 110 | 111 | def is_channel_healthy(self) -> bool: 112 | """Check if the channel is in a healthy state""" 113 | try: 114 | return self._test_connection() 115 | except Exception: 116 | return False 117 | 118 | def recreate_channel(self) -> kubemq_pb2_grpc.kubemqStub: 119 | """ 120 | Recreate the gRPC channel and client after a connection failure 121 | 122 | Returns: 123 | kubemq_pb2_grpc.kubemqStub: New client instance 124 | """ 125 | with self._channel_lock: 126 | self.logger.info("Starting channel recreation process") 127 | self.connection_state.set_connected(False) 128 | 129 | # Check if auto-reconnect is disabled 130 | if self._connection.disable_auto_reconnect: 131 | self.logger.warning( 132 | "Auto-reconnect is disabled, not attempting to recreate channel" 133 | ) 134 | raise ConnectionError("Auto-reconnect is disabled by configuration") 135 | 136 | # Close existing channel if exists 137 | if self._channel is not None: 138 | try: 139 | self._channel.close() 140 | except Exception as e: 141 | self.logger.warning(f"Error closing existing channel: {str(e)}") 142 | self._channel = None 143 | self._client = None 144 | 145 | reconnect_seconds = self._connection.reconnect_interval_seconds 146 | 147 | self.logger.info(f"Waiting {reconnect_seconds} seconds before reconnection") 148 | time.sleep(reconnect_seconds) 149 | 150 | # Recreate channel with existing credentials and options 151 | try: 152 | auth_interceptor = AuthInterceptors(self._opts.auth_token) 153 | interceptors = [auth_interceptor] 154 | credentials = ( 155 | _get_ssl_credentials(self._opts.tls) 156 | if self._opts.tls.enabled 157 | else None 158 | ) 159 | 160 | self._channel = ( 161 | grpc.secure_channel( 162 | self._opts.address, 163 | credentials, 164 | options=_get_call_options(self._opts), 165 | ) 166 | if credentials 167 | else grpc.insecure_channel( 168 | self._opts.address, options=_get_call_options(self._opts) 169 | ) 170 | ) 171 | self._channel = grpc.intercept_channel(self._channel, *interceptors) 172 | self._client = kubemq_pb2_grpc.kubemqStub(self._channel) 173 | 174 | # Test the connection 175 | if self._test_connection(): 176 | self.logger.info( 177 | "Successfully recreated gRPC channel and verified connection" 178 | ) 179 | self.connection_state.set_connected(True) 180 | else: 181 | self.logger.warning("Channel recreated but connection test failed") 182 | # We'll keep the connected state as False 183 | except Exception as e: 184 | self.logger.error(f"Failed to recreate channel: {str(e)}") 185 | raise e 186 | 187 | return self._client 188 | 189 | def close(self): 190 | """Close the channel and clean up resources""" 191 | with self._channel_lock: 192 | if self._channel is not None: 193 | try: 194 | self._channel.close() 195 | except Exception as e: 196 | self.logger.warning(f"Error while closing channel: {str(e)}") 197 | self._channel = None 198 | self._client = None 199 | self.connection_state.set_connected(False) 200 | -------------------------------------------------------------------------------- /kubemq/common/requests.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | import grpc 3 | from kubemq.common.exceptions import * 4 | from kubemq.common.helpers import decode_grpc_error 5 | from kubemq.grpc import Request 6 | from kubemq.common.channel_stats import * 7 | 8 | requests_channel = "kubemq.cluster.internal.requests" 9 | 10 | 11 | def create_channel_request( 12 | transport, client_id, channel_name, channel_type 13 | ) -> [bool, None]: 14 | """ 15 | 16 | This method creates a request to create a channel in the Kubemq server. 17 | 18 | Parameters: 19 | - transport: The transport object that handles the communication with the Kubemq server. 20 | - client_id: The ID of the client. 21 | - channel_name: The name of the channel to create. 22 | - channel_type: The type of the channel. 23 | 24 | Returns: 25 | - bool: True if the channel creation was successful. 26 | - None: If an error occurred during the channel creation. 27 | 28 | Raises: 29 | - CreateChannelError: If the channel creation request was executed but with an error response from the server. 30 | - GRPCError: If there was an error with the gRPC communication with the Kubemq server. 31 | 32 | """ 33 | try: 34 | request = Request( 35 | RequestID=str(uuid.uuid4()), 36 | RequestTypeData=2, 37 | Metadata="create-channel", 38 | Channel=requests_channel, 39 | ClientID=client_id, 40 | Tags={ 41 | "channel_type": channel_type, 42 | "channel": channel_name, 43 | "client_id": client_id, 44 | }, 45 | Timeout=10 * 1000, 46 | ) 47 | response = transport.kubemq_client().SendRequest(request) 48 | if response: 49 | if response.Executed: 50 | return True 51 | else: 52 | raise CreateChannelError(response.Error) 53 | except grpc.RpcError as e: 54 | raise GRPCError(decode_grpc_error(e)) 55 | 56 | 57 | def delete_channel_request( 58 | transport, client_id, channel_name, channel_type 59 | ) -> [bool, None]: 60 | """ 61 | 62 | This method is used to send a delete channel request to the Kubemq server. It deletes a channel with the specified name and type. 63 | 64 | Parameters: 65 | - transport: The transport object used for communication with the Kubemq server. 66 | - client_id: The client ID associated with the request. 67 | - channel_name: The name of the channel to be deleted. 68 | - channel_type: The type of the channel to be deleted. 69 | 70 | Returns: 71 | - If the delete channel request is executed successfully, it returns True. 72 | - If there is an error during the execution of the delete channel request, it raises a DeleteChannelError with the corresponding error message. 73 | 74 | Raises: 75 | - GRPCError: If there is a GRPC error during the process. 76 | 77 | """ 78 | try: 79 | request = Request( 80 | RequestID=str(uuid.uuid4()), 81 | RequestTypeData=2, 82 | Metadata="delete-channel", 83 | Channel=requests_channel, 84 | ClientID=client_id, 85 | Tags={ 86 | "channel_type": channel_type, 87 | "channel": channel_name, 88 | "client_id": client_id, 89 | }, 90 | Timeout=10 * 1000, 91 | ) 92 | response = transport.kubemq_client().SendRequest(request) 93 | if response: 94 | if response.Executed: 95 | return True 96 | else: 97 | raise DeleteChannelError(response.Error) 98 | except grpc.RpcError as e: 99 | raise GRPCError(decode_grpc_error(e)) 100 | 101 | 102 | def list_queues_channels(transport, client_id, channel_search) -> List[QueuesChannel]: 103 | """ 104 | 105 | List Queues Channels 106 | 107 | This method is used to list the queues channels from the Kubemq server. It takes the following parameters: 108 | 109 | Parameters: 110 | - `transport` : The transport object used to communicate with the Kubemq server. 111 | - `client_id` : The client ID of the client making the request. 112 | - `channel_search` : The search query string to filter the channels. (optional) 113 | 114 | Returns: 115 | - `List[QueuesChannel]` : A list of QueuesChannel objects representing the queues channels. 116 | 117 | Raises: 118 | - `ListChannelsError` : If the client fails to list channels, this error will be raised. 119 | - `GRPCError` : If there is an error while communicating with the Kubemq server, this error will be raised. 120 | 121 | Example Usage: 122 | 123 | ```python 124 | transport = Transport() 125 | client_id = "my_client_id" 126 | 127 | # List all queues channels 128 | queues_channels = list_queues_channels(transport, client_id) 129 | 130 | # List queues channels with a specific search query 131 | search_query = "search query" 132 | queues_channels = list_queues_channels(transport, client_id, search_query) 133 | ``` 134 | """ 135 | try: 136 | request = Request( 137 | RequestID=str(uuid.uuid4()), 138 | RequestTypeData=2, 139 | Metadata="list-channels", 140 | Channel=requests_channel, 141 | ClientID=client_id, 142 | Tags={"channel_type": "queues", "channel_search": channel_search}, 143 | Timeout=10 * 1000, 144 | ) 145 | response = transport.kubemq_client().SendRequest(request) 146 | if response: 147 | if response.Executed: 148 | return decode_queues_channel_list(response.Body) 149 | else: 150 | self.logger.error( 151 | f"Client failed to list {channel_type} channels, error: {response.Error}" 152 | ) 153 | raise ListChannelsError(response.Error) 154 | except grpc.RpcError as e: 155 | raise GRPCError(decode_grpc_error(e)) 156 | 157 | 158 | def list_pubsub_channels( 159 | transport, client_id, channel_type: str, channel_search 160 | ) -> List[PubSubChannel]: 161 | """ 162 | 163 | This method is used to retrieve a list of PubSub channels based on the specified parameters. 164 | 165 | Parameters: 166 | - transport: The transport object used for communication. 167 | - client_id: The ID of the client making the request. 168 | - channel_type (str): The type of PubSub channel to filter the list by. 169 | - channel_search: A search parameter to further filter the list of channels. 170 | 171 | Returns: 172 | - List[PubSubChannel]: A list of PubSubChannel objects representing the channels matching the specified parameters. 173 | 174 | Raises: 175 | - ListChannelsError: If the request fails or is not executed successfully. 176 | - GRPCError: If a gRPC error occurs during the execution of the request. 177 | 178 | """ 179 | try: 180 | request = Request( 181 | RequestID=str(uuid.uuid4()), 182 | RequestTypeData=2, 183 | Metadata="list-channels", 184 | Channel=requests_channel, 185 | ClientID=client_id, 186 | Tags={"channel_type": channel_type, "channel_search": channel_search}, 187 | Timeout=10 * 1000, 188 | ) 189 | response = transport.kubemq_client().SendRequest(request) 190 | if response: 191 | if response.Executed: 192 | return decode_pub_sub_channel_list(response.Body) 193 | else: 194 | self.logger.error( 195 | f"Client failed to list {channel_type} channels, error: {response.Error}" 196 | ) 197 | raise ListChannelsError(response.Error) 198 | except grpc.RpcError as e: 199 | raise GRPCError(decode_grpc_error(e)) 200 | 201 | 202 | def list_cq_channels( 203 | transport, client_id, channel_type: str, channel_search 204 | ) -> List[CQChannel]: 205 | """ 206 | 207 | Method: list_cq_channels 208 | 209 | Parameters: 210 | - transport: The transport object used for communication with the Kubemq server. 211 | - client_id: The ID of the client making the request. 212 | - channel_type (str): The type of channel to list. 213 | - channel_search: The search keyword for filtering the channels. 214 | 215 | Returns: 216 | - List[CQChannel]: A list of CQChannel objects representing the channels that match the search criteria. 217 | 218 | Raises: 219 | - ListChannelsError: If the client fails to list the channels. 220 | - GRPCError: If a gRPC error occurs during the request. 221 | 222 | """ 223 | try: 224 | request = Request( 225 | RequestID=str(uuid.uuid4()), 226 | RequestTypeData=2, 227 | Metadata="list-channels", 228 | Channel=requests_channel, 229 | ClientID=client_id, 230 | Tags={"channel_type": channel_type, "channel_search": channel_search}, 231 | Timeout=10 * 1000, 232 | ) 233 | response = transport.kubemq_client().SendRequest(request) 234 | if response: 235 | if response.Executed: 236 | return decode_cq_channel_list(response.Body) 237 | else: 238 | self.logger.error( 239 | f"Client failed to list {channel_type} channels, error: {response.Error}" 240 | ) 241 | raise ListChannelsError(response.Error) 242 | except grpc.RpcError as e: 243 | raise GRPCError(decode_grpc_error(e)) 244 | -------------------------------------------------------------------------------- /protos/kubemq/grpc/kubemq.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | option csharp_namespace = "KubeMQ.Client"; // Namespace for c# generated classes; 3 | package kubemq; 4 | service kubemq { 5 | rpc SendEvent (Event) returns (Result) {} 6 | rpc SendEventsStream (stream Event) returns (stream Result) {} 7 | rpc SubscribeToEvents (Subscribe) returns (stream EventReceive) {} 8 | rpc SubscribeToRequests (Subscribe) returns (stream Request) {} 9 | rpc SendRequest (Request) returns (Response) {} 10 | rpc SendResponse(Response) returns (Empty) {} 11 | rpc SendQueueMessage(QueueMessage) returns (SendQueueMessageResult) {} 12 | rpc SendQueueMessagesBatch(QueueMessagesBatchRequest) returns (QueueMessagesBatchResponse) {} 13 | rpc ReceiveQueueMessages(ReceiveQueueMessagesRequest) returns (ReceiveQueueMessagesResponse) {} 14 | rpc StreamQueueMessage(stream StreamQueueMessagesRequest) returns (stream StreamQueueMessagesResponse) {} 15 | rpc AckAllQueueMessages(AckAllQueueMessagesRequest) returns (AckAllQueueMessagesResponse) {} 16 | rpc Ping (Empty) returns (PingResult) {} 17 | rpc QueuesDownstream(stream QueuesDownstreamRequest) returns (stream QueuesDownstreamResponse) {} 18 | rpc QueuesUpstream(stream QueuesUpstreamRequest) returns (stream QueuesUpstreamResponse) {} 19 | rpc QueuesInfo(QueuesInfoRequest) returns (QueuesInfoResponse) {} 20 | } 21 | message PingResult { 22 | string Host =1; 23 | string Version =2; 24 | int64 ServerStartTime =3; 25 | int64 ServerUpTimeSeconds =4; 26 | } 27 | message Empty {} 28 | 29 | message Result { 30 | string EventID =1; 31 | bool Sent =2; 32 | string Error =3; 33 | } 34 | 35 | message Event { 36 | string EventID =1; 37 | string ClientID =2; 38 | string Channel =3; 39 | string Metadata =4; 40 | bytes Body =5; 41 | bool Store =6; 42 | map Tags =7; 43 | } 44 | 45 | message EventReceive { 46 | string EventID =1; 47 | string Channel =2; 48 | string Metadata =3; 49 | bytes Body =4; 50 | int64 Timestamp =5; 51 | uint64 Sequence =6; 52 | map Tags =7; 53 | } 54 | 55 | message Subscribe { 56 | enum SubscribeType { 57 | SubscribeTypeUndefined = 0; 58 | Events =1; 59 | EventsStore =2; 60 | Commands =3; 61 | Queries =4; 62 | 63 | } 64 | SubscribeType SubscribeTypeData =1; 65 | string ClientID =2; 66 | string Channel =3; 67 | string Group =4; 68 | enum EventsStoreType { 69 | EventsStoreTypeUndefined =0; 70 | StartNewOnly =1; 71 | StartFromFirst =2; 72 | StartFromLast =3; 73 | StartAtSequence =4; 74 | StartAtTime =5; 75 | StartAtTimeDelta =6; 76 | } 77 | EventsStoreType EventsStoreTypeData =5; 78 | int64 EventsStoreTypeValue =6; 79 | } 80 | 81 | message Request { 82 | string RequestID =1; 83 | enum RequestType { 84 | RequestTypeUnknown =0; 85 | Command =1; 86 | Query =2; 87 | } 88 | RequestType RequestTypeData =2; 89 | string ClientID =3; 90 | string Channel =4; 91 | string Metadata =5; 92 | bytes Body =6; 93 | string ReplyChannel =7; 94 | int32 Timeout =8; 95 | string CacheKey =9; 96 | int32 CacheTTL =10; 97 | bytes Span =11; 98 | map Tags =12; 99 | } 100 | message Response { 101 | string ClientID =1; 102 | string RequestID =2; 103 | string ReplyChannel =3; 104 | string Metadata =4; 105 | bytes Body =5; 106 | bool CacheHit =6; 107 | int64 Timestamp =7; 108 | bool Executed =8; 109 | string Error =9; 110 | bytes Span =10; 111 | map Tags =11; 112 | } 113 | 114 | message QueueMessage { 115 | string MessageID =1; 116 | string ClientID =2; 117 | string Channel =3; 118 | string Metadata =4; 119 | bytes Body =5; 120 | map Tags =6; 121 | QueueMessageAttributes Attributes =7; 122 | QueueMessagePolicy Policy =8; 123 | string Topic =9; 124 | int32 Partition =10; 125 | string PartitionKey =11; 126 | } 127 | message QueueMessagesBatchRequest { 128 | string BatchID =1; 129 | repeated QueueMessage Messages =2; 130 | } 131 | message QueueMessagesBatchResponse { 132 | string BatchID =1; 133 | repeated SendQueueMessageResult Results =2; 134 | bool HaveErrors =3; 135 | } 136 | 137 | message QueueMessageAttributes { 138 | int64 Timestamp =1; 139 | uint64 Sequence =2; 140 | string MD5OfBody =3; 141 | int32 ReceiveCount =4; 142 | bool ReRouted =5; 143 | string ReRoutedFromQueue =6; 144 | int64 ExpirationAt =7; 145 | int64 DelayedTo =8; 146 | } 147 | 148 | message QueueMessagePolicy { 149 | int32 ExpirationSeconds =1; 150 | int32 DelaySeconds =2; 151 | int32 MaxReceiveCount =3; 152 | string MaxReceiveQueue =4; 153 | 154 | } 155 | message SendQueueMessageResult { 156 | string MessageID =1; 157 | int64 SentAt =2; 158 | int64 ExpirationAt =3; 159 | int64 DelayedTo =4; 160 | bool IsError =5; 161 | string Error =6; 162 | string RefChannel =7; 163 | string RefTopic =8; 164 | int32 RefPartition =9; 165 | string RefHash =10; 166 | } 167 | 168 | message ReceiveQueueMessagesRequest { 169 | string RequestID =1; 170 | string ClientID =2; 171 | string Channel =3; 172 | int32 MaxNumberOfMessages =4; 173 | int32 WaitTimeSeconds =5; 174 | bool IsPeak =6; 175 | } 176 | message ReceiveQueueMessagesResponse { 177 | string RequestID =1; 178 | repeated QueueMessage Messages =2; 179 | int32 MessagesReceived =3; 180 | int32 MessagesExpired =4; 181 | bool IsPeak =5; 182 | bool IsError =6; 183 | string Error =7; 184 | } 185 | 186 | 187 | message AckAllQueueMessagesRequest { 188 | string RequestID =1; 189 | string ClientID =2; 190 | string Channel =3; 191 | int32 WaitTimeSeconds =4; 192 | } 193 | message AckAllQueueMessagesResponse { 194 | string RequestID =1; 195 | uint64 AffectedMessages =2; 196 | bool IsError =3; 197 | string Error =4; 198 | } 199 | 200 | 201 | enum StreamRequestType { 202 | StreamRequestTypeUnknown =0; 203 | ReceiveMessage =1; 204 | AckMessage =2; 205 | RejectMessage =3; 206 | ModifyVisibility =4; 207 | ResendMessage =5; 208 | SendModifiedMessage =6; 209 | } 210 | message StreamQueueMessagesRequest { 211 | string RequestID =1; 212 | string ClientID =2; 213 | StreamRequestType StreamRequestTypeData =3; 214 | string Channel =4; 215 | int32 VisibilitySeconds =5; 216 | int32 WaitTimeSeconds =6; 217 | uint64 RefSequence =7; 218 | QueueMessage ModifiedMessage =8; 219 | } 220 | 221 | 222 | message StreamQueueMessagesResponse { 223 | string RequestID =1; 224 | StreamRequestType StreamRequestTypeData =2; 225 | QueueMessage Message =3; 226 | bool IsError =4; 227 | string Error =5; 228 | } 229 | 230 | 231 | message QueuesUpstreamRequest { 232 | string RequestID =1; 233 | repeated QueueMessage Messages =2; 234 | } 235 | message QueuesUpstreamResponse { 236 | string RefRequestID =1; 237 | repeated SendQueueMessageResult Results =2; 238 | bool IsError =3; 239 | string Error =4; 240 | } 241 | 242 | 243 | enum QueuesDownstreamRequestType { 244 | PollRequestTypeUnknown =0; 245 | Get =1; 246 | AckAll =2; 247 | AckRange =3; 248 | NAckAll =4; 249 | NAckRange =5; 250 | ReQueueAll =6; 251 | ReQueueRange =7; 252 | ActiveOffsets =8; 253 | TransactionStatus =9; 254 | CloseByClient =10; 255 | CloseByServer =11; 256 | } 257 | 258 | message QueuesDownstreamRequest { 259 | string RequestID =1; 260 | string ClientID =2; 261 | QueuesDownstreamRequestType RequestTypeData =3; 262 | string Channel =4; 263 | int32 MaxItems =5; 264 | int32 WaitTimeout =6; 265 | bool AutoAck =7; 266 | string ReQueueChannel =8; 267 | repeated int64 SequenceRange =9; 268 | string RefTransactionId =10; 269 | map Metadata =12; 270 | } 271 | 272 | message QueuesDownstreamResponse { 273 | string TransactionId =1; 274 | string RefRequestId =2; 275 | QueuesDownstreamRequestType RequestTypeData =3; 276 | repeated QueueMessage Messages = 4; 277 | repeated int64 ActiveOffsets = 5; 278 | bool IsError =6; 279 | string Error =7; 280 | bool TransactionComplete =8; 281 | map Metadata =9; 282 | } 283 | 284 | message QueueInfo { 285 | string Name =1; 286 | int64 Messages =2; 287 | int64 Bytes =3; 288 | int64 FirstSequence =4; 289 | int64 LastSequence =5; 290 | int64 Sent =6; 291 | int64 Delivered =7; 292 | int64 Waiting =8; 293 | int64 Subscribers =9; 294 | } 295 | message QueuesInfo { 296 | int32 TotalQueue =1; 297 | int64 Sent =2; 298 | int64 Delivered =3; 299 | int64 Waiting =4; 300 | repeated QueueInfo Queues =5; 301 | } 302 | message QueuesInfoRequest { 303 | string RequestID =1; 304 | string QueueName =2; 305 | } 306 | 307 | message QueuesInfoResponse { 308 | string RefRequestID =1; 309 | QueuesInfo Info =2; 310 | 311 | } 312 | -------------------------------------------------------------------------------- /kubemq/queues/queues_message.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from pydantic import BaseModel, Field, field_validator, model_validator 3 | from typing import Dict, Optional, ClassVar 4 | import uuid 5 | from datetime import datetime 6 | from kubemq.grpc import QueueMessage as pbQueueMessage 7 | from kubemq.grpc import QueuesUpstreamRequest as pbQueuesUpstreamRequest 8 | 9 | 10 | class QueueMessage(BaseModel): 11 | """ 12 | A class representing a message in a KubeMQ queue. 13 | 14 | This class encapsulates all the properties of a message that can be sent to a KubeMQ queue. 15 | It provides methods for validation, encoding to protobuf format, and creating messages 16 | from protobuf format. 17 | 18 | Attributes: 19 | id: The unique identifier for the message. If not provided, a UUID will be generated. 20 | channel: The channel (queue name) where the message will be sent. Required. 21 | metadata: Optional metadata associated with the message. 22 | body: The binary payload of the message. 23 | tags: Key-value pairs for additional message metadata. 24 | delay_in_seconds: Time in seconds to delay the message before it becomes available. 25 | expiration_in_seconds: Time in seconds after which the message expires. 26 | attempts_before_dead_letter_queue: Maximum number of receive attempts before moving to DLQ. 27 | dead_letter_queue: The queue where messages are moved after max receive attempts. 28 | 29 | Examples: 30 | ```python 31 | # Create a simple message 32 | message = QueueMessage( 33 | channel="my-queue", 34 | body=b"Hello, World!" 35 | ) 36 | 37 | # Create a message with metadata and tags 38 | message = QueueMessage( 39 | channel="my-queue", 40 | metadata="Message metadata", 41 | body=b"Hello, World!", 42 | tags={"key1": "value1", "key2": "value2"} 43 | ) 44 | 45 | # Create a message with delay and expiration 46 | message = QueueMessage( 47 | channel="my-queue", 48 | body=b"Hello, World!", 49 | delay_in_seconds=60, # Delay for 1 minute 50 | expiration_in_seconds=3600 # Expire after 1 hour 51 | ) 52 | 53 | # Create a message with dead letter queue configuration 54 | message = QueueMessage( 55 | channel="my-queue", 56 | body=b"Hello, World!", 57 | attempts_before_dead_letter_queue=3, 58 | dead_letter_queue="my-dlq" 59 | ) 60 | ``` 61 | """ 62 | 63 | # Pydantic configuration 64 | class Config: 65 | arbitrary_types_allowed = True 66 | frozen = True # Make instances immutable 67 | 68 | # Class attributes 69 | MAX_DELAY_SECONDS: ClassVar[int] = 43200 # 12 hours 70 | MAX_EXPIRATION_SECONDS: ClassVar[int] = 43200 # 12 hours 71 | 72 | # Instance attributes 73 | id: Optional[str] = Field( 74 | default=None, description="The unique identifier for the message" 75 | ) 76 | channel: str = Field(..., description="The channel (queue name) where the message will be sent") 77 | metadata: Optional[str] = Field( 78 | default=None, description="The metadata associated with the message" 79 | ) 80 | body: bytes = Field(default=b"", description="The binary payload of the message") 81 | tags: Dict[str, str] = Field( 82 | default_factory=dict, description="Key-value pairs for additional message metadata" 83 | ) 84 | delay_in_seconds: int = Field( 85 | default=0, 86 | ge=0, 87 | description="Time in seconds to delay the message before it becomes available", 88 | ) 89 | expiration_in_seconds: int = Field( 90 | default=0, 91 | ge=0, 92 | description="Time in seconds after which the message expires" 93 | ) 94 | attempts_before_dead_letter_queue: int = Field( 95 | default=0, 96 | ge=0, 97 | description="Maximum number of receive attempts before moving to DLQ", 98 | ) 99 | dead_letter_queue: str = Field( 100 | default="", 101 | description="The queue where messages are moved after max receive attempts", 102 | ) 103 | 104 | # Validators 105 | @field_validator("channel") 106 | def channel_must_not_be_empty(cls, v: str) -> str: 107 | """ 108 | Validate that the channel is not empty. 109 | 110 | Args: 111 | v: The channel value to validate 112 | 113 | Returns: 114 | The validated channel value 115 | 116 | Raises: 117 | ValueError: If the channel is empty 118 | """ 119 | if not v: 120 | raise ValueError("Channel cannot be empty. Please provide a valid queue name.") 121 | return v 122 | 123 | @field_validator("delay_in_seconds") 124 | def validate_delay(cls, v: int) -> int: 125 | """ 126 | Validate that the delay is within acceptable limits. 127 | 128 | Args: 129 | v: The delay value to validate 130 | 131 | Returns: 132 | The validated delay value 133 | 134 | Raises: 135 | ValueError: If the delay exceeds the maximum allowed value 136 | """ 137 | if v > cls.MAX_DELAY_SECONDS: 138 | raise ValueError(f"Delay cannot exceed {cls.MAX_DELAY_SECONDS} seconds (12 hours)") 139 | return v 140 | 141 | @field_validator("expiration_in_seconds") 142 | def validate_expiration(cls, v: int) -> int: 143 | """ 144 | Validate that the expiration is within acceptable limits. 145 | 146 | Args: 147 | v: The expiration value to validate 148 | 149 | Returns: 150 | The validated expiration value 151 | 152 | Raises: 153 | ValueError: If the expiration exceeds the maximum allowed value 154 | """ 155 | if v > cls.MAX_EXPIRATION_SECONDS: 156 | raise ValueError(f"Expiration cannot exceed {cls.MAX_EXPIRATION_SECONDS} seconds (12 hours)") 157 | return v 158 | 159 | @model_validator(mode="after") 160 | def check_message_content(self) -> QueueMessage: 161 | """ 162 | Validate that the message has at least one of metadata, body, or tags. 163 | 164 | Returns: 165 | The validated message 166 | 167 | Raises: 168 | ValueError: If the message has no content 169 | """ 170 | if not self.metadata and not self.body and not self.tags: 171 | raise ValueError( 172 | "Message must have at least one of the following: metadata, body, or tags. " 173 | "Empty messages are not allowed." 174 | ) 175 | 176 | # Validate dead letter queue configuration 177 | if self.attempts_before_dead_letter_queue > 0 and not self.dead_letter_queue: 178 | raise ValueError( 179 | "When specifying attempts_before_dead_letter_queue, " 180 | "you must also provide a dead_letter_queue." 181 | ) 182 | 183 | return self 184 | 185 | # Encoding methods 186 | def encode(self, client_id: str) -> pbQueuesUpstreamRequest: 187 | """ 188 | Encode the message to a QueuesUpstreamRequest protobuf object. 189 | 190 | This method is used when sending a message to the KubeMQ server. 191 | 192 | Args: 193 | client_id: The client ID to use for the message 194 | 195 | Returns: 196 | A QueuesUpstreamRequest protobuf object containing the encoded message 197 | """ 198 | pb_queue_stream = pbQueuesUpstreamRequest() 199 | pb_queue_stream.RequestID = str(uuid.uuid4()) 200 | pb_message = self.encode_message(client_id) 201 | pb_queue_stream.Messages.append(pb_message) 202 | return pb_queue_stream 203 | 204 | def encode_message(self, client_id: str) -> pbQueueMessage: 205 | """ 206 | Encode the message to a QueueMessage protobuf object. 207 | 208 | Args: 209 | client_id: The client ID to use for the message 210 | 211 | Returns: 212 | A QueueMessage protobuf object containing the encoded message 213 | """ 214 | pb_queue = pbQueueMessage() 215 | pb_queue.MessageID = self.id or str(uuid.uuid4()) 216 | pb_queue.ClientID = client_id 217 | pb_queue.Channel = self.channel 218 | pb_queue.Metadata = self.metadata or "" 219 | pb_queue.Body = self.body 220 | pb_queue.Tags.update(self.tags) 221 | pb_queue.Policy.DelaySeconds = self.delay_in_seconds 222 | pb_queue.Policy.ExpirationSeconds = self.expiration_in_seconds 223 | pb_queue.Policy.MaxReceiveCount = self.attempts_before_dead_letter_queue 224 | pb_queue.Policy.MaxReceiveQueue = self.dead_letter_queue 225 | return pb_queue 226 | 227 | # Decoding methods 228 | @classmethod 229 | def decode(cls, pb_message: pbQueueMessage) -> QueueMessage: 230 | """ 231 | Create a QueueMessage from a protobuf QueueMessage. 232 | 233 | Args: 234 | pb_message: The protobuf QueueMessage to decode 235 | 236 | Returns: 237 | A new QueueMessage instance 238 | """ 239 | tags = {key: pb_message.Tags[key] for key in pb_message.Tags} 240 | 241 | return cls( 242 | id=pb_message.MessageID, 243 | channel=pb_message.Channel, 244 | metadata=pb_message.Metadata if pb_message.Metadata else None, 245 | body=pb_message.Body, 246 | tags=tags, 247 | delay_in_seconds=pb_message.Policy.DelaySeconds if pb_message.Policy else 0, 248 | expiration_in_seconds=pb_message.Policy.ExpirationSeconds if pb_message.Policy else 0, 249 | attempts_before_dead_letter_queue=pb_message.Policy.MaxReceiveCount if pb_message.Policy else 0, 250 | dead_letter_queue=pb_message.Policy.MaxReceiveQueue if pb_message.Policy else "", 251 | ) 252 | 253 | # Utility methods 254 | def with_updates(self, **kwargs) -> QueueMessage: 255 | """ 256 | Create a new QueueMessage with updated values. 257 | 258 | Since QueueMessage instances are immutable, this method creates a new 259 | instance with the specified updates. 260 | 261 | Args: 262 | **kwargs: The fields to update and their new values 263 | 264 | Returns: 265 | A new QueueMessage instance with the updated values 266 | """ 267 | data = self.model_dump() 268 | data.update(kwargs) 269 | return self.__class__(**data) 270 | 271 | def __str__(self) -> str: 272 | """ 273 | Get a string representation of the message. 274 | 275 | Returns: 276 | A string representation of the message 277 | """ 278 | body_preview = self.body[:20].decode('utf-8', errors='replace') if self.body else "" 279 | if len(self.body) > 20: 280 | body_preview += "..." 281 | 282 | return ( 283 | f"QueueMessage(id={self.id}, channel={self.channel}, " 284 | f"metadata={self.metadata}, body_preview='{body_preview}', " 285 | f"tags={self.tags}, delay={self.delay_in_seconds}s, " 286 | f"expiration={self.expiration_in_seconds}s)" 287 | ) 288 | 289 | def __repr__(self) -> str: 290 | """ 291 | Get a detailed representation of the message. 292 | 293 | Returns: 294 | A detailed representation of the message 295 | """ 296 | return ( 297 | f"QueueMessage(id={self.id!r}, channel={self.channel!r}, " 298 | f"metadata={self.metadata!r}, body={self.body!r}, tags={self.tags!r}, " 299 | f"delay_in_seconds={self.delay_in_seconds}, " 300 | f"expiration_in_seconds={self.expiration_in_seconds}, " 301 | f"attempts_before_dead_letter_queue={self.attempts_before_dead_letter_queue}, " 302 | f"dead_letter_queue={self.dead_letter_queue!r})" 303 | ) 304 | -------------------------------------------------------------------------------- /kubemq/common/channel_stats.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import List 3 | 4 | 5 | class QueuesStats: 6 | """ 7 | A class that represents statistics for a queue. 8 | 9 | Attributes: 10 | messages (int): The number of messages in the queue. 11 | volume (int): The total volume of the messages in the queue. 12 | waiting (int): The number of messages waiting in the queue. 13 | expired (int): The number of messages that have expired. 14 | delayed (int): The number of delayed messages. 15 | 16 | Methods: 17 | __init__(messages: int, volume: int, waiting: int, expired: int, delayed: int, **kwargs) 18 | Initializes the QueueStats object with the provided statistics. 19 | 20 | __repr__() 21 | Returns a string representation of the QueueStats object. 22 | """ 23 | 24 | def __init__( 25 | self, 26 | messages: int, 27 | volume: int, 28 | waiting: int, 29 | expired: int, 30 | delayed: int, 31 | **kwargs, 32 | ): 33 | self.messages = messages 34 | self.volume = volume 35 | self.waiting = waiting 36 | self.expired = expired 37 | self.delayed = delayed 38 | 39 | def __repr__(self): 40 | return f"Stats: messages={self.messages}, volume={self.volume}, waiting={self.waiting}, expired={self.expired}, delayed={self.delayed}" 41 | 42 | 43 | class QueuesChannel: 44 | """Represents a channel in a queueing system. 45 | 46 | Args: 47 | name (str): The name of the channel. 48 | type (str): The type of the channel. 49 | last_activity (int): The timestamp of the last activity on the channel. 50 | is_active (bool): Indicates whether the channel is currently active or not. 51 | incoming (QueuesStats): The statistics of incoming messages on the channel. 52 | outgoing (QueuesStats): The statistics of outgoing messages on the channel. 53 | **kwargs: Additional keyword arguments can be provided. 54 | 55 | Attributes: 56 | name (str): The name of the channel. 57 | type (str): The type of the channel. 58 | last_activity (int): The timestamp of the last activity on the channel. 59 | is_active (bool): Indicates whether the channel is currently active or not. 60 | incoming (QueuesStats): The statistics of incoming messages on the channel. 61 | outgoing (QueuesStats): The statistics of outgoing messages on the channel. 62 | 63 | Returns: 64 | str: A string representation of the QueuesChannel object. 65 | 66 | Example: 67 | channel = QueuesChannel("channel1", "type1", 1622014799, True, incoming_stats, outgoing_stats) 68 | print(channel) 69 | """ 70 | 71 | def __init__( 72 | self, 73 | name: str, 74 | type: str, 75 | last_activity: int, 76 | is_active: bool, 77 | incoming: QueuesStats, 78 | outgoing: QueuesStats, 79 | **kwargs, 80 | ): 81 | self.name = name 82 | self.type = type 83 | self.last_activity = last_activity 84 | self.is_active = is_active 85 | self.incoming = incoming 86 | self.outgoing = outgoing 87 | 88 | def __repr__(self): 89 | return f"Channel: name={self.name}, type={self.type}, last_activity={self.last_activity}, is_active={self.is_active}, incoming={self.incoming}, outgoing={self.outgoing}" 90 | 91 | 92 | class PubSubStats: 93 | """ 94 | Initialize the PubSubStats object with the number of messages and volume. 95 | 96 | Args: 97 | messages (int): The number of messages. 98 | volume (int): The volume of the messages. 99 | 100 | Returns: 101 | None 102 | """ 103 | 104 | def __init__(self, messages: int, volume: int, **kwargs): 105 | self.messages = messages 106 | self.volume = volume 107 | 108 | def __repr__(self): 109 | return f"Stats: messages={self.messages}, volume={self.volume}" 110 | 111 | 112 | class PubSubChannel: 113 | """ 114 | The PubSubChannel class represents a communication channel used in a publish-subscribe system. It stores information about the channel's name, type, last activity, active status, incoming 115 | * statistics, and outgoing statistics. 116 | 117 | Attributes: 118 | name (str): The name of the channel. 119 | type (str): The type of the channel. 120 | last_activity (int): The timestamp of the last activity on the channel. 121 | is_active (bool): Indicates whether the channel is currently active. 122 | incoming (PubSubStats): The statistics related to incoming messages on the channel. 123 | outgoing (PubSubStats): The statistics related to outgoing messages on the channel. 124 | 125 | Methods: 126 | __repr__(): Returns a string representation of the PubSubChannel object. 127 | 128 | Note: The `PubSubStats` class is not defined here, but it is assumed to be a separate class that represents statistics related to message traffic on the channel. 129 | """ 130 | 131 | def __init__( 132 | self, 133 | name: str, 134 | type: str, 135 | last_activity: int, 136 | is_active: bool, 137 | incoming: PubSubStats, 138 | outgoing: PubSubStats, 139 | **kwargs, 140 | ): 141 | self.name = name 142 | self.type = type 143 | self.last_activity = last_activity 144 | self.is_active = is_active 145 | self.incoming = incoming 146 | self.outgoing = outgoing 147 | 148 | def __repr__(self): 149 | return f"Channel: name={self.name}, type={self.type}, last_activity={self.last_activity}, is_active={self.is_active}, incoming={self.incoming}, outgoing={self.outgoing}" 150 | 151 | 152 | class CQStats: 153 | """ 154 | Class representing statistics for a conversation queue. 155 | 156 | Attributes: 157 | messages (int): The number of messages in the queue. 158 | volume (int): The volume of the queue. 159 | responses (int): The number of responses in the queue. 160 | 161 | Methods: 162 | __init__(messages: int, volume: int, responses: int, **kwargs): 163 | Initializes a new instance of the CQStats class. 164 | __repr__(): 165 | Returns a string representation of the CQStats object. 166 | 167 | """ 168 | 169 | def __init__(self, messages: int, volume: int, responses: int, **kwargs): 170 | self.messages = messages 171 | self.volume = volume 172 | self.responses = responses 173 | 174 | def __repr__(self): 175 | return f"Stats: messages={self.messages}, volume={self.volume}, responses={self.responses}" 176 | 177 | 178 | class CQChannel: 179 | """ 180 | 181 | The CQChannel class represents a channel in a communication system. It stores information about the channel's name, type, last activity timestamp, active status, incoming statistics 182 | *, and outgoing statistics. 183 | 184 | Attributes: 185 | - name: A string representing the name of the channel. 186 | - type: A string representing the type of the channel. 187 | - last_activity: An integer representing the timestamp of the last activity on the channel. 188 | - is_active: A boolean indicating whether the channel is active or not. 189 | - incoming: An instance of the CQStats class representing the incoming statistics of the channel. 190 | - outgoing: An instance of the CQStats class representing the outgoing statistics of the channel. 191 | 192 | Methods: 193 | - __init__(self, name: str, type: str, last_activity: int, is_active: bool, incoming: CQStats, outgoing: CQStats, **kwargs): 194 | Initializes a new instance of the CQChannel class with the specified parameters. 195 | 196 | - __repr__(self): 197 | Returns a string representation of the CQChannel object. 198 | 199 | """ 200 | 201 | def __init__( 202 | self, 203 | name: str, 204 | type: str, 205 | last_activity: int, 206 | is_active: bool, 207 | incoming: CQStats, 208 | outgoing: CQStats, 209 | **kwargs, 210 | ): 211 | self.name = name 212 | self.type = type 213 | self.last_activity = last_activity 214 | self.is_active = is_active 215 | self.incoming = incoming 216 | self.outgoing = outgoing 217 | 218 | def __repr__(self): 219 | return f"Channel: name={self.name}, type={self.type}, last_activity={self.last_activity}, is_active={self.is_active}, incoming={self.incoming}, outgoing={self.outgoing}" 220 | 221 | 222 | def decode_pub_sub_channel_list(data_bytes: bytes) -> List[PubSubChannel]: 223 | """ 224 | 225 | Decodes the given data bytes into a list of PubSubChannel objects. 226 | 227 | Parameters: 228 | - data_bytes (bytes): The data bytes to decode. 229 | 230 | Returns: 231 | - List[PubSubChannel]: A list of PubSubChannel objects. 232 | 233 | """ 234 | # Decode bytes to string and parse JSON 235 | data_str = data_bytes.decode("utf-8") 236 | channels_data = json.loads(data_str) 237 | 238 | channels = [] 239 | for item in channels_data: 240 | # Extracting incoming and outgoing as Stats objects 241 | incoming = PubSubStats(**item["incoming"]) 242 | outgoing = PubSubStats(**item["outgoing"]) 243 | 244 | # Creating a Channel instance with the Stats objects 245 | channel = PubSubChannel( 246 | name=item["name"], 247 | type=item["type"], 248 | last_activity=item["lastActivity"], 249 | is_active=item["isActive"], 250 | incoming=incoming, 251 | outgoing=outgoing, 252 | ) 253 | channels.append(channel) 254 | 255 | return channels 256 | 257 | 258 | def decode_queues_channel_list(data_bytes: bytes) -> List[QueuesChannel]: 259 | """ 260 | Decodes a byte string into a list of QueuesChannel objects. 261 | 262 | Parameters: 263 | - data_bytes (bytes): The byte string to be decoded. 264 | 265 | Returns: 266 | - List[QueuesChannel]: A list of QueuesChannel objects. 267 | 268 | Note: 269 | - This method assumes that the byte string is encoded in 'utf-8' format. 270 | - The byte string should represent a valid JSON object. 271 | - The JSON object should contain the necessary fields ('name', 'type', 'lastActivity', 'isActive', 'incoming', 'outgoing') for creating QueuesChannel objects. 272 | - The 'incoming' and 'outgoing' fields should contain valid JSON objects that can be parsed into QueuesStats objects. 273 | """ 274 | # Decode bytes to string and parse JSON 275 | data_str = data_bytes.decode("utf-8") 276 | channels_data = json.loads(data_str) 277 | 278 | channels = [] 279 | for item in channels_data: 280 | # Extracting incoming and outgoing as Stats objects 281 | incoming = QueuesStats(**item["incoming"]) 282 | outgoing = QueuesStats(**item["outgoing"]) 283 | 284 | # Creating a Channel instance with the Stats objects 285 | channel = QueuesChannel( 286 | name=item["name"], 287 | type=item["type"], 288 | last_activity=item["lastActivity"], 289 | is_active=item["isActive"], 290 | incoming=incoming, 291 | outgoing=outgoing, 292 | ) 293 | channels.append(channel) 294 | 295 | return channels 296 | 297 | 298 | def decode_cq_channel_list(data_bytes: bytes) -> List[CQChannel]: 299 | """ 300 | Decodes the given byte array into a list of CQChannel objects. 301 | 302 | Parameters: 303 | - data_bytes (bytes): The byte array to decode. 304 | 305 | Returns: 306 | - List[CQChannel]: The list of CQChannel objects decoded from the byte array. 307 | """ 308 | # Decode bytes to string and parse JSON 309 | data_str = data_bytes.decode("utf-8") 310 | channels_data = json.loads(data_str) 311 | 312 | channels = [] 313 | for item in channels_data: 314 | # Extracting incoming and outgoing as Stats objects 315 | incoming = CQStats(**item["incoming"]) 316 | outgoing = CQStats(**item["outgoing"]) 317 | 318 | # Creating a Channel instance with the Stats objects 319 | channel = CQChannel( 320 | name=item["name"], 321 | type=item["type"], 322 | last_activity=item["lastActivity"], 323 | is_active=item["isActive"], 324 | incoming=incoming, 325 | outgoing=outgoing, 326 | ) 327 | channels.append(channel) 328 | 329 | return channels 330 | -------------------------------------------------------------------------------- /kubemq/queues/upstream_sender.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import threading 3 | import queue 4 | import time 5 | from time import sleep 6 | from typing import Optional, Iterator, Generator 7 | 8 | import grpc 9 | from kubemq.transport import Transport, Connection 10 | from kubemq.grpc import ( 11 | QueuesUpstreamRequest, 12 | QueuesUpstreamResponse, 13 | SendQueueMessageResult, 14 | QueueMessage, 15 | ) 16 | from kubemq.common import * 17 | from kubemq.queues import * 18 | from kubemq.common.helpers import decode_grpc_error, is_channel_error 19 | 20 | 21 | class UpstreamSender: 22 | """ 23 | Class representing an upstream sender for sending messages to a KubeMQ server. 24 | 25 | This class manages a continuous stream of messages to the server and processes 26 | responses asynchronously using a background thread. It provides methods for 27 | sending messages and handling responses. 28 | 29 | Thread Safety: 30 | - All shared state is protected by locks to ensure thread safety 31 | - The background thread is started in __init__ and runs until close() is called 32 | - Response tracking is managed through a thread-safe dictionary 33 | 34 | Error Handling: 35 | - Connection errors trigger automatic reconnection attempts 36 | - Errors in send() are returned as error responses 37 | 38 | Attributes: 39 | transport (Transport): The transport object for channel management. 40 | clientStub (Transport): The transport client stub. 41 | connection (Connection): The connection to the server. 42 | shutdown_event (threading.Event): The event used to indicate shutdown. 43 | logger (logging.Logger): The logger for logging messages. 44 | lock (threading.Lock): The lock used for thread safety. 45 | response_tracking (dict): A dictionary for tracking response containers and result events. 46 | sending_queue (queue.Queue): The queue used for storing messages to be sent. 47 | allow_new_messages (bool): Flag indicating whether new messages are allowed. 48 | queue_timeout (float): Timeout in seconds for queue polling. 49 | request_sleep_interval (float): Sleep interval in seconds between requests. 50 | send_timeout (float): Timeout in seconds for waiting for a send response. 51 | """ 52 | 53 | def __init__( 54 | self, 55 | transport: Transport, 56 | logger: logging.Logger, 57 | connection: Connection, 58 | send_timeout: float = 2.0, 59 | ): 60 | """Initialize a new UpstreamSender. 61 | 62 | Args: 63 | transport: The transport object for channel management 64 | logger: The logger for logging messages 65 | connection: The connection to the server 66 | queue_size: Maximum size of the sending queue (0 for unlimited) 67 | queue_timeout: Timeout in seconds for queue polling 68 | request_sleep_interval: Sleep interval in seconds between requests (0 to disable) 69 | send_timeout: Timeout in seconds for waiting for a send response 70 | """ 71 | self.transport = transport 72 | self.clientStub = transport.kubemq_client() 73 | self.connection = connection 74 | self.shutdown_event = threading.Event() 75 | self.logger = logger 76 | self.lock = threading.Lock() 77 | self.response_tracking = {} 78 | self.sending_queue = queue.Queue() 79 | self.allow_new_messages = True 80 | self.send_timeout = send_timeout 81 | threading.Thread(target=self._send_queue_stream, args=(), daemon=True).start() 82 | 83 | def send(self, message: QueueMessage) -> Optional[QueueSendResult]: 84 | """Send a message to the server. 85 | 86 | Args: 87 | message: The message to send to the server 88 | 89 | Returns: 90 | The result of the send operation, or None if an exception occurred 91 | 92 | Raises: 93 | ConnectionError: If the client is not connected or not ready to accept messages 94 | """ 95 | try: 96 | if not self.transport.is_connected(): 97 | raise ConnectionError( 98 | "Client is not connected to the server and cannot send messages." 99 | ) 100 | if not self.allow_new_messages: 101 | raise ConnectionError("Sender is not ready to accept new messages.") 102 | 103 | response_result = threading.Event() 104 | response_container = {} 105 | message_id = message.id 106 | queue_upstream_request = message.encode(self.connection.client_id) 107 | with self.lock: 108 | self.response_tracking[queue_upstream_request.RequestID] = ( 109 | response_container, 110 | response_result, 111 | message_id, 112 | ) 113 | self.sending_queue.put(queue_upstream_request) 114 | response_result.wait(self.send_timeout) 115 | response: QueuesUpstreamResponse = response_container.get("response") 116 | with self.lock: 117 | if self.response_tracking.get(queue_upstream_request.RequestID): 118 | del self.response_tracking[queue_upstream_request.RequestID] 119 | if response is None: 120 | return QueueSendResult( 121 | id=message_id, 122 | is_error=True, 123 | error="Error: Timeout waiting for response", 124 | ) 125 | send_result = response.Results[0] 126 | return QueueSendResult().decode(send_result) 127 | except Exception as e: 128 | self.logger.error(f"Error sending message: {str(e)}") 129 | return QueueSendResult(id=message.id, is_error=True, error=str(e)) 130 | 131 | def _handle_disconnection(self) -> None: 132 | """Handle disconnection from the server. 133 | 134 | Sets error responses for all pending requests and clears the tracking dictionary. 135 | """ 136 | with self.lock: 137 | self.allow_new_messages = False 138 | for request_id, ( 139 | response_container, 140 | response_result, 141 | message_id, 142 | ) in self.response_tracking.items(): 143 | response_container["response"] = QueuesUpstreamResponse( 144 | RefRequestID=request_id, 145 | Results=[ 146 | SendQueueMessageResult( 147 | MessageID=message_id, 148 | IsError=True, 149 | Error="Error: Disconnected from server", 150 | ) 151 | ], 152 | ) 153 | response_result.set() # Signal that the response has been processed 154 | self.response_tracking.clear() 155 | 156 | def _generate_requests(self) -> Generator[QueuesUpstreamRequest, None, None]: 157 | """Generate requests from the queue to send to the server. 158 | 159 | Yields: 160 | The next request to send 161 | """ 162 | while not self.shutdown_event.is_set(): 163 | try: 164 | msg = self.sending_queue.get() 165 | yield msg 166 | except queue.Empty: 167 | continue 168 | 169 | 170 | def _process_responses(self, responses: Iterator[QueuesUpstreamResponse]) -> None: 171 | """Process responses from the server. 172 | 173 | Args: 174 | responses: Iterator of responses from the server 175 | """ 176 | for response in responses: 177 | if self.shutdown_event.is_set(): 178 | break 179 | response_request_id = response.RefRequestID 180 | with self.lock: 181 | self.allow_new_messages = True 182 | if response_request_id in self.response_tracking: 183 | response_container, response_result, message_id = ( 184 | self.response_tracking[response_request_id] 185 | ) 186 | response_container["response"] = response 187 | response_result.set() 188 | 189 | def _recreate_channel(self) -> bool: 190 | """Attempt to recreate the gRPC channel after a connection failure. 191 | 192 | Returns: 193 | True if channel recreation was successful, False otherwise 194 | """ 195 | try: 196 | self.clientStub = self.transport.recreate_channel() 197 | self.logger.info("Successfully recreated gRPC channel") 198 | with self.lock: 199 | self.allow_new_messages = True 200 | return True 201 | except ConnectionError as conn_ex: 202 | if self.connection.disable_auto_reconnect: 203 | self.logger.warning( 204 | "Auto-reconnect is disabled, not attempting to reconnect" 205 | ) 206 | with self.lock: 207 | self.allow_new_messages = False 208 | return False 209 | else: 210 | self.logger.error(f"Connection error: {str(conn_ex)}") 211 | return False 212 | except Exception as channel_ex: 213 | self.logger.error( 214 | f"Failed to recreate channel: {str(channel_ex)}, type: {type(channel_ex).__name__}" 215 | ) 216 | return False 217 | 218 | def _handle_error(self, error: Exception, is_grpc_error: bool = False) -> bool: 219 | """Handle connection errors and attempt recovery. 220 | 221 | Args: 222 | error: The exception that occurred 223 | is_grpc_error: Whether the error is a gRPC-specific error 224 | 225 | Returns: 226 | True if processing should continue, False if the thread should exit 227 | """ 228 | if is_grpc_error: 229 | error_details = decode_grpc_error(error) 230 | self.logger.error( 231 | f"gRPC error type: {type(error).__name__}, error: {error_details}" 232 | ) 233 | if hasattr(error, "code"): 234 | self.logger.error( 235 | f"gRPC error details: code={error.code()}, details={error.details() if hasattr(error, 'details') else 'N/A'}" 236 | ) 237 | elif is_channel_error(error): 238 | self.logger.error(f"Channel error: {str(error)}") 239 | self.logger.info("Detected channel error in generic exception handler") 240 | else: 241 | self.logger.error( 242 | f"Generic exception type: {type(error).__name__}, error: {str(error)}" 243 | ) 244 | 245 | self._handle_disconnection() 246 | 247 | if is_grpc_error or is_channel_error(error): 248 | if not self._recreate_channel(): 249 | if self.connection.disable_auto_reconnect: 250 | return False # Exit thread 251 | else: 252 | self.logger.error( 253 | "Non-channel error detected, clearing affected messages only" 254 | ) 255 | 256 | time.sleep(self.connection.get_reconnect_delay()) 257 | return True # Continue processing 258 | 259 | def _send_queue_stream(self) -> None: 260 | """Continuously sends messages from the queue to the server and handles responses.""" 261 | while not self.shutdown_event.is_set(): 262 | try: 263 | self.clientStub = self.transport.kubemq_client() 264 | responses = self.clientStub.QueuesUpstream(self._generate_requests()) 265 | self._process_responses(responses) 266 | except grpc.RpcError as e: 267 | if not self._handle_error(e, is_grpc_error=True): 268 | return # Exit thread if handling indicates we should stop 269 | continue 270 | except Exception as e: 271 | if not self._handle_error(e): 272 | return # Exit thread if handling indicates we should stop 273 | continue 274 | 275 | def close(self) -> None: 276 | """Close the sender and release resources. 277 | 278 | This method stops the background thread and cleans up resources. 279 | """ 280 | with self.lock: 281 | self.allow_new_messages = False 282 | self.shutdown_event.set() 283 | self.sending_queue.put(None) 284 | -------------------------------------------------------------------------------- /kubemq/queues/downstream_receiver.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import threading 3 | import queue 4 | import time 5 | from time import sleep 6 | from typing import Optional, Iterator, Generator 7 | 8 | import grpc 9 | from kubemq.transport import Transport, Connection 10 | from kubemq.grpc import ( 11 | QueuesDownstreamRequest, 12 | QueuesDownstreamResponse, 13 | ) 14 | from kubemq.common import * 15 | from kubemq.common.helpers import decode_grpc_error, is_channel_error 16 | 17 | class DownstreamReceiver: 18 | """ 19 | Class representing a downstream receiver for sending requests to a KubeMQ server. 20 | 21 | This class manages a continuous stream of requests to the server and processes 22 | responses asynchronously using a background thread. It provides methods for 23 | sending requests with or without waiting for responses. 24 | 25 | Thread Safety: 26 | - All shared state is protected by locks to ensure thread safety 27 | - The background thread is started in __init__ and runs until close() is called 28 | - Response tracking is managed through a thread-safe dictionary 29 | 30 | Error Handling: 31 | - Connection errors trigger automatic reconnection attempts 32 | - Errors in send() are returned as error responses 33 | - Errors in send_without_response() are raised as exceptions 34 | 35 | Attributes: 36 | transport (Transport): The transport object for channel management. 37 | clientStub (Transport): The transport client stub. 38 | connection (Connection): The connection to the server. 39 | shutdown_event (threading.Event): The event used to indicate shutdown. 40 | logger (logging.Logger): The logger for logging messages. 41 | lock (threading.Lock): The lock used for thread safety. 42 | response_tracking (dict): A dictionary for tracking response containers and result events. 43 | queue (queue.Queue): The queue used for storing requests. 44 | allow_new_requests (bool): Flag indicating whether new requests are allowed. 45 | queue_timeout (float): Timeout in seconds for queue polling. 46 | request_sleep_interval (float): Sleep interval in seconds between requests. 47 | """ 48 | 49 | def __init__( 50 | self, 51 | transport: Transport, 52 | logger: logging.Logger, 53 | connection: Connection, 54 | timeout_buffer: float = 0.5, 55 | ): 56 | """Initialize a new DownstreamReceiver. 57 | 58 | Args: 59 | transport: The transport object for channel management 60 | logger: The logger for logging messages 61 | connection: The connection to the server 62 | queue_size: Maximum size of the request queue (0 for unlimited) 63 | queue_timeout: Timeout in seconds for queue polling 64 | request_sleep_interval: Sleep interval in seconds between requests (0 to disable) 65 | timeout_buffer: Timeout buffer in seconds for request timeouts 66 | """ 67 | self.transport = transport 68 | self.clientStub = transport.kubemq_client() 69 | self.connection = connection 70 | self.shutdown_event = threading.Event() 71 | self.logger = logger 72 | self.lock = threading.Lock() 73 | self.response_tracking = {} 74 | self.queue = queue.Queue() 75 | self.allow_new_requests = True 76 | self.timeout_buffer = timeout_buffer 77 | threading.Thread(target=self._send_queue_stream, args=(), daemon=True).start() 78 | 79 | def send( 80 | self, request: QueuesDownstreamRequest 81 | ) -> Optional[QueuesDownstreamResponse]: 82 | """Send a request to the server and wait for a response. 83 | 84 | Args: 85 | request: The request to send to the server 86 | 87 | Returns: 88 | The response from the server, or None if an exception occurred 89 | 90 | Raises: 91 | ConnectionError: If the client is not connected or not ready to accept requests 92 | """ 93 | try: 94 | if not self.transport.is_connected(): 95 | raise ConnectionError( 96 | "Client is not connected to the server and cannot accept new requests" 97 | ) 98 | if not self.allow_new_requests: 99 | raise ConnectionError("Receiver is not ready to accept new requests") 100 | 101 | response_result = threading.Event() 102 | response_container = {} 103 | with self.lock: 104 | self.response_tracking[request.RequestID] = ( 105 | response_container, 106 | response_result, 107 | ) 108 | self.queue.put(request) 109 | request_wait_timeout = (request.WaitTimeout / 1000) + self.timeout_buffer 110 | response_result.wait(request_wait_timeout) 111 | response: QueuesDownstreamResponse = response_container.get("response") 112 | with self.lock: 113 | if self.response_tracking.get(request.RequestID): 114 | del self.response_tracking[request.RequestID] 115 | if response is None: 116 | return QueuesDownstreamResponse( 117 | RefRequestId=request.RequestID, 118 | IsError=True, 119 | Error="Error: Timeout waiting for response", 120 | ) 121 | return response 122 | except Exception as e: 123 | return QueuesDownstreamResponse( 124 | RefRequestId=request.RequestID, IsError=True, Error=str(e) 125 | ) 126 | 127 | def send_without_response(self, request: QueuesDownstreamRequest) -> None: 128 | """Send a request to the server without waiting for a response. 129 | 130 | Args: 131 | request: The request to send to the server 132 | 133 | Raises: 134 | ConnectionError: If the client is not connected or not ready to accept requests 135 | """ 136 | if not self.transport.is_connected(): 137 | self.logger.error( 138 | "Client is not connected to the server and cannot accept new requests" 139 | ) 140 | raise ConnectionError( 141 | "Client is not connected to the server and cannot accept new requests" 142 | ) 143 | if not self.allow_new_requests: 144 | self.logger.error("Receiver is not ready to accept new requests") 145 | raise ConnectionError("Receiver is not ready to accept new requests") 146 | self.queue.put(request) 147 | 148 | def _handle_disconnection(self) -> None: 149 | """Handle disconnection from the server. 150 | 151 | Sets error responses for all pending requests and clears the tracking dictionary. 152 | """ 153 | with self.lock: 154 | self.allow_new_requests = False 155 | for request_id, ( 156 | response_container, 157 | response_result, 158 | ) in self.response_tracking.items(): 159 | response_container["response"] = QueuesDownstreamResponse( 160 | RefRequestId=request_id, 161 | IsError=True, 162 | Error="Error: Disconnected from server", 163 | ) 164 | response_result.set() # Signal that the response has been processed 165 | self.response_tracking.clear() 166 | 167 | def _generate_requests(self) -> Generator[QueuesDownstreamRequest, None, None]: 168 | """Generate requests from the queue to send to the server. 169 | 170 | Yields: 171 | The next request to send 172 | """ 173 | while not self.shutdown_event.is_set(): 174 | try: 175 | req = self.queue.get() 176 | yield req 177 | except queue.Empty: 178 | continue 179 | 180 | 181 | def _process_responses(self, responses: Iterator[QueuesDownstreamResponse]) -> None: 182 | """Process responses from the server. 183 | 184 | Args: 185 | responses: Iterator of responses from the server 186 | """ 187 | for response in responses: 188 | if self.shutdown_event.is_set(): 189 | break 190 | response_request_id = response.RefRequestId 191 | with self.lock: 192 | self.allow_new_requests = True 193 | if response_request_id in self.response_tracking: 194 | response_container, response_result = self.response_tracking[ 195 | response_request_id 196 | ] 197 | response_container["response"] = response 198 | response_result.set() 199 | 200 | def _recreate_channel(self) -> bool: 201 | """Attempt to recreate the gRPC channel after a connection failure. 202 | 203 | Returns: 204 | True if channel recreation was successful, False otherwise 205 | """ 206 | try: 207 | self.clientStub = self.transport.recreate_channel() 208 | self.logger.info("Successfully recreated gRPC channel") 209 | with self.lock: 210 | self.allow_new_requests = True 211 | return True 212 | except ConnectionError as conn_ex: 213 | if self.connection.disable_auto_reconnect: 214 | self.logger.warning( 215 | "Auto-reconnect is disabled, not attempting to reconnect" 216 | ) 217 | with self.lock: 218 | self.allow_new_requests = False 219 | return False 220 | else: 221 | self.logger.error(f"Connection error: {str(conn_ex)}") 222 | return False 223 | except Exception as channel_ex: 224 | self.logger.error( 225 | f"Failed to recreate channel: {str(channel_ex)}, type: {type(channel_ex).__name__}" 226 | ) 227 | return False 228 | 229 | def _handle_error(self, error: Exception, is_grpc_error: bool = False) -> bool: 230 | """Handle connection errors and attempt recovery. 231 | 232 | Args: 233 | error: The exception that occurred 234 | is_grpc_error: Whether the error is a gRPC-specific error 235 | 236 | Returns: 237 | True if processing should continue, False if the thread should exit 238 | """ 239 | if is_grpc_error: 240 | error_details = decode_grpc_error(error) 241 | self.logger.error( 242 | f"gRPC error type: {type(error).__name__}, error: {error_details}" 243 | ) 244 | if hasattr(error, "code"): 245 | self.logger.error( 246 | f"gRPC error details: code={error.code()}, details={error.details() if hasattr(error, 'details') else 'N/A'}" 247 | ) 248 | elif is_channel_error(error): 249 | self.logger.error(f"Channel error: {str(error)}") 250 | else: 251 | self.logger.error( 252 | f"Generic exception type: {type(error).__name__}, error: {str(error)}" 253 | ) 254 | 255 | self._handle_disconnection() 256 | 257 | if is_grpc_error or is_channel_error(error): 258 | if not self._recreate_channel(): 259 | if self.connection.disable_auto_reconnect: 260 | return False # Exit thread 261 | else: 262 | self.logger.error( 263 | "Non-channel error detected, clearing affected requests only" 264 | ) 265 | 266 | time.sleep(self.connection.get_reconnect_delay()) 267 | return True # Continue processing 268 | 269 | def _send_queue_stream(self) -> None: 270 | """Continuously send requests from the queue to the server and handle responses.""" 271 | while not self.shutdown_event.is_set(): 272 | try: 273 | self.clientStub = self.transport.kubemq_client() 274 | responses = self.clientStub.QueuesDownstream(self._generate_requests()) 275 | self._process_responses(responses) 276 | except grpc.RpcError as e: 277 | if not self._handle_error(e, is_grpc_error=True): 278 | return # Exit thread if handling indicates we should stop 279 | continue 280 | except Exception as e: 281 | if not self._handle_error(e): 282 | return # Exit thread if handling indicates we should stop 283 | continue 284 | 285 | def close(self) -> None: 286 | """Close the receiver and release resources. 287 | 288 | This method stops the background thread and cleans up resources. 289 | """ 290 | with self.lock: 291 | self.allow_new_requests = False 292 | self.shutdown_event.set() 293 | self.queue.put(None) 294 | self.logger.debug("Downstream receiver shutdown") 295 | --------------------------------------------------------------------------------