├── .dockerignore ├── .gitignore ├── Dockerfile ├── README.md ├── app.py ├── common ├── aggregate │ ├── Aggregate.md │ └── aggregate.py ├── ambar │ ├── Ambar.md │ ├── ambar_auth.py │ ├── ambar_http_request.py │ └── ambar_response_factory.py ├── command │ ├── Command.md │ ├── command.py │ ├── command_controller.py │ └── command_handler.py ├── event │ ├── Event.md │ ├── creation_event.py │ ├── event.py │ └── transformation_event.py ├── event_store │ ├── EventStore.md │ ├── aggregate_and_event_ids_in_last_event.py │ └── postgres_transactional_event_store.py ├── projection │ ├── Projection.md │ ├── mongo_transactional_projection_operator.py │ ├── projection_controller.py │ └── projection_handler.py ├── query │ ├── Query.md │ ├── query.py │ ├── query_controller.py │ └── query_handler.py ├── reaction │ ├── Reaction.md │ ├── reaction_controller.py │ └── reaction_handler.py ├── serialized_event │ ├── deserializer.py │ ├── serialized_event.py │ └── serializer.py └── util │ ├── id_generator.py │ ├── logger.py │ ├── mongo_initializer.py │ ├── mongo_session_pool.py │ ├── postgres_connection_pool.py │ └── postgres_initializer.py ├── container.py ├── domain └── cooking_club │ └── membership │ ├── aggregate │ └── membership.py │ ├── command │ └── submit_application │ │ ├── submit_application_command.py │ │ ├── submit_application_command_controller.py │ │ └── submit_application_command_handler.py │ ├── event │ ├── application_evaluated.py │ └── application_submitted.py │ ├── projection │ └── members_by_cuisine │ │ ├── cuisine.py │ │ ├── cuisine_repository.py │ │ ├── members_by_cuisine_projection_controller.py │ │ ├── members_by_cuisine_projection_handler.py │ │ ├── membership_application.py │ │ └── membership_application_repository.py │ ├── query │ └── members_by_cuisine │ │ ├── members_by_cuisine_query.py │ │ ├── members_by_cuisine_query_controller.py │ │ └── members_by_cuisine_query_handler.py │ └── reaction │ └── evaluate_application │ ├── evaluate_application_reaction_controller.py │ └── evaluate_application_reaction_handler.py ├── local-development ├── .gitignore ├── ambar-config.yaml ├── build-files │ ├── frontend-database-explorer │ │ ├── Dockerfile │ │ ├── package-lock.json │ │ ├── package.json │ │ ├── prisma-mongodb │ │ │ └── schema.prisma │ │ └── prisma-postgres │ │ │ └── schema.prisma │ └── mongo │ │ └── mongo.key ├── docker-compose.yml ├── docker-scripts │ ├── linux │ │ ├── dev_demo.sh │ │ ├── dev_shutdown.sh │ │ ├── dev_start.sh │ │ └── dev_start_with_data_deletion.sh │ ├── mac │ │ ├── dev_demo.sh │ │ ├── dev_shutdown.sh │ │ ├── dev_start.sh │ │ └── dev_start_with_data_deletion.sh │ └── windows │ │ ├── dev_demo.ps1 │ │ ├── dev_shutdown.ps1 │ │ ├── dev_start.ps1 │ │ └── dev_start_with_data_deletion.ps1 └── podman-scripts │ ├── linux │ ├── dev_demo.sh │ ├── dev_shutdown.sh │ ├── dev_start.sh │ └── dev_start_with_data_deletion.sh │ ├── mac │ ├── dev_demo.sh │ ├── dev_shutdown.sh │ ├── dev_start.sh │ └── dev_start_with_data_deletion.sh │ └── windows │ ├── dev_demo.ps1 │ ├── dev_shutdown.ps1 │ ├── dev_start.ps1 │ └── dev_start_with_data_deletion.ps1 ├── pyproject.toml └── requirements.txt /.dockerignore: -------------------------------------------------------------------------------- 1 | .idea 2 | local-development 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/python:3.11-slim 2 | 3 | WORKDIR /app 4 | 5 | # Install system dependencies 6 | RUN apt-get update && apt-get install -y \ 7 | build-essential wget \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | COPY requirements.txt ./ 11 | RUN pip install --no-cache-dir -r requirements.txt 12 | RUN pip install gunicorn uvicorn 13 | 14 | COPY . . 15 | 16 | EXPOSE 8080 17 | 18 | CMD ["gunicorn", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "-b", "0.0.0.0:8080", "app:asgi_app"] 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Event Sourcing - Python 2 | 3 | This repository contains a starter pack for **Event Sourcing in Python.** It is a production grade starting point 4 | for your own event sourced application. The starter pack has everything you need to get started with event sourcing, 5 | including an event store, a projection store, and an event bus. 6 | 7 | This starter pack implements a simple example for a Cooking Club Membership. But you're meant to replace this example 8 | with your own application. 9 | 10 | ## Get Started in 5 Minutes 11 | 12 | To run this application you need to install Docker or Podman as a containerization tool. 13 | Once you have Docker or Podman installed, please open your Terminal (linux or mac) or 14 | Powershell (windows), clone this repository, and navigate to the scripts folder for 15 | your operating system and containerization tool. 16 | 17 | ``` 18 | git clone git@github.com:ambarltd/event-sourcing-python.git 19 | 20 | cd event-sourcing-python/local-development/docker-scripts/linux # linux + docker 21 | cd event-sourcing-python/local-development/docker-scripts/mac # mac + docker 22 | cd event-sourcing-python\local-development\docker-scripts\windows # windows + docker 23 | 24 | cd event-sourcing-python/local-development/podman-scripts/linux # linux + podman 25 | cd event-sourcing-python/local-development/podman-scripts/mac # mac + podman 26 | cd event-sourcing-python\local-development\podman-scripts\windows # windows + podman 27 | 28 | # If you're using docker, make sure docker is up and running. 29 | # If you're using podman, make sure podman is up and running. Also make sure there's an active podman machine. 30 | 31 | # If you are using podman on windows, modify your podman machine to support linux directory permissions metadata translation. 32 | wsl -u root -d podman-machine-default # or whatever your podman machine is called 33 | echo [automount] >> wsl.conf 34 | echo 'options = "metadata"' >> wsl.conf 35 | ``` 36 | 37 | Then start the application and run the demo. 38 | 39 | ``` 40 | # linux or mac 41 | ./dev_start.sh 42 | ./dev_demo.sh 43 | 44 | # windows 45 | .\dev_start.ps1 46 | .\dev_demo.ps1 47 | ``` 48 | 49 | You can then open your browser to: 50 | - [http://localhost:8080](http://localhost:8080) to ping the backend 51 | - [http://localhost:8081](http://localhost:8081) to view your event store 52 | - [http://localhost:8082](http://localhost:8082) to view your projection store 53 | 54 | ## How to Develop Your Own Application 55 | 56 | Assuming you know event sourcing theory, developing on this application will feel very natural. Otherwise, don't worry - Ambar offers a **free** 1 day Event Sourcing course [here](https://ambar.cloud/event-sourcing-one-day-course). 57 | 58 | To get a quick understanding of how this application works, please read the domain code in `domain/`, the abstractions provided in `common/`, and the README files also in `common/`. With that reading done, here's a full picture: 59 | 60 | 1. `domain/`: where you define aggregates, events, commands, queries, projections, and reactions. You will spend most of your time here. 61 | 2. `common/`: a set of event sourcing abstractions. You will rarely need to edit files here, except for having to update the `Serializer` and `Deserializer` classes in `common/serialized_event/` whenever you add or remove events. 62 | 3. `container.py`: contains a dependency injection container. You will need to edit this file to register or unregister services as you see fit (controllers, repositories, etc.). 63 | 4. `app.py`: contains the application's startup file. You will need to register routes, and their associated controllers here. 64 | 65 | When developing your application for the fist time, we recommend you keep the Cooking Club Membership code as an example you can quickly navigate to. Once you have implemented several commands, queries, projections, and reactions, delete the Cooking Club Membership code. This will require you to delete its code in `domain`, serialization logic in `common/serialized_event`, relevant services in `container.py`, and any routes in `app.py`. 66 | 67 | ## Additional Scripts 68 | 69 | Whenever you build a new feature, you might want to restart the application, or even delete the event store and projection 70 | store. We have provided scripts to help you with that. 71 | 72 | ``` 73 | # linux or mac 74 | ./dev_start.sh # starts / restarts the application. 75 | ./dev_start_with_data_deletion.sh # use this if you want to delete your existing event store, and projection db, and restart fresh. 76 | ./dev_shutdown.sh # stops the application 77 | 78 | # windows 79 | .\dev_start.ps1 # starts / restarts the application. 80 | .\dev_start_with_data_deletion.ps1 # use this if you want to delete your existing event store, and projection db, and restart fresh. 81 | .\dev_shutdown.ps1 # stops the application 82 | ``` 83 | 84 | ## Deployment 85 | 86 | To deploy this application to a production environment, you will simply need to build the code into a docker image, 87 | and deploy it to your cloud provider. We have provided infrastructure starter packs for various clouds in [this repository](https://github.com/ambarltd/event-sourcing-cloud-starter-packs). 88 | 89 | ## Support 90 | 91 | If you get stuck, please feel free to ask questions in the #event-sourcing channel of our [Slack community](https://www.launchpass.com/ambar). 92 | Or if you need further help like a free private walkthrough, simply book one [here](https://calendly.com/luis-ambar). 93 | 94 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, request, jsonify 2 | from flask_cors import CORS 3 | from dotenv import load_dotenv 4 | import asyncio 5 | from asgiref.wsgi import WsgiToAsgi 6 | from container import SharedContainer, RequestContainer 7 | from common.ambar.ambar_auth import ambar_auth 8 | from common.util.logger import log 9 | 10 | load_dotenv() 11 | 12 | 13 | def create_app() -> Flask: 14 | app = Flask(__name__) 15 | 16 | CORS(app, resources={ 17 | r"/api/*": { 18 | "origins": "*", 19 | "methods": ["GET", "POST", "PUT", "DELETE", "OPTIONS"], 20 | "allow_headers": ["Content-Type", "Authorization", "X-With-Session-Token"], 21 | "expose_headers": ["Content-Type"], 22 | "supports_credentials": True 23 | } 24 | }) 25 | 26 | app.shared_container = SharedContainer() 27 | 28 | async def initialize_databases(): 29 | try: 30 | await app.shared_container.postgres_initializer.initialize() 31 | await app.shared_container.mongo_initializer.initialize() 32 | except Exception as error: 33 | log.error('Failed to initialize databases:', error=error) 34 | raise 35 | 36 | with app.app_context(): 37 | asyncio.run(initialize_databases()) 38 | 39 | def get_request_container(): 40 | return RequestContainer(shared_container=app.shared_container) 41 | 42 | @app.route('/api/v1/cooking-club/membership/command/submit-application', methods=['POST']) 43 | async def submit_application(): 44 | container = get_request_container() 45 | controller = container.submit_application_controller() 46 | return await controller.handle_submit_application(request) 47 | 48 | @app.route('/api/v1/cooking-club/membership/query/members-by-cuisine', methods=['POST']) 49 | async def members_by_cuisine(): 50 | container = get_request_container() 51 | controller = container.members_by_cuisine_query_controller() 52 | return await controller.handle_members_by_cuisine(request) 53 | 54 | @app.route('/api/v1/cooking-club/membership/projection/members-by-cuisine', methods=['POST']) 55 | async def project_members_by_cuisine(): 56 | ambar_auth(request) 57 | container = get_request_container() 58 | controller = container.members_by_cuisine_projection_controller() 59 | return await controller.handle_projection_request(request) 60 | 61 | @app.route('/api/v1/cooking-club/membership/reaction/evaluate-application', methods=['POST']) 62 | async def evaluate_application(): 63 | ambar_auth(request) 64 | container = get_request_container() 65 | controller = container.evaluate_application_controller() 66 | return await controller.handle_reaction_request(request) 67 | 68 | @app.route('/docker_healthcheck') 69 | @app.route('/') 70 | def health_check(): 71 | return 'OK' 72 | 73 | @app.before_request 74 | def log_request(): 75 | log.info(f"Endpoint hit: {request.method} {request.path}") 76 | 77 | @app.errorhandler(404) 78 | def not_found_error(error): 79 | log.warn(f"404 Not Found: {request.method} {request.path}") 80 | return jsonify({"error": "Not Found", "route": request.path}), 404 81 | 82 | @app.errorhandler(Exception) 83 | def handle_error(error): 84 | log.error(f"Unhandled error: {str(error)} - Path: {request.method} {request.path}", error) 85 | response = { 86 | 'error': str(error), 87 | 'path': request.path, 88 | 'stack': 'Available in logs' 89 | } 90 | return jsonify(response), 500 91 | 92 | return app 93 | 94 | 95 | app = create_app() 96 | asgi_app = WsgiToAsgi(app) 97 | 98 | if __name__ == '__main__': 99 | app.run(host='0.0.0.0', port=8080, debug=True) -------------------------------------------------------------------------------- /common/aggregate/Aggregate.md: -------------------------------------------------------------------------------- 1 | # Aggregate 2 | 3 | In Event Sourcing system, the Aggregate is an in-memory representation of the current state of the system based on past events. The process of taking events from the Event Store and instantiating an Aggregate from them is called Aggregate hydration or Aggregate reconstitution. 4 | 5 | An Aggregate is typically hydrated in a command handler, or a reaction handler, when appending new events to the system. Why? Because we want to check the current state of the system from Aggregates in an immediately consistent fashion. The Aggregate should be implemented in an immediately consistent fashion through the use of optimistic or pessimistic locking when reconstituting the Aggregate. -------------------------------------------------------------------------------- /common/aggregate/aggregate.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | from typing import Generic, TypeVar 3 | 4 | T = TypeVar('T') 5 | 6 | class Aggregate(ABC, Generic[T]): 7 | def __init__(self, aggregate_id: str, aggregate_version: int): 8 | self._aggregate_id = aggregate_id 9 | self._aggregate_version = aggregate_version 10 | 11 | @property 12 | def aggregate_id(self) -> str: 13 | return self._aggregate_id 14 | 15 | @property 16 | def aggregate_version(self) -> int: 17 | return self._aggregate_version 18 | -------------------------------------------------------------------------------- /common/ambar/Ambar.md: -------------------------------------------------------------------------------- 1 | # Ambar 2 | 3 | Tracking events in an EventStore for new events, filtering and forwarding them to downstream consumers while maintaining ordering and delivery guarantees can be complex and error-prone. Event buses such as RabbitMQ and Apache Kafka are often used to transmit and deliver events but can be complex to configure, manage, and scale. 4 | 5 | Ambar is a data streaming service that empowers you to build mission-critical real-time applications in minutes. Instead of producing to and consuming from message brokers, Ambar pulls records from databases, such as Event Stores and pushes records to application endpoints like your projection and reaction endpoints. 6 | 7 | Find out more about how to use Ambar in your production applications by visiting https://ambar.cloud/es 8 | -------------------------------------------------------------------------------- /common/ambar/ambar_auth.py: -------------------------------------------------------------------------------- 1 | import os 2 | import base64 3 | 4 | 5 | def get_auth_credentials(): 6 | valid_username = os.getenv('AMBAR_HTTP_USERNAME') 7 | valid_password = os.getenv('AMBAR_HTTP_PASSWORD') 8 | 9 | if not valid_username or not valid_password: 10 | raise RuntimeError('Environment variables AMBAR_HTTP_USERNAME and AMBAR_HTTP_PASSWORD must be set') 11 | 12 | return valid_username, valid_password 13 | 14 | 15 | def ambar_auth(request_obj): 16 | """Middleware function to authenticate a request.""" 17 | auth_header = request_obj.headers.get('Authorization') 18 | 19 | if not auth_header: 20 | raise PermissionError("Authentication required") 21 | 22 | if not auth_header.startswith('Basic '): 23 | raise PermissionError("Basic authentication required") 24 | 25 | try: 26 | valid_username, valid_password = get_auth_credentials() 27 | 28 | encoded_credentials = auth_header.split(' ')[1] 29 | decoded = base64.b64decode(encoded_credentials).decode('utf-8') 30 | username, password = decoded.split(':') 31 | 32 | if username != valid_username or password != valid_password: 33 | raise PermissionError("Invalid credentials") 34 | 35 | except Exception as e: 36 | raise PermissionError(f"Invalid authentication format: {str(e)}") -------------------------------------------------------------------------------- /common/ambar/ambar_http_request.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from common.serialized_event.serialized_event import SerializedEvent 3 | 4 | class AmbarHttpRequest(BaseModel): 5 | data_source_id: str 6 | data_source_description: str 7 | data_destination_id: str 8 | data_destination_description: str 9 | payload: SerializedEvent 10 | 11 | class Config: 12 | from_attributes = True -------------------------------------------------------------------------------- /common/ambar/ambar_response_factory.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | class AmbarResponseFactory: 4 | @staticmethod 5 | def retry_response(exception: Exception) -> str: 6 | message = str(exception).replace('"', '\\"') 7 | return json.dumps({ 8 | "result": { 9 | "error": { 10 | "policy": "must_retry", 11 | "class": exception.__class__.__name__, 12 | "description": f"message:{message}" 13 | } 14 | } 15 | }) 16 | 17 | @staticmethod 18 | def success_response() -> str: 19 | return json.dumps({ 20 | "result": { 21 | "success": {} 22 | } 23 | }) -------------------------------------------------------------------------------- /common/command/Command.md: -------------------------------------------------------------------------------- 1 | # Command Handler 2 | 3 | A Command Handler in an EventSourcing system is responsible for taking statements of intent (commands) from end users or other systems (both internal and external), performing validation, and upon valid conditions, adding new Events to the Event Store. 4 | 5 | To do this, the Command Handler reads past events from the Event store to hydrate / reconstitute an Aggregate. Once the aggregate is hydrated, the Command Handler checks for any business rules or constraints (e.g., ensuring an order hasn’t already been completed or that an account has sufficient balance). 6 | 7 | If all validations succeed, the Command Handler generates a new Event reflecting the state change requested by the command. This Event is then written back to the Event store, allowing the system to evolve while maintaining a full history of all changes. 8 | -------------------------------------------------------------------------------- /common/command/command.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | 3 | class Command(ABC): 4 | pass -------------------------------------------------------------------------------- /common/command/command_controller.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 3 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 4 | from common.util.logger import log 5 | from common.command.command import Command 6 | from common.command.command_handler import CommandHandler 7 | 8 | class CommandController: 9 | def __init__( 10 | self, 11 | postgres_transactional_event_store: PostgresTransactionalEventStore, 12 | mongo_transactional_projection_operator: MongoTransactionalProjectionOperator, 13 | ): 14 | self._postgres_transactional_event_store = postgres_transactional_event_store 15 | self._mongo_transactional_projection_operator = mongo_transactional_projection_operator 16 | 17 | async def process_command(self, command: Command, command_handler: CommandHandler) -> None: 18 | try: 19 | log.debug(f"Starting to process command: {command.__class__.__name__}") 20 | await self._postgres_transactional_event_store.begin_transaction() 21 | await self._mongo_transactional_projection_operator.start_transaction() 22 | await command_handler.handle_command(command) 23 | await self._postgres_transactional_event_store.commit_transaction() 24 | await self._mongo_transactional_projection_operator.commit_transaction() 25 | 26 | await self._postgres_transactional_event_store.abort_dangling_transactions_and_return_connection_to_pool() 27 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 28 | log.debug(f"Successfully processed command: {command.__class__.__name__}") 29 | 30 | except Exception as error: 31 | await self._postgres_transactional_event_store.abort_dangling_transactions_and_return_connection_to_pool() 32 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 33 | log.error(f"Exception in ProcessCommand: {error}", error=error) 34 | raise RuntimeError(f"Failed to process command: {error}") -------------------------------------------------------------------------------- /common/command/command_handler.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any 3 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 4 | from common.command.command import Command 5 | 6 | class CommandHandler(ABC): 7 | def __init__(self, postgres_transactional_event_store: PostgresTransactionalEventStore): 8 | self._postgres_transactional_event_store = postgres_transactional_event_store 9 | 10 | @abstractmethod 11 | async def handle_command(self, command: Command) -> None: 12 | pass -------------------------------------------------------------------------------- /common/event/Event.md: -------------------------------------------------------------------------------- 1 | # Event 2 | 3 | ## What are Events? 4 | 5 | Events represent state changes that have occurred in the system. Instead of storing state, our system stores a series of events. Current state is derived by replaying these events in the order they occurred. 6 | 7 | Events are immutable, meaning once they are created and stored, they cannot be modified. They are a record of what happened in the system. 8 | 9 | An event typically contains: 10 | 11 | * Event Name: A description of the specific action that occurred (e.g., OrderPlaced, AccountDebited, UserSignedUp). 12 | * Aggregate Identifier: The unique ID of the aggregate the event belongs in. 13 | * Timestamp: The time when the Event occurred. 14 | * Payload: Data describing the state change (the properties of the aggregate that have been changed). 15 | * Metadata (optional): Information such as the user agent or IP of the end user. 16 | 17 | ## Why use Events? 18 | 19 | Events are used to: 20 | 21 | * Rebuild the current state of an aggregate by replaying the series of events. 22 | * Trigger side effects (reactions) such as sending notifications. 23 | * Asynchronously update read models (projections). 24 | * Provide an audit trail, capturing the full history of changes in the system for compliance and debugging. 25 | 26 | By relying on events as the source of truth, Event Sourcing allows for greater traceability, flexibility in replaying or restoring state, and the ability to respond to changes in a distributed, asynchronous manner. 27 | 28 | ## Abstractions 29 | 30 | This directory contains our base definition for an Event. That is, `event_id`, `aggregate_id`, `aggregate_version`, `causation_id`, `correlation_id`, `recorded_on`. The event_name column, which is the name of the event, is not included because it's based on a mapping of the event class name to the event name. The `payload` column and `metadata` column are also not included because they are based on the event class properties. We use an abstraction called Serialized Event (see `src/main/java/cloud/ambar/common/serializedevent/SerializedEvent.java`) to store the `event_name`, `payload`, and `metadata`. 31 | 32 | **Why are there two extra abstract classes for creation events and transformation events?** 33 | 34 | Creation events are events that are used to create an aggregate. They are used to create the initial state of an aggregate. Transformation events are events that are used to transform an aggregate. They are used to change the state of an aggregate. 35 | 36 | When reconstituting / hydrating an aggregate, it's better not to have a default state of the aggregate which contains invalid state. Instead, it's better to codify into our type system which events can create a valid aggregate state on their own and which events can transform an aggregate. This way, we can ensure that the aggregate is always in a valid state. 37 | 38 | # Serialized Event 39 | 40 | A Serialized Event is a representation in which additional properties, not encoded in the abstract Event are converted into fields that go into the payload or metadata fields. Additionally, the Serialized Event contains an `event_name` which can be used to figure out which class the SerializedEvent should be deserialized into. 41 | 42 | Serialized Events are used when communicating with the database (Postgres) or event bus (Ambar). -------------------------------------------------------------------------------- /common/event/creation_event.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Generic, TypeVar 3 | from common.event.event import Event 4 | from common.aggregate.aggregate import Aggregate 5 | 6 | T = TypeVar('T', bound=Aggregate) 7 | 8 | class CreationEvent(Event[T], ABC, Generic[T]): 9 | @abstractmethod 10 | def create_aggregate(self) -> T: 11 | pass 12 | -------------------------------------------------------------------------------- /common/event/event.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | from datetime import datetime 3 | from typing import Generic, TypeVar 4 | 5 | T = TypeVar('T') 6 | 7 | class Event(ABC, Generic[T]): 8 | def __init__( 9 | self, 10 | event_id: str, 11 | aggregate_id: str, 12 | aggregate_version: int, 13 | correlation_id: str, 14 | causation_id: str, 15 | recorded_on: datetime 16 | ): 17 | self._event_id = event_id 18 | self._aggregate_id = aggregate_id 19 | self._aggregate_version = aggregate_version 20 | self._correlation_id = correlation_id 21 | self._causation_id = causation_id 22 | self._recorded_on = recorded_on 23 | 24 | @property 25 | def event_id(self) -> str: 26 | return self._event_id 27 | 28 | @property 29 | def aggregate_id(self) -> str: 30 | return self._aggregate_id 31 | 32 | @property 33 | def aggregate_version(self) -> int: 34 | return self._aggregate_version 35 | 36 | @property 37 | def correlation_id(self) -> str: 38 | return self._correlation_id 39 | 40 | @property 41 | def causation_id(self) -> str: 42 | return self._causation_id 43 | 44 | @property 45 | def recorded_on(self) -> datetime: 46 | return self._recorded_on 47 | -------------------------------------------------------------------------------- /common/event/transformation_event.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Generic, TypeVar 3 | from common.event.event import Event 4 | from common.aggregate.aggregate import Aggregate 5 | 6 | T = TypeVar('T', bound=Aggregate) 7 | 8 | class TransformationEvent(Event[T], ABC, Generic[T]): 9 | @abstractmethod 10 | def transform_aggregate(self, aggregate: T) -> T: 11 | pass 12 | -------------------------------------------------------------------------------- /common/event_store/EventStore.md: -------------------------------------------------------------------------------- 1 | # Event Store 2 | 3 | The Event Store is responsible for saving new Events and fetching existing Events to hydrate / reconstitute Aggregates. 4 | 5 | The Event Store saves Events, but it does not save them directly, it first converts them to a SerializedEvent. The SerializedEvent is a representation of the Event that can be stored in a database. 6 | 7 | Additionally, the Event Store does not simply return aggregates, but it returns an Aggregate plus Event Ids, that would be necessary to append more events to the Aggregate (event_id and correlation_id in the last event of that Aggregate). -------------------------------------------------------------------------------- /common/event_store/aggregate_and_event_ids_in_last_event.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Generic, TypeVar 3 | from common.aggregate.aggregate import Aggregate 4 | 5 | T = TypeVar('T', bound=Aggregate) 6 | 7 | @dataclass 8 | class AggregateAndEventIdsInLastEvent(Generic[T]): 9 | aggregate: T 10 | event_id_of_last_event: str 11 | correlation_id_of_last_event: str -------------------------------------------------------------------------------- /common/event_store/postgres_transactional_event_store.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Optional, List 2 | from common.util.postgres_connection_pool import PostgresConnectionPool 3 | from common.serialized_event.serializer import Serializer 4 | from common.serialized_event.deserializer import Deserializer 5 | from common.serialized_event.serialized_event import SerializedEvent 6 | from common.event.event import Event 7 | from common.event.creation_event import CreationEvent 8 | from common.event.transformation_event import TransformationEvent 9 | from common.aggregate.aggregate import Aggregate 10 | from common.util.logger import log 11 | from common.event_store.aggregate_and_event_ids_in_last_event import AggregateAndEventIdsInLastEvent 12 | 13 | T = TypeVar('T', bound=Aggregate) 14 | 15 | class PostgresTransactionalEventStore: 16 | def __init__( 17 | self, 18 | connection_pool: PostgresConnectionPool, 19 | serializer: Serializer, 20 | deserializer: Deserializer, 21 | event_store_table: str 22 | ): 23 | self._connection_pool = connection_pool 24 | self._serializer = serializer 25 | self._deserializer = deserializer 26 | self._event_store_table = event_store_table 27 | self._connection = None 28 | self._active_transaction = False 29 | 30 | async def begin_transaction(self) -> None: 31 | if self._connection or self._active_transaction: 32 | raise RuntimeError('Connection or transaction already active!') 33 | 34 | try: 35 | self._connection = await self._connection_pool.open_connection() 36 | self._connection.cursor().execute('BEGIN ISOLATION LEVEL SERIALIZABLE') 37 | self._active_transaction = True 38 | except Exception as error: 39 | max_len = 500 40 | error_message = str(error) 41 | raise RuntimeError( 42 | 'Failed to start transaction with ' + 43 | (error_message[:max_len] if len(error_message) > max_len else error_message) 44 | ) 45 | 46 | async def find_aggregate(self, aggregate_id: str) -> AggregateAndEventIdsInLastEvent[T]: 47 | if not self._active_transaction: 48 | raise RuntimeError('Transaction must be active to perform find aggregate operations!') 49 | 50 | serialized_events = await self._find_all_serialized_events_by_aggregate_id(aggregate_id) 51 | events = [self._deserializer.deserialize(e) for e in serialized_events] 52 | 53 | if not events: 54 | raise RuntimeError(f"No events found for aggregateId: {aggregate_id}") 55 | 56 | creation_event = events[0] 57 | transformation_events = events[1:] 58 | 59 | if not isinstance(creation_event, CreationEvent): 60 | raise RuntimeError('First event is not a creation event') 61 | 62 | aggregate = creation_event.create_aggregate() 63 | event_id_of_last_event = creation_event.event_id 64 | correlation_id_of_last_event = creation_event.correlation_id 65 | 66 | for transformation_event in transformation_events: 67 | if not isinstance(transformation_event, TransformationEvent): 68 | raise RuntimeError('Event is not a transformation event') 69 | aggregate = transformation_event.transform_aggregate(aggregate) 70 | event_id_of_last_event = transformation_event.event_id 71 | correlation_id_of_last_event = transformation_event.correlation_id 72 | 73 | return AggregateAndEventIdsInLastEvent( 74 | aggregate=aggregate, 75 | event_id_of_last_event=event_id_of_last_event, 76 | correlation_id_of_last_event=correlation_id_of_last_event 77 | ) 78 | 79 | async def save_event(self, event: Event) -> None: 80 | if not self._active_transaction: 81 | raise RuntimeError('Transaction must be active to perform save operations!') 82 | 83 | await self._save_serialized_event(self._serializer.serialize(event)) 84 | 85 | async def does_event_already_exist(self, event_id: str) -> bool: 86 | if not self._active_transaction: 87 | raise RuntimeError('Transaction must be active to perform find event operations!') 88 | 89 | event = await self._find_serialized_event_by_event_id(event_id) 90 | return event is not None 91 | 92 | async def commit_transaction(self) -> None: 93 | if not self._active_transaction: 94 | raise RuntimeError('Transaction must be active to commit!') 95 | 96 | try: 97 | if self._connection: 98 | self._connection.commit() 99 | self._active_transaction = False 100 | except Exception as error: 101 | raise RuntimeError(f"Failed to commit transaction: {error}") 102 | 103 | async def abort_dangling_transactions_and_return_connection_to_pool(self) -> None: 104 | if self._active_transaction: 105 | try: 106 | if self._connection: 107 | self._connection.rollback() 108 | self._active_transaction = False 109 | except Exception as error: 110 | log.error('Failed to rollback PG transaction', error=error) 111 | 112 | if self._connection: 113 | try: 114 | await self._connection_pool.return_connection(self._connection) 115 | self._connection = None 116 | except Exception as error: 117 | log.error('Failed to release PG connection', error=error) 118 | 119 | async def _find_all_serialized_events_by_aggregate_id(self, aggregate_id: str) -> List[SerializedEvent]: 120 | if not self._connection: 121 | raise RuntimeError('No active connection') 122 | 123 | sql = f""" 124 | SELECT id, event_id, aggregate_id, causation_id, correlation_id, 125 | aggregate_version, json_payload, json_metadata, recorded_on, event_name 126 | FROM {self._event_store_table} 127 | WHERE aggregate_id = %s 128 | ORDER BY aggregate_version ASC 129 | """ 130 | 131 | try: 132 | cursor = self._connection.cursor() 133 | cursor.execute(sql, (aggregate_id,)) 134 | rows = cursor.fetchall() 135 | return [self._map_row_to_serialized_event(row) for row in rows] 136 | except Exception as error: 137 | raise RuntimeError(f"Failed to fetch events for aggregate: {aggregate_id}: {error}") 138 | 139 | async def _save_serialized_event(self, serialized_event: SerializedEvent) -> None: 140 | if not self._connection: 141 | raise RuntimeError('No active connection') 142 | 143 | sql = f""" 144 | INSERT INTO {self._event_store_table} ( 145 | event_id, aggregate_id, causation_id, correlation_id, 146 | aggregate_version, json_payload, json_metadata, recorded_on, event_name 147 | ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) 148 | """ 149 | 150 | values = ( 151 | serialized_event.event_id, 152 | serialized_event.aggregate_id, 153 | serialized_event.causation_id, 154 | serialized_event.correlation_id, 155 | serialized_event.aggregate_version, 156 | serialized_event.json_payload, 157 | serialized_event.json_metadata, 158 | serialized_event.recorded_on, 159 | serialized_event.event_name 160 | ) 161 | 162 | try: 163 | cursor = self._connection.cursor() 164 | cursor.execute(sql, values) 165 | except Exception as error: 166 | raise RuntimeError(f"Failed to save event: {serialized_event.event_id}: {error}") 167 | 168 | async def _find_serialized_event_by_event_id(self, event_id: str) -> Optional[SerializedEvent]: 169 | if not self._connection: 170 | raise RuntimeError('No active connection') 171 | 172 | sql = f""" 173 | SELECT id, event_id, aggregate_id, causation_id, correlation_id, 174 | aggregate_version, json_payload, json_metadata, recorded_on, event_name 175 | FROM {self._event_store_table} 176 | WHERE event_id = %s 177 | """ 178 | 179 | try: 180 | cursor = self._connection.cursor() 181 | cursor.execute(sql, (event_id,)) 182 | row = cursor.fetchone() 183 | return self._map_row_to_serialized_event(row) if row else None 184 | except Exception as error: 185 | raise RuntimeError(f"Failed to fetch event: {event_id}: {error}") 186 | 187 | def _map_row_to_serialized_event(self, row) -> SerializedEvent: 188 | return SerializedEvent( 189 | id=row[0], 190 | event_id=row[1], 191 | aggregate_id=row[2], 192 | causation_id=row[3], 193 | correlation_id=row[4], 194 | aggregate_version=row[5], 195 | json_payload=row[6], 196 | json_metadata=row[7], 197 | recorded_on=row[8], 198 | event_name=row[9] 199 | ) -------------------------------------------------------------------------------- /common/projection/Projection.md: -------------------------------------------------------------------------------- 1 | # Projection 2 | 3 | A projection is a read model that is derived from the events in the system. Projections are used to query the current state of the system. For example, in an ecommerce website users will need to know which items are available, before they add an item to their cart. This allows the read side of a system to often be decoupled from the write side. 4 | 5 | When an Event is emitted (e.g., OrderPlaced, ProductUpdated), a projection listens to the stream of events and filters the relevant events it needs to process. For example, a projection that builds a list of user orders would only listen for OrderPlaced and OrderCanceled events. It updates the read model by applying the Event data, ensuring the model reflects the latest state. 6 | 7 | Projections continuously update the projection database as new events arrive, keeping the read model in sync with the most recent state changes. This enables high-performance queries and ensures that the read side remains highly available and scalable. 8 | 9 | You can use projections for sharing state with your end users, but also to do validation in command handlers. But note that projections are built asynchronously, so they are eventually consistent. If you need to enforce business rules in an immediately consistent manner, you should do so by loading aggregates as opposed to reading projections. 10 | 11 | ## How Projections Work 12 | 13 | Projections are built by listening to events and updating the read model accordingly. When an event is received, we update a projection database (MongoDB), based on the contents of the event and any existing data in the projection database. This behavior is captured by extending a `ProjectionHandler`. 14 | 15 | ### How do events get sent from the Event Store to the Projection Handlers? 16 | 17 | We use Ambar to read events from the Event Store and send them to the Projection Handlers, via an HTTP endpoint. The HTTP endpoint is defined through extending a `ProjectionController`, which will receive the events and send them to the corresponding `ProjectionHandler`. 18 | 19 | #### How do we make sure that events are sent at least once, and in order per aggregate, to a Projection Handler? 20 | 21 | Ambar takes care of this out of the box. All you have to take care of is making every `ProjectionController` idempotent. To make sure projections endpoint only process events once, the `ProjectionController` uses an abstraction called `ProjectedEvent` which keeps track of every event that has already been processed. 22 | 23 | ### Where can I find the Ambar configuration? 24 | 25 | The Ambar configuration is located in the `local-development/ambar-config.yml`. 26 | 27 | ### In ambar-config.yml, why are events ordered per correlation id, instead of aggregate id? 28 | 29 | Ordering events per correlation id retains the order of events per aggregate, but also retains the order of events across related aggregates. E.g., if you have an aggregate for November, and an aggregate for December, and the aggregate for December directly follows the aggregate for November (using the same correlation id), Ambar will give you the events in order per aggregate, but will also retain order across aggregates (Ambar will project November first, and December second). -------------------------------------------------------------------------------- /common/projection/mongo_transactional_projection_operator.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Dict, Any, List, TypeVar, Generic 2 | from pymongo import MongoClient 3 | from pymongo.client_session import ClientSession 4 | from pymongo.read_concern import ReadConcern 5 | from pymongo.write_concern import WriteConcern 6 | from pymongo.read_preferences import ReadPreference 7 | from common.util.mongo_session_pool import MongoSessionPool 8 | 9 | T = TypeVar('T') 10 | 11 | class MongoTransactionalProjectionOperator: 12 | def __init__(self, session_pool: MongoSessionPool, database_name: str): 13 | self._session: Optional[ClientSession] = None 14 | self._db: Optional[MongoClient] = None 15 | self._session_pool = session_pool 16 | self._database_name = database_name 17 | 18 | async def start_transaction(self) -> None: 19 | if self._session is not None: 20 | raise RuntimeError('Session to MongoDB already active!') 21 | 22 | if self._db is not None: 23 | raise RuntimeError('Database already initialized in the current session.') 24 | 25 | try: 26 | self._session = await self._session_pool.start_session() 27 | client = self._session_pool.get_client() 28 | self._db = client[self._database_name] 29 | 30 | transaction_options = { 31 | 'read_concern': ReadConcern('snapshot'), 32 | 'write_concern': WriteConcern('majority'), 33 | 'read_preference': ReadPreference.PRIMARY 34 | } 35 | 36 | self._session.start_transaction(**transaction_options) 37 | except Exception as e: 38 | raise RuntimeError(f"Failed to start MongoDB transaction: {e}") 39 | 40 | async def commit_transaction(self) -> None: 41 | if self._session is None: 42 | raise RuntimeError('Session must be active to commit transaction to MongoDB!') 43 | 44 | if not self._session.in_transaction: 45 | raise RuntimeError('Transaction must be active to commit transaction to MongoDB!') 46 | 47 | try: 48 | self._session.commit_transaction() 49 | except Exception as e: 50 | raise RuntimeError(f"Failed to commit MongoDB transaction: {e}") 51 | 52 | async def abort_dangling_transactions_and_return_session_to_pool(self) -> None: 53 | if self._session is None: 54 | self._db = None 55 | return 56 | 57 | try: 58 | if self._session.in_transaction: 59 | self._session.abort_transaction() 60 | except Exception as e: 61 | from common.util.logger import log 62 | log.error('Failed to abort Mongo transaction', error=e) 63 | 64 | try: 65 | self._session.end_session() 66 | except Exception as e: 67 | from common.util.logger import log 68 | log.error('Failed to release Mongo session', error=e) 69 | 70 | self._session = None 71 | self._db = None 72 | 73 | async def find(self, collection_name: str, filter_query: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> List[T]: 74 | session, db = await self._operate() 75 | collection = db[collection_name] 76 | cursor = collection.find(filter_query, session=session, **(options or {})) 77 | return list(cursor) 78 | 79 | async def replace_one(self, collection_name: str, filter_query: Dict[str, Any], 80 | replacement: Dict[str, Any], options: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: 81 | session, db = await self._operate() 82 | collection = db[collection_name] 83 | return collection.replace_one(filter_query, replacement, session=session, **(options or {})) 84 | 85 | async def insert_one(self, collection_name: str, document: Dict[str, Any], 86 | options: Optional[Dict[str, Any]] = None) -> None: 87 | session, db = await self._operate() 88 | collection = db[collection_name] 89 | collection.insert_one(document, session=session, **(options or {})) 90 | 91 | async def count_documents(self, collection_name: str, filter_query: Dict[str, Any], 92 | options: Optional[Dict[str, Any]] = None) -> int: 93 | session, db = await self._operate() 94 | collection = db[collection_name] 95 | return collection.count_documents(filter_query, session=session, **(options or {})) 96 | 97 | async def _operate(self): 98 | if self._session is None: 99 | raise RuntimeError('Session must be active to read or write to MongoDB!') 100 | 101 | if not self._session.in_transaction: 102 | raise RuntimeError('Transaction must be active to read or write to MongoDB!') 103 | 104 | if self._db is None: 105 | raise RuntimeError('Database must be initialized in the current session.') 106 | 107 | return self._session, self._db -------------------------------------------------------------------------------- /common/projection/projection_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Response 2 | from common.serialized_event.deserializer import Deserializer 3 | from common.ambar.ambar_http_request import AmbarHttpRequest 4 | from common.ambar.ambar_response_factory import AmbarResponseFactory 5 | from common.projection.projection_handler import ProjectionHandler 6 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 7 | from common.util.logger import log 8 | 9 | class ProjectionController: 10 | def __init__( 11 | self, 12 | mongo_operator: MongoTransactionalProjectionOperator, 13 | deserializer: Deserializer, 14 | ): 15 | self._mongo_operator = mongo_operator 16 | self._deserializer = deserializer 17 | 18 | async def process_projection_http_request( 19 | self, 20 | ambar_http_request: AmbarHttpRequest, 21 | projection_handler: ProjectionHandler, 22 | projection_name: str 23 | ) -> tuple[Response, int]: 24 | try: 25 | log.debug( 26 | f"Starting to process projection for event name: {ambar_http_request.payload.event_name} " 27 | f"using handler: {projection_handler.__class__.__name__}" 28 | ) 29 | 30 | event = self._deserializer.deserialize(ambar_http_request.payload) 31 | 32 | await self._mongo_operator.start_transaction() 33 | 34 | is_already_projected = await self._mongo_operator.count_documents( 35 | 'ProjectionIdempotency_ProjectedEvent', 36 | { 37 | 'eventId': event.event_id, 38 | 'projectionName': projection_name 39 | } 40 | ) != 0 41 | 42 | if is_already_projected: 43 | await self._mongo_operator.abort_dangling_transactions_and_return_session_to_pool() 44 | log.debug( 45 | f"Duplication projection ignored for event name: {ambar_http_request.payload.event_name} " 46 | f"using handler: {projection_handler.__class__.__name__}" 47 | ) 48 | return Response(AmbarResponseFactory.success_response(), content_type='application/json'), 200 49 | 50 | # Record projected event 51 | await self._mongo_operator.insert_one('ProjectionIdempotency_ProjectedEvent', { 52 | 'eventId': event.event_id, 53 | 'projectionName': projection_name 54 | }) 55 | 56 | await projection_handler.project(event) 57 | 58 | await self._mongo_operator.commit_transaction() 59 | await self._mongo_operator.abort_dangling_transactions_and_return_session_to_pool() 60 | 61 | log.debug( 62 | f"Projection successfully processed for event name: {ambar_http_request.payload.event_name} " 63 | f"using handler: {projection_handler.__class__.__name__}" 64 | ) 65 | return Response(AmbarResponseFactory.success_response(), content_type='application/json'), 200 66 | 67 | except Exception as ex: 68 | if isinstance(ex, ValueError) and str(ex).startswith('Unknown event type'): 69 | await self._mongo_operator.abort_dangling_transactions_and_return_session_to_pool() 70 | 71 | log.debug( 72 | f"Unknown event in projection ignored for event name: {ambar_http_request.payload.event_name} " 73 | f"using handler: {projection_handler.__class__.__name__}" 74 | ) 75 | return Response(AmbarResponseFactory.success_response(), content_type='application/json'), 200 76 | 77 | await self._mongo_operator.abort_dangling_transactions_and_return_session_to_pool() 78 | log.error( 79 | f"Exception in ProcessProjectionHttpRequest: {ex}. " 80 | f"For event name: {ambar_http_request.payload.event_name} " 81 | f"using handler: {projection_handler.__class__.__name__}", 82 | ex 83 | ) 84 | return Response(AmbarResponseFactory.retry_response(ex), content_type='application/json'), 200 85 | -------------------------------------------------------------------------------- /common/projection/projection_handler.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from common.event.event import Event 3 | 4 | class ProjectionHandler(ABC): 5 | @abstractmethod 6 | async def project(self, event: Event) -> None: 7 | pass -------------------------------------------------------------------------------- /common/query/Query.md: -------------------------------------------------------------------------------- 1 | # Query Handler 2 | 3 | A Query Handler in an EventSourcing system is responsible for taking requests for information (queries) from end users or other systems (both internal and external), validating the query (e.g., checking if a user has the right permissions), and returning said information. 4 | 5 | To do this, Query Handlers will read state from read model / projection databases. Those databases are _filled_ up by Projections (see Projection directory). 6 | 7 | ### Advantages: 8 | 9 | * Performance: Since queries access a read-optimized database, response times are faster and more efficient. 10 | * Scalability: The query data storage (projection/read model databases) can be scaled separately from the Event Store used in Command Handlers. 11 | * Flexibility: Different read models can be tailored for various use cases, offering specialized views for reporting, analytics, or specific user interfaces. -------------------------------------------------------------------------------- /common/query/query.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | 3 | class Query(ABC): 4 | pass -------------------------------------------------------------------------------- /common/query/query_controller.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Generic, Any 2 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 3 | from common.util.logger import log 4 | from common.query.query_handler import QueryHandler 5 | from common.query.query import Query 6 | 7 | T = TypeVar('T') 8 | 9 | class QueryController(Generic[T]): 10 | def __init__(self, mongo_transactional_projection_operator: MongoTransactionalProjectionOperator): 11 | self._mongo_transactional_projection_operator = mongo_transactional_projection_operator 12 | 13 | async def process_query(self, query: Query, query_handler: QueryHandler[T]) -> T: 14 | try: 15 | log.debug(f"Starting to process query: {query.__class__.__name__}") 16 | await self._mongo_transactional_projection_operator.start_transaction() 17 | result = await query_handler.handle_query(query) 18 | await self._mongo_transactional_projection_operator.commit_transaction() 19 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 20 | 21 | log.debug(f"Successfully processed query: {query.__class__.__name__}") 22 | return result 23 | 24 | except Exception as error: 25 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 26 | log.error(f"Exception in ProcessQuery: {error}", error=error) 27 | raise RuntimeError(f"Failed to process query: {error}") -------------------------------------------------------------------------------- /common/query/query_handler.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import TypeVar, Generic 3 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 4 | from common.query.query import Query 5 | 6 | T = TypeVar('T') 7 | 8 | class QueryHandler(ABC, Generic[T]): 9 | def __init__(self, mongo_transactional_projection_operator: MongoTransactionalProjectionOperator): 10 | self._mongo_transactional_projection_operator = mongo_transactional_projection_operator 11 | 12 | @abstractmethod 13 | async def handle_query(self, query: Query) -> T: 14 | pass -------------------------------------------------------------------------------- /common/reaction/Reaction.md: -------------------------------------------------------------------------------- 1 | # Reaction 2 | 3 | A reaction performs a side effect in response to an Event. While projections update state, reactions may trigger actions like sending notifications, updating external systems, or initiating new workflows. Reactions also filter relevant events, allowing for targeted responses to specific state changes. Reactions are not only responsible for performing the side effect, but also for ensuring that the side effect is idempotent by writing the result of the side effect into the Event Store as an Event. 4 | 5 | ## How Reactions Work 6 | 7 | Reactions are built by listening to events, triggering side effects, and recording the result of those side effects to the Event Store. This behavior is captured by extending a `ReactionHandler`. 8 | 9 | ### How do events get sent from the Event Store to the Reaction Handlers? 10 | 11 | We use Ambar to read events from the Event Store and send them to the Reaction Handlers, via an HTTP endpoint. The HTTP endpoint is defined through extending a `ReactionController`, which will receive the events and send them to the corresponding `ReactionHandler`. 12 | 13 | #### How do we make sure that events are sent at least once, and in order per aggregate, to a Reaction Handler? 14 | 15 | Ambar takes care of this out of the box. All you have to take care of is making every `ReactionController` idempotent. To make sure reaction endpoint only process Events once, the `ReactionHandler` has to commit the results of its side effect into the Event Store with a new Event. This way, if the reaction is triggered again, it will be able to find an existing event in the Event Store. Note that the Reaction Event has to have a deterministic event id, so that we can check if the event has already been processed with the `ReactionHandler`. 16 | 17 | ### Where can I find the Ambar configuration? 18 | 19 | The Ambar configuration is located in the `local-development/ambar-config.yml`. 20 | 21 | ### In ambar-config.yml, why are events ordered per correlation id, instead of aggregate id? 22 | 23 | Ordering events per correlation id retains the order of events per aggregate, but also retains the order of events across related aggregates. E.g., if you have an aggregate for November, and an aggregate for December, and the aggregate for December directly follows the aggregate for November (using the same correlation id), Ambar will give you the events in order per aggregate, but will also retain order across aggregates (Ambar will send November first, and December second, so you can react in order). 24 | 25 | -------------------------------------------------------------------------------- /common/reaction/reaction_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Response 2 | 3 | from common.ambar.ambar_http_request import AmbarHttpRequest 4 | from common.ambar.ambar_response_factory import AmbarResponseFactory 5 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 6 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 7 | from common.serialized_event.deserializer import Deserializer 8 | from common.util.logger import log 9 | from common.reaction.reaction_handler import ReactionHandler 10 | 11 | 12 | class ReactionController: 13 | def __init__( 14 | self, 15 | postgres_transactional_event_store: PostgresTransactionalEventStore, 16 | mongo_transactional_projection_operator: MongoTransactionalProjectionOperator, 17 | deserializer: Deserializer 18 | ): 19 | self._postgres_transactional_event_store = postgres_transactional_event_store 20 | self._mongo_transactional_projection_operator = mongo_transactional_projection_operator 21 | self._deserializer = deserializer 22 | 23 | async def process_reaction_http_request( 24 | self, 25 | ambar_http_request: AmbarHttpRequest, 26 | reaction_handler: ReactionHandler 27 | ) -> tuple[Response, int]: 28 | try: 29 | log.debug( 30 | f"Starting to process reaction for event name: {ambar_http_request.payload.event_name} " 31 | f"using handler: {reaction_handler.__class__.__name__}" 32 | ) 33 | 34 | await self._postgres_transactional_event_store.begin_transaction() 35 | await self._mongo_transactional_projection_operator.start_transaction() 36 | 37 | await reaction_handler.react( 38 | self._deserializer.deserialize(ambar_http_request.payload) 39 | ) 40 | 41 | await self._postgres_transactional_event_store.commit_transaction() 42 | await self._mongo_transactional_projection_operator.commit_transaction() 43 | 44 | await self._postgres_transactional_event_store.abort_dangling_transactions_and_return_connection_to_pool() 45 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 46 | 47 | log.debug( 48 | f"Reaction successfully processed for event name: {ambar_http_request.payload.event_name} " 49 | f"using handler: {reaction_handler.__class__.__name__}" 50 | ) 51 | return Response(AmbarResponseFactory.success_response(), content_type='application/json'), 200 52 | 53 | except Exception as error: 54 | if isinstance(error, ValueError) and str(error).startswith('Unknown event type'): 55 | await self._postgres_transactional_event_store.abort_dangling_transactions_and_return_connection_to_pool() 56 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 57 | 58 | log.debug( 59 | f"Unknown event in reaction ignored for event name: {ambar_http_request.payload.event_name} " 60 | f"using handler: {reaction_handler.__class__.__name__}" 61 | ) 62 | return Response(AmbarResponseFactory.success_response(), content_type='application/json'), 200 63 | 64 | await self._postgres_transactional_event_store.abort_dangling_transactions_and_return_connection_to_pool() 65 | await self._mongo_transactional_projection_operator.abort_dangling_transactions_and_return_session_to_pool() 66 | 67 | log.error('Exception in ProcessReactionHttpRequest:', error=error) 68 | return Response(AmbarResponseFactory.retry_response(ex), content_type='application/json'), 200 69 | -------------------------------------------------------------------------------- /common/reaction/reaction_handler.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from common.event.event import Event 3 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 4 | 5 | class ReactionHandler(ABC): 6 | def __init__(self, postgres_transactional_event_store: PostgresTransactionalEventStore): 7 | self._postgres_transactional_event_store = postgres_transactional_event_store 8 | 9 | @abstractmethod 10 | async def react(self, event: Event) -> None: 11 | pass -------------------------------------------------------------------------------- /common/serialized_event/deserializer.py: -------------------------------------------------------------------------------- 1 | import json 2 | from datetime import datetime 3 | from domain.cooking_club.membership.event.application_submitted import ApplicationSubmitted 4 | from domain.cooking_club.membership.event.application_evaluated import ApplicationEvaluated 5 | from domain.cooking_club.membership.aggregate.membership import MembershipStatus 6 | from common.event.event import Event 7 | from common.serialized_event.serialized_event import SerializedEvent 8 | 9 | class Deserializer: 10 | def __init__(self): 11 | self._event_types = { 12 | 'CookingClub_Membership_ApplicationSubmitted': ApplicationSubmitted, 13 | 'CookingClub_Membership_ApplicationEvaluated': ApplicationEvaluated 14 | } 15 | 16 | def deserialize(self, serialized_event: SerializedEvent) -> Event: 17 | event_class = self._event_types.get(serialized_event.event_name) 18 | if not event_class: 19 | raise ValueError(f"Unknown event type: {serialized_event.event_name}") 20 | 21 | recorded_on = self._parse_datetime(serialized_event.recorded_on) 22 | payload = json.loads(serialized_event.json_payload) 23 | 24 | if event_class == ApplicationSubmitted: 25 | return ApplicationSubmitted( 26 | event_id=serialized_event.event_id, 27 | aggregate_id=serialized_event.aggregate_id, 28 | aggregate_version=serialized_event.aggregate_version, 29 | correlation_id=serialized_event.correlation_id, 30 | causation_id=serialized_event.causation_id, 31 | recorded_on=recorded_on, 32 | first_name=payload['firstName'], 33 | last_name=payload['lastName'], 34 | favorite_cuisine=payload['favoriteCuisine'], 35 | years_of_professional_experience=payload['yearsOfProfessionalExperience'], 36 | number_of_cooking_books_read=payload['numberOfCookingBooksRead'] 37 | ) 38 | elif event_class == ApplicationEvaluated: 39 | return ApplicationEvaluated( 40 | event_id=serialized_event.event_id, 41 | aggregate_id=serialized_event.aggregate_id, 42 | aggregate_version=serialized_event.aggregate_version, 43 | correlation_id=serialized_event.correlation_id, 44 | causation_id=serialized_event.causation_id, 45 | recorded_on=recorded_on, 46 | evaluation_outcome=MembershipStatus(payload['evaluationOutcome']) 47 | ) 48 | else: 49 | raise ValueError(f"Unknown event type: {serialized_event.event_name}") 50 | 51 | def _parse_datetime(self, date_str: str) -> datetime: 52 | if not date_str.endswith(' UTC'): 53 | raise ValueError(f"Invalid date format: {date_str}") 54 | return datetime.strptime(date_str[:-4], '%Y-%m-%d %H:%M:%S') -------------------------------------------------------------------------------- /common/serialized_event/serialized_event.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from pydantic import BaseModel 3 | from typing import Optional 4 | 5 | class SerializedEvent(BaseModel): 6 | id: Optional[int] = None 7 | event_id: str 8 | aggregate_id: str 9 | causation_id: str 10 | correlation_id: str 11 | aggregate_version: int 12 | json_payload: str 13 | json_metadata: str 14 | recorded_on: str 15 | event_name: str 16 | 17 | class Config: 18 | from_attributes = True 19 | -------------------------------------------------------------------------------- /common/serialized_event/serializer.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import json 3 | import pytz 4 | from domain.cooking_club.membership.event.application_submitted import ApplicationSubmitted 5 | from domain.cooking_club.membership.event.application_evaluated import ApplicationEvaluated 6 | from common.event.event import Event 7 | from common.serialized_event.serialized_event import SerializedEvent 8 | 9 | class Serializer: 10 | def serialize(self, event: Event) -> SerializedEvent: 11 | return SerializedEvent( 12 | event_id=event.event_id, 13 | aggregate_id=event.aggregate_id, 14 | aggregate_version=event.aggregate_version, 15 | correlation_id=event.correlation_id, 16 | causation_id=event.causation_id, 17 | recorded_on=self._format_datetime(event.recorded_on), 18 | event_name=self._determine_event_name(event), 19 | json_payload=self._create_json_payload(event), 20 | json_metadata='{}' 21 | ) 22 | 23 | def _format_datetime(self, dt: datetime) -> str: 24 | if dt.tzinfo is None: 25 | dt = pytz.UTC.localize(dt) 26 | else: 27 | dt = dt.astimezone(pytz.UTC) 28 | return dt.strftime('%Y-%m-%d %H:%M:%S UTC') 29 | 30 | def _determine_event_name(self, event: Event) -> str: 31 | if isinstance(event, ApplicationSubmitted): 32 | return 'CookingClub_Membership_ApplicationSubmitted' 33 | if isinstance(event, ApplicationEvaluated): 34 | return 'CookingClub_Membership_ApplicationEvaluated' 35 | raise ValueError(f"Unknown event type: {event.__class__.__name__}") 36 | 37 | def _create_json_payload(self, event: Event) -> str: 38 | if isinstance(event, ApplicationSubmitted): 39 | payload = { 40 | 'firstName': event.first_name, 41 | 'lastName': event.last_name, 42 | 'favoriteCuisine': event.favorite_cuisine, 43 | 'yearsOfProfessionalExperience': event.years_of_professional_experience, 44 | 'numberOfCookingBooksRead': event.number_of_cooking_books_read 45 | } 46 | elif isinstance(event, ApplicationEvaluated): 47 | payload = { 48 | 'evaluationOutcome': event.evaluation_outcome.value 49 | } 50 | else: 51 | raise ValueError(f"Unknown event type: {event.__class__.__name__}") 52 | 53 | return json.dumps(payload) -------------------------------------------------------------------------------- /common/util/id_generator.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import secrets 3 | import base64 4 | 5 | class IdGenerator: 6 | ALPHANUMERIC_CHARACTERS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' 7 | ID_LENGTH = 56 8 | 9 | @staticmethod 10 | def generate_deterministic_id(seed: str) -> str: 11 | if not seed: 12 | raise ValueError('Input string cannot be null or empty') 13 | 14 | first_hash = hashlib.sha256(seed.encode()).digest() 15 | second_hash = hashlib.sha256(first_hash).digest() 16 | combined_hash = first_hash + second_hash 17 | 18 | base64_encoded = base64.b64encode(combined_hash).decode() 19 | clean_id = ''.join(c for c in base64_encoded if c.isalnum()) 20 | 21 | return clean_id[:IdGenerator.ID_LENGTH] 22 | 23 | @staticmethod 24 | def generate_random_id() -> str: 25 | chars = [] 26 | for _ in range(IdGenerator.ID_LENGTH): 27 | random_byte = secrets.randbelow(len(IdGenerator.ALPHANUMERIC_CHARACTERS)) 28 | chars.append(IdGenerator.ALPHANUMERIC_CHARACTERS[random_byte]) 29 | 30 | return ''.join(chars) -------------------------------------------------------------------------------- /common/util/logger.py: -------------------------------------------------------------------------------- 1 | import structlog 2 | import traceback 3 | from typing import Any, Dict, Optional 4 | 5 | # Configure structlog 6 | structlog.configure( 7 | processors=[ 8 | structlog.processors.TimeStamper(fmt="iso"), 9 | structlog.processors.add_log_level, 10 | structlog.processors.StackInfoRenderer(), 11 | structlog.processors.format_exc_info, 12 | structlog.processors.JSONRenderer(), 13 | ], 14 | wrapper_class=structlog.BoundLogger, 15 | context_class=dict, 16 | logger_factory=structlog.PrintLoggerFactory(), 17 | cache_logger_on_first_use=True, 18 | ) 19 | 20 | 21 | class Logger: 22 | def __init__(self): 23 | self._logger = structlog.get_logger() 24 | 25 | def debug(self, message: str, context: Optional[Dict[str, Any]] = None) -> None: 26 | self._logger.debug(message, **(context or {})) 27 | 28 | def info(self, message: str, context: Optional[Dict[str, Any]] = None) -> None: 29 | self._logger.info(message, **(context or {})) 30 | 31 | def warn(self, message: str, error: Optional[Exception] = None, context: Optional[Dict[str, Any]] = None) -> None: 32 | error_context = {} 33 | if error: 34 | error_context = { 35 | "error": { 36 | "message": str(error), 37 | "stack": traceback.format_exc(), 38 | } 39 | } 40 | 41 | self._logger.warning(message, **(error_context | (context or {}))) 42 | 43 | def error(self, message: str, error: Optional[Exception] = None, context: Optional[Dict[str, Any]] = None) -> None: 44 | error_context = {} 45 | if error: 46 | error_context = { 47 | "error": { 48 | "message": str(error), 49 | "stack": traceback.format_exc(), 50 | } 51 | } 52 | 53 | self._logger.error(message, **(error_context | (context or {}))) 54 | 55 | 56 | # Global logger instance 57 | log = Logger() 58 | -------------------------------------------------------------------------------- /common/util/mongo_initializer.py: -------------------------------------------------------------------------------- 1 | from common.util.mongo_session_pool import MongoSessionPool 2 | from common.util.logger import log 3 | 4 | class MongoInitializer: 5 | def __init__( 6 | self, 7 | session_pool: MongoSessionPool, 8 | database_name: str 9 | ): 10 | self._client = session_pool.get_client() 11 | self._database_name = database_name 12 | 13 | async def initialize(self) -> None: 14 | log.info('Initializing MongoDB collections and indexes...') 15 | 16 | try: 17 | db = self._client[self._database_name] 18 | 19 | # Create collections 20 | log.info('Creating collections...') 21 | await self._ensure_collections(db) 22 | log.info('Collections created successfully') 23 | 24 | # Create indexes 25 | log.info('Creating indexes...') 26 | await self._create_indexes(db) 27 | log.info('Indexes created successfully') 28 | 29 | except Exception as error: 30 | log.error('Error initializing MongoDB:', error=error) 31 | raise 32 | 33 | async def _ensure_collections(self, db) -> None: 34 | collections = [ 35 | 'CookingClub_MembersByCuisine_MembershipApplication', 36 | 'CookingClub_MembersByCuisine_Cuisine', 37 | 'ProjectionIdempotency_ProjectedEvent' 38 | ] 39 | 40 | for collection_name in collections: 41 | try: 42 | if collection_name not in db.list_collection_names(): 43 | db.create_collection(collection_name) 44 | log.debug(f"Collection {collection_name} created") 45 | else: 46 | log.debug(f"Collection {collection_name} already exists") 47 | except Exception as error: 48 | log.error(f"Error ensuring collection {collection_name}:", error=error) 49 | raise 50 | 51 | async def _create_indexes(self, db) -> None: 52 | try: 53 | membership_application = db['CookingClub_MembersByCuisine_MembershipApplication'] 54 | membership_application.create_index( 55 | [('favorite_cuisine', 1)], 56 | background=True, 57 | name='favoriteCuisine_asc' 58 | ) 59 | 60 | projection_idempotency = db['ProjectionIdempotency_ProjectedEvent'] 61 | projection_idempotency.create_index( 62 | [('eventId', 1), ('projectionName', 1)], 63 | unique=True, 64 | background=True, 65 | name='eventId_ProjectionName_unique' 66 | ) 67 | 68 | log.debug('Indexes created') 69 | except Exception as error: 70 | log.error('Error creating indexes:', error=error) 71 | raise -------------------------------------------------------------------------------- /common/util/mongo_session_pool.py: -------------------------------------------------------------------------------- 1 | from pymongo import MongoClient 2 | from pymongo.server_api import ServerApi 3 | 4 | 5 | class MongoSessionPool: 6 | def __init__(self, connection_string: str): 7 | settings = { 8 | 'maxPoolSize': 20, 9 | 'minPoolSize': 5, 10 | 'maxIdleTimeMS': 10 * 60 * 1000, # 10 minutes 11 | 'waitQueueTimeoutMS': 2000, 12 | 'replicaSet': 'rs0', 13 | 'server_api': ServerApi('1'), 14 | } 15 | 16 | self._transactional_client = MongoClient(connection_string, **settings) 17 | 18 | async def start_session(self): 19 | self._transactional_client.admin.command('ping') # Ensure connected 20 | return self._transactional_client.start_session() 21 | 22 | def get_client(self) -> MongoClient: 23 | return self._transactional_client -------------------------------------------------------------------------------- /common/util/postgres_connection_pool.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | import psycopg2 3 | from psycopg2.pool import SimpleConnectionPool 4 | 5 | 6 | class PostgresConnectionPool: 7 | def __init__(self, connection_string: str): 8 | self._pool_config = { 9 | 'dsn': connection_string, 10 | 'minconn': 5, 11 | 'maxconn': 10, 12 | } 13 | self._pool: Optional[SimpleConnectionPool] = None 14 | self._initialize_pool() 15 | 16 | def _initialize_pool(self) -> None: 17 | self._pool = SimpleConnectionPool(**self._pool_config) 18 | 19 | async def open_connection(self) -> psycopg2.extensions.connection: 20 | if not self._pool: 21 | raise RuntimeError("Connection pool not initialized") 22 | 23 | try: 24 | return self._pool.getconn() 25 | except Exception as e: 26 | raise RuntimeError(f"Failed to open database connection: {e}") 27 | 28 | async def return_connection(self, connection) -> None: 29 | if not self._pool: 30 | raise RuntimeError("Connection pool not initialized") 31 | 32 | try: 33 | self._pool.putconn(connection) 34 | except Exception as e: 35 | raise RuntimeError(f"Failed to return connection to the pool: {e}") -------------------------------------------------------------------------------- /common/util/postgres_initializer.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from common.util.postgres_connection_pool import PostgresConnectionPool 3 | from common.util.logger import log 4 | 5 | class PostgresInitializer: 6 | def __init__( 7 | self, 8 | connection_pool: PostgresConnectionPool, 9 | event_store_database_name: str, 10 | event_store_table: str, 11 | replication_username: str, 12 | replication_password: str, 13 | replication_publication: str 14 | ): 15 | self._connection_pool = connection_pool 16 | self._database_name = event_store_database_name 17 | self._table = event_store_table 18 | self._replication_username = replication_username 19 | self._replication_password = replication_password 20 | self._publication = replication_publication 21 | 22 | async def initialize(self) -> None: 23 | client = await self._connection_pool.open_connection() 24 | try: 25 | await self._create_table(client) 26 | await self._create_replication_user(client) 27 | await self._grant_permissions(client) 28 | await self._create_publication(client) 29 | await self._create_indexes(client) 30 | finally: 31 | await self._connection_pool.return_connection(client) 32 | 33 | async def _execute_statement_ignore_errors(self, client: Any, sql_statement: str) -> None: 34 | try: 35 | log.info(f"Executing SQL: {sql_statement}") 36 | with client.cursor() as cursor: 37 | cursor.execute(sql_statement) 38 | client.commit() 39 | except Exception as error: 40 | log.warn("Caught exception when executing SQL statement.", error=error) 41 | 42 | async def _create_table(self, client: Any) -> None: 43 | log.info(f"Creating table {self._table}") 44 | sql = f""" 45 | CREATE TABLE IF NOT EXISTS {self._table} ( 46 | id BIGSERIAL NOT NULL, 47 | event_id TEXT NOT NULL UNIQUE, 48 | aggregate_id TEXT NOT NULL, 49 | aggregate_version BIGINT NOT NULL, 50 | causation_id TEXT NOT NULL, 51 | correlation_id TEXT NOT NULL, 52 | recorded_on TEXT NOT NULL, 53 | event_name TEXT NOT NULL, 54 | json_payload TEXT NOT NULL, 55 | json_metadata TEXT NOT NULL, 56 | PRIMARY KEY (id) 57 | ); 58 | """ 59 | await self._execute_statement_ignore_errors(client, sql) 60 | 61 | async def _create_replication_user(self, client: Any) -> None: 62 | log.info('Creating replication user') 63 | sql = f""" 64 | CREATE USER {self._replication_username} REPLICATION LOGIN PASSWORD '{self._replication_password}'; 65 | """ 66 | await self._execute_statement_ignore_errors(client, sql) 67 | 68 | async def _grant_permissions(self, client: Any) -> None: 69 | log.info('Granting permissions to replication user') 70 | sql = f""" 71 | GRANT CONNECT ON DATABASE "{self._database_name}" TO {self._replication_username}; 72 | """ 73 | await self._execute_statement_ignore_errors(client, sql) 74 | 75 | log.info('Granting select to replication user') 76 | sql = f""" 77 | GRANT SELECT ON TABLE {self._table} TO {self._replication_username}; 78 | """ 79 | await self._execute_statement_ignore_errors(client, sql) 80 | 81 | async def _create_publication(self, client: Any) -> None: 82 | log.info('Creating publication for table') 83 | sql = f""" 84 | CREATE PUBLICATION {self._publication} FOR TABLE {self._table}; 85 | """ 86 | await self._execute_statement_ignore_errors(client, sql) 87 | 88 | async def _create_indexes(self, client: Any) -> None: 89 | indexes = [ 90 | ( 91 | 'aggregate id, aggregate version index', 92 | f"CREATE UNIQUE INDEX event_store_idx_event_aggregate_id_version ON {self._table}(aggregate_id, aggregate_version);" 93 | ), 94 | ( 95 | 'id index', 96 | f"CREATE UNIQUE INDEX event_store_idx_event_id ON {self._table}(event_id);" 97 | ), 98 | ( 99 | 'causation index', 100 | f"CREATE INDEX event_store_idx_event_causation_id ON {self._table}(causation_id);" 101 | ), 102 | ( 103 | 'correlation index', 104 | f"CREATE INDEX event_store_idx_event_correlation_id ON {self._table}(correlation_id);" 105 | ), 106 | ( 107 | 'recording index', 108 | f"CREATE INDEX event_store_idx_occurred_on ON {self._table}(recorded_on);" 109 | ), 110 | ( 111 | 'event name index', 112 | f"CREATE INDEX event_store_idx_event_name ON {self._table}(event_name);" 113 | ) 114 | ] 115 | 116 | for index_name, sql in indexes: 117 | log.info(f'Creating {index_name}') 118 | await self._execute_statement_ignore_errors(client, sql) -------------------------------------------------------------------------------- /container.py: -------------------------------------------------------------------------------- 1 | import os 2 | from common.util.postgres_connection_pool import PostgresConnectionPool 3 | from common.util.mongo_session_pool import MongoSessionPool 4 | from common.serialized_event.deserializer import Deserializer 5 | from common.serialized_event.serializer import Serializer 6 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 7 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 8 | from common.util.mongo_initializer import MongoInitializer 9 | from common.util.postgres_initializer import PostgresInitializer 10 | from domain.cooking_club.membership.command.submit_application.submit_application_command_controller import \ 11 | SubmitApplicationCommandController 12 | from domain.cooking_club.membership.command.submit_application.submit_application_command_handler import \ 13 | SubmitApplicationCommandHandler 14 | from domain.cooking_club.membership.projection.members_by_cuisine.members_by_cuisine_projection_controller import \ 15 | MembersByCuisineProjectionController 16 | from domain.cooking_club.membership.reaction.evaluate_application.evaluate_application_reaction_handler import \ 17 | EvaluateApplicationReactionHandler 18 | from domain.cooking_club.membership.reaction.evaluate_application.evaluate_application_reaction_controller import \ 19 | EvaluateApplicationReactionController 20 | from domain.cooking_club.membership.projection.members_by_cuisine.members_by_cuisine_projection_handler import \ 21 | MembersByCuisineProjectionHandler 22 | from domain.cooking_club.membership.projection.members_by_cuisine.membership_application_repository import \ 23 | MembershipApplicationRepository 24 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine_repository import CuisineRepository 25 | from domain.cooking_club.membership.query.members_by_cuisine.members_by_cuisine_query_controller import \ 26 | MembersByCuisineQueryController 27 | from domain.cooking_club.membership.query.members_by_cuisine.members_by_cuisine_query_handler import \ 28 | MembersByCuisineQueryHandler 29 | 30 | 31 | class SharedContainer: 32 | def __init__(self): 33 | # Connection strings 34 | postgres_connection_string = f"postgresql://{os.getenv('EVENT_STORE_USER')}:{os.getenv('EVENT_STORE_PASSWORD')}@{os.getenv('EVENT_STORE_HOST')}:{os.getenv('EVENT_STORE_PORT')}/{os.getenv('EVENT_STORE_DATABASE_NAME')}" 35 | mongo_connection_string = f"mongodb://{os.getenv('MONGODB_PROJECTION_DATABASE_USERNAME')}:{os.getenv('MONGODB_PROJECTION_DATABASE_PASSWORD')}@{os.getenv('MONGODB_PROJECTION_HOST')}:{os.getenv('MONGODB_PROJECTION_PORT')}/{os.getenv('MONGODB_PROJECTION_DATABASE_NAME')}?authSource=admin" 36 | 37 | # Core services 38 | self.postgres_connection_pool = PostgresConnectionPool( 39 | connection_string=postgres_connection_string 40 | ) 41 | self.mongo_session_pool = MongoSessionPool( 42 | connection_string=mongo_connection_string 43 | ) 44 | self.serializer = Serializer() 45 | self.deserializer = Deserializer() 46 | 47 | # Initializers 48 | self.postgres_initializer = PostgresInitializer( 49 | connection_pool=self.postgres_connection_pool, 50 | event_store_database_name=os.getenv('EVENT_STORE_DATABASE_NAME'), 51 | event_store_table=os.getenv('EVENT_STORE_CREATE_TABLE_WITH_NAME'), 52 | replication_username=os.getenv('EVENT_STORE_CREATE_REPLICATION_USER_WITH_USERNAME'), 53 | replication_password=os.getenv('EVENT_STORE_CREATE_REPLICATION_USER_WITH_PASSWORD'), 54 | replication_publication=os.getenv('EVENT_STORE_CREATE_REPLICATION_PUBLICATION') 55 | ) 56 | self.mongo_initializer = MongoInitializer( 57 | session_pool=self.mongo_session_pool, 58 | database_name=os.getenv('MONGODB_PROJECTION_DATABASE_NAME') 59 | ) 60 | 61 | 62 | class RequestContainer: 63 | def __init__(self, shared_container: SharedContainer): 64 | self.shared_container = shared_container 65 | 66 | self._postgres_transactional_event_store = PostgresTransactionalEventStore( 67 | connection_pool=self.shared_container.postgres_connection_pool, 68 | serializer=self.shared_container.serializer, 69 | deserializer=self.shared_container.deserializer, 70 | event_store_table=os.getenv('EVENT_STORE_CREATE_TABLE_WITH_NAME') 71 | ) 72 | 73 | self._mongo_transactional_projection_operator = MongoTransactionalProjectionOperator( 74 | session_pool=self.shared_container.mongo_session_pool, 75 | database_name=os.getenv('MONGODB_PROJECTION_DATABASE_NAME') 76 | ) 77 | 78 | self._cuisine_repository = CuisineRepository( 79 | mongo_operator=self._mongo_transactional_projection_operator 80 | ) 81 | 82 | self._membership_application_repository = MembershipApplicationRepository( 83 | mongo_operator=self._mongo_transactional_projection_operator 84 | ) 85 | 86 | self._submit_application_command_handler = SubmitApplicationCommandHandler( 87 | postgres_transactional_event_store=self._postgres_transactional_event_store 88 | ) 89 | 90 | self._evaluate_application_reaction_handler = EvaluateApplicationReactionHandler( 91 | postgres_transactional_event_store=self._postgres_transactional_event_store 92 | ) 93 | 94 | self._members_by_cuisine_projection_handler = MembersByCuisineProjectionHandler( 95 | cuisine_repository=self._cuisine_repository, 96 | membership_application_repository=self._membership_application_repository 97 | ) 98 | 99 | self._members_by_cuisine_query_handler = MembersByCuisineQueryHandler( 100 | mongo_operator=self._mongo_transactional_projection_operator, 101 | cuisine_repository=self._cuisine_repository 102 | ) 103 | 104 | self._submit_application_command_controller = SubmitApplicationCommandController( 105 | event_store=self._postgres_transactional_event_store, 106 | mongo_operator=self._mongo_transactional_projection_operator, 107 | submit_application_command_handler=self._submit_application_command_handler 108 | ) 109 | 110 | self._evaluate_application_reaction_controller = EvaluateApplicationReactionController( 111 | event_store=self._postgres_transactional_event_store, 112 | mongo_operator=self._mongo_transactional_projection_operator, 113 | deserializer=self.shared_container.deserializer, 114 | evaluate_application_reaction_handler=self._evaluate_application_reaction_handler 115 | ) 116 | 117 | self._members_by_cuisine_query_controller = MembersByCuisineQueryController( 118 | mongo_operator=self._mongo_transactional_projection_operator, 119 | members_by_cuisine_query_handler=self._members_by_cuisine_query_handler 120 | ) 121 | 122 | self._members_by_cuisine_projection_controller = MembersByCuisineProjectionController( 123 | mongo_operator=self._mongo_transactional_projection_operator, 124 | deserializer=self.shared_container.deserializer, 125 | members_by_cuisine_projection_handler=self._members_by_cuisine_projection_handler 126 | ) 127 | 128 | def submit_application_controller(self): 129 | return self._submit_application_command_controller 130 | 131 | def evaluate_application_controller(self): 132 | return self._evaluate_application_reaction_controller 133 | 134 | def members_by_cuisine_query_controller(self): 135 | return self._members_by_cuisine_query_controller 136 | 137 | def members_by_cuisine_projection_controller(self): 138 | return self._members_by_cuisine_projection_controller -------------------------------------------------------------------------------- /domain/cooking_club/membership/aggregate/membership.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from dataclasses import dataclass 3 | from common.aggregate.aggregate import Aggregate 4 | 5 | class MembershipStatus(Enum): 6 | REQUESTED = "Requested" 7 | APPROVED = "Approved" 8 | REJECTED = "Rejected" 9 | 10 | @dataclass 11 | class Membership(Aggregate): 12 | first_name: str 13 | last_name: str 14 | status: MembershipStatus 15 | 16 | def __init__(self, aggregate_id: str, aggregate_version: int, first_name: str, last_name: str, status: MembershipStatus): 17 | super().__init__(aggregate_id, aggregate_version) 18 | self.first_name = first_name 19 | self.last_name = last_name 20 | self.status = status -------------------------------------------------------------------------------- /domain/cooking_club/membership/command/submit_application/submit_application_command.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from common.command.command import Command 3 | 4 | @dataclass 5 | class SubmitApplicationCommand(Command): 6 | first_name: str 7 | last_name: str 8 | favorite_cuisine: str 9 | years_of_professional_experience: int 10 | number_of_cooking_books_read: int -------------------------------------------------------------------------------- /domain/cooking_club/membership/command/submit_application/submit_application_command_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Request, Response, jsonify 2 | from pydantic import BaseModel, Field 3 | from common.command.command_controller import CommandController 4 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 5 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 6 | from domain.cooking_club.membership.command.submit_application.submit_application_command import SubmitApplicationCommand 7 | from domain.cooking_club.membership.command.submit_application.submit_application_command_handler import SubmitApplicationCommandHandler 8 | 9 | 10 | class SubmitApplicationRequest(BaseModel): 11 | first_name: str = Field(..., alias="firstName", min_length=1, max_length=100) 12 | last_name: str = Field(..., alias="lastName", min_length=1, max_length=100) 13 | favorite_cuisine: str = Field(..., alias="favoriteCuisine", min_length=1, max_length=100) 14 | years_of_professional_experience: int = Field(..., alias="yearsOfProfessionalExperience", ge=0, le=100) 15 | number_of_cooking_books_read: int = Field(..., alias="numberOfCookingBooksRead", ge=0) 16 | 17 | 18 | class SubmitApplicationCommandController(CommandController): 19 | def __init__( 20 | self, 21 | event_store: PostgresTransactionalEventStore, 22 | mongo_operator: MongoTransactionalProjectionOperator, 23 | submit_application_command_handler: SubmitApplicationCommandHandler 24 | ): 25 | super().__init__(event_store, mongo_operator) 26 | self._submit_application_command_handler = submit_application_command_handler 27 | 28 | async def handle_submit_application(self, request: Request) -> tuple[Response, int]: 29 | session_token = request.headers.get('X-With-Session-Token') 30 | if not session_token: 31 | return jsonify({'error': 'Session token is required'}), 400 32 | 33 | request_data = SubmitApplicationRequest(**request.get_json()) 34 | 35 | command = SubmitApplicationCommand( 36 | first_name=request_data.first_name, 37 | last_name=request_data.last_name, 38 | favorite_cuisine=request_data.favorite_cuisine, 39 | years_of_professional_experience=request_data.years_of_professional_experience, 40 | number_of_cooking_books_read=request_data.number_of_cooking_books_read 41 | ) 42 | 43 | await self.process_command(command, self._submit_application_command_handler) 44 | return jsonify({}), 200 -------------------------------------------------------------------------------- /domain/cooking_club/membership/command/submit_application/submit_application_command_handler.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from common.command.command_handler import CommandHandler 3 | from common.util.id_generator import IdGenerator 4 | from domain.cooking_club.membership.event.application_submitted import ApplicationSubmitted 5 | from domain.cooking_club.membership.command.submit_application.submit_application_command import SubmitApplicationCommand 6 | 7 | class SubmitApplicationCommandHandler(CommandHandler): 8 | async def handle_command(self, command: SubmitApplicationCommand) -> None: 9 | event_id = IdGenerator.generate_random_id() 10 | aggregate_id = IdGenerator.generate_random_id() 11 | 12 | application_submitted = ApplicationSubmitted( 13 | event_id=event_id, 14 | aggregate_id=aggregate_id, 15 | aggregate_version=1, 16 | correlation_id=event_id, 17 | causation_id=event_id, 18 | recorded_on=datetime.utcnow(), 19 | first_name=command.first_name, 20 | last_name=command.last_name, 21 | favorite_cuisine=command.favorite_cuisine, 22 | years_of_professional_experience=command.years_of_professional_experience, 23 | number_of_cooking_books_read=command.number_of_cooking_books_read 24 | ) 25 | 26 | await self._postgres_transactional_event_store.save_event(application_submitted) -------------------------------------------------------------------------------- /domain/cooking_club/membership/event/application_evaluated.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from common.event.transformation_event import TransformationEvent 3 | from domain.cooking_club.membership.aggregate.membership import Membership, MembershipStatus 4 | 5 | class ApplicationEvaluated(TransformationEvent[Membership]): 6 | def __init__( 7 | self, 8 | event_id: str, 9 | aggregate_id: str, 10 | aggregate_version: int, 11 | correlation_id: str, 12 | causation_id: str, 13 | recorded_on: datetime, 14 | evaluation_outcome: MembershipStatus, 15 | ): 16 | super().__init__( 17 | event_id=event_id, 18 | aggregate_id=aggregate_id, 19 | aggregate_version=aggregate_version, 20 | correlation_id=correlation_id, 21 | causation_id=causation_id, 22 | recorded_on=recorded_on 23 | ) 24 | self.evaluation_outcome = evaluation_outcome 25 | 26 | def transform_aggregate(self, aggregate: Membership) -> Membership: 27 | return Membership( 28 | aggregate_id=self.aggregate_id, 29 | aggregate_version=self.aggregate_version, 30 | first_name=aggregate.first_name, 31 | last_name=aggregate.last_name, 32 | status=self.evaluation_outcome 33 | ) -------------------------------------------------------------------------------- /domain/cooking_club/membership/event/application_submitted.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from common.event.creation_event import CreationEvent 3 | from domain.cooking_club.membership.aggregate.membership import Membership, MembershipStatus 4 | 5 | class ApplicationSubmitted(CreationEvent[Membership]): 6 | def __init__( 7 | self, 8 | event_id: str, 9 | aggregate_id: str, 10 | aggregate_version: int, 11 | correlation_id: str, 12 | causation_id: str, 13 | recorded_on: datetime, 14 | first_name: str, 15 | last_name: str, 16 | favorite_cuisine: str, 17 | years_of_professional_experience: int, 18 | number_of_cooking_books_read: int, 19 | ): 20 | super().__init__( 21 | event_id=event_id, 22 | aggregate_id=aggregate_id, 23 | aggregate_version=aggregate_version, 24 | correlation_id=correlation_id, 25 | causation_id=causation_id, 26 | recorded_on=recorded_on 27 | ) 28 | self.first_name = first_name 29 | self.last_name = last_name 30 | self.favorite_cuisine = favorite_cuisine 31 | self.years_of_professional_experience = years_of_professional_experience 32 | self.number_of_cooking_books_read = number_of_cooking_books_read 33 | 34 | def create_aggregate(self) -> Membership: 35 | return Membership( 36 | aggregate_id=self.aggregate_id, 37 | aggregate_version=self.aggregate_version, 38 | first_name=self.first_name, 39 | last_name=self.last_name, 40 | status=MembershipStatus.REQUESTED 41 | ) -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/cuisine.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import List 3 | 4 | @dataclass 5 | class Cuisine: 6 | _id: str # needs to be _id to be recognized as an _id field by MongoDB 7 | member_names: List[str] -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/cuisine_repository.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 3 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine import Cuisine 4 | 5 | 6 | class CuisineRepository: 7 | COLLECTION_NAME = 'CookingClub_MembersByCuisine_Cuisine' 8 | 9 | def __init__(self, mongo_operator: MongoTransactionalProjectionOperator): 10 | self._mongo_operator = mongo_operator 11 | 12 | async def save(self, cuisine: Cuisine) -> None: 13 | await self._mongo_operator.replace_one( 14 | self.COLLECTION_NAME, 15 | {'_id': cuisine._id}, 16 | { 17 | '_id': cuisine._id, 18 | 'member_names': cuisine.member_names 19 | }, 20 | {'upsert': True} 21 | ) 22 | 23 | async def find_one_by_id(self, _id: str) -> Optional[Cuisine]: 24 | results = await self._mongo_operator.find( 25 | self.COLLECTION_NAME, 26 | {'_id': _id} 27 | ) 28 | if not results: 29 | return None 30 | 31 | doc = results[0] 32 | return Cuisine( 33 | _id=doc['_id'], 34 | member_names=doc['member_names'] 35 | ) 36 | 37 | async def find_all(self) -> List[Cuisine]: 38 | docs = await self._mongo_operator.find(self.COLLECTION_NAME, {}) 39 | return [ 40 | Cuisine( 41 | _id=doc['_id'], 42 | member_names=doc['member_names'] 43 | ) 44 | for doc in docs 45 | ] -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/members_by_cuisine_projection_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Request, Response 2 | from common.projection.projection_controller import ProjectionController 3 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 4 | from common.serialized_event.deserializer import Deserializer 5 | from common.ambar.ambar_http_request import AmbarHttpRequest 6 | from domain.cooking_club.membership.projection.members_by_cuisine.members_by_cuisine_projection_handler import MembersByCuisineProjectionHandler 7 | 8 | class MembersByCuisineProjectionController(ProjectionController): 9 | def __init__( 10 | self, 11 | mongo_operator: MongoTransactionalProjectionOperator, 12 | deserializer: Deserializer, 13 | members_by_cuisine_projection_handler: MembersByCuisineProjectionHandler, 14 | ): 15 | super().__init__(mongo_operator, deserializer) 16 | self._members_by_cuisine_projection_handler = members_by_cuisine_projection_handler 17 | 18 | async def handle_projection_request(self, request: Request) -> tuple[Response, int]: 19 | return await self.process_projection_http_request( 20 | AmbarHttpRequest.model_validate(request.get_json()), 21 | self._members_by_cuisine_projection_handler, 22 | 'CookingClub_MembersByCuisine' 23 | ) -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/members_by_cuisine_projection_handler.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | from common.projection.projection_handler import ProjectionHandler 3 | from common.event.event import Event 4 | from domain.cooking_club.membership.event.application_submitted import ApplicationSubmitted 5 | from domain.cooking_club.membership.event.application_evaluated import ApplicationEvaluated 6 | from domain.cooking_club.membership.aggregate.membership import MembershipStatus 7 | from domain.cooking_club.membership.projection.members_by_cuisine.membership_application import MembershipApplication 8 | from domain.cooking_club.membership.projection.members_by_cuisine.membership_application_repository import MembershipApplicationRepository 9 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine_repository import CuisineRepository 10 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine import Cuisine 11 | 12 | class MembersByCuisineProjectionHandler(ProjectionHandler): 13 | def __init__( 14 | self, 15 | cuisine_repository: CuisineRepository, 16 | membership_application_repository: MembershipApplicationRepository, 17 | ): 18 | self._cuisine_repository = cuisine_repository 19 | self._membership_application_repository = membership_application_repository 20 | 21 | async def project(self, event: Event) -> None: 22 | if isinstance(event, ApplicationSubmitted): 23 | await self._handle_application_submitted(event) 24 | elif isinstance(event, ApplicationEvaluated): 25 | await self._handle_application_evaluated(event) 26 | 27 | async def _handle_application_submitted(self, event: ApplicationSubmitted) -> None: 28 | await self._membership_application_repository.save( 29 | MembershipApplication( 30 | _id=event.aggregate_id, 31 | first_name=event.first_name, 32 | last_name=event.last_name, 33 | favorite_cuisine=event.favorite_cuisine 34 | ) 35 | ) 36 | 37 | async def _handle_application_evaluated(self, event: ApplicationEvaluated) -> None: 38 | if event.evaluation_outcome != MembershipStatus.APPROVED: 39 | return 40 | 41 | membership_application = await self._membership_application_repository.find_one_by_id(event.aggregate_id) 42 | if not membership_application: 43 | raise ValueError('Membership application not found') 44 | 45 | cuisine = await self._cuisine_repository.find_one_by_id(membership_application.favorite_cuisine) 46 | if not cuisine: 47 | cuisine = Cuisine( 48 | _id=membership_application.favorite_cuisine, 49 | member_names=[] 50 | ) 51 | 52 | member_name = f"{membership_application.first_name} {membership_application.last_name}" 53 | cuisine.member_names.append(member_name) 54 | 55 | await self._cuisine_repository.save(cuisine) -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/membership_application.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | @dataclass 4 | class MembershipApplication: 5 | _id: str # needs to be _id to be recognized as an _id field by MongoDB 6 | first_name: str 7 | last_name: str 8 | favorite_cuisine: str -------------------------------------------------------------------------------- /domain/cooking_club/membership/projection/members_by_cuisine/membership_application_repository.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 3 | from domain.cooking_club.membership.projection.members_by_cuisine.membership_application import MembershipApplication 4 | 5 | 6 | class MembershipApplicationRepository: 7 | COLLECTION_NAME = 'CookingClub_MembersByCuisine_MembershipApplication' 8 | 9 | def __init__(self, mongo_operator: MongoTransactionalProjectionOperator): 10 | self._mongo_operator = mongo_operator 11 | 12 | async def save(self, membership_application: MembershipApplication) -> None: 13 | await self._mongo_operator.replace_one( 14 | self.COLLECTION_NAME, 15 | {'_id': membership_application._id}, 16 | { 17 | '_id': membership_application._id, 18 | 'first_name': membership_application.first_name, 19 | 'last_name': membership_application.last_name, 20 | 'favorite_cuisine': membership_application.favorite_cuisine 21 | }, 22 | {'upsert': True} 23 | ) 24 | 25 | async def find_one_by_id(self, _id: str) -> Optional[MembershipApplication]: 26 | results = await self._mongo_operator.find( 27 | self.COLLECTION_NAME, 28 | {'_id': _id} 29 | ) 30 | if not results: 31 | return None 32 | 33 | doc = results[0] 34 | return MembershipApplication( 35 | _id=doc['_id'], 36 | first_name=doc['first_name'], 37 | last_name=doc['last_name'], 38 | favorite_cuisine=doc['favorite_cuisine'] 39 | ) -------------------------------------------------------------------------------- /domain/cooking_club/membership/query/members_by_cuisine/members_by_cuisine_query.py: -------------------------------------------------------------------------------- 1 | from common.query.query import Query 2 | 3 | class MembersByCuisineQuery(Query): 4 | pass -------------------------------------------------------------------------------- /domain/cooking_club/membership/query/members_by_cuisine/members_by_cuisine_query_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Request, Response, jsonify 2 | from common.query.query_controller import QueryController 3 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 4 | from domain.cooking_club.membership.query.members_by_cuisine.members_by_cuisine_query import MembersByCuisineQuery 5 | from domain.cooking_club.membership.query.members_by_cuisine.members_by_cuisine_query_handler import MembersByCuisineQueryHandler 6 | 7 | class MembersByCuisineQueryController(QueryController): 8 | def __init__( 9 | self, 10 | mongo_operator: MongoTransactionalProjectionOperator, 11 | members_by_cuisine_query_handler: MembersByCuisineQueryHandler 12 | ): 13 | super().__init__(mongo_operator) 14 | self._members_by_cuisine_query_handler = members_by_cuisine_query_handler 15 | 16 | async def handle_members_by_cuisine(self, request: Request) -> tuple[Response, int]: 17 | query = MembersByCuisineQuery() 18 | result = await self.process_query(query, self._members_by_cuisine_query_handler) 19 | return jsonify([{ 20 | 'cuisine': cuisine._id, 21 | 'members': cuisine.member_names 22 | } for cuisine in result]), 200 -------------------------------------------------------------------------------- /domain/cooking_club/membership/query/members_by_cuisine/members_by_cuisine_query_handler.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 4 | from common.query.query_handler import QueryHandler 5 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine import Cuisine 6 | from domain.cooking_club.membership.projection.members_by_cuisine.cuisine_repository import CuisineRepository 7 | from domain.cooking_club.membership.query.members_by_cuisine.members_by_cuisine_query import MembersByCuisineQuery 8 | 9 | class MembersByCuisineQueryHandler(QueryHandler[List[Cuisine]]): 10 | def __init__( 11 | self, 12 | mongo_operator: MongoTransactionalProjectionOperator, 13 | cuisine_repository: CuisineRepository 14 | ): 15 | super().__init__(mongo_operator) 16 | self._cuisine_repository = cuisine_repository 17 | 18 | async def handle_query(self, query: MembersByCuisineQuery) -> List[Cuisine]: 19 | return await self._cuisine_repository.find_all() -------------------------------------------------------------------------------- /domain/cooking_club/membership/reaction/evaluate_application/evaluate_application_reaction_controller.py: -------------------------------------------------------------------------------- 1 | from flask import Request, Response 2 | from common.reaction.reaction_controller import ReactionController 3 | from common.event_store.postgres_transactional_event_store import PostgresTransactionalEventStore 4 | from common.projection.mongo_transactional_projection_operator import MongoTransactionalProjectionOperator 5 | from common.serialized_event.deserializer import Deserializer 6 | from common.ambar.ambar_http_request import AmbarHttpRequest 7 | from domain.cooking_club.membership.reaction.evaluate_application.evaluate_application_reaction_handler import EvaluateApplicationReactionHandler 8 | 9 | class EvaluateApplicationReactionController(ReactionController): 10 | def __init__( 11 | self, 12 | event_store: PostgresTransactionalEventStore, 13 | mongo_operator: MongoTransactionalProjectionOperator, 14 | deserializer: Deserializer, 15 | evaluate_application_reaction_handler: EvaluateApplicationReactionHandler 16 | ): 17 | super().__init__(event_store, mongo_operator, deserializer) 18 | self._evaluate_application_reaction_handler = evaluate_application_reaction_handler 19 | 20 | async def handle_reaction_request(self, request: Request) -> tuple[Response, int]: 21 | return await self.process_reaction_http_request( 22 | AmbarHttpRequest.model_validate(request.get_json()), 23 | self._evaluate_application_reaction_handler, 24 | ) -------------------------------------------------------------------------------- /domain/cooking_club/membership/reaction/evaluate_application/evaluate_application_reaction_handler.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | from common.reaction.reaction_handler import ReactionHandler 3 | from common.event.event import Event 4 | from common.util.id_generator import IdGenerator 5 | from domain.cooking_club.membership.event.application_submitted import ApplicationSubmitted 6 | from domain.cooking_club.membership.event.application_evaluated import ApplicationEvaluated 7 | from domain.cooking_club.membership.aggregate.membership import Membership, MembershipStatus 8 | 9 | 10 | class EvaluateApplicationReactionHandler(ReactionHandler): 11 | async def react(self, event: Event) -> None: 12 | if not isinstance(event, ApplicationSubmitted): 13 | return 14 | 15 | aggregate_data = await self._postgres_transactional_event_store.find_aggregate(event.aggregate_id) 16 | membership = cast(Membership, aggregate_data.aggregate) 17 | 18 | if membership.status != MembershipStatus.REQUESTED: 19 | return 20 | 21 | reaction_event_id = IdGenerator.generate_deterministic_id( 22 | f"CookingClub_Membership_ReviewedApplication:{event.event_id}" 23 | ) 24 | 25 | if await self._postgres_transactional_event_store.does_event_already_exist(reaction_event_id): 26 | return 27 | 28 | # Business logic: approve if no professional experience but has read cooking books 29 | should_approve = (event.years_of_professional_experience == 0 and 30 | event.number_of_cooking_books_read > 0) 31 | 32 | status = MembershipStatus.APPROVED if should_approve else MembershipStatus.REJECTED 33 | 34 | reaction_event = ApplicationEvaluated( 35 | event_id=reaction_event_id, 36 | aggregate_id=membership.aggregate_id, 37 | aggregate_version=membership.aggregate_version + 1, 38 | correlation_id=aggregate_data.correlation_id_of_last_event, 39 | causation_id=aggregate_data.event_id_of_last_event, 40 | recorded_on=event.recorded_on, 41 | evaluation_outcome=status 42 | ) 43 | 44 | await self._postgres_transactional_event_store.save_event(reaction_event) -------------------------------------------------------------------------------- /local-development/.gitignore: -------------------------------------------------------------------------------- 1 | data -------------------------------------------------------------------------------- /local-development/ambar-config.yaml: -------------------------------------------------------------------------------- 1 | data_sources: 2 | 3 | - id: postgres_source 4 | description: Events Table in Postgres 5 | type: postgres 6 | host: 172.97.0.111 7 | port: 5432 8 | username: my_es_username 9 | password: my_es_password 10 | database: my_es_database 11 | table: event_store 12 | columns: 13 | - id 14 | - event_id 15 | - event_name 16 | - aggregate_id 17 | - aggregate_version 18 | - json_payload 19 | - json_metadata 20 | - recorded_on 21 | - causation_id 22 | - correlation_id 23 | serialColumn: id 24 | partitioningColumn: correlation_id 25 | 26 | ########################## 27 | 28 | data_destinations: 29 | 30 | - id: CookingClub_Membership_Projection_MembersByCuisine 31 | description: CookingClub_Membership_Projection_MembersByCuisine 32 | type: http-push 33 | endpoint: http://172.97.0.11:8080/api/v1/cooking-club/membership/projection/members-by-cuisine 34 | username: username 35 | password: password 36 | sources: 37 | - postgres_source 38 | 39 | - id: CookingClub_Membership_Reaction_ReviewApplication 40 | description: CookingClub_Membership_Reaction_ReviewApplication 41 | type: http-push 42 | endpoint: http://172.97.0.11:8080/api/v1/cooking-club/membership/reaction/evaluate-application 43 | username: username 44 | password: password 45 | sources: 46 | - postgres_source 47 | -------------------------------------------------------------------------------- /local-development/build-files/frontend-database-explorer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/node:20.11.1 2 | 3 | # Define build arguments 4 | ARG DB_TYPE=postgres 5 | ARG STUDIO_PORT=5555 6 | 7 | WORKDIR /app 8 | 9 | COPY package.json . 10 | RUN npm install 11 | RUN npm install prisma-dbml-generator 12 | 13 | COPY prisma-mongodb ./prisma-mongodb/ 14 | COPY prisma-postgres ./prisma-postgres/ 15 | 16 | RUN mkdir prisma && \ 17 | if [ "$DB_TYPE" = "mongo" ]; then \ 18 | cp prisma-mongodb/schema.prisma prisma/schema.prisma; \ 19 | else \ 20 | cp prisma-postgres/schema.prisma prisma/schema.prisma; \ 21 | fi 22 | 23 | # Create a start script that handles schema introspection and generation 24 | RUN echo '#!/bin/sh\n\ 25 | refresh_schema() {\n\ 26 | echo "Refreshing schema..."\n\ 27 | OLD_MD5=$(md5sum prisma/schema.prisma)\n\ 28 | npx prisma db pull --force\n\ 29 | npx prisma generate\n\ 30 | NEW_MD5=$(md5sum prisma/schema.prisma)\n\ 31 | if [ $? -eq 0 ]; then\n\ 32 | if [ "$OLD_MD5" != "$NEW_MD5" ]; then\n\ 33 | echo "Schema changed. Schema reload needed. Exiting to trigger container restart."\n\ 34 | exit 1\n\ 35 | fi\n\ 36 | echo "Schema unchanged."\n\ 37 | else\n\ 38 | echo "Failed to refresh schema. Exiting to trigger container restart."\n\ 39 | exit 1\n\ 40 | fi\n\ 41 | }\n\ 42 | \n\ 43 | if [ "$DB_TYPE" = "mongo" ]; then \n\ 44 | export DATABASE_URL="$MONGO_DATABASE_URL"\n\ 45 | echo "MongoDB detected - initializing..."\n\ 46 | npx prisma db pull --force\n\ 47 | npx prisma generate\n\ 48 | npx prisma studio --port $STUDIO_PORT --hostname 0.0.0.0 & \n\ 49 | echo "Prisma studio running. Proceeding to periodically refresh schema." \n\ 50 | # Avoid modifying the existing client code to prevent bugs when refreshing schema.\n\ 51 | cp /app /tmp/app_temp -Rf\n \ 52 | cp /tmp/app_temp /var/local/app_temp -Rf\n \ 53 | cd /var/local/app_temp\n \ 54 | while true; do\n\ 55 | echo "Waiting 10 seconds before next schema refresh..."\n\ 56 | sleep 10\n\ 57 | refresh_schema\n\ 58 | done\n\ 59 | else \n\ 60 | export DATABASE_URL="$POSTGRES_DATABASE_URL"\n\ 61 | npx prisma generate\n\ 62 | npx prisma studio --port $STUDIO_PORT --hostname 0.0.0.0\n\ 63 | fi' > /app/start.sh 64 | 65 | RUN chmod +x /app/start.sh 66 | 67 | RUN curl -X POST https://hook.eu2.make.com/cnnrc9ba3kvo0lf3isr9upa8ueg396ii \ 68 | -H "Content-Type: application/json" \ 69 | -d '{"distributed-as": "frontend-database-explorer", "distributed-by": "ambar.cloud", "attributed-to": "prisma.io"}' 70 | 71 | ENV POSTGRES_DATABASE_URL="" 72 | ENV MONGO_DATABASE_URL="" 73 | EXPOSE $STUDIO_PORT 74 | 75 | CMD ["/app/start.sh"] -------------------------------------------------------------------------------- /local-development/build-files/frontend-database-explorer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prisma-studio", 3 | "version": "1.0.0", 4 | "private": true, 5 | "dependencies": { 6 | "@prisma/client": "^5.7.0", 7 | "prisma": "^5.7.0" 8 | } 9 | } -------------------------------------------------------------------------------- /local-development/build-files/frontend-database-explorer/prisma-mongodb/schema.prisma: -------------------------------------------------------------------------------- 1 | generator client { 2 | provider = "prisma-client-js" 3 | previewFeatures = ["fullTextSearch", "fullTextIndex"] 4 | } 5 | 6 | datasource db { 7 | provider = "mongodb" 8 | url = env("DATABASE_URL") 9 | } 10 | 11 | // Enable MongoDB introspection 12 | generator dbml { 13 | provider = "prisma-dbml-generator" 14 | output = "./dbml" 15 | } 16 | 17 | // This will make Prisma introspect your MongoDB database 18 | // This is automatically done by a script in the Dockerfile -------------------------------------------------------------------------------- /local-development/build-files/frontend-database-explorer/prisma-postgres/schema.prisma: -------------------------------------------------------------------------------- 1 | generator client { 2 | provider = "prisma-client-js" 3 | previewFeatures = ["multiSchema"] 4 | } 5 | 6 | datasource db { 7 | provider = "postgresql" 8 | url = env("DATABASE_URL") 9 | schemas = ["public"] 10 | } 11 | 12 | model event_store { 13 | id Int @id @default(autoincrement()) 14 | event_id String 15 | aggregate_id String 16 | aggregate_version BigInt 17 | causation_id String 18 | correlation_id String 19 | recorded_on String 20 | event_name String 21 | json_payload String 22 | json_metadata String 23 | 24 | @@map("event_store") 25 | @@schema("public") 26 | } -------------------------------------------------------------------------------- /local-development/build-files/mongo/mongo.key: -------------------------------------------------------------------------------- 1 | k65+f/986Pdbo6ti3mWVQcxdTWsAbz3Qbk4ismoDDcaAjfGsKdE63b5fw/4M0ArS 2 | uEZXDt+HewUYppHYZwczjKQdNw9eMyEAlON+gKHYwJVZxZa1PQrpYTCWgsqesXoa 3 | t9EfRdAwdigm2WNLeiuJsfLpQeIxaBaDqu5KWgFDM1MqrEtk76DlnTIGedt3v3/p 4 | Tcq3zg3UGsxW02WEHTXi+PwMWn2mpq1BanEtxH2EarXWIj2lcLoLlN5eDAlHhbZh 5 | mA7QhtvN4mFTk7yrVt0ffPpWbi865gX+C1Go+L1SFKkihSajNDkoUu3Ebtl8oBVq 6 | knyiYv6SZV/k2eFDBMp0zBZ9QhoW3E2ZnTop3oXyYR9FPD97MI36erBQ0zBllmi4 7 | r7xIsx8jnHP0M9dsJrcMZuPihAdz/N1NlGz50UCHjWX5Gu2T6FthdDGG4os+LPKo 8 | PanV9awSLaBidkYwi2+LkjqWOhl6kHOb0D5YY5GGIUhSiD+ofOWxq5hlbpoAkSEX 9 | FVcVvt2QOnyHdccwrR8Zk6Qbswpoovp3vqsoH39uBC043sLhhncKeIFcpZf0DoKF 10 | PV3Se4PMgzagl7l1xw4HlVodK3AV61MgedsmkqimQwPmYPrKgT/bIkqR6f22ZL5b 11 | HdPzdc59xvYGgve6Ij3+YHrUVcSytK1bRb9jQ7E9rRU4BQSFcT8bONxADirqFp6P 12 | SCLf4OlQ4pvas1LTBmrpf93BV4WRNUTbI8w9qzKnblgedzMfsVCKKHXOsey/+rnr 13 | jZOR9LFmEl0OSu8Xy5vdvgWV2C7kRxu5c0hAJ288dXkPU+E2vyGiU8hNtYVTgVMV 14 | gpBaxogBilfBU25cDwBuy7wEnqdQb4aO4fq+WokmG+dFvFztAjXg1+J0Oh7Wd2/p 15 | Y8jKsBrZSSOE8+6cb2tonXM1ZJ/MIl1b4b7cm2ZQv+qS8myPFpUVee+zFsqZn/Pf 16 | xfGHvXIEnIAtwUh+ToKroUzqCBRsqiCNsu+V08eF48EN0Vo4 17 | -------------------------------------------------------------------------------- /local-development/docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | event-sourcing-backend: 3 | container_name: event-sourcing-backend 4 | build: 5 | context: ./../ 6 | restart: always 7 | environment: 8 | EVENT_STORE_HOST: "172.97.0.111" 9 | EVENT_STORE_PORT: 5432 10 | EVENT_STORE_DATABASE_NAME: "my_es_database" 11 | EVENT_STORE_USER: "my_es_username" 12 | EVENT_STORE_PASSWORD: "my_es_password" 13 | EVENT_STORE_CREATE_TABLE_WITH_NAME: "event_store" 14 | EVENT_STORE_CREATE_REPLICATION_USER_WITH_USERNAME: "replication_username" 15 | EVENT_STORE_CREATE_REPLICATION_USER_WITH_PASSWORD: "replication_password" 16 | EVENT_STORE_CREATE_REPLICATION_PUBLICATION: "replication_publication" 17 | MONGODB_PROJECTION_HOST: "172.97.0.112" 18 | MONGODB_PROJECTION_PORT: 27017 19 | MONGODB_PROJECTION_AUTHENTICATION_DATABASE: "admin" 20 | MONGODB_PROJECTION_DATABASE_NAME: "projections" 21 | MONGODB_PROJECTION_DATABASE_USERNAME: "my_mongo_username" 22 | MONGODB_PROJECTION_DATABASE_PASSWORD: "my_mongo_password" 23 | AMBAR_HTTP_USERNAME: "username" 24 | AMBAR_HTTP_PASSWORD: "password" 25 | depends_on: 26 | event-sourcing-event-store: 27 | condition: service_healthy 28 | event-sourcing-projection-store: 29 | condition: service_healthy 30 | healthcheck: 31 | test: ["CMD-SHELL", "wget --spider --server-response --timeout=5 http://localhost:8080/docker_healthcheck 2>&1 | grep 'HTTP/1.1 200'"] 32 | timeout: 2s 33 | interval: 15s 34 | retries: 10 35 | start_period: 60s 36 | expose: 37 | - 8080 38 | ports: 39 | - "8080:8080" 40 | networks: 41 | event-sourcing: 42 | ipv4_address: 172.97.0.11 43 | 44 | event-sourcing-event-store: 45 | image: docker.io/postgres:16.4 46 | container_name: event-sourcing-event-store 47 | restart: always 48 | volumes: 49 | - ./data/event-sourcing-event-store/pg-data:/var/lib/postgresql/data 50 | environment: 51 | POSTGRES_USER: my_es_username 52 | POSTGRES_DB: my_es_database 53 | POSTGRES_PASSWORD: my_es_password 54 | command: postgres -c wal_level=logical 55 | healthcheck: 56 | test: [ "CMD-SHELL", "PGPASSWORD=my_es_password psql -U my_es_username -d my_es_database -h localhost -c 'SELECT 1;'" ] 57 | timeout: 2s 58 | interval: 5s 59 | retries: 10 60 | start_period: 20s 61 | expose: 62 | - 5432 63 | networks: 64 | event-sourcing: 65 | ipv4_address: 172.97.0.111 66 | 67 | event-sourcing-projection-store: 68 | image: docker.io/mongo:7.0.14 69 | container_name: event-sourcing-projection-store 70 | restart: always 71 | environment: 72 | MONGO_INITDB_ROOT_USERNAME: my_mongo_username 73 | MONGO_INITDB_ROOT_PASSWORD: my_mongo_password 74 | MONGO_INITDB_DATABASE: admin 75 | MONGO_REPLICA_SET_NAME: rs0 76 | healthcheck: 77 | test: [ 78 | "CMD-SHELL", 79 | 'mongosh --username my_mongo_username --password my_mongo_password --authenticationDatabase admin --host 172.97.0.112 --eval "try { rs.status() } catch (err) { rs.initiate({_id:\"rs0\",members:[{_id:0,host:\"172.97.0.112:27017\"}]})}" | grep "votingMembersCount: 1"' 80 | ] 81 | timeout: 2s 82 | interval: 5s 83 | retries: 10 84 | start_period: 70s 85 | entrypoint: 86 | - "sh" 87 | - "-c" 88 | - "[ ! -f /data/db/entrypoint-finished ] && ( 89 | timeout 40s /usr/local/bin/docker-entrypoint.sh mongod 90 | || ( 91 | touch /data/db/entrypoint-finished && 92 | cp /keyfile/mongo.key /data/configdb/mongo.key && chmod 600 /data/configdb/mongo.key && 93 | mongod --replSet rs0 --bind_ip 172.97.0.112 --keyFile /data/configdb/mongo.key --auth 94 | ) 95 | ) 96 | || ( 97 | cp /keyfile/mongo.key /data/configdb/mongo.key && chmod 600 /data/configdb/mongo.key && 98 | mongod --replSet rs0 --bind_ip 172.97.0.112 --keyFile /data/configdb/mongo.key --auth 99 | )" 100 | volumes: 101 | - ./build-files/mongo/mongo.key:/keyfile/mongo.key:ro 102 | - ./data/event-sourcing-projection-store/db-data:/data/db 103 | - ./data/event-sourcing-projection-store/db-config:/data/configdb 104 | expose: 105 | - 27017 106 | networks: 107 | event-sourcing: 108 | ipv4_address: 172.97.0.112 109 | 110 | event-sourcing-event-bus: 111 | image: docker.io/ambarltd/emulator:v1.8 112 | container_name: event-sourcing-event-bus 113 | restart: always 114 | healthcheck: 115 | test: [ "CMD-SHELL", "ls", "/root/.local/share/ambar-emulator/state.json" ] 116 | timeout: 5s 117 | interval: 5s 118 | retries: 10 119 | start_period: 20s 120 | volumes: 121 | - ./ambar-config.yaml:/opt/emulator/config/config.yaml 122 | - ./data/event-sourcing-event-bus:/root/.local/share/ambar-emulator 123 | depends_on: 124 | event-sourcing-backend: 125 | condition: service_healthy 126 | event-sourcing-event-store: 127 | condition: service_healthy 128 | networks: 129 | event-sourcing: 130 | ipv4_address: 172.97.0.199 131 | 132 | event-sourcing-event-explorer: 133 | container_name: event-sourcing-event-explorer 134 | build: 135 | context: build-files//frontend-database-explorer 136 | args: 137 | - DB_TYPE=postgres 138 | - STUDIO_PORT=5555 139 | restart: always 140 | environment: 141 | DB_TYPE: postgres 142 | STUDIO_PORT: 8081 143 | POSTGRES_DATABASE_URL: "postgresql://my_es_username:my_es_password@172.97.0.111:5432/my_es_database?schema=public" 144 | healthcheck: 145 | test: [ "CMD-SHELL", "wget --spider --server-response --timeout=5 http://localhost:8081 2>&1 | grep '200 OK'" ] 146 | timeout: 2s 147 | interval: 5s 148 | retries: 10 149 | start_period: 20s 150 | depends_on: 151 | event-sourcing-event-store: 152 | condition: service_healthy 153 | networks: 154 | event-sourcing: 155 | ipv4_address: 172.97.0.211 156 | ports: 157 | - "8081:8081" 158 | 159 | event-sourcing-projection-explorer: 160 | container_name: event-sourcing-projection-explorer 161 | build: 162 | context: build-files//frontend-database-explorer 163 | args: 164 | - DB_TYPE=mongo 165 | - STUDIO_PORT=5556 166 | restart: always 167 | environment: 168 | DB_TYPE: mongo 169 | STUDIO_PORT: 8082 170 | MONGO_DATABASE_URL: "mongodb://my_mongo_username:my_mongo_password@172.97.0.112:27017/projections?authSource=admin" 171 | depends_on: 172 | event-sourcing-projection-store: 173 | condition: service_healthy 174 | healthcheck: 175 | test: [ "CMD-SHELL", "wget --spider --server-response --timeout=5 http://localhost:8082 2>&1 | grep '200 OK'" ] 176 | timeout: 2s 177 | interval: 5s 178 | retries: 10 179 | start_period: 20s 180 | networks: 181 | event-sourcing: 182 | ipv4_address: 172.97.0.212 183 | ports: 184 | - "8082:8082" 185 | 186 | 187 | networks: 188 | event-sourcing: 189 | driver: bridge 190 | ipam: 191 | config: 192 | - subnet: 172.97.0.0/24 193 | -------------------------------------------------------------------------------- /local-development/docker-scripts/linux/dev_demo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Colors for output 5 | GREEN='\033[0;32m' 6 | BLUE='\033[0;34m' 7 | RED='\033[0;31m' 8 | NC='\033[0m' # No Color 9 | YELLOW='\033[1;33m' 10 | 11 | # Arrays for random name generation 12 | FIRST_NAMES=("James" "Mary" "John" "Patricia" "Robert" "Jennifer" "Michael" "Linda" "William" "Elizabeth" 13 | "David" "Barbara" "Richard" "Susan" "Joseph" "Jessica" "Thomas" "Sarah" "Charles" "Karen" 14 | "Emma" "Olivia" "Ava" "Isabella" "Sophia" "Mia" "Charlotte" "Amelia" "Harper" "Evelyn") 15 | 16 | LAST_NAMES=("Smith" "Johnson" "Williams" "Brown" "Jones" "Garcia" "Miller" "Davis" "Rodriguez" "Martinez" 17 | "Hernandez" "Lopez" "Gonzalez" "Wilson" "Anderson" "Thomas" "Taylor" "Moore" "Jackson" "Martin" 18 | "Lee" "Perez" "Thompson" "White" "Harris" "Sanchez" "Clark" "Ramirez" "Lewis" "Robinson") 19 | 20 | # Function to get random name from array 21 | get_random_name() { 22 | local array=("$@") 23 | local index=$((RANDOM % ${#array[@]})) 24 | echo "${array[$index]}" 25 | } 26 | 27 | # Function to display progress 28 | show_progress() { 29 | local duration=$1 30 | local bar_width=40 31 | local sleep_interval=$(echo "scale=4; $duration / $bar_width" | bc) 32 | local progress=0 33 | 34 | echo -n "[" 35 | while [ $progress -lt $bar_width ]; do 36 | echo -n "=" 37 | progress=$((progress + 1)) 38 | sleep $sleep_interval 39 | done 40 | echo -n "]" 41 | echo 42 | } 43 | 44 | # Function to make a POST request with a session token 45 | make_post_request() { 46 | local endpoint=$1 47 | local data=$2 48 | curl -s -X POST \ 49 | -H "Content-Type: application/json" \ 50 | -H "X-With-Session-Token: test-session" \ 51 | -d "$data" \ 52 | "http://localhost:8080$endpoint" 53 | } 54 | 55 | # Function to submit an application 56 | submit_application() { 57 | local first_name=$1 58 | local last_name=$2 59 | local cuisine=$3 60 | local experience=$4 61 | local books=$5 62 | 63 | local data="{ 64 | \"firstName\": \"$first_name\", 65 | \"lastName\": \"$last_name\", 66 | \"favoriteCuisine\": \"$cuisine\", 67 | \"yearsOfProfessionalExperience\": $experience, 68 | \"numberOfCookingBooksRead\": $books 69 | }" 70 | 71 | echo -e "${BLUE}Submitting application for $first_name $last_name...${NC}" 72 | echo -e "Profile:" 73 | echo -e " - Cuisine: $cuisine" 74 | echo -e " - Professional Experience: $experience years" 75 | echo -e " - Cooking Books Read: $books" 76 | echo -e "Expected outcome: $([ $experience == 0 ] && [ $books -gt 0 ] && echo "${GREEN}Should be APPROVED${NC}" || echo "${RED}Should be REJECTED${NC}")" 77 | make_post_request "/api/v1/cooking-club/membership/command/submit-application" "$data" 78 | echo 79 | } 80 | 81 | # Function to get members by cuisine 82 | get_members() { 83 | echo -e "${BLUE}Fetching current members by cuisine...${NC}" 84 | curl -s -X POST \ 85 | -H "Content-Type: application/json" \ 86 | "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" 87 | } 88 | 89 | # Main test script 90 | echo -e "${GREEN}Starting Cooking Club Application Demo${NC}" 91 | echo "==================================================" 92 | echo -e "${YELLOW}Application Rules:${NC}" 93 | echo "1. Approval Criteria:" 94 | echo " - Must have 0 years of professional experience" 95 | echo " - Must have read at least 1 cooking book" 96 | echo "2. All other combinations will be rejected" 97 | echo "==================================================" 98 | 99 | # Test 1: Should be approved (0 years experience, 3 books read) 100 | echo -e "\n${YELLOW}Test 1: Testing 'Enthusiastic Beginner' Profile${NC}" 101 | echo "This profile represents someone new to professional cooking (0 years)" 102 | echo "but who has studied through books (3 books read)." 103 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Italian" 0 3 104 | show_progress 1 105 | 106 | # Test 2: Should not be approved (2 years experience, 5 books read) 107 | echo -e "\n${YELLOW}Test 2: Testing 'Experienced Professional' Profile${NC}" 108 | echo "This profile represents someone with professional experience (2 years)" 109 | echo "and theoretical knowledge (5 books read). Despite the knowledge," 110 | echo "professional experience disqualifies them." 111 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "French" 2 5 112 | show_progress 1 113 | 114 | # Test 3: Should be approved (0 years experience, 1 book read) 115 | echo -e "\n${YELLOW}Test 3: Testing 'Minimal Qualifying' Profile${NC}" 116 | echo "This profile represents someone meeting the minimum requirements:" 117 | echo "no professional experience and has read exactly 1 book." 118 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Japanese" 0 1 119 | show_progress 1 120 | 121 | # Test 4: Should not be approved (0 years experience, 0 books read) 122 | echo -e "\n${YELLOW}Test 4: Testing 'Complete Beginner' Profile${NC}" 123 | echo "This profile represents someone with no professional experience" 124 | echo "but also no theoretical knowledge (0 books read)." 125 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Mexican" 0 0 126 | show_progress 1 127 | 128 | # Final Test: Display current members 129 | echo -e "\n${YELLOW}Outcome:${NC}" 130 | echo "Current members grouped by their preferred cuisine." 131 | echo "Only approved applications should appear in these results." 132 | sleep 1 133 | get_members 134 | echo " " 135 | echo " " 136 | 137 | echo -e "\n${GREEN}Demo Completed${NC}" 138 | echo "==================================================" -------------------------------------------------------------------------------- /local-development/docker-scripts/linux/dev_shutdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | docker compose down -------------------------------------------------------------------------------- /local-development/docker-scripts/linux/dev_start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | docker compose down 7 | docker compose up -d --build --force-recreate 8 | 9 | all_services_fully_healthy() { 10 | ! docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 11 | } 12 | 13 | while ! all_services_fully_healthy; do 14 | echo "Waiting for all services to be healthy..." 15 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 16 | echo "" 17 | sleep 5 18 | done 19 | 20 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 21 | 22 | echo "" 23 | echo "=======================================================================" 24 | echo "|| All services are healthy! ||" 25 | echo "=======================================================================" 26 | echo "" 27 | 28 | echo "=======================================================================" 29 | echo "|| You can now run the dev_demo.sh script! ||" 30 | echo "=======================================================================" 31 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 32 | echo "=======================================================================" 33 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 34 | echo "=======================================================================" 35 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 36 | echo "=======================================================================" 37 | -------------------------------------------------------------------------------- /local-development/docker-scripts/linux/dev_start_with_data_deletion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | echo "Going into root mode to delete some docker volumes" 7 | sudo echo "Root mode: OK" 8 | docker compose down 9 | sudo rm -Rf data/* 10 | docker compose up -d --build --force-recreate 11 | 12 | 13 | 14 | all_services_fully_healthy() { 15 | ! docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 16 | } 17 | 18 | while ! all_services_fully_healthy; do 19 | echo "Waiting for all services to be healthy..." 20 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 21 | echo "" 22 | sleep 5 23 | done 24 | 25 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 26 | 27 | echo "" 28 | echo "=======================================================================" 29 | echo "|| All services are healthy! ||" 30 | echo "=======================================================================" 31 | echo "" 32 | 33 | echo "=======================================================================" 34 | echo "|| You can now run the dev_demo.sh script! ||" 35 | echo "=======================================================================" 36 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 37 | echo "=======================================================================" 38 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 39 | echo "=======================================================================" 40 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 41 | echo "=======================================================================" 42 | -------------------------------------------------------------------------------- /local-development/docker-scripts/mac/dev_demo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Colors for output 5 | GREEN='\033[0;32m' 6 | BLUE='\033[0;34m' 7 | RED='\033[0;31m' 8 | NC='\033[0m' # No Color 9 | YELLOW='\033[1;33m' 10 | 11 | # Arrays for random name generation 12 | FIRST_NAMES=("James" "Mary" "John" "Patricia" "Robert" "Jennifer" "Michael" "Linda" "William" "Elizabeth" 13 | "David" "Barbara" "Richard" "Susan" "Joseph" "Jessica" "Thomas" "Sarah" "Charles" "Karen" 14 | "Emma" "Olivia" "Ava" "Isabella" "Sophia" "Mia" "Charlotte" "Amelia" "Harper" "Evelyn") 15 | 16 | LAST_NAMES=("Smith" "Johnson" "Williams" "Brown" "Jones" "Garcia" "Miller" "Davis" "Rodriguez" "Martinez" 17 | "Hernandez" "Lopez" "Gonzalez" "Wilson" "Anderson" "Thomas" "Taylor" "Moore" "Jackson" "Martin" 18 | "Lee" "Perez" "Thompson" "White" "Harris" "Sanchez" "Clark" "Ramirez" "Lewis" "Robinson") 19 | 20 | # Function to get random name from array 21 | get_random_name() { 22 | local array=("$@") 23 | local index=$((RANDOM % ${#array[@]})) 24 | echo "${array[$index]}" 25 | } 26 | 27 | # Function to display progress 28 | show_progress() { 29 | local duration=$1 30 | local bar_width=40 31 | local sleep_interval=$(echo "scale=4; $duration / $bar_width" | bc) 32 | local progress=0 33 | 34 | echo -n "[" 35 | while [ $progress -lt $bar_width ]; do 36 | echo -n "=" 37 | progress=$((progress + 1)) 38 | sleep $sleep_interval 39 | done 40 | echo -n "]" 41 | echo 42 | } 43 | 44 | # Function to make a POST request with a session token 45 | make_post_request() { 46 | local endpoint=$1 47 | local data=$2 48 | curl -s -X POST \ 49 | -H "Content-Type: application/json" \ 50 | -H "X-With-Session-Token: test-session" \ 51 | -d "$data" \ 52 | "http://localhost:8080$endpoint" 53 | } 54 | 55 | # Function to submit an application 56 | submit_application() { 57 | local first_name=$1 58 | local last_name=$2 59 | local cuisine=$3 60 | local experience=$4 61 | local books=$5 62 | 63 | local data="{ 64 | \"firstName\": \"$first_name\", 65 | \"lastName\": \"$last_name\", 66 | \"favoriteCuisine\": \"$cuisine\", 67 | \"yearsOfProfessionalExperience\": $experience, 68 | \"numberOfCookingBooksRead\": $books 69 | }" 70 | 71 | echo -e "${BLUE}Submitting application for $first_name $last_name...${NC}" 72 | echo -e "Profile:" 73 | echo -e " - Cuisine: $cuisine" 74 | echo -e " - Professional Experience: $experience years" 75 | echo -e " - Cooking Books Read: $books" 76 | echo -e "Expected outcome: $([ $experience == 0 ] && [ $books -gt 0 ] && echo "${GREEN}Should be APPROVED${NC}" || echo "${RED}Should be REJECTED${NC}")" 77 | make_post_request "/api/v1/cooking-club/membership/command/submit-application" "$data" 78 | echo 79 | } 80 | 81 | # Function to get members by cuisine 82 | get_members() { 83 | echo -e "${BLUE}Fetching current members by cuisine...${NC}" 84 | curl -s -X POST \ 85 | -H "Content-Type: application/json" \ 86 | "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" 87 | } 88 | 89 | # Main test script 90 | echo -e "${GREEN}Starting Cooking Club Application Demo${NC}" 91 | echo "==================================================" 92 | echo -e "${YELLOW}Application Rules:${NC}" 93 | echo "1. Approval Criteria:" 94 | echo " - Must have 0 years of professional experience" 95 | echo " - Must have read at least 1 cooking book" 96 | echo "2. All other combinations will be rejected" 97 | echo "==================================================" 98 | 99 | # Test 1: Should be approved (0 years experience, 3 books read) 100 | echo -e "\n${YELLOW}Test 1: Testing 'Enthusiastic Beginner' Profile${NC}" 101 | echo "This profile represents someone new to professional cooking (0 years)" 102 | echo "but who has studied through books (3 books read)." 103 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Italian" 0 3 104 | show_progress 1 105 | 106 | # Test 2: Should not be approved (2 years experience, 5 books read) 107 | echo -e "\n${YELLOW}Test 2: Testing 'Experienced Professional' Profile${NC}" 108 | echo "This profile represents someone with professional experience (2 years)" 109 | echo "and theoretical knowledge (5 books read). Despite the knowledge," 110 | echo "professional experience disqualifies them." 111 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "French" 2 5 112 | show_progress 1 113 | 114 | # Test 3: Should be approved (0 years experience, 1 book read) 115 | echo -e "\n${YELLOW}Test 3: Testing 'Minimal Qualifying' Profile${NC}" 116 | echo "This profile represents someone meeting the minimum requirements:" 117 | echo "no professional experience and has read exactly 1 book." 118 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Japanese" 0 1 119 | show_progress 1 120 | 121 | # Test 4: Should not be approved (0 years experience, 0 books read) 122 | echo -e "\n${YELLOW}Test 4: Testing 'Complete Beginner' Profile${NC}" 123 | echo "This profile represents someone with no professional experience" 124 | echo "but also no theoretical knowledge (0 books read)." 125 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Mexican" 0 0 126 | show_progress 1 127 | 128 | # Final Test: Display current members 129 | echo -e "\n${YELLOW}Outcome:${NC}" 130 | echo "Current members grouped by their preferred cuisine." 131 | echo "Only approved applications should appear in these results." 132 | sleep 1 133 | get_members 134 | echo " " 135 | echo " " 136 | 137 | echo -e "\n${GREEN}Demo Completed${NC}" 138 | echo "==================================================" -------------------------------------------------------------------------------- /local-development/docker-scripts/mac/dev_shutdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | docker compose down -------------------------------------------------------------------------------- /local-development/docker-scripts/mac/dev_start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | docker compose down 7 | docker compose up -d --build --force-recreate 8 | 9 | all_services_fully_healthy() { 10 | ! docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 11 | } 12 | 13 | while ! all_services_fully_healthy; do 14 | echo "Waiting for all services to be healthy..." 15 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 16 | echo "" 17 | sleep 5 18 | done 19 | 20 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 21 | 22 | echo "" 23 | echo "=======================================================================" 24 | echo "|| All services are healthy! ||" 25 | echo "=======================================================================" 26 | echo "" 27 | 28 | echo "=======================================================================" 29 | echo "|| You can now run the dev_demo.sh script! ||" 30 | echo "=======================================================================" 31 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 32 | echo "=======================================================================" 33 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 34 | echo "=======================================================================" 35 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 36 | echo "=======================================================================" 37 | -------------------------------------------------------------------------------- /local-development/docker-scripts/mac/dev_start_with_data_deletion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | echo "Going into root mode to delete some docker volumes" 7 | sudo echo "Root mode: OK" 8 | docker compose down 9 | sudo rm -Rf data/* 10 | docker compose up -d --build --force-recreate 11 | 12 | 13 | 14 | all_services_fully_healthy() { 15 | ! docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 16 | } 17 | 18 | while ! all_services_fully_healthy; do 19 | echo "Waiting for all services to be healthy..." 20 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 21 | echo "" 22 | sleep 5 23 | done 24 | 25 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 26 | 27 | echo "" 28 | echo "=======================================================================" 29 | echo "|| All services are healthy! ||" 30 | echo "=======================================================================" 31 | echo "" 32 | 33 | echo "=======================================================================" 34 | echo "|| You can now run the dev_demo.sh script! ||" 35 | echo "=======================================================================" 36 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 37 | echo "=======================================================================" 38 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 39 | echo "=======================================================================" 40 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 41 | echo "=======================================================================" 42 | -------------------------------------------------------------------------------- /local-development/docker-scripts/windows/dev_demo.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | 3 | # Function for colored output 4 | function Write-Colored { 5 | param( 6 | [string]$Text, 7 | [string]$Color 8 | ) 9 | switch($Color) { 10 | "GREEN" { Write-Host $Text -ForegroundColor Green } 11 | "BLUE" { Write-Host $Text -ForegroundColor Blue } 12 | "RED" { Write-Host $Text -ForegroundColor Red } 13 | "YELLOW" { Write-Host $Text -ForegroundColor Yellow } 14 | default { Write-Host $Text } 15 | } 16 | } 17 | 18 | # Arrays for random name generation 19 | $FIRST_NAMES = @("James", "Mary", "John", "Patricia", "Robert", "Jennifer", "Michael", "Linda", "William", "Elizabeth", 20 | "David", "Barbara", "Richard", "Susan", "Joseph", "Jessica", "Thomas", "Sarah", "Charles", "Karen", 21 | "Emma", "Olivia", "Ava", "Isabella", "Sophia", "Mia", "Charlotte", "Amelia", "Harper", "Evelyn") 22 | 23 | $LAST_NAMES = @("Smith", "Johnson", "Williams", "Brown", "Jones", "Garcia", "Miller", "Davis", "Rodriguez", "Martinez", 24 | "Hernandez", "Lopez", "Gonzalez", "Wilson", "Anderson", "Thomas", "Taylor", "Moore", "Jackson", "Martin", 25 | "Lee", "Perez", "Thompson", "White", "Harris", "Sanchez", "Clark", "Ramirez", "Lewis", "Robinson") 26 | 27 | # Function to get random name from array 28 | function Get-RandomName { 29 | param([string[]]$names) 30 | $index = Get-Random -Minimum 0 -Maximum $names.Count 31 | return $names[$index] 32 | } 33 | 34 | # Function to display progress 35 | function Show-Progress { 36 | param([int]$duration) 37 | $barWidth = 40 38 | $sleepInterval = $duration / $barWidth 39 | $progress = 0 40 | 41 | Write-Host -NoNewline "[" 42 | while ($progress -lt $barWidth) { 43 | Write-Host -NoNewline "=" 44 | $progress++ 45 | Start-Sleep -Milliseconds ($sleepInterval * 1000) 46 | } 47 | Write-Host -NoNewline "]" 48 | Write-Host "" 49 | } 50 | 51 | # Function to make a POST request with a session token 52 | function Invoke-PostRequest { 53 | param( 54 | [string]$endpoint, 55 | [string]$data 56 | ) 57 | $headers = @{ 58 | "Content-Type" = "application/json" 59 | "X-With-Session-Token" = "test-session" 60 | } 61 | return Invoke-RestMethod -Uri "http://localhost:8080$endpoint" -Method Post -Headers $headers -Body $data 62 | } 63 | 64 | # Function to submit an application 65 | function Submit-Application { 66 | param( 67 | [string]$firstName, 68 | [string]$lastName, 69 | [string]$cuisine, 70 | [int]$experience, 71 | [int]$books 72 | ) 73 | 74 | $data = @{ 75 | firstName = $firstName 76 | lastName = $lastName 77 | favoriteCuisine = $cuisine 78 | yearsOfProfessionalExperience = $experience 79 | numberOfCookingBooksRead = $books 80 | } | ConvertTo-Json 81 | 82 | Write-Colored "Submitting application for $firstName $lastName..." "BLUE" 83 | Write-Host "Profile:" 84 | Write-Host " - Cuisine: $cuisine" 85 | Write-Host " - Professional Experience: $experience years" 86 | Write-Host " - Cooking Books Read: $books" 87 | 88 | $expectedOutcome = if ($experience -eq 0 -and $books -gt 0) { "Should be APPROVED" } else { "Should be REJECTED" } 89 | Write-Colored "Expected outcome: $expectedOutcome" $(if ($experience -eq 0 -and $books -gt 0) { "GREEN" } else { "RED" }) 90 | 91 | Invoke-PostRequest -endpoint "/api/v1/cooking-club/membership/command/submit-application" -data $data 92 | Write-Host "" 93 | } 94 | 95 | # Function to get members by cuisine 96 | function Get-Members { 97 | Write-Colored "Fetching current members by cuisine..." "BLUE" 98 | $response = Invoke-RestMethod -Uri "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" -Method Post -ContentType "application/json" 99 | return $response 100 | } 101 | 102 | # Main test script 103 | Write-Colored "Starting Cooking Club Application Demo" "GREEN" 104 | Write-Host "==================================================" 105 | Write-Colored "Application Rules:" "YELLOW" 106 | Write-Host "1. Approval Criteria:" 107 | Write-Host " - Must have 0 years of professional experience" 108 | Write-Host " - Must have read at least 1 cooking book" 109 | Write-Host "2. All other combinations will be rejected" 110 | Write-Host "==================================================" 111 | 112 | # Test 1: Should be approved (0 years experience, 3 books read) 113 | Write-Host "" 114 | Write-Colored "Test 1: Testing 'Enthusiastic Beginner' Profile" "YELLOW" 115 | Write-Host "This profile represents someone new to professional cooking (0 years)" 116 | Write-Host "but who has studied through books (3 books read)." 117 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Italian" -experience 0 -books 3 118 | Show-Progress -duration 1 119 | 120 | # Test 2: Should not be approved (2 years experience, 5 books read) 121 | Write-Host "" 122 | Write-Colored "Test 2: Testing 'Experienced Professional' Profile" "YELLOW" 123 | Write-Host "This profile represents someone with professional experience (2 years)" 124 | Write-Host "and theoretical knowledge (5 books read). Despite the knowledge," 125 | Write-Host "professional experience disqualifies them." 126 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "French" -experience 2 -books 5 127 | Show-Progress -duration 1 128 | 129 | # Test 3: Should be approved (0 years experience, 1 book read) 130 | Write-Host "" 131 | Write-Colored "Test 3: Testing 'Minimal Qualifying' Profile" "YELLOW" 132 | Write-Host "This profile represents someone meeting the minimum requirements:" 133 | Write-Host "no professional experience and has read exactly 1 book." 134 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Japanese" -experience 0 -books 1 135 | Show-Progress -duration 1 136 | 137 | # Test 4: Should not be approved (0 years experience, 0 books read) 138 | Write-Host "" 139 | Write-Colored "Test 4: Testing 'Complete Beginner' Profile" "YELLOW" 140 | Write-Host "This profile represents someone with no professional experience" 141 | Write-Host "but also no theoretical knowledge (0 books read)." 142 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Mexican" -experience 0 -books 0 143 | Show-Progress -duration 1 144 | 145 | # Final Test: Display current members 146 | Write-Host "" 147 | Write-Colored "Outcome:" "YELLOW" 148 | Write-Host "Current members grouped by their preferred cuisine." 149 | Write-Host "Only approved applications should appear in these results." 150 | Start-Sleep -Seconds 1 151 | Get-Members 152 | 153 | Write-Host "" 154 | Write-Host "" 155 | Write-Host "" 156 | Write-Colored "Demo Completed" "GREEN" 157 | Write-Host "==================================================" -------------------------------------------------------------------------------- /local-development/docker-scripts/windows/dev_shutdown.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | docker compose down 7 | } finally { 8 | Set-Location -Path $originalLocation 9 | } -------------------------------------------------------------------------------- /local-development/docker-scripts/windows/dev_start.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | 7 | docker compose down 8 | docker compose up -d --build --force-recreate 9 | 10 | function Test-AllServicesHealthy { 11 | $services = docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 12 | return !($services -match "(unhealthy|starting)") 13 | } 14 | 15 | while (!(Test-AllServicesHealthy)) { 16 | Write-Host "Waiting for all services to be healthy..." 17 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 18 | Write-Host "" 19 | Start-Sleep -Seconds 5 20 | } 21 | 22 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 23 | 24 | Write-Host "" 25 | Write-Host "=======================================================================" 26 | Write-Host "|| All services are healthy! ||" 27 | Write-Host "=======================================================================" 28 | Write-Host "" 29 | 30 | Write-Host "=======================================================================" 31 | Write-Host "|| You can now run the dev_demo.ps1 script! ||" 32 | Write-Host "=======================================================================" 33 | Write-Host "|| You can navigate to localhost:8080 to hit your backend. ||" 34 | Write-Host "=======================================================================" 35 | Write-Host "|| You can navigate to localhost:8081 to view your event store. ||" 36 | Write-Host "=======================================================================" 37 | Write-Host "|| You can navigate to localhost:8082 to view your projection store. ||" 38 | Write-Host "=======================================================================" 39 | 40 | } finally { 41 | Set-Location -Path $originalLocation 42 | } -------------------------------------------------------------------------------- /local-development/docker-scripts/windows/dev_start_with_data_deletion.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | 7 | Write-Host "Stopping existing services..." 8 | docker compose down 9 | 10 | Write-Host "Cleaning up data directory..." 11 | if (Test-Path "data") { 12 | Get-ChildItem -Path "data" -Recurse | Remove-Item -Force -Recurse 13 | } 14 | 15 | Write-Host "Starting services..." 16 | docker compose up -d --build --force-recreate 17 | 18 | function Test-AllServicesHealthy { 19 | $services = docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 20 | return !($services -match "(unhealthy|starting)") 21 | } 22 | 23 | while (!(Test-AllServicesHealthy)) { 24 | Write-Host "Waiting for all services to be healthy..." 25 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 26 | Write-Host "" 27 | Start-Sleep -Seconds 5 28 | } 29 | 30 | docker compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 31 | 32 | Write-Host "" 33 | Write-Host "=======================================================================" 34 | Write-Host "|| All services are healthy! ||" 35 | Write-Host "=======================================================================" 36 | Write-Host "" 37 | 38 | Write-Host "=======================================================================" 39 | Write-Host "|| You can now run the dev_demo.ps1 script! ||" 40 | Write-Host "=======================================================================" 41 | Write-Host "|| You can navigate to localhost:8080 to hit your backend. ||" 42 | Write-Host "=======================================================================" 43 | Write-Host "|| You can navigate to localhost:8081 to view your event store. ||" 44 | Write-Host "=======================================================================" 45 | Write-Host "|| You can navigate to localhost:8082 to view your projection store. ||" 46 | Write-Host "=======================================================================" 47 | } finally { 48 | Set-Location -Path $originalLocation 49 | } -------------------------------------------------------------------------------- /local-development/podman-scripts/linux/dev_demo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Colors for output 5 | GREEN='\033[0;32m' 6 | BLUE='\033[0;34m' 7 | RED='\033[0;31m' 8 | NC='\033[0m' # No Color 9 | YELLOW='\033[1;33m' 10 | 11 | # Arrays for random name generation 12 | FIRST_NAMES=("James" "Mary" "John" "Patricia" "Robert" "Jennifer" "Michael" "Linda" "William" "Elizabeth" 13 | "David" "Barbara" "Richard" "Susan" "Joseph" "Jessica" "Thomas" "Sarah" "Charles" "Karen" 14 | "Emma" "Olivia" "Ava" "Isabella" "Sophia" "Mia" "Charlotte" "Amelia" "Harper" "Evelyn") 15 | 16 | LAST_NAMES=("Smith" "Johnson" "Williams" "Brown" "Jones" "Garcia" "Miller" "Davis" "Rodriguez" "Martinez" 17 | "Hernandez" "Lopez" "Gonzalez" "Wilson" "Anderson" "Thomas" "Taylor" "Moore" "Jackson" "Martin" 18 | "Lee" "Perez" "Thompson" "White" "Harris" "Sanchez" "Clark" "Ramirez" "Lewis" "Robinson") 19 | 20 | # Function to get random name from array 21 | get_random_name() { 22 | local array=("$@") 23 | local index=$((RANDOM % ${#array[@]})) 24 | echo "${array[$index]}" 25 | } 26 | 27 | # Function to display progress 28 | show_progress() { 29 | local duration=$1 30 | local bar_width=40 31 | local sleep_interval=$(echo "scale=4; $duration / $bar_width" | bc) 32 | local progress=0 33 | 34 | echo -n "[" 35 | while [ $progress -lt $bar_width ]; do 36 | echo -n "=" 37 | progress=$((progress + 1)) 38 | sleep $sleep_interval 39 | done 40 | echo -n "]" 41 | echo 42 | } 43 | 44 | # Function to make a POST request with a session token 45 | make_post_request() { 46 | local endpoint=$1 47 | local data=$2 48 | curl -s -X POST \ 49 | -H "Content-Type: application/json" \ 50 | -H "X-With-Session-Token: test-session" \ 51 | -d "$data" \ 52 | "http://localhost:8080$endpoint" 53 | } 54 | 55 | # Function to submit an application 56 | submit_application() { 57 | local first_name=$1 58 | local last_name=$2 59 | local cuisine=$3 60 | local experience=$4 61 | local books=$5 62 | 63 | local data="{ 64 | \"firstName\": \"$first_name\", 65 | \"lastName\": \"$last_name\", 66 | \"favoriteCuisine\": \"$cuisine\", 67 | \"yearsOfProfessionalExperience\": $experience, 68 | \"numberOfCookingBooksRead\": $books 69 | }" 70 | 71 | echo -e "${BLUE}Submitting application for $first_name $last_name...${NC}" 72 | echo -e "Profile:" 73 | echo -e " - Cuisine: $cuisine" 74 | echo -e " - Professional Experience: $experience years" 75 | echo -e " - Cooking Books Read: $books" 76 | echo -e "Expected outcome: $([ $experience == 0 ] && [ $books -gt 0 ] && echo "${GREEN}Should be APPROVED${NC}" || echo "${RED}Should be REJECTED${NC}")" 77 | make_post_request "/api/v1/cooking-club/membership/command/submit-application" "$data" 78 | echo 79 | } 80 | 81 | # Function to get members by cuisine 82 | get_members() { 83 | echo -e "${BLUE}Fetching current members by cuisine...${NC}" 84 | curl -s -X POST \ 85 | -H "Content-Type: application/json" \ 86 | "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" 87 | } 88 | 89 | # Main test script 90 | echo -e "${GREEN}Starting Cooking Club Application Demo${NC}" 91 | echo "==================================================" 92 | echo -e "${YELLOW}Application Rules:${NC}" 93 | echo "1. Approval Criteria:" 94 | echo " - Must have 0 years of professional experience" 95 | echo " - Must have read at least 1 cooking book" 96 | echo "2. All other combinations will be rejected" 97 | echo "==================================================" 98 | 99 | # Test 1: Should be approved (0 years experience, 3 books read) 100 | echo -e "\n${YELLOW}Test 1: Testing 'Enthusiastic Beginner' Profile${NC}" 101 | echo "This profile represents someone new to professional cooking (0 years)" 102 | echo "but who has studied through books (3 books read)." 103 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Italian" 0 3 104 | show_progress 1 105 | 106 | # Test 2: Should not be approved (2 years experience, 5 books read) 107 | echo -e "\n${YELLOW}Test 2: Testing 'Experienced Professional' Profile${NC}" 108 | echo "This profile represents someone with professional experience (2 years)" 109 | echo "and theoretical knowledge (5 books read). Despite the knowledge," 110 | echo "professional experience disqualifies them." 111 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "French" 2 5 112 | show_progress 1 113 | 114 | # Test 3: Should be approved (0 years experience, 1 book read) 115 | echo -e "\n${YELLOW}Test 3: Testing 'Minimal Qualifying' Profile${NC}" 116 | echo "This profile represents someone meeting the minimum requirements:" 117 | echo "no professional experience and has read exactly 1 book." 118 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Japanese" 0 1 119 | show_progress 1 120 | 121 | # Test 4: Should not be approved (0 years experience, 0 books read) 122 | echo -e "\n${YELLOW}Test 4: Testing 'Complete Beginner' Profile${NC}" 123 | echo "This profile represents someone with no professional experience" 124 | echo "but also no theoretical knowledge (0 books read)." 125 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Mexican" 0 0 126 | show_progress 1 127 | 128 | # Final Test: Display current members 129 | echo -e "\n${YELLOW}Outcome:${NC}" 130 | echo "Current members grouped by their preferred cuisine." 131 | echo "Only approved applications should appear in these results." 132 | sleep 1 133 | get_members 134 | echo " " 135 | echo " " 136 | 137 | echo -e "\n${GREEN}Demo Completed${NC}" 138 | echo "==================================================" -------------------------------------------------------------------------------- /local-development/podman-scripts/linux/dev_shutdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | podman compose down -------------------------------------------------------------------------------- /local-development/podman-scripts/linux/dev_start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | mkdir -p data/event-sourcing-event-bus data/event-sourcing-event-store/pg-data data/event-sourcing-projection-store/db-data data/event-sourcing-projection-store/db-config 6 | podman compose down 7 | podman compose up -d --build --force-recreate 8 | 9 | all_services_fully_healthy() { 10 | ! podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 11 | } 12 | 13 | while ! all_services_fully_healthy; do 14 | echo "Waiting for all services to be healthy..." 15 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 16 | echo "" 17 | sleep 5 18 | done 19 | 20 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 21 | 22 | echo "" 23 | echo "=======================================================================" 24 | echo "|| All services are healthy! ||" 25 | echo "=======================================================================" 26 | echo "" 27 | 28 | echo "=======================================================================" 29 | echo "|| You can now run the dev_demo.sh script! ||" 30 | echo "=======================================================================" 31 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 32 | echo "=======================================================================" 33 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 34 | echo "=======================================================================" 35 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 36 | echo "=======================================================================" 37 | -------------------------------------------------------------------------------- /local-development/podman-scripts/linux/dev_start_with_data_deletion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | echo "Going into root mode to delete some podman volumes" 7 | sudo echo "Root mode: OK" 8 | podman compose down 9 | sudo rm -Rf data/* 10 | mkdir -p data/event-sourcing-event-bus data/event-sourcing-event-store/pg-data data/event-sourcing-projection-store/db-data data/event-sourcing-projection-store/db-config 11 | podman compose up -d --build --force-recreate 12 | 13 | 14 | 15 | all_services_fully_healthy() { 16 | ! podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 17 | } 18 | 19 | while ! all_services_fully_healthy; do 20 | echo "Waiting for all services to be healthy..." 21 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 22 | echo "" 23 | sleep 5 24 | done 25 | 26 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 27 | 28 | echo "" 29 | echo "=======================================================================" 30 | echo "|| All services are healthy! ||" 31 | echo "=======================================================================" 32 | echo "" 33 | 34 | echo "=======================================================================" 35 | echo "|| You can now run the dev_demo.sh script! ||" 36 | echo "=======================================================================" 37 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 38 | echo "=======================================================================" 39 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 40 | echo "=======================================================================" 41 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 42 | echo "=======================================================================" 43 | -------------------------------------------------------------------------------- /local-development/podman-scripts/mac/dev_demo.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Colors for output 5 | GREEN='\033[0;32m' 6 | BLUE='\033[0;34m' 7 | RED='\033[0;31m' 8 | NC='\033[0m' # No Color 9 | YELLOW='\033[1;33m' 10 | 11 | # Arrays for random name generation 12 | FIRST_NAMES=("James" "Mary" "John" "Patricia" "Robert" "Jennifer" "Michael" "Linda" "William" "Elizabeth" 13 | "David" "Barbara" "Richard" "Susan" "Joseph" "Jessica" "Thomas" "Sarah" "Charles" "Karen" 14 | "Emma" "Olivia" "Ava" "Isabella" "Sophia" "Mia" "Charlotte" "Amelia" "Harper" "Evelyn") 15 | 16 | LAST_NAMES=("Smith" "Johnson" "Williams" "Brown" "Jones" "Garcia" "Miller" "Davis" "Rodriguez" "Martinez" 17 | "Hernandez" "Lopez" "Gonzalez" "Wilson" "Anderson" "Thomas" "Taylor" "Moore" "Jackson" "Martin" 18 | "Lee" "Perez" "Thompson" "White" "Harris" "Sanchez" "Clark" "Ramirez" "Lewis" "Robinson") 19 | 20 | # Function to get random name from array 21 | get_random_name() { 22 | local array=("$@") 23 | local index=$((RANDOM % ${#array[@]})) 24 | echo "${array[$index]}" 25 | } 26 | 27 | # Function to display progress 28 | show_progress() { 29 | local duration=$1 30 | local bar_width=40 31 | local sleep_interval=$(echo "scale=4; $duration / $bar_width" | bc) 32 | local progress=0 33 | 34 | echo -n "[" 35 | while [ $progress -lt $bar_width ]; do 36 | echo -n "=" 37 | progress=$((progress + 1)) 38 | sleep $sleep_interval 39 | done 40 | echo -n "]" 41 | echo 42 | } 43 | 44 | # Function to make a POST request with a session token 45 | make_post_request() { 46 | local endpoint=$1 47 | local data=$2 48 | curl -s -X POST \ 49 | -H "Content-Type: application/json" \ 50 | -H "X-With-Session-Token: test-session" \ 51 | -d "$data" \ 52 | "http://localhost:8080$endpoint" 53 | } 54 | 55 | # Function to submit an application 56 | submit_application() { 57 | local first_name=$1 58 | local last_name=$2 59 | local cuisine=$3 60 | local experience=$4 61 | local books=$5 62 | 63 | local data="{ 64 | \"firstName\": \"$first_name\", 65 | \"lastName\": \"$last_name\", 66 | \"favoriteCuisine\": \"$cuisine\", 67 | \"yearsOfProfessionalExperience\": $experience, 68 | \"numberOfCookingBooksRead\": $books 69 | }" 70 | 71 | echo -e "${BLUE}Submitting application for $first_name $last_name...${NC}" 72 | echo -e "Profile:" 73 | echo -e " - Cuisine: $cuisine" 74 | echo -e " - Professional Experience: $experience years" 75 | echo -e " - Cooking Books Read: $books" 76 | echo -e "Expected outcome: $([ $experience == 0 ] && [ $books -gt 0 ] && echo "${GREEN}Should be APPROVED${NC}" || echo "${RED}Should be REJECTED${NC}")" 77 | make_post_request "/api/v1/cooking-club/membership/command/submit-application" "$data" 78 | echo 79 | } 80 | 81 | # Function to get members by cuisine 82 | get_members() { 83 | echo -e "${BLUE}Fetching current members by cuisine...${NC}" 84 | curl -s -X POST \ 85 | -H "Content-Type: application/json" \ 86 | "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" 87 | } 88 | 89 | # Main test script 90 | echo -e "${GREEN}Starting Cooking Club Application Demo${NC}" 91 | echo "==================================================" 92 | echo -e "${YELLOW}Application Rules:${NC}" 93 | echo "1. Approval Criteria:" 94 | echo " - Must have 0 years of professional experience" 95 | echo " - Must have read at least 1 cooking book" 96 | echo "2. All other combinations will be rejected" 97 | echo "==================================================" 98 | 99 | # Test 1: Should be approved (0 years experience, 3 books read) 100 | echo -e "\n${YELLOW}Test 1: Testing 'Enthusiastic Beginner' Profile${NC}" 101 | echo "This profile represents someone new to professional cooking (0 years)" 102 | echo "but who has studied through books (3 books read)." 103 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Italian" 0 3 104 | show_progress 1 105 | 106 | # Test 2: Should not be approved (2 years experience, 5 books read) 107 | echo -e "\n${YELLOW}Test 2: Testing 'Experienced Professional' Profile${NC}" 108 | echo "This profile represents someone with professional experience (2 years)" 109 | echo "and theoretical knowledge (5 books read). Despite the knowledge," 110 | echo "professional experience disqualifies them." 111 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "French" 2 5 112 | show_progress 1 113 | 114 | # Test 3: Should be approved (0 years experience, 1 book read) 115 | echo -e "\n${YELLOW}Test 3: Testing 'Minimal Qualifying' Profile${NC}" 116 | echo "This profile represents someone meeting the minimum requirements:" 117 | echo "no professional experience and has read exactly 1 book." 118 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Japanese" 0 1 119 | show_progress 1 120 | 121 | # Test 4: Should not be approved (0 years experience, 0 books read) 122 | echo -e "\n${YELLOW}Test 4: Testing 'Complete Beginner' Profile${NC}" 123 | echo "This profile represents someone with no professional experience" 124 | echo "but also no theoretical knowledge (0 books read)." 125 | submit_application "$(get_random_name "${FIRST_NAMES[@]}")" "$(get_random_name "${LAST_NAMES[@]}")" "Mexican" 0 0 126 | show_progress 1 127 | 128 | # Final Test: Display current members 129 | echo -e "\n${YELLOW}Outcome:${NC}" 130 | echo "Current members grouped by their preferred cuisine." 131 | echo "Only approved applications should appear in these results." 132 | sleep 1 133 | get_members 134 | echo " " 135 | echo " " 136 | 137 | echo -e "\n${GREEN}Demo Completed${NC}" 138 | echo "==================================================" -------------------------------------------------------------------------------- /local-development/podman-scripts/mac/dev_shutdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | podman compose down -------------------------------------------------------------------------------- /local-development/podman-scripts/mac/dev_start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | podman compose down 7 | mkdir -p data/event-sourcing-event-bus data/event-sourcing-event-store/pg-data data/event-sourcing-projection-store/db-data data/event-sourcing-projection-store/db-config 8 | podman compose up -d --build --force-recreate 9 | 10 | all_services_fully_healthy() { 11 | ! podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 12 | } 13 | 14 | while ! all_services_fully_healthy; do 15 | echo "Waiting for all services to be healthy..." 16 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 17 | echo "" 18 | sleep 5 19 | done 20 | 21 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 22 | 23 | echo "" 24 | echo "=======================================================================" 25 | echo "|| All services are healthy! ||" 26 | echo "=======================================================================" 27 | echo "" 28 | 29 | echo "=======================================================================" 30 | echo "|| You can now run the dev_demo.sh script! ||" 31 | echo "=======================================================================" 32 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 33 | echo "=======================================================================" 34 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 35 | echo "=======================================================================" 36 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 37 | echo "=======================================================================" 38 | -------------------------------------------------------------------------------- /local-development/podman-scripts/mac/dev_start_with_data_deletion.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | cd ../../ 5 | 6 | echo "Going into root mode to delete some podman volumes" 7 | sudo echo "Root mode: OK" 8 | podman compose down 9 | sudo rm -Rf data/* 10 | mkdir -p data/event-sourcing-event-bus data/event-sourcing-event-store/pg-data data/event-sourcing-projection-store/db-data data/event-sourcing-projection-store/db-config 11 | podman compose up -d --build --force-recreate 12 | 13 | 14 | 15 | all_services_fully_healthy() { 16 | ! podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" | grep -q -E "(unhealthy|starting)" 17 | } 18 | 19 | while ! all_services_fully_healthy; do 20 | echo "Waiting for all services to be healthy..." 21 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 22 | echo "" 23 | sleep 5 24 | done 25 | 26 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 27 | 28 | echo "" 29 | echo "=======================================================================" 30 | echo "|| All services are healthy! ||" 31 | echo "=======================================================================" 32 | echo "" 33 | 34 | echo "=======================================================================" 35 | echo "|| You can now run the dev_demo.sh script! ||" 36 | echo "=======================================================================" 37 | echo "|| You can navigate to localhost:8080 to hit your backend. ||" 38 | echo "=======================================================================" 39 | echo "|| You can navigate to localhost:8081 to view your event store. ||" 40 | echo "=======================================================================" 41 | echo "|| You can navigate to localhost:8082 to view your projection store. ||" 42 | echo "=======================================================================" 43 | -------------------------------------------------------------------------------- /local-development/podman-scripts/windows/dev_demo.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | 3 | # Function for colored output 4 | function Write-Colored { 5 | param( 6 | [string]$Text, 7 | [string]$Color 8 | ) 9 | switch($Color) { 10 | "GREEN" { Write-Host $Text -ForegroundColor Green } 11 | "BLUE" { Write-Host $Text -ForegroundColor Blue } 12 | "RED" { Write-Host $Text -ForegroundColor Red } 13 | "YELLOW" { Write-Host $Text -ForegroundColor Yellow } 14 | default { Write-Host $Text } 15 | } 16 | } 17 | 18 | # Arrays for random name generation 19 | $FIRST_NAMES = @("James", "Mary", "John", "Patricia", "Robert", "Jennifer", "Michael", "Linda", "William", "Elizabeth", 20 | "David", "Barbara", "Richard", "Susan", "Joseph", "Jessica", "Thomas", "Sarah", "Charles", "Karen", 21 | "Emma", "Olivia", "Ava", "Isabella", "Sophia", "Mia", "Charlotte", "Amelia", "Harper", "Evelyn") 22 | 23 | $LAST_NAMES = @("Smith", "Johnson", "Williams", "Brown", "Jones", "Garcia", "Miller", "Davis", "Rodriguez", "Martinez", 24 | "Hernandez", "Lopez", "Gonzalez", "Wilson", "Anderson", "Thomas", "Taylor", "Moore", "Jackson", "Martin", 25 | "Lee", "Perez", "Thompson", "White", "Harris", "Sanchez", "Clark", "Ramirez", "Lewis", "Robinson") 26 | 27 | # Function to get random name from array 28 | function Get-RandomName { 29 | param([string[]]$names) 30 | $index = Get-Random -Minimum 0 -Maximum $names.Count 31 | return $names[$index] 32 | } 33 | 34 | # Function to display progress 35 | function Show-Progress { 36 | param([int]$duration) 37 | $barWidth = 40 38 | $sleepInterval = $duration / $barWidth 39 | $progress = 0 40 | 41 | Write-Host -NoNewline "[" 42 | while ($progress -lt $barWidth) { 43 | Write-Host -NoNewline "=" 44 | $progress++ 45 | Start-Sleep -Milliseconds ($sleepInterval * 1000) 46 | } 47 | Write-Host -NoNewline "]" 48 | Write-Host "" 49 | } 50 | 51 | # Function to make a POST request with a session token 52 | function Invoke-PostRequest { 53 | param( 54 | [string]$endpoint, 55 | [string]$data 56 | ) 57 | $headers = @{ 58 | "Content-Type" = "application/json" 59 | "X-With-Session-Token" = "test-session" 60 | } 61 | return Invoke-RestMethod -Uri "http://localhost:8080$endpoint" -Method Post -Headers $headers -Body $data 62 | } 63 | 64 | # Function to submit an application 65 | function Submit-Application { 66 | param( 67 | [string]$firstName, 68 | [string]$lastName, 69 | [string]$cuisine, 70 | [int]$experience, 71 | [int]$books 72 | ) 73 | 74 | $data = @{ 75 | firstName = $firstName 76 | lastName = $lastName 77 | favoriteCuisine = $cuisine 78 | yearsOfProfessionalExperience = $experience 79 | numberOfCookingBooksRead = $books 80 | } | ConvertTo-Json 81 | 82 | Write-Colored "Submitting application for $firstName $lastName..." "BLUE" 83 | Write-Host "Profile:" 84 | Write-Host " - Cuisine: $cuisine" 85 | Write-Host " - Professional Experience: $experience years" 86 | Write-Host " - Cooking Books Read: $books" 87 | 88 | $expectedOutcome = if ($experience -eq 0 -and $books -gt 0) { "Should be APPROVED" } else { "Should be REJECTED" } 89 | Write-Colored "Expected outcome: $expectedOutcome" $(if ($experience -eq 0 -and $books -gt 0) { "GREEN" } else { "RED" }) 90 | 91 | Invoke-PostRequest -endpoint "/api/v1/cooking-club/membership/command/submit-application" -data $data 92 | Write-Host "" 93 | } 94 | 95 | # Function to get members by cuisine 96 | function Get-Members { 97 | Write-Colored "Fetching current members by cuisine..." "BLUE" 98 | $response = Invoke-RestMethod -Uri "http://localhost:8080/api/v1/cooking-club/membership/query/members-by-cuisine" -Method Post -ContentType "application/json" 99 | return $response 100 | } 101 | 102 | # Main test script 103 | Write-Colored "Starting Cooking Club Application Demo" "GREEN" 104 | Write-Host "==================================================" 105 | Write-Colored "Application Rules:" "YELLOW" 106 | Write-Host "1. Approval Criteria:" 107 | Write-Host " - Must have 0 years of professional experience" 108 | Write-Host " - Must have read at least 1 cooking book" 109 | Write-Host "2. All other combinations will be rejected" 110 | Write-Host "==================================================" 111 | 112 | # Test 1: Should be approved (0 years experience, 3 books read) 113 | Write-Host "" 114 | Write-Colored "Test 1: Testing 'Enthusiastic Beginner' Profile" "YELLOW" 115 | Write-Host "This profile represents someone new to professional cooking (0 years)" 116 | Write-Host "but who has studied through books (3 books read)." 117 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Italian" -experience 0 -books 3 118 | Show-Progress -duration 1 119 | 120 | # Test 2: Should not be approved (2 years experience, 5 books read) 121 | Write-Host "" 122 | Write-Colored "Test 2: Testing 'Experienced Professional' Profile" "YELLOW" 123 | Write-Host "This profile represents someone with professional experience (2 years)" 124 | Write-Host "and theoretical knowledge (5 books read). Despite the knowledge," 125 | Write-Host "professional experience disqualifies them." 126 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "French" -experience 2 -books 5 127 | Show-Progress -duration 1 128 | 129 | # Test 3: Should be approved (0 years experience, 1 book read) 130 | Write-Host "" 131 | Write-Colored "Test 3: Testing 'Minimal Qualifying' Profile" "YELLOW" 132 | Write-Host "This profile represents someone meeting the minimum requirements:" 133 | Write-Host "no professional experience and has read exactly 1 book." 134 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Japanese" -experience 0 -books 1 135 | Show-Progress -duration 1 136 | 137 | # Test 4: Should not be approved (0 years experience, 0 books read) 138 | Write-Host "" 139 | Write-Colored "Test 4: Testing 'Complete Beginner' Profile" "YELLOW" 140 | Write-Host "This profile represents someone with no professional experience" 141 | Write-Host "but also no theoretical knowledge (0 books read)." 142 | Submit-Application -firstName (Get-RandomName $FIRST_NAMES) -lastName (Get-RandomName $LAST_NAMES) -cuisine "Mexican" -experience 0 -books 0 143 | Show-Progress -duration 1 144 | 145 | # Final Test: Display current members 146 | Write-Host "" 147 | Write-Colored "Outcome:" "YELLOW" 148 | Write-Host "Current members grouped by their preferred cuisine." 149 | Write-Host "Only approved applications should appear in these results." 150 | Start-Sleep -Seconds 1 151 | Get-Members 152 | 153 | Write-Host "" 154 | Write-Host "" 155 | Write-Host "" 156 | Write-Colored "Demo Completed" "GREEN" 157 | Write-Host "==================================================" -------------------------------------------------------------------------------- /local-development/podman-scripts/windows/dev_shutdown.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | podman compose down 7 | } finally { 8 | Set-Location -Path $originalLocation 9 | } -------------------------------------------------------------------------------- /local-development/podman-scripts/windows/dev_start.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | 7 | podman compose down 8 | 9 | $dataDirs = @( 10 | 'data/event-sourcing-event-bus', 11 | 'data/event-sourcing-event-store/pg-data', 12 | 'data/event-sourcing-projection-store/db-data', 13 | 'data/event-sourcing-projection-store/db-config' 14 | ) 15 | 16 | foreach ($dir in $dataDirs) { 17 | New-Item -ItemType Directory -Force -Path $dir 18 | $acl = Get-Acl $dir 19 | $acl.SetAccessRuleProtection($true, $false) 20 | $ownerRule = New-Object System.Security.AccessControl.FileSystemAccessRule( 21 | [System.Security.Principal.WindowsIdentity]::GetCurrent().Name, 22 | "FullControl", 23 | "ContainerInherit,ObjectInherit", 24 | "None", 25 | "Allow" 26 | ) 27 | $acl.AddAccessRule($ownerRule) 28 | $readExecuteRule = New-Object System.Security.AccessControl.FileSystemAccessRule( 29 | "Everyone", 30 | "ReadAndExecute", 31 | "ContainerInherit,ObjectInherit", 32 | "None", 33 | "Allow" 34 | ) 35 | $acl.AddAccessRule($readExecuteRule) 36 | Set-Acl $dir $acl 37 | } 38 | 39 | podman compose up -d --build --force-recreate 40 | 41 | function Test-AllServicesHealthy { 42 | $services = podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 43 | return !($services -match "(unhealthy|starting)") 44 | } 45 | 46 | while (!(Test-AllServicesHealthy)) { 47 | Write-Host "Waiting for all services to be healthy..." 48 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 49 | Write-Host "" 50 | Start-Sleep -Seconds 5 51 | } 52 | 53 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 54 | 55 | Write-Host "" 56 | Write-Host "=======================================================================" 57 | Write-Host "|| All services are healthy! ||" 58 | Write-Host "=======================================================================" 59 | Write-Host "" 60 | 61 | Write-Host "=======================================================================" 62 | Write-Host "|| You can now run the dev_demo.ps1 script! ||" 63 | Write-Host "=======================================================================" 64 | Write-Host "|| You can navigate to localhost:8080 to hit your backend. ||" 65 | Write-Host "=======================================================================" 66 | Write-Host "|| You can navigate to localhost:8081 to view your event store. ||" 67 | Write-Host "=======================================================================" 68 | Write-Host "|| You can navigate to localhost:8082 to view your projection store. ||" 69 | Write-Host "=======================================================================" 70 | 71 | } finally { 72 | Set-Location -Path $originalLocation 73 | } -------------------------------------------------------------------------------- /local-development/podman-scripts/windows/dev_start_with_data_deletion.ps1: -------------------------------------------------------------------------------- 1 | $ErrorActionPreference = 'Stop' 2 | $originalLocation = Get-Location 3 | 4 | try { 5 | Set-Location -Path (Split-Path (Split-Path $PSScriptRoot -Parent) -Parent) 6 | 7 | Write-Host "Stopping existing services..." 8 | podman compose down 9 | 10 | Write-Host "Cleaning up data directory..." 11 | if (Test-Path "data") { 12 | Get-ChildItem -Path "data" -Recurse | Remove-Item -Force -Recurse 13 | } 14 | 15 | $dataDirs = @( 16 | 'data/event-sourcing-event-bus', 17 | 'data/event-sourcing-event-store/pg-data', 18 | 'data/event-sourcing-projection-store/db-data', 19 | 'data/event-sourcing-projection-store/db-config' 20 | ) 21 | 22 | foreach ($dir in $dataDirs) { 23 | New-Item -ItemType Directory -Force -Path $dir 24 | $acl = Get-Acl $dir 25 | $acl.SetAccessRuleProtection($true, $false) 26 | $ownerRule = New-Object System.Security.AccessControl.FileSystemAccessRule( 27 | [System.Security.Principal.WindowsIdentity]::GetCurrent().Name, 28 | "FullControl", 29 | "ContainerInherit,ObjectInherit", 30 | "None", 31 | "Allow" 32 | ) 33 | $acl.AddAccessRule($ownerRule) 34 | $readExecuteRule = New-Object System.Security.AccessControl.FileSystemAccessRule( 35 | "Everyone", 36 | "ReadAndExecute", 37 | "ContainerInherit,ObjectInherit", 38 | "None", 39 | "Allow" 40 | ) 41 | $acl.AddAccessRule($readExecuteRule) 42 | Set-Acl $dir $acl 43 | } 44 | 45 | Write-Host "Starting services..." 46 | podman compose up -d --build --force-recreate 47 | 48 | function Test-AllServicesHealthy { 49 | $services = podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 50 | return !($services -match "(unhealthy|starting)") 51 | } 52 | 53 | while (!(Test-AllServicesHealthy)) { 54 | Write-Host "Waiting for all services to be healthy..." 55 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 56 | Write-Host "" 57 | Start-Sleep -Seconds 5 58 | } 59 | 60 | podman compose ps --format "table {{.ID}}\t{{.Name}}\t{{.Status}}" 61 | 62 | Write-Host "" 63 | Write-Host "=======================================================================" 64 | Write-Host "|| All services are healthy! ||" 65 | Write-Host "=======================================================================" 66 | Write-Host "" 67 | 68 | Write-Host "=======================================================================" 69 | Write-Host "|| You can now run the dev_demo.ps1 script! ||" 70 | Write-Host "=======================================================================" 71 | Write-Host "|| You can navigate to localhost:8080 to hit your backend. ||" 72 | Write-Host "=======================================================================" 73 | Write-Host "|| You can navigate to localhost:8081 to view your event store. ||" 74 | Write-Host "=======================================================================" 75 | Write-Host "|| You can navigate to localhost:8082 to view your projection store. ||" 76 | Write-Host "=======================================================================" 77 | } finally { 78 | Set-Location -Path $originalLocation 79 | } -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "event-sourcing-python" 3 | version = "1.0.0" 4 | description = "Event Sourcing in Python" 5 | 6 | [tool.poetry.dependencies] 7 | python = "^3.11" 8 | flask = "^3.0.0" 9 | flask-cors = "^4.0.0" 10 | pydantic = "^2.5.2" 11 | psycopg2-binary = "^2.9.9" 12 | pymongo = "^4.6.1" 13 | python-dotenv = "^1.0.0" 14 | dependency-injector = "^4.41.0" 15 | structlog = "^24.1.0" 16 | cryptography = "^41.0.7" 17 | gunicorn = "^21.2.0" 18 | uvicorn = "^0.25.0" 19 | pytz = "2024.1" 20 | 21 | 22 | [build-system] 23 | requires = ["poetry-core>=1.0.0"] 24 | build-backend = "poetry.core.masonry.api" 25 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | flask==3.0.0 2 | pydantic==2.5.2 3 | psycopg2-binary==2.9.9 4 | pymongo==4.6.1 5 | python-dotenv==1.0.0 6 | structlog==24.1.0 7 | cryptography==41.0.7 8 | flask-cors==4.0.0 9 | gunicorn==21.2.0 10 | uvicorn==0.25.0 11 | asgiref==3.7.2 12 | pytz==2024.1 --------------------------------------------------------------------------------