├── .gitignore
├── README.md
├── compose
├── cqrs.yml
├── event-sourcing.yml
├── saga-choreography.yml
└── saga-orchestration.yml
├── cqrs-example
├── Dockerfile
├── README.md
├── alembic.ini
├── app
│ ├── __init__.py
│ ├── cli.py
│ ├── db.py
│ ├── event_consumer.py
│ ├── main.py
│ ├── models.py
│ ├── producers.py
│ ├── services.py
│ └── settings.py
├── migrations
│ ├── README
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ │ └── 1191d11f60ca_initial_migration.py
├── poetry.lock
├── pyproject.toml
├── resources
│ └── cqrs-pattern.png
├── start_dev.sh
└── wait-for-it.sh
├── event-sourcing-example
├── README.md
├── booking
│ ├── Dockerfile
│ ├── alembic.ini
│ ├── app
│ │ ├── __init__.py
│ │ ├── amqp_client.py
│ │ ├── applications.py
│ │ ├── cli.py
│ │ ├── db.py
│ │ ├── domainmodels.py
│ │ ├── main.py
│ │ ├── models.py
│ │ ├── services.py
│ │ └── settings.py
│ ├── migrations
│ │ ├── README
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ ├── 5ffab98ff42c_add_domain_uuid_to_booking_sqlalchemy_.py
│ │ │ └── fa1e57d87087_initial_migrations.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── start_dev.sh
│ └── wait-for-it.sh
├── db_init_scripts
│ └── docker_postgres_init.sql
├── parking
│ ├── Dockerfile
│ ├── alembic.ini
│ ├── app
│ │ ├── __init__.py
│ │ ├── amqp_client.py
│ │ ├── cli.py
│ │ ├── db.py
│ │ ├── main.py
│ │ ├── models.py
│ │ ├── services.py
│ │ └── settings.py
│ ├── migrations
│ │ ├── README
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ └── 9201fb72ddb9_initial_migration.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── start_dev.sh
│ └── wait-for-it.sh
└── resources
│ └── event-sourcing-pattern.png
├── saga-choreography-example
├── README.md
├── billing
│ ├── Dockerfile
│ ├── alembic.ini
│ ├── app
│ │ ├── __init__.py
│ │ ├── amqp_client.py
│ │ ├── cli.py
│ │ ├── db.py
│ │ ├── dependencies.py
│ │ ├── events.py
│ │ ├── main.py
│ │ ├── models.py
│ │ ├── pydantic_models.py
│ │ ├── services.py
│ │ └── settings.py
│ ├── migrations
│ │ ├── README
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ └── 9cbf6f752227_initial_migration.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── start_dev.sh
│ └── wait-for-it.sh
├── booking
│ ├── Dockerfile
│ ├── alembic.ini
│ ├── app
│ │ ├── __init__.py
│ │ ├── amqp_client.py
│ │ ├── cli.py
│ │ ├── db.py
│ │ ├── dependencies.py
│ │ ├── main.py
│ │ ├── models.py
│ │ ├── pydantic_models.py
│ │ ├── services.py
│ │ └── settings.py
│ ├── migrations
│ │ ├── README
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ ├── 29af7026cb10_rename_booking_parking_slot_uuid_to_.py
│ │ │ ├── a2f83ffe751f_initial_migration.py
│ │ │ ├── caeb28082687_removal_of_description_column.py
│ │ │ └── d6bb61f1e755_alter_parking_space_no_to_parking_slot_.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── start_dev.sh
│ └── wait-for-it.sh
├── parking
│ ├── Dockerfile
│ ├── alembic.ini
│ ├── app
│ │ ├── __init__.py
│ │ ├── amqp_client.py
│ │ ├── cli.py
│ │ ├── db.py
│ │ ├── dependencies.py
│ │ ├── main.py
│ │ ├── models.py
│ │ ├── pydantic_models.py
│ │ ├── services.py
│ │ ├── settings.py
│ │ └── signals.py
│ ├── migrations
│ │ ├── README
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ ├── 2efeb1766d65_add_status_field.py
│ │ │ └── 3a414c21545c_inital_migrations.py
│ ├── poetry.lock
│ ├── pyproject.toml
│ ├── start_dev.sh
│ └── wait-for-it.sh
└── resources
│ ├── saga-choreography-pattern-rb-transaction.png
│ └── saga-choreography-pattern.png
└── saga-orchestration-example
├── README.md
├── billing
├── Dockerfile
├── alembic.ini
├── app
│ ├── __init__.py
│ ├── amqp_client.py
│ ├── cli.py
│ ├── db.py
│ ├── main.py
│ ├── models.py
│ ├── services.py
│ └── settings.py
├── migrations
│ ├── README
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ │ └── 74f91803e544_initial_migration.py
├── poetry.lock
├── pyproject.toml
├── start_dev.sh
└── wait-for-it.sh
├── booking
├── Dockerfile
├── alembic.ini
├── app
│ ├── __init__.py
│ ├── cli.py
│ ├── db.py
│ ├── main.py
│ ├── models.py
│ ├── sagas.py
│ ├── services.py
│ └── settings.py
├── migrations
│ ├── README
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ │ └── fec7a5b19428_initial_migration.py
├── poetry.lock
├── pyproject.toml
├── start_dev.sh
└── wait-for-it.sh
├── parking
├── Dockerfile
├── alembic.ini
├── app
│ ├── __init__.py
│ ├── amqp_client.py
│ ├── cli.py
│ ├── db.py
│ ├── main.py
│ ├── models.py
│ ├── services.py
│ └── settings.py
├── migrations
│ ├── README
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ │ ├── 5e95058bcf50_update_parkingslot_status_default_value_.py
│ │ └── 6935a5c10469_initial_migration.py
├── poetry.lock
├── pyproject.toml
├── start_dev.sh
└── wait-for-it.sh
└── resources
├── saga-orchestration-pattern-rb-transaction.png
└── saga-orchestration-pattern.png
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | .idea/
161 |
162 | # VS Code
163 | .vscode/
164 |
165 | # PostgreSQL data
166 | postgres/
167 |
168 | # Redis
169 | redis/
170 |
171 | .DS_Store
172 |
173 | # SQLite
174 | *.sqlite3
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Python Microservices Patterns
2 |
3 | ## Table of Contents
4 | 1. [Saga](#saga)
5 | - [Choreography](#saga-choreography)
6 | - [Orchestration](#saga-orchestration)
7 | 1. [Command Query Responsibility Segregation (CQRS)](#cqrs)
8 | 1. [Event Sourcing](#event-sourcing)
9 |
10 |
11 |
12 | ## Saga
13 | When you use **Database per Service** design you will need to consider **Saga** for distributed transactions to maintain data consistency across multiple services.
14 |
15 | A Saga is a sequence of local transactions and each transaction will publish messages or events that trigger the next local transaction.
16 |
17 | If something goes wrong to a participating microservice, there should be a compensating transaction to undo the changes that were made by the preceding local transactions.
18 |
19 | There are two popular Saga patterns:
20 |
21 |
22 |
23 | - [__Choreography__](https://github.com/roelzkie15/python-microservices-patterns/tree/master/saga-choreography-example) - Where microservices publish a message/event from a local transaction and trigger subscribers or participating microservices for the next local transaction.
24 |
25 |
26 |
27 | - [__Orchestration__](https://github.com/roelzkie15/python-microservices-patterns/tree/master/saga-orchestration-example) - Where microservices have an orchestrator to command what participating microservices should trigger the next local transaction and should receive a reply from that local transaction.
28 |
29 |
30 |
31 | ## Command Query Responsibility Segregation (CQRS)
32 |
33 | CQRS is a popular architectural pattern that separates write (Command) part and read (Query) part to boost system overall performance and promotes better response delivery.
34 |
35 | When the system has an overwhelming read activity than write activity this pattern helps lessen the database workloads and avoid system bottleneck.
36 |
37 | It also amplify scalability as read databases or replicas data sources can be placed in a geolocation where data consumer can access information with reduced response latency.
38 |
39 | See python CQRS example [here](https://github.com/roelzkie15/python-microservices-patterns/tree/master/cqrs-example).
40 |
41 |
42 |
43 | ## Event Sourcing
44 |
45 | Event Sourcing is a powerful tool in building distributed systems in software architecture.
46 |
47 | In the traditional approach the create, read, update, and delete (CRUD) usually update the data directly to the data source and a new value represents the current state of that application without leaving previous states or records of how it got to the current state.
48 |
49 | Event sourcing pattern stores changes of an application as a sequence of immutable events to a persistent data store called **Event Store** in an append-only style where we can maintain history of events so we are able to reconstruct past events and enable compensating transaction if necessary.
50 |
51 | It works well with [CQRS](#cqrs) architecture, since a write model and a read model can work together to separate the **Event Store** for append-only (write) operation from the **Projector** which you can use to query (read) suitable data for the UI presentation. Moreover consumers can subscribe to the events and initiate tasks that complete an operation. Although CQRS is not required for the Event Sourcing design pattern.
52 |
53 | See python Event Sourcing example [here](https://github.com/roelzkie15/python-microservices-patterns/tree/master/event-sourcing-example).
54 |
--------------------------------------------------------------------------------
/compose/cqrs.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | cqrs-primary-db:
6 | image: postgres:13
7 | restart: unless-stopped
8 | environment:
9 | - POSTGRES_PASSWORD=postgres
10 | volumes:
11 | - $PWD/cqrs-example/postgres/primary/data:/var/lib/postgresql/data
12 | ports:
13 | - '5434:5432'
14 |
15 | cqrs-replica-db:
16 | image: postgres:13
17 | restart: unless-stopped
18 | environment:
19 | - POSTGRES_PASSWORD=postgres
20 | volumes:
21 | - $PWD/cqrs-example/postgres/replica/data:/var/lib/postgresql/data
22 | ports:
23 | - '5435:5432'
24 |
25 | cqrs-rabbit:
26 | image: rabbitmq:3.9.15-management-alpine
27 | restart: unless-stopped
28 | environment:
29 | - RABBITMQ_DEFAULT_USER=guest
30 | - RABBITMQ_DEFAULT_PASS=guest
31 | - RABBITMQ_NODE_PORT=5671
32 | ports:
33 | - '15671:15671'
34 | expose:
35 | - '5671'
36 |
37 | cqrs:
38 | build: $PWD/cqrs-example
39 | restart: unless-stopped
40 | entrypoint: ['/bin/sh','-c']
41 | command:
42 | - |
43 | /code/wait-for-it.sh -t 0 cqrs-primary-db:5432
44 | /code/wait-for-it.sh -t 0 cqrs-replica-db:5432
45 | /code/wait-for-it.sh -t 0 cqrs-rabbit:5671
46 | /code/start_dev.sh
47 | volumes:
48 | - $PWD/cqrs-example:/code
49 | ports:
50 | - '8000:8000'
51 | environment:
52 | - ENVIRONMENT=dev
53 | - TESTING=0
54 | - DATABASE_URL=postgresql://postgres:postgres@cqrs-primary-db:5432/postgres
55 | - REPLICA_DATABASE_URL=postgresql://postgres:postgres@cqrs-replica-db:5432/postgres
56 | - RABBITMQ_BROKER_URL=amqp://guest:guest@cqrs-rabbit:5671
57 | depends_on:
58 | - cqrs-primary-db
59 | - cqrs-replica-db
60 | - cqrs-rabbit
61 |
62 | event_consumer:
63 | image: python_microservice_patterns_cqrs
64 | restart: unless-stopped
65 | entrypoint: ['/bin/sh','-c']
66 | command:
67 | - |
68 | /code/wait-for-it.sh -t 0 cqrs-primary-db:5432
69 | /code/wait-for-it.sh -t 0 cqrs-replica-db:5432
70 | /code/wait-for-it.sh -t 0 cqrs-rabbit:5671
71 | /code/wait-for-it.sh -t 0 cqrs:8000
72 | poetry run python -m app.event_consumer
73 | volumes:
74 | - $PWD/cqrs-example:/code
75 | ports:
76 | - '8001:8000'
77 | environment:
78 | - ENVIRONMENT=dev
79 | - TESTING=0
80 | - DATABASE_URL=postgresql://postgres:postgres@cqrs-primary-db:5432/postgres
81 | - REPLICA_DATABASE_URL=postgresql://postgres:postgres@cqrs-replica-db:5432/postgres
82 | - RABBITMQ_BROKER_URL=amqp://guest:guest@cqrs-rabbit:5671
83 | depends_on:
84 | - cqrs-primary-db
85 | - cqrs-replica-db
86 | - cqrs-rabbit
87 | - cqrs
88 |
--------------------------------------------------------------------------------
/compose/event-sourcing.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | es-event-store-db:
6 | image: postgres:13
7 | restart: unless-stopped
8 | environment:
9 | - POSTGRES_PASSWORD=postgres
10 | volumes:
11 | - $PWD/event-sourcing-example/postgres/event-store-db/data:/var/lib/postgresql/data
12 | ports:
13 | - '5434:5432'
14 |
15 | es-projector-db:
16 | image: postgres:13
17 | restart: unless-stopped
18 | environment:
19 | - POSTGRES_PASSWORD=postgres
20 | volumes:
21 | - $PWD/event-sourcing-example/postgres/read-db/data:/var/lib/postgresql/data
22 | - $PWD/event-sourcing-example/db_init_scripts/docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql
23 | ports:
24 | - '5435:5432'
25 |
26 | es-rabbit:
27 | image: rabbitmq:3.9.15-management-alpine
28 | restart: unless-stopped
29 | environment:
30 | - RABBITMQ_DEFAULT_USER=guest
31 | - RABBITMQ_DEFAULT_PASS=guest
32 | - RABBITMQ_NODE_PORT=5671
33 | ports:
34 | - '15671:15671'
35 | expose:
36 | - '5671'
37 |
38 | es-booking:
39 | build: $PWD/event-sourcing-example/booking
40 | restart: unless-stopped
41 | entrypoint: ['/bin/sh','-c']
42 | command:
43 | - |
44 | /code/wait-for-it.sh -t 0 es-event-store-db:5432
45 | /code/wait-for-it.sh -t 0 es-projector-db:5432
46 | /code/wait-for-it.sh -t 0 es-rabbit:5671
47 | /code/start_dev.sh
48 | volumes:
49 | - $PWD/event-sourcing-example/booking:/code
50 | ports:
51 | - '8000:8000'
52 | environment:
53 | - ENVIRONMENT=dev
54 | - TESTING=0
55 | - EVENT_STORE_DATABASE_URL=postgresql://postgres:postgres@es-event-store-db:5432/postgres
56 | - PROJECTOR_DATABASE_URL=postgresql://booking_projector_user:postgres@es-projector-db:5432/postgres
57 | - SQLALCHEMY_URL=postgresql://postgres:postgres@es-event-store-db:5432/postgres
58 | - RABBITMQ_BROKER_URL=amqp://guest:guest@es-rabbit:5671
59 | - PERSISTENCE_MODULE=eventsourcing_sqlalchemy
60 | depends_on:
61 | - es-event-store-db
62 | - es-projector-db
63 | - es-rabbit
64 |
65 | es-parking:
66 | build: $PWD/event-sourcing-example/parking
67 | restart: unless-stopped
68 | entrypoint: ['/bin/sh','-c']
69 | command:
70 | - |
71 | /code/wait-for-it.sh -t 0 es-rabbit:5671
72 | /code/start_dev.sh
73 | volumes:
74 | - $PWD/event-sourcing-example/parking:/code
75 | ports:
76 | - '8001:8000'
77 | environment:
78 | - ENVIRONMENT=dev
79 | - TESTING=0
80 | - DATABASE_URL=sqlite:////code/parking_db.sqlite3
81 | - RABBITMQ_BROKER_URL=amqp://guest:guest@es-rabbit:5671
82 | depends_on:
83 | - es-rabbit
84 |
--------------------------------------------------------------------------------
/compose/saga-choreography.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | sc-db:
6 | image: postgres:13
7 | restart: unless-stopped
8 | environment:
9 | - POSTGRES_PASSWORD=postgres
10 | volumes:
11 | - $PWD/saga-choreograhpy-example/postgres/data:/var/lib/postgresql/data
12 | ports:
13 | - '5434:5432'
14 |
15 | sc-rabbit:
16 | image: rabbitmq:3.9.15-management-alpine
17 | restart: unless-stopped
18 | environment:
19 | - RABBITMQ_DEFAULT_USER=guest
20 | - RABBITMQ_DEFAULT_PASS=guest
21 | - RABBITMQ_NODE_PORT=5671
22 | ports:
23 | - '15671:15671'
24 | expose:
25 | - '5671'
26 |
27 | sc-booking:
28 | build: $PWD/saga-choreograhpy-example/booking
29 | restart: unless-stopped
30 | entrypoint: ['/bin/sh','-c']
31 | command:
32 | - |
33 | /code/wait-for-it.sh -t 0 sc-db:5432
34 | /code/wait-for-it.sh -t 0 sc-rabbit:5671
35 | /code/start_dev.sh
36 | volumes:
37 | - $PWD/saga-choreograhpy-example/booking:/code
38 | ports:
39 | - '8000:8000'
40 | environment:
41 | - ENVIRONMENT=dev
42 | - TESTING=0
43 | - DATABASE_URL=postgresql://postgres:postgres@sc-db:5432/postgres
44 | - RABBITMQ_BROKER_URL=amqp://guest:guest@sc-rabbit:5671
45 | depends_on:
46 | - sc-db
47 | - sc-rabbit
48 | - sc-parking
49 |
50 | sc-billing:
51 | build: $PWD/saga-choreograhpy-example/billing
52 | restart: unless-stopped
53 | entrypoint: ['/bin/sh','-c']
54 | command:
55 | - |
56 | /code/wait-for-it.sh -t 0 sc-db:5432
57 | /code/wait-for-it.sh -t 0 sc-rabbit:5671
58 | /code/wait-for-it.sh -t 0 sc-booking:8000
59 | /code/start_dev.sh
60 | volumes:
61 | - $PWD/saga-choreograhpy-example/billing:/code
62 | ports:
63 | - '8001:8000'
64 | environment:
65 | - ENVIRONMENT=dev
66 | - TESTING=0
67 | - DATABASE_URL=postgresql://postgres:postgres@sc-db:5432/postgres
68 | - RABBITMQ_BROKER_URL=amqp://guest:guest@sc-rabbit:5671
69 | depends_on:
70 | - sc-db
71 | - sc-rabbit
72 | - sc-booking
73 |
74 | sc-parking:
75 | build: $PWD/saga-choreograhpy-example/parking
76 | restart: unless-stopped
77 | entrypoint: ['/bin/sh','-c']
78 | command:
79 | - |
80 | /code/wait-for-it.sh -t 0 sc-db:5432
81 | /code/wait-for-it.sh -t 0 sc-rabbit:5671
82 | /code/wait-for-it.sh -t 0 sc-billing:8000
83 | /code/start_dev.sh
84 | volumes:
85 | - $PWD/saga-choreograhpy-example/parking:/code
86 | ports:
87 | - '8002:8000'
88 | environment:
89 | - ENVIRONMENT=dev
90 | - TESTING=0
91 | - DATABASE_URL=postgresql://postgres:postgres@sc-db:5432/postgres
92 | - RABBITMQ_BROKER_URL=amqp://guest:guest@sc-rabbit:5671
93 | depends_on:
94 | - sc-db
95 | - sc-rabbit
96 |
--------------------------------------------------------------------------------
/compose/saga-orchestration.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 |
5 | so-rabbit:
6 | image: rabbitmq:3.9.15-management-alpine
7 | restart: unless-stopped
8 | environment:
9 | - RABBITMQ_DEFAULT_USER=guest
10 | - RABBITMQ_DEFAULT_PASS=guest
11 | - RABBITMQ_NODE_PORT=5671
12 | ports:
13 | - '15671:15671'
14 | expose:
15 | - '5671'
16 |
17 | so-parking:
18 | build: $PWD/saga-orchestration-example/parking
19 | restart: unless-stopped
20 | entrypoint: ['/bin/sh','-c']
21 | command:
22 | - |
23 | /code/wait-for-it.sh -t 0 so-rabbit:5671
24 | /code/start_dev.sh
25 | volumes:
26 | - $PWD/saga-orchestration-example/parking:/code
27 | ports:
28 | - '8000:8000'
29 | environment:
30 | - ENVIRONMENT=dev
31 | - TESTING=0
32 | - DATABASE_URL=sqlite:////code/parking_db.sqlite3
33 | - RABBITMQ_BROKER_URL=amqp://guest:guest@so-rabbit:5671
34 | depends_on:
35 | - so-rabbit
36 |
37 | so-booking:
38 | build: $PWD/saga-orchestration-example/booking
39 | restart: unless-stopped
40 | entrypoint: ['/bin/sh','-c']
41 | command:
42 | - |
43 | /code/wait-for-it.sh -t 0 so-rabbit:5671
44 | /code/wait-for-it.sh -t 0 so-parking:8000
45 | /code/start_dev.sh
46 | volumes:
47 | - $PWD/saga-orchestration-example/booking:/code
48 | ports:
49 | - '8001:8000'
50 | environment:
51 | - ENVIRONMENT=dev
52 | - TESTING=0
53 | - DATABASE_URL=sqlite:////code/booking_db.sqlite3
54 | - RABBITMQ_BROKER_URL=amqp://guest:guest@so-rabbit:5671
55 | depends_on:
56 | - so-rabbit
57 | - so-parking
58 |
59 | so-billing:
60 | build: $PWD/saga-orchestration-example/billing
61 | restart: unless-stopped
62 | entrypoint: ['/bin/sh','-c']
63 | command:
64 | - |
65 | /code/wait-for-it.sh -t 0 so-rabbit:5671
66 | /code/wait-for-it.sh -t 0 so-booking:8000
67 | /code/start_dev.sh
68 | volumes:
69 | - $PWD/saga-orchestration-example/billing:/code
70 | ports:
71 | - '8002:8000'
72 | environment:
73 | - ENVIRONMENT=dev
74 | - TESTING=0
75 | - DATABASE_URL=sqlite:////code/billing_db.sqlite3
76 | - RABBITMQ_BROKER_URL=amqp://guest:guest@so-rabbit:5671
77 | depends_on:
78 | - so-rabbit
79 | - so-booking
80 |
--------------------------------------------------------------------------------
/cqrs-example/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh"]
24 |
25 |
--------------------------------------------------------------------------------
/cqrs-example/alembic.ini:
--------------------------------------------------------------------------------
1 | # a multi-database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | databases = primary, replica
59 |
60 | [primary]
61 | sqlalchemy.url = driver://user:pass@localhost/dbname
62 |
63 | [replica]
64 | sqlalchemy.url = driver://user:pass@localhost/dbname2
65 |
66 | [post_write_hooks]
67 | # post_write_hooks defines scripts or Python functions that are run
68 | # on newly generated revision scripts. See the documentation for further
69 | # detail and examples
70 |
71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
72 | # hooks = black
73 | # black.type = console_scripts
74 | # black.entrypoint = black
75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
76 |
77 | # Logging configuration
78 | [loggers]
79 | keys = root,sqlalchemy,alembic
80 |
81 | [handlers]
82 | keys = console
83 |
84 | [formatters]
85 | keys = generic
86 |
87 | [logger_root]
88 | level = WARN
89 | handlers = console
90 | qualname =
91 |
92 | [logger_sqlalchemy]
93 | level = WARN
94 | handlers =
95 | qualname = sqlalchemy.engine
96 |
97 | [logger_alembic]
98 | level = INFO
99 | handlers =
100 | qualname = alembic
101 |
102 | [handler_console]
103 | class = StreamHandler
104 | args = (sys.stderr,)
105 | level = NOTSET
106 | formatter = generic
107 |
108 | [formatter_generic]
109 | format = %(levelname)-5.5s [%(name)s] %(message)s
110 | datefmt = %H:%M:%S
111 |
--------------------------------------------------------------------------------
/cqrs-example/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/cqrs-example/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 | from app.db import Session
3 | from app.services import (create_parking_slot, parking_slot_details,
4 | parking_slot_list)
5 |
6 |
7 | class AppCLI(object):
8 | async def create_parking_slot(self, name: str, status: str | None = "available"):
9 | with Session() as session:
10 | ps = await create_parking_slot(session, name=name, status=status)
11 | return ps.to_dict()
12 |
13 | async def parking_slot_list(self):
14 | with Session() as session:
15 | ps_list = await parking_slot_list(session)
16 | return [ps.to_dict() for ps in ps_list]
17 |
18 | async def parking_slot_details(self, uuid: str):
19 | with Session() as session:
20 | ps = await parking_slot_details(session, uuid)
21 | return ps.to_dict()
22 |
23 |
24 | if __name__ == "__main__":
25 | fire.Fire(AppCLI)
26 |
--------------------------------------------------------------------------------
/cqrs-example/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from app import settings
4 | from sqlalchemy import create_engine
5 | from sqlalchemy.ext.declarative import declarative_base
6 | from sqlalchemy.orm import Session, scoped_session, sessionmaker
7 |
8 | engines = {
9 | "primary": create_engine(settings.DATABASE_URL, logging_name="primary"),
10 | "replica": create_engine(settings.REPLICA_DATABASE_URL, logging_name="replica"),
11 | }
12 |
13 |
14 | class RoutingSession(Session):
15 | def get_bind(self, mapper=None, clause=None):
16 | if mapper and mapper.class_.__name__ == "ParkingSlotReplica":
17 | return engines["replica"]
18 | elif self._flushing:
19 | return engines["primary"]
20 | else:
21 | return engines["replica"]
22 |
23 |
24 | SessionLocal = scoped_session(sessionmaker(autocommit=False, autoflush=False, class_=RoutingSession))
25 |
26 | Base = declarative_base()
27 | ReplicaBase = declarative_base()
28 |
29 |
30 | @contextlib.contextmanager
31 | def Session():
32 | db = SessionLocal()
33 | try:
34 | yield db
35 | finally:
36 | db.close()
37 |
--------------------------------------------------------------------------------
/cqrs-example/app/event_consumer.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import asyncio
3 | import json
4 |
5 | from aio_pika import connect
6 | from aio_pika.abc import AbstractIncomingMessage
7 | from app import logging, settings
8 | from app.db import Session
9 | from app.services import create_parking_slot_replica
10 |
11 |
12 | async def on_message(message: AbstractIncomingMessage) -> None:
13 | async with message.process():
14 | message.ack()
15 | logging.info(f"message received: {message.body!r}")
16 |
17 | raw_ps = ast.literal_eval(message.body.decode())
18 |
19 | with Session() as session:
20 | ps = await create_parking_slot_replica(session, **raw_ps)
21 | logging.info(f"Replicated parking slot data: {ps.to_dict()!r}")
22 |
23 |
24 | async def main() -> None:
25 | # Perform connection
26 | connection = await connect(settings.RABBITMQ_BROKER_URL)
27 |
28 | async with connection:
29 | # Creating a channel
30 | channel = await connection.channel()
31 | await channel.set_qos(prefetch_count=10)
32 |
33 | exchange = await channel.declare_exchange(
34 | "CQRS_EVENT_STORE", type="topic", durable=True
35 | )
36 |
37 | # Declaring queue
38 | queue = await channel.declare_queue(exclusive=True)
39 |
40 | # Binding the queue to the exchange
41 | await queue.bind(exchange, "parking.create")
42 |
43 | # Start listening the queue
44 | await queue.consume(on_message)
45 |
46 | logging.info("Event consumer for parking slot replication is running...")
47 | await asyncio.Future()
48 |
49 |
50 | if __name__ == "__main__":
51 | asyncio.run(main())
52 |
--------------------------------------------------------------------------------
/cqrs-example/app/main.py:
--------------------------------------------------------------------------------
1 | from starlette.applications import Starlette
2 | from starlette.responses import JSONResponse
3 | from starlette.routing import Route
4 |
5 |
6 | async def health(request):
7 | return JSONResponse({"message": "CQRS server is running..."})
8 |
9 |
10 | routes = [
11 | Route("/health", health),
12 | ]
13 |
14 | app = Starlette(routes=routes)
15 |
--------------------------------------------------------------------------------
/cqrs-example/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from app.db import Base, ReplicaBase
4 | from sqlalchemy import Column, String
5 |
6 |
7 | class DictMixin:
8 | def to_dict(self) -> Dict:
9 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
10 |
11 |
12 | class ParkingSlot(DictMixin, Base):
13 | __tablename__ = "parking_slots"
14 |
15 | uuid = Column(String, primary_key=True, unique=True, index=True)
16 | name = Column(String, nullable=False)
17 |
18 | # available/blocked/reserved
19 | status = Column(String, nullable=False, server_default="available")
20 |
21 |
22 | class ParkingSlotReplica(DictMixin, ReplicaBase):
23 | __tablename__ = "parking_slots"
24 |
25 | uuid = Column(String, primary_key=True, unique=True, index=True)
26 | name = Column(String, nullable=False)
27 |
28 | # available/blocked/reserved
29 | status = Column(String, nullable=False, server_default="available")
30 |
--------------------------------------------------------------------------------
/cqrs-example/app/producers.py:
--------------------------------------------------------------------------------
1 | from aio_pika import DeliveryMode, Message, connect
2 | from app import logging, settings
3 |
4 |
5 | async def replicate_parking_slot(ps) -> None:
6 | # Perform connection
7 | connection = await connect(settings.RABBITMQ_BROKER_URL)
8 |
9 | async with connection:
10 | # Creating a channel
11 | channel = await connection.channel()
12 |
13 | exchange = await channel.declare_exchange(
14 | "CQRS_EVENT_STORE", type="topic", durable=True
15 | )
16 |
17 | message_body = str(ps).encode()
18 |
19 | message = Message(
20 | message_body,
21 | delivery_mode=DeliveryMode.PERSISTENT,
22 | )
23 |
24 | # Sending the message
25 | await exchange.publish(message, routing_key="parking.create")
26 |
27 | logging.info(f"Sent message: {message!r}")
28 |
--------------------------------------------------------------------------------
/cqrs-example/app/services.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from uuid import uuid4
3 |
4 | from app.db import Session
5 | from app.models import ParkingSlot, ParkingSlotReplica
6 | from app.producers import replicate_parking_slot
7 |
8 |
9 | async def create_parking_slot(session: Session, **kwargs) -> ParkingSlot:
10 | ps = ParkingSlot(uuid=str(uuid4()))
11 |
12 | {setattr(ps, k, v) for k, v in kwargs.items()}
13 |
14 | await replicate_parking_slot(ps.to_dict())
15 |
16 | session.add(ps)
17 | session.commit()
18 | session.refresh(ps)
19 | return ps
20 |
21 |
22 | async def parking_slot_list(session: Session) -> List[ParkingSlot]:
23 | return session.query(ParkingSlot).all()
24 |
25 |
26 | async def parking_slot_details(session: Session, uuid: str) -> ParkingSlot:
27 | return session.query(ParkingSlot).filter(ParkingSlot.uuid == uuid).one()
28 |
29 |
30 | async def create_parking_slot_replica(session: Session, **kwargs) -> ParkingSlot:
31 | ps = ParkingSlotReplica(uuid=str(uuid4()))
32 |
33 | {setattr(ps, k, v) for k, v in kwargs.items()}
34 |
35 | session.add(ps)
36 | session.commit()
37 | session.refresh(ps)
38 | return ps
39 |
--------------------------------------------------------------------------------
/cqrs-example/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | DATABASE_URL = config("DATABASE_URL")
6 | REPLICA_DATABASE_URL = config("REPLICA_DATABASE_URL")
7 | RABBITMQ_BROKER_URL = config("RABBITMQ_BROKER_URL")
8 |
--------------------------------------------------------------------------------
/cqrs-example/migrations/README:
--------------------------------------------------------------------------------
1 | Rudimentary multi-database configuration.
2 |
3 | Multi-DB isn't vastly different from generic. The primary difference is that it
4 | will run the migrations N times (depending on how many databases you have
5 | configured), providing one engine name and associated context for each run.
6 |
7 | That engine name will then allow the migration to restrict what runs within it to
8 | just the appropriate migrations for that engine. You can see this behavior within
9 | the mako template.
10 |
11 | In the provided configuration, you'll need to have `databases` provided in
12 | alembic's config, and an `sqlalchemy.url` provided for each engine name.
13 |
--------------------------------------------------------------------------------
/cqrs-example/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | <%!
2 | import re
3 |
4 | %>"""${message}
5 |
6 | Revision ID: ${up_revision}
7 | Revises: ${down_revision | comma,n}
8 | Create Date: ${create_date}
9 |
10 | """
11 | from alembic import op
12 | import sqlalchemy as sa
13 | ${imports if imports else ""}
14 |
15 | # revision identifiers, used by Alembic.
16 | revision = ${repr(up_revision)}
17 | down_revision = ${repr(down_revision)}
18 | branch_labels = ${repr(branch_labels)}
19 | depends_on = ${repr(depends_on)}
20 |
21 |
22 | def upgrade(engine_name: str) -> None:
23 | globals()["upgrade_%s" % engine_name]()
24 |
25 |
26 | def downgrade(engine_name: str) -> None:
27 | globals()["downgrade_%s" % engine_name]()
28 |
29 | <%
30 | db_names = config.get_main_option("databases")
31 | %>
32 |
33 | ## generate an "upgrade_() / downgrade_()" function
34 | ## for each database name in the ini file.
35 |
36 | % for db_name in re.split(r',\s*', db_names):
37 |
38 | def upgrade_${db_name}() -> None:
39 | ${context.get("%s_upgrades" % db_name, "pass")}
40 |
41 |
42 | def downgrade_${db_name}() -> None:
43 | ${context.get("%s_downgrades" % db_name, "pass")}
44 |
45 | % endfor
46 |
--------------------------------------------------------------------------------
/cqrs-example/migrations/versions/1191d11f60ca_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: 1191d11f60ca
4 | Revises:
5 | Create Date: 2022-07-02 17:36:09.660036
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "1191d11f60ca"
13 | down_revision = None
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade(engine_name: str) -> None:
19 | globals()["upgrade_%s" % engine_name]()
20 |
21 |
22 | def downgrade(engine_name: str) -> None:
23 | globals()["downgrade_%s" % engine_name]()
24 |
25 |
26 | def upgrade_primary() -> None:
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.create_table(
29 | "parking_slots",
30 | sa.Column("uuid", sa.String(), nullable=False),
31 | sa.Column("name", sa.String(), nullable=False),
32 | sa.Column("status", sa.String(), server_default="available", nullable=False),
33 | sa.PrimaryKeyConstraint("uuid"),
34 | )
35 | op.create_index(
36 | op.f("ix_parking_slots_uuid"), "parking_slots", ["uuid"], unique=True
37 | )
38 | # ### end Alembic commands ###
39 |
40 |
41 | def downgrade_primary() -> None:
42 | # ### commands auto generated by Alembic - please adjust! ###
43 | op.drop_index(op.f("ix_parking_slots_uuid"), table_name="parking_slots")
44 | op.drop_table("parking_slots")
45 | # ### end Alembic commands ###
46 |
47 |
48 | def upgrade_replica() -> None:
49 | # ### commands auto generated by Alembic - please adjust! ###
50 | op.create_table(
51 | "parking_slots",
52 | sa.Column("uuid", sa.String(), nullable=False),
53 | sa.Column("name", sa.String(), nullable=False),
54 | sa.Column("status", sa.String(), server_default="available", nullable=False),
55 | sa.PrimaryKeyConstraint("uuid"),
56 | )
57 | op.create_index(
58 | op.f("ix_parking_slots_uuid"), "parking_slots", ["uuid"], unique=True
59 | )
60 | # ### end Alembic commands ###
61 |
62 |
63 | def downgrade_replica() -> None:
64 | # ### commands auto generated by Alembic - please adjust! ###
65 | op.drop_index(op.f("ix_parking_slots_uuid"), table_name="parking_slots")
66 | op.drop_table("parking_slots")
67 | # ### end Alembic commands ###
68 |
--------------------------------------------------------------------------------
/cqrs-example/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "cqrs-example"
3 | version = "0.1.0"
4 | description = "Python example of CQRS pattern"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "3.10.3"
9 | uvicorn = "^0.18.2"
10 | starlette = "^0.20.4"
11 | SQLAlchemy = "^1.4.39"
12 | alembic = "^1.8.0"
13 | psycopg2-binary = "^2.9.3"
14 | fire = "^0.4.0"
15 | attrs = "^21.4.0"
16 | aio-pika = "^8.0.3"
17 |
18 | [tool.poetry.dev-dependencies]
19 | black = "^22.6.0"
20 |
21 | [build-system]
22 | requires = ["poetry-core>=1.0.0"]
23 | build-backend = "poetry.core.masonry.api"
24 |
--------------------------------------------------------------------------------
/cqrs-example/resources/cqrs-pattern.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/cqrs-example/resources/cqrs-pattern.png
--------------------------------------------------------------------------------
/cqrs-example/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
8 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh"]
24 |
25 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/alembic.ini:
--------------------------------------------------------------------------------
1 | # a multi-database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = ./migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to ./migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:./migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | databases = event_store, read_db
59 |
60 | [event_store]
61 | sqlalchemy.url = driver://user:pass@localhost/dbname
62 |
63 | [read_db]
64 | sqlalchemy.url = driver://user:pass@localhost/dbname2
65 |
66 | [post_write_hooks]
67 | # post_write_hooks defines scripts or Python functions that are run
68 | # on newly generated revision scripts. See the documentation for further
69 | # detail and examples
70 |
71 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
72 | # hooks = black
73 | # black.type = console_scripts
74 | # black.entrypoint = black
75 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
76 |
77 | # Logging configuration
78 | [loggers]
79 | keys = root,sqlalchemy,alembic
80 |
81 | [handlers]
82 | keys = console
83 |
84 | [formatters]
85 | keys = generic
86 |
87 | [logger_root]
88 | level = WARN
89 | handlers = console
90 | qualname =
91 |
92 | [logger_sqlalchemy]
93 | level = WARN
94 | handlers =
95 | qualname = sqlalchemy.engine
96 |
97 | [logger_alembic]
98 | level = INFO
99 | handlers =
100 | qualname = alembic
101 |
102 | [handler_console]
103 | class = StreamHandler
104 | args = (sys.stderr,)
105 | level = NOTSET
106 | formatter = generic
107 |
108 | [formatter_generic]
109 | format = %(levelname)-5.5s [%(name)s] %(message)s
110 | datefmt = %H:%M:%S
111 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import json
3 |
4 | import attrs
5 | import pika
6 |
7 | from app import settings
8 | from app.models import AMQPMessage
9 |
10 |
11 | class AMQPClient:
12 | def init(self):
13 | """
14 | Inititalize AMQP client.
15 | """
16 |
17 | parameters = pika.URLParameters(settings.RABBITMQ_BROKER_URL)
18 | self.connection = pika.BlockingConnection(parameters)
19 | self.channel = self.connection.channel()
20 | return self
21 |
22 | def event_producer(
23 | self, event_store: str, binding_key: str, message: AMQPMessage
24 | ) -> None:
25 | """
26 | Send event/message to a specific exchange with binding-key.
27 |
28 | If an existing queue is bound to the given binding-key, the message will be stored
29 | to that queue otherwise the message will be lost.
30 |
31 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
32 | to the right queue.
33 | """
34 |
35 | # Declare exchange
36 | self.channel.exchange_declare(
37 | exchange=event_store, exchange_type="topic", durable=True
38 | )
39 |
40 | payload = json.dumps(attrs.asdict(message))
41 | self.channel.basic_publish(
42 | exchange=event_store, routing_key=binding_key, body=payload
43 | )
44 |
45 |
46 | @contextlib.contextmanager
47 | def AMQP() -> AMQPClient:
48 | client = AMQPClient().init()
49 | try:
50 | yield client
51 | finally:
52 | client.connection.close()
53 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/applications.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | from typing import List
3 | from uuid import UUID
4 |
5 | from app.amqp_client import AMQP, AMQPClient
6 | from app.domainmodels import Booking
7 | from app.models import AMQPMessage
8 | from app.services import create_booking, update_booking_status_by
9 | from attrs import asdict
10 | from eventsourcing.application import Application
11 | from eventsourcing.dispatch import singledispatchmethod
12 | from eventsourcing.system import ProcessApplication, SingleThreadedRunner, System
13 |
14 | BOOKING_EVENT_ATTRIBUTES = ['originator_id', 'status', 'parking_slot_ref_no', 'originator_version']
15 | BOOKING_EVENT_ATTRIBUTES_MAPPER = {'originator_id': 'id', 'originator_version': 'version'}
16 |
17 |
18 | class Bookings(Application):
19 | def create_booking(self, parking_slot_ref_no: str) -> UUID:
20 | booking = Booking(parking_slot_ref_no=parking_slot_ref_no)
21 | self.save(booking)
22 | return booking.id
23 |
24 | def reserve_booking(self, booking_id: str) -> Booking:
25 | booking: Booking = self.get_booking(booking_id)
26 | self._status_checker(booking, "reserved")
27 | booking.reserve()
28 | return self._update_booking(booking)
29 |
30 | def complete_booking(self, booking_id: str) -> Booking:
31 | booking: Booking = self.get_booking(booking_id)
32 | self._status_checker(booking, "completed")
33 | booking.complete()
34 | return self._update_booking(booking)
35 |
36 | def get_booking(self, booking_id: str) -> Booking:
37 | booking = self.repository.get(booking_id)
38 | return booking
39 |
40 | def get_booking_history(self, booking_id: str) -> List[dict] | List:
41 | booking_events = list(self.events.get(booking_id))
42 | booking_event_dicts = []
43 |
44 | for booking_event in booking_events:
45 | booking_event_dict = booking_event.__dict__
46 |
47 | event_dict = {
48 | BOOKING_EVENT_ATTRIBUTES_MAPPER.get(event_key, event_key): booking_event_dict.get(event_key)
49 | for event_key in sorted(booking_event_dict.keys())
50 | if event_key in BOOKING_EVENT_ATTRIBUTES
51 | }
52 | booking_event_dicts.append(event_dict)
53 |
54 | return booking_event_dicts
55 |
56 | def _status_checker(self, booking: Booking, status: str):
57 | if booking.status == status:
58 | raise ValueError(f"Booking ID: {booking.id} is already {booking.status}.")
59 | return booking
60 |
61 | def _update_booking(self, booking: Booking) -> Booking:
62 | self.save(booking)
63 | return booking
64 |
65 |
66 | class BookingProjector(ProcessApplication):
67 | @singledispatchmethod
68 | def policy(self, domain_event, processing_event):
69 | if type(domain_event) is Booking.BookingCreated:
70 | booking = create_booking(
71 | domain_uuid=str(domain_event.originator_id),
72 | parking_slot_ref_no=domain_event.parking_slot_ref_no,
73 | status=domain_event.status,
74 | )
75 |
76 | with AMQP() as amqp:
77 | amqp_client: AMQPClient = amqp
78 | amqp_client.event_producer(
79 | "BOOKING_TX_EVENT",
80 | "booking.create",
81 | AMQPMessage(
82 | id=booking.parking_slot_ref_no, content=booking.to_dict()
83 | ),
84 | )
85 |
86 | else:
87 | # Aside from booking created event, there is only have
88 | # a booking status change events.
89 | booking = update_booking_status_by(
90 | domain_uuid=str(domain_event.originator_id),
91 | status=domain_event.status,
92 | )
93 | with AMQP() as amqp:
94 | amqp_client: AMQPClient = amqp
95 | amqp_client.event_producer(
96 | "BOOKING_TX_EVENT",
97 | "booking.status_changed",
98 | AMQPMessage(
99 | id=booking.parking_slot_ref_no, content=booking.to_dict()
100 | ),
101 | )
102 |
103 |
104 | system = System(pipes=[[Bookings, BookingProjector]])
105 |
106 |
107 | @contextlib.contextmanager
108 | def process_runner():
109 | runner = SingleThreadedRunner(system)
110 | try:
111 | runner.start()
112 | yield runner
113 | finally:
114 | runner.stop()
115 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/cli.py:
--------------------------------------------------------------------------------
1 | from uuid import UUID
2 |
3 | import fire
4 |
5 | from app.applications import Bookings, process_runner
6 | from app.services import get_booking_by_domain_uuid, get_booking_list
7 |
8 |
9 | class AppCLI(object):
10 | def create_booking_request(self, parking_slot_ref_no: str) -> UUID:
11 | with process_runner() as runner:
12 | booking_app = runner.get(Bookings)
13 | result_id = booking_app.create_booking(
14 | parking_slot_ref_no=parking_slot_ref_no
15 | )
16 | return result_id
17 |
18 | def reserve_booking(self, booking_id: str) -> UUID:
19 | with process_runner() as runner:
20 | booking_app = runner.get(Bookings)
21 | booking = booking_app.reserve_booking(booking_id=booking_id)
22 | return booking.id
23 |
24 | def complete_booking(self, booking_id: str) -> UUID:
25 | with process_runner() as runner:
26 | booking_app = runner.get(Bookings)
27 | booking = booking_app.complete_booking(booking_id=booking_id)
28 | return booking.id
29 |
30 | def get_booking_details(self, booking_id: str):
31 | booking = get_booking_by_domain_uuid(booking_id)
32 | return booking.to_dict()
33 |
34 | def get_booking_list(self):
35 | booking_list = get_booking_list()
36 | return [booking.to_dict() for booking in booking_list]
37 |
38 | def get_booking_events(self, booking_id: str):
39 | booking_app = Bookings()
40 | return booking_app.get_booking_history(booking_id)
41 |
42 |
43 | if __name__ == "__main__":
44 | fire.Fire(AppCLI)
45 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from app import settings
8 |
9 | engine = create_engine(settings.PROJECTOR_DATABASE_URL)
10 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
11 |
12 | ProjectorBase = declarative_base()
13 |
14 |
15 | @contextlib.contextmanager
16 | def Session():
17 | db = SessionLocal()
18 | try:
19 | yield db
20 | finally:
21 | db.close()
22 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/domainmodels.py:
--------------------------------------------------------------------------------
1 | from eventsourcing.domain import Aggregate, event
2 |
3 |
4 | class Booking(Aggregate):
5 | @event("BookingCreated")
6 | def __init__(self, parking_slot_ref_no: str, status: str = "created") -> None:
7 | self.parking_slot_ref_no = parking_slot_ref_no
8 | self.status = status
9 |
10 | @event("BookingReserved")
11 | def reserve(self, status: str = "reserved"):
12 | self.status = status
13 |
14 | @event("BookingCompleted")
15 | def complete(self, status: str = "completed"):
16 | self.status = status
17 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/main.py:
--------------------------------------------------------------------------------
1 | from starlette.applications import Starlette
2 | from starlette.responses import JSONResponse
3 | from starlette.routing import Route
4 |
5 |
6 | async def health(request):
7 | return JSONResponse({"message": "Booking server is running"})
8 |
9 |
10 | routes = [
11 | Route("/health", health),
12 | ]
13 |
14 | app = Starlette(routes=routes)
15 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from attrs import define
4 | from sqlalchemy import Column
5 | from sqlalchemy.types import Integer, String
6 |
7 | from app.db import ProjectorBase
8 |
9 |
10 | class DictMixin:
11 | def to_dict(self) -> Dict:
12 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
13 |
14 |
15 | class Booking(DictMixin, ProjectorBase):
16 | """This is a projector class. We persist data through the event store."""
17 |
18 | __tablename__ = "bookings"
19 |
20 | id = Column(Integer, primary_key=True, unique=True, index=True)
21 | domain_uuid = Column(String, nullable=False, unique=True)
22 |
23 | status = Column(String, nullable=False, server_default="created")
24 | parking_slot_ref_no = Column(String, nullable=True)
25 |
26 |
27 | @define
28 | class AMQPMessage:
29 | id: str
30 | content: Any
31 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/services.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from app.db import Session
4 | from app.models import Booking
5 |
6 |
7 | def create_booking(domain_uuid: str, parking_slot_ref_no: str, status: str) -> Booking:
8 | with Session() as session:
9 | booking = Booking(
10 | domain_uuid=domain_uuid,
11 | parking_slot_ref_no=parking_slot_ref_no,
12 | status=status,
13 | )
14 | session.add(booking)
15 | session.commit()
16 | session.refresh(booking)
17 | return booking
18 |
19 |
20 | def get_booking_by_domain_uuid(domain_uuid: str) -> Booking:
21 | with Session() as session:
22 | return session.query(Booking).filter(Booking.domain_uuid == domain_uuid).one()
23 |
24 |
25 | def get_booking_list() -> List[Booking]:
26 | with Session() as session:
27 | return session.query(Booking).all()
28 |
29 |
30 | def update_booking_status_by(domain_uuid: str, status: str) -> Booking:
31 | with Session() as session:
32 | booking = get_booking_by_domain_uuid(domain_uuid)
33 | booking.status = status
34 | session.add(booking)
35 | session.commit()
36 | session.refresh(booking)
37 | return booking
38 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | EVENT_STORE_DATABASE_URL = config("EVENT_STORE_DATABASE_URL")
6 | PROJECTOR_DATABASE_URL = config("PROJECTOR_DATABASE_URL")
7 | RABBITMQ_BROKER_URL = config("RABBITMQ_BROKER_URL")
8 | SQLALCHEMY_URL = config("SQLALCHEMY_URL")
9 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/event-sourcing-example/booking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from alembic import context
4 | from sqlalchemy import engine_from_config, pool
5 |
6 | from app import logging, settings
7 |
8 | # this is the Alembic Config object, which provides
9 | # access to the values within the .ini file in use.
10 | config = context.config
11 |
12 | # Interpret the config file for Python logging.
13 | # This line sets up loggers basically.
14 | if config.config_file_name is not None:
15 | fileConfig(config.config_file_name)
16 |
17 | # add your model's MetaData object here
18 | # for 'autogenerate' support
19 | # from myapp import mymodel
20 | # target_metadata = mymodel.Base.metadata
21 | from app.db import ProjectorBase
22 | from app.models import Booking
23 |
24 | config.set_main_option("sqlalchemy.url", settings.PROJECTOR_DATABASE_URL)
25 | target_metadata = [ProjectorBase.metadata]
26 |
27 |
28 | # other values from the config, defined by the needs of env.py,
29 | # can be acquired:
30 | # my_important_option = config.get_main_option("my_important_option")
31 | # ... etc.
32 |
33 |
34 | def run_migrations_offline() -> None:
35 | """Run migrations in 'offline' mode.
36 |
37 | This configures the context with just a URL
38 | and not an Engine, though an Engine is acceptable
39 | here as well. By skipping the Engine creation
40 | we don't even need a DBAPI to be available.
41 |
42 | Calls to context.execute() here emit the given string to the
43 | script output.
44 |
45 | """
46 | url = config.get_main_option("sqlalchemy.url")
47 | context.configure(
48 | url=url,
49 | target_metadata=target_metadata,
50 | literal_binds=True,
51 | compare_type=True,
52 | compare_server_default=True,
53 | dialect_opts={"paramstyle": "named"},
54 | )
55 |
56 | with context.begin_transaction():
57 | context.run_migrations()
58 |
59 |
60 | def run_migrations_online() -> None:
61 | """Run migrations in 'online' mode.
62 |
63 | In this scenario we need to create an Engine
64 | and associate a connection with the context.
65 |
66 | """
67 |
68 | # this callback is used to prevent an auto-migration from being generated
69 | # when there are no changes to the schema
70 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
71 | def process_revision_directives(context, revision, directives):
72 | if getattr(config.cmd_opts, "autogenerate", False):
73 | script = directives[0]
74 | if script.upgrade_ops.is_empty():
75 | directives[:] = []
76 | logging.info("No changes in schema detected.")
77 |
78 | connectable = engine_from_config(
79 | config.get_section(config.config_ini_section),
80 | prefix="sqlalchemy.",
81 | poolclass=pool.NullPool,
82 | )
83 |
84 | with connectable.connect() as connection:
85 | context.configure(
86 | connection=connection,
87 | target_metadata=target_metadata,
88 | process_revision_directives=process_revision_directives,
89 | compare_type=True,
90 | compare_server_default=True,
91 | )
92 |
93 | with context.begin_transaction():
94 | context.run_migrations()
95 |
96 |
97 | if context.is_offline_mode():
98 | run_migrations_offline()
99 | else:
100 | run_migrations_online()
101 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade() -> None:
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/migrations/versions/5ffab98ff42c_add_domain_uuid_to_booking_sqlalchemy_.py:
--------------------------------------------------------------------------------
1 | """Add domain_uuid to Booking sqlalchemy model
2 |
3 | Revision ID: 5ffab98ff42c
4 | Revises: fa1e57d87087
5 | Create Date: 2022-08-06 15:54:41.554612
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "5ffab98ff42c"
13 | down_revision = "fa1e57d87087"
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade() -> None:
19 | # ### commands auto generated by Alembic - please adjust! ###
20 | op.add_column("bookings", sa.Column("domain_uuid", sa.String(), nullable=False))
21 | op.create_unique_constraint(None, "bookings", ["domain_uuid"])
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade() -> None:
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_constraint(None, "bookings", type_="unique")
28 | op.drop_column("bookings", "domain_uuid")
29 | # ### end Alembic commands ###
30 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/migrations/versions/fa1e57d87087_initial_migrations.py:
--------------------------------------------------------------------------------
1 | """Initial migrations
2 |
3 | Revision ID: fa1e57d87087
4 | Revises:
5 | Create Date: 2022-07-23 18:17:54.439068
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "fa1e57d87087"
13 | down_revision = None
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade() -> None:
19 | # ### commands auto generated by Alembic - please adjust! ###
20 | op.create_table(
21 | "bookings",
22 | sa.Column("id", sa.Integer(), nullable=False),
23 | sa.Column("status", sa.String(), server_default="created", nullable=False),
24 | sa.Column("parking_slot_ref_no", sa.String(), nullable=True),
25 | sa.PrimaryKeyConstraint("id"),
26 | )
27 | op.create_index(op.f("ix_bookings_id"), "bookings", ["id"], unique=True)
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | op.drop_index(op.f("ix_bookings_id"), table_name="bookings")
34 | op.drop_table("bookings")
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "booking"
3 | version = "0.1.0"
4 | description = "Event Sourcing pattern example - Booking Service"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.8"
9 | starlette = "^0.20.4"
10 | SQLAlchemy = "^1.4.39"
11 | fire = "^0.4.0"
12 | alembic = "^1.8.1"
13 | uvicorn = {extras = ["standard"], version = "^0.18.2"}
14 | psycopg2-binary = "^2.9.3"
15 | eventsourcing_sqlalchemy = "^0.2.0"
16 | attrs = "^22.1.0"
17 | pika = "^1.3.0"
18 |
19 | [tool.poetry.dev-dependencies]
20 | isort = "^5.10.1"
21 | black = "^22.6.0"
22 |
23 | [build-system]
24 | requires = ["poetry-core>=1.0.0"]
25 | build-backend = "poetry.core.masonry.api"
26 |
--------------------------------------------------------------------------------
/event-sourcing-example/booking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
8 |
--------------------------------------------------------------------------------
/event-sourcing-example/db_init_scripts/docker_postgres_init.sql:
--------------------------------------------------------------------------------
1 | CREATE USER booking_projector_user WITH PASSWORD 'postgres';
2 | GRANT CONNECT ON DATABASE postgres TO booking_projector_user;
3 | GRANT USAGE ON SCHEMA public TO booking_projector_user;
4 | GRANT SELECT ON ALL TABLES IN SCHEMA public TO booking_projector_user;
5 | ALTER DEFAULT PRIVILEGES IN SCHEMA public
6 | GRANT SELECT ON TABLES TO booking_projector_user;
7 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
24 |
25 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | sqlalchemy.url = driver://user:pass@localhost/dbname
59 |
60 |
61 | [post_write_hooks]
62 | # post_write_hooks defines scripts or Python functions that are run
63 | # on newly generated revision scripts. See the documentation for further
64 | # detail and examples
65 |
66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
67 | # hooks = black
68 | # black.type = console_scripts
69 | # black.entrypoint = black
70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
71 |
72 | # Logging configuration
73 | [loggers]
74 | keys = root,sqlalchemy,alembic
75 |
76 | [handlers]
77 | keys = console
78 |
79 | [formatters]
80 | keys = generic
81 |
82 | [logger_root]
83 | level = WARN
84 | handlers = console
85 | qualname =
86 |
87 | [logger_sqlalchemy]
88 | level = WARN
89 | handlers =
90 | qualname = sqlalchemy.engine
91 |
92 | [logger_alembic]
93 | level = INFO
94 | handlers =
95 | qualname = alembic
96 |
97 | [handler_console]
98 | class = StreamHandler
99 | args = (sys.stderr,)
100 | level = NOTSET
101 | formatter = generic
102 |
103 | [formatter_generic]
104 | format = %(levelname)-5.5s [%(name)s] %(message)s
105 | datefmt = %H:%M:%S
106 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | import simplejson as json
7 | from aio_pika import IncomingMessage, Message
8 |
9 | from app import settings
10 | from app.models import AMQPMessage
11 |
12 |
13 | class AMQPClient:
14 | async def init(self) -> None:
15 | """
16 | Inititalize AMQP client.
17 | """
18 |
19 | self.connection = await aio_pika.connect_robust(
20 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
21 | )
22 |
23 | # Creating channel
24 | self.channel = await self.connection.channel()
25 | return self
26 |
27 | async def event_consumer(
28 | self,
29 | callback: Callable,
30 | event_store: str,
31 | event: str = "#",
32 | queue_name: str | None = None,
33 | ) -> None:
34 | """
35 | Create an event consumer.
36 |
37 | callback - A function that will process the incoming message.
38 | event_store - Declare an exchange as an event store. We store send messages/events
39 | to this exchange.
40 | event - Serves as a binding key or a type of event that occurred.
41 | queue_name - Create a queue to set of events from the Exchange (Optional).
42 | If not specified it will still create a queue with a random name.
43 | """
44 | exchange = await self.channel.declare_exchange(
45 | event_store, type="topic", durable=True
46 | )
47 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
48 |
49 | await queue.bind(exchange, event)
50 | await queue.consume(partial(self._process_message, callback=callback))
51 |
52 | async def event_producer(
53 | self,
54 | event_store: str,
55 | binding_key: str,
56 | correlation_id: str,
57 | message: AMQPMessage,
58 | ) -> None:
59 | """
60 | Send event/message to a specific exchange with binding-key.
61 |
62 | If an existing queue is bound to the given binding-key, the message will be stored
63 | to that queue otherwise the message will be lost.
64 |
65 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
66 | to the right queue.
67 | """
68 |
69 | # Declare exchange
70 | exchange = await self.channel.declare_exchange(
71 | event_store, type="topic", durable=True
72 | )
73 |
74 | payload = json.dumps(message.dict())
75 | await exchange.publish(
76 | Message(
77 | body=str(payload).encode(),
78 | content_type="application/json",
79 | correlation_id=correlation_id,
80 | ),
81 | routing_key=binding_key,
82 | )
83 |
84 | async def _process_message(
85 | self, message: IncomingMessage, callback: Callable
86 | ) -> None:
87 | """
88 | Process incoming message from a Queue. It will require a callback function to handle
89 | message content.
90 | """
91 | async with message.process(ignore_processed=True):
92 | await callback(message)
93 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 |
3 | from app.db import Session
4 | from app.services import (create_parking_slot, parking_slot_details,
5 | parking_slot_list)
6 |
7 |
8 | class AppCLI(object):
9 | async def create_parking_slot(self, name: str, status: str | None = "available"):
10 | with Session() as session:
11 | ps = await create_parking_slot(session, name=name, status=status)
12 | return ps.to_dict()
13 |
14 | async def parking_slot_list(self):
15 | with Session() as session:
16 | ps_list = await parking_slot_list(session)
17 | return [ps.to_dict() for ps in ps_list]
18 |
19 | async def parking_slot_details(self, uuid: str):
20 | with Session() as session:
21 | ps = await parking_slot_details(session, uuid)
22 | return ps.to_dict()
23 |
24 |
25 | if __name__ == "__main__":
26 | fire.Fire(AppCLI)
27 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from app import settings
8 |
9 | engine = create_engine(settings.DATABASE_URL)
10 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
11 |
12 | Base = declarative_base()
13 |
14 |
15 | @contextlib.contextmanager
16 | def Session():
17 | db = SessionLocal()
18 | try:
19 | yield db
20 | finally:
21 | db.close()
22 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/main.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import contextlib
3 |
4 | from starlette.applications import Starlette
5 | from starlette.responses import JSONResponse
6 | from starlette.routing import Route
7 |
8 | from app.amqp_client import AMQPClient
9 | from app.services import booking_event_processor
10 |
11 |
12 | @contextlib.asynccontextmanager
13 | async def lifespan(app):
14 | amqp_client: AMQPClient = await AMQPClient().init()
15 | try:
16 | await amqp_client.event_consumer(
17 | booking_event_processor, "BOOKING_TX_EVENT", "booking.*"
18 | )
19 | yield
20 | finally:
21 | await amqp_client.connection.close()
22 |
23 |
24 | async def health(request):
25 | return JSONResponse({"message": "Parking server is running"})
26 |
27 |
28 | routes = [
29 | Route("/health", health),
30 | ]
31 |
32 | app = Starlette(routes=routes, lifespan=lifespan)
33 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from pydantic import BaseModel
4 | from sqlalchemy import Column, String
5 |
6 | from app.db import Base
7 |
8 |
9 | class DictMixin:
10 | def to_dict(self) -> Dict:
11 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
12 |
13 |
14 | class ParkingSlot(Base, DictMixin):
15 | __tablename__ = "parking_slots"
16 |
17 | uuid = Column(String, primary_key=True, unique=True, index=True)
18 | name = Column(String, nullable=False)
19 |
20 | # available/blocked/reserved
21 | status = Column(String, nullable=False, server_default="available")
22 |
23 |
24 | class AMQPMessage(BaseModel):
25 | id: str
26 | content: Any | None = None
27 | reply_state: str | None = None
28 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/services.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import json
3 | from typing import List
4 | from uuid import uuid4
5 |
6 | from aio_pika import IncomingMessage
7 |
8 | from app.amqp_client import AMQPClient
9 | from app.db import Session
10 | from app.models import AMQPMessage, ParkingSlot
11 |
12 |
13 | async def update_parking_slot(session: Session, ps: ParkingSlot) -> ParkingSlot:
14 | session.commit()
15 | session.refresh(ps)
16 | return ps
17 |
18 |
19 | async def create_parking_slot(session: Session, **kwargs) -> ParkingSlot:
20 | ps = ParkingSlot(uuid=str(uuid4()))
21 |
22 | {setattr(ps, k, v) for k, v in kwargs.items()}
23 |
24 | session.add(ps)
25 | session.commit()
26 | session.refresh(ps)
27 | return ps
28 |
29 |
30 | async def parking_slot_list(session: Session) -> List[ParkingSlot]:
31 | return session.query(ParkingSlot).all()
32 |
33 |
34 | async def parking_slot_details(session: Session, uuid: str) -> ParkingSlot:
35 | return session.query(ParkingSlot).filter(ParkingSlot.uuid == uuid).one()
36 |
37 |
38 | async def block_parking_slot(session: Session, uuid: str) -> bool:
39 | ps = await parking_slot_details(session, uuid)
40 |
41 | if ps.status != "available":
42 | return False
43 |
44 | ps.status = "blocked"
45 | ps = await update_parking_slot(session, ps)
46 |
47 | return ps.status == "blocked"
48 |
49 |
50 | async def reserve_parking_slot(session: Session, uuid: str) -> bool:
51 | ps = await parking_slot_details(session, uuid)
52 | ps.status = "reserved"
53 | ps = await update_parking_slot(session, ps)
54 | return ps.status == "reserved"
55 |
56 |
57 | async def booking_event_processor(message: IncomingMessage):
58 | async with message.process(ignore_processed=True):
59 | await message.ack()
60 |
61 | data = json.loads(str(message.body.decode("utf-8")))
62 | parking_slot_uuid = data.get("id")
63 | booking = data.get("content")
64 |
65 | if booking.get("status").lower() == "created":
66 | with Session() as session:
67 | await block_parking_slot(session, parking_slot_uuid)
68 | elif booking.get("status").lower() == "reserved":
69 | with Session() as session:
70 | await reserve_parking_slot(session, parking_slot_uuid)
71 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | DATABASE_URL = config("DATABASE_URL")
6 | RABBITMQ_BROKER_URL = config("RABBITMQ_BROKER_URL")
7 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/event-sourcing-example/parking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from alembic import context
4 | from sqlalchemy import engine_from_config, pool
5 |
6 | from app import logging, settings
7 |
8 | # this is the Alembic Config object, which provides
9 | # access to the values within the .ini file in use.
10 | config = context.config
11 |
12 | # Interpret the config file for Python logging.
13 | # This line sets up loggers basically.
14 | if config.config_file_name is not None:
15 | fileConfig(config.config_file_name)
16 |
17 | # add your model's MetaData object here
18 | # for 'autogenerate' support
19 | # from myapp import mymodel
20 | # target_metadata = mymodel.Base.metadata
21 | from app.db import Base
22 | from app.models import ParkingSlot
23 |
24 | config.set_main_option("sqlalchemy.url", settings.DATABASE_URL)
25 | target_metadata = [Base.metadata]
26 |
27 |
28 | # other values from the config, defined by the needs of env.py,
29 | # can be acquired:
30 | # my_important_option = config.get_main_option("my_important_option")
31 | # ... etc.
32 |
33 |
34 | def run_migrations_offline() -> None:
35 | """Run migrations in 'offline' mode.
36 |
37 | This configures the context with just a URL
38 | and not an Engine, though an Engine is acceptable
39 | here as well. By skipping the Engine creation
40 | we don't even need a DBAPI to be available.
41 |
42 | Calls to context.execute() here emit the given string to the
43 | script output.
44 |
45 | """
46 | url = config.get_main_option("sqlalchemy.url")
47 | context.configure(
48 | url=url,
49 | target_metadata=target_metadata,
50 | literal_binds=True,
51 | compare_type=True,
52 | compare_server_default=True,
53 | render_as_batch=True,
54 | dialect_opts={"paramstyle": "named"},
55 | )
56 |
57 | with context.begin_transaction():
58 | context.run_migrations()
59 |
60 |
61 | def run_migrations_online() -> None:
62 | """Run migrations in 'online' mode.
63 |
64 | In this scenario we need to create an Engine
65 | and associate a connection with the context.
66 |
67 | """
68 |
69 | # this callback is used to prevent an auto-migration from being generated
70 | # when there are no changes to the schema
71 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
72 | def process_revision_directives(context, revision, directives):
73 | if getattr(config.cmd_opts, "autogenerate", False):
74 | script = directives[0]
75 | if script.upgrade_ops.is_empty():
76 | directives[:] = []
77 | logging.info("No changes in schema detected.")
78 |
79 | connectable = engine_from_config(
80 | config.get_section(config.config_ini_section),
81 | prefix="sqlalchemy.",
82 | poolclass=pool.NullPool,
83 | )
84 |
85 | with connectable.connect() as connection:
86 | context.configure(
87 | connection=connection,
88 | target_metadata=target_metadata,
89 | process_revision_directives=process_revision_directives,
90 | compare_type=True,
91 | compare_server_default=True,
92 | render_as_batch=True,
93 | )
94 |
95 | with context.begin_transaction():
96 | context.run_migrations()
97 |
98 |
99 | if context.is_offline_mode():
100 | run_migrations_offline()
101 | else:
102 | run_migrations_online()
103 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade() -> None:
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/migrations/versions/9201fb72ddb9_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: 9201fb72ddb9
4 | Revises:
5 | Create Date: 2022-08-07 16:03:30.313387
6 |
7 | """
8 | import sqlalchemy as sa
9 | from alembic import op
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = "9201fb72ddb9"
13 | down_revision = None
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade() -> None:
19 | # ### commands auto generated by Alembic - please adjust! ###
20 | op.create_table(
21 | "parking_slots",
22 | sa.Column("uuid", sa.String(), nullable=False),
23 | sa.Column("name", sa.String(), nullable=False),
24 | sa.Column("status", sa.String(), server_default="available", nullable=False),
25 | sa.PrimaryKeyConstraint("uuid"),
26 | )
27 | with op.batch_alter_table("parking_slots", schema=None) as batch_op:
28 | batch_op.create_index(
29 | batch_op.f("ix_parking_slots_uuid"), ["uuid"], unique=True
30 | )
31 |
32 | # ### end Alembic commands ###
33 |
34 |
35 | def downgrade() -> None:
36 | # ### commands auto generated by Alembic - please adjust! ###
37 | with op.batch_alter_table("parking_slots", schema=None) as batch_op:
38 | batch_op.drop_index(batch_op.f("ix_parking_slots_uuid"))
39 |
40 | op.drop_table("parking_slots")
41 | # ### end Alembic commands ###
42 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "parking"
3 | version = "0.1.0"
4 | description = "Saga's orchestration pattern example - Parking service"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.10"
9 | starlette = "^0.20.1"
10 | aio-pika = "^7.2.0"
11 | fire = "^0.4.0"
12 | alembic = "^1.7.7"
13 | psycopg2-binary = "2.9.3"
14 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
15 | pydantic = "^1.9.0"
16 | simplejson = "^3.17.6"
17 |
18 | [tool.poetry.dev-dependencies]
19 | black = "^22.6.0"
20 | isort = "^5.10.1"
21 |
22 | [build-system]
23 | requires = ["poetry-core>=1.0.0"]
24 | build-backend = "poetry.core.masonry.api"
25 |
--------------------------------------------------------------------------------
/event-sourcing-example/parking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run alembic upgrade head
8 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
9 |
--------------------------------------------------------------------------------
/event-sourcing-example/resources/event-sourcing-pattern.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/event-sourcing-example/resources/event-sourcing-pattern.png
--------------------------------------------------------------------------------
/saga-choreography-example/billing/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | COPY pyproject.toml .
17 | COPY poetry.lock .
18 |
19 | RUN poetry config virtualenvs.in-project false
20 | RUN poetry install
21 |
22 | COPY . .
23 |
24 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
25 |
26 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration files
8 | # file_template = %%(rev)s_%%(slug)s
9 |
10 | # sys.path path, will be prepended to sys.path if present.
11 | # defaults to the current working directory.
12 | prepend_sys_path = .
13 |
14 | # timezone to use when rendering the date within the migration file
15 | # as well as the filename.
16 | # If specified, requires the python-dateutil library that can be
17 | # installed by adding `alembic[tz]` to the pip requirements
18 | # string value is passed to dateutil.tz.gettz()
19 | # leave blank for localtime
20 | # timezone =
21 |
22 | # max length of characters to apply to the
23 | # "slug" field
24 | # truncate_slug_length = 40
25 |
26 | # set to 'true' to run the environment during
27 | # the 'revision' command, regardless of autogenerate
28 | # revision_environment = false
29 |
30 | # set to 'true' to allow .pyc and .pyo files without
31 | # a source .py file to be detected as revisions in the
32 | # versions/ directory
33 | # sourceless = false
34 |
35 | # version location specification; This defaults
36 | # to migrations/versions. When using multiple version
37 | # directories, initial revisions must be specified with --version-path.
38 | # The path separator used here should be the separator specified by "version_path_separator" below.
39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
40 |
41 | # version path separator; As mentioned above, this is the character used to split
42 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
43 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
44 | # Valid values for version_path_separator are:
45 | #
46 | # version_path_separator = :
47 | # version_path_separator = ;
48 | # version_path_separator = space
49 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
50 |
51 | # the output encoding used when revision files
52 | # are written from script.py.mako
53 | # output_encoding = utf-8
54 |
55 | # sqlalchemy.url = %(DATABASE_URL)s
56 |
57 |
58 | [post_write_hooks]
59 | # post_write_hooks defines scripts or Python functions that are run
60 | # on newly generated revision scripts. See the documentation for further
61 | # detail and examples
62 |
63 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
64 | # hooks = black
65 | # black.type = console_scripts
66 | # black.entrypoint = black
67 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
68 |
69 | # Logging configuration
70 | [loggers]
71 | keys = root,sqlalchemy,alembic
72 |
73 | [handlers]
74 | keys = console
75 |
76 | [formatters]
77 | keys = generic
78 |
79 | [logger_root]
80 | level = WARN
81 | handlers = console
82 | qualname =
83 |
84 | [logger_sqlalchemy]
85 | level = WARN
86 | handlers =
87 | qualname = sqlalchemy.engine
88 |
89 | [logger_alembic]
90 | level = INFO
91 | handlers =
92 | qualname = alembic
93 |
94 | [handler_console]
95 | class = StreamHandler
96 | args = (sys.stderr,)
97 | level = NOTSET
98 | formatter = generic
99 |
100 | [formatter_generic]
101 | format = %(levelname)-5.5s [%(name)s] %(message)s
102 | datefmt = %H:%M:%S
103 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | import simplejson as json
7 | from aio_pika import IncomingMessage, Message
8 |
9 | from app.dependencies import get_settings
10 | from app.models import AMQPMessage
11 |
12 |
13 | class AMQPClient:
14 |
15 | async def init(self) -> None:
16 | '''
17 | Inititalize AMQP client.
18 | '''
19 |
20 | settings = get_settings()
21 | self.connection = await aio_pika.connect_robust(
22 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
23 | )
24 |
25 | # Creating channel
26 | self.channel = await self.connection.channel()
27 | await self.channel.set_qos(prefetch_count=1)
28 |
29 | return self
30 |
31 | async def event_consumer(
32 | self, callback: Callable, event_store: str, event: str = '#', queue_name: str | None = None,
33 | ) -> None:
34 | '''
35 | Create an event consumer.
36 |
37 | callback - A function that will process the incoming message.
38 | event_store - Declare an exchange as an event store. We store send messages/events
39 | to this exchange.
40 | event - Serves as a binding key or a type of event that occurred.
41 | queue_name - Create a queue to set of events from the Exchange (Optional).
42 | If not specified it will still create a queue with a random name.
43 | '''
44 | exchange = await self.channel.declare_exchange(
45 | event_store,
46 | type='topic',
47 | durable=True
48 | )
49 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
50 |
51 | await queue.bind(exchange, event)
52 | await queue.consume(partial(self._process_message, callback=callback))
53 |
54 | async def event_producer(
55 | self, exchange: str, binding_key: str, message: AMQPMessage
56 | ) -> None:
57 | '''
58 | Send event/message to a specific exchange with binding-key.
59 |
60 | If an existing queue is bound to the given binding-key, the message will be stored
61 | to that queue otherwise the message will be lost.
62 |
63 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
64 | to the right queue.
65 | '''
66 |
67 | # Declare exchange
68 | exchange = await self.channel.declare_exchange(
69 | exchange,
70 | type='topic',
71 | durable=True
72 | )
73 |
74 | payload = json.dumps(message.dict())
75 | await exchange.publish(
76 | Message(
77 | body=str(payload).encode(),
78 | content_type='application/json',
79 | ),
80 | routing_key=binding_key,
81 | )
82 |
83 | async def _process_message(self, message: IncomingMessage, callback: Callable) -> None:
84 | '''
85 | Process incoming message from a Queue. It will require a callback function to handle
86 | message content.
87 | '''
88 | async with message.process(ignore_processed=True):
89 | await message.ack()
90 | await callback(message)
91 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/cli.py:
--------------------------------------------------------------------------------
1 |
2 | from decimal import Decimal
3 |
4 | import fire
5 |
6 | from app.amqp_client import AMQPClient
7 | from app.db import Session
8 | from app.models import AMQPMessage
9 | from app.services import (billing_request_details_by_reference_no,
10 | billing_request_list, create_payment_reconciliation,
11 | update_billing_request)
12 |
13 |
14 | class AppCLI(object):
15 |
16 | async def billing_request_list(self):
17 | with Session() as session:
18 | br_list = await billing_request_list(session)
19 | return [br.to_dict() for br in br_list]
20 |
21 | async def billing_request_details_by_reference_no(self, ref_no: str):
22 | with Session() as session:
23 | billing_request = await billing_request_details_by_reference_no(session, ref_no=ref_no)
24 | billing_request_obj = {
25 | 'billing_request': billing_request.to_dict(),
26 | 'reconciliations': [
27 | payment.to_dict() for payment in billing_request.payment_reconciliations
28 | ]
29 | }
30 | return billing_request_obj
31 |
32 | async def pay_bill(self, ref_no: str, amount: Decimal):
33 | with Session() as session:
34 | billing_request = await billing_request_details_by_reference_no(session, ref_no=ref_no)
35 | billing_request_obj = billing_request.to_dict()
36 |
37 | if billing_request.status == 'paid':
38 | return 'Billing request already paid.'
39 |
40 | # To avoid complex logic we will only accept sufficient payment.
41 | total_payment = billing_request.calculate_total_payment(amount)
42 | if total_payment > billing_request.total:
43 | return f'Payment is over {total_payment - billing_request.total}.\nUnable to process payment transaction.'
44 |
45 | reconciliation = await create_payment_reconciliation(
46 | session, billing_request.id, amount
47 | )
48 | reconciliation_obj = dict(
49 | (col, getattr(reconciliation, col))
50 | for col in reconciliation.__table__.columns.keys()
51 | )
52 |
53 | if billing_request.balance == Decimal('0.0'):
54 | billing_request.status = 'paid'
55 | billing_request = await update_billing_request(session, billing_request)
56 |
57 | amqp_client: AMQPClient = await AMQPClient().init()
58 | await amqp_client.event_producer(
59 | 'BOOKING_TX_EVENT_STORE', 'bill.paid', message=AMQPMessage(id=str(billing_request.reference_no), content=billing_request_obj)
60 | )
61 |
62 | return reconciliation_obj
63 |
64 |
65 | if __name__ == '__main__':
66 | fire.Fire(AppCLI)
67 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from app.dependencies import get_settings
8 |
9 | settings = get_settings()
10 |
11 | engine = create_engine(settings.DATABASE_URL)
12 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
13 |
14 | Base = declarative_base()
15 | Base.metadata.schema = 'billing_schema'
16 |
17 |
18 | @contextlib.contextmanager
19 | def Session():
20 | db = SessionLocal()
21 | try:
22 | yield db
23 | finally:
24 | db.close()
25 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/dependencies.py:
--------------------------------------------------------------------------------
1 | from functools import lru_cache
2 |
3 | from app.settings import Settings
4 |
5 |
6 | @lru_cache()
7 | def get_settings():
8 | return Settings()
9 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/events.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/saga-choreography-example/billing/app/events.py
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/main.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from fastapi import FastAPI
4 |
5 | from app.amqp_client import AMQPClient
6 | from app.services import (create_billing_request_from_event,
7 | set_billing_request_status_from_event)
8 |
9 | app = FastAPI()
10 |
11 |
12 | @app.on_event('startup')
13 | async def startup():
14 | amqp_client: AMQPClient = await AMQPClient().init()
15 |
16 | await amqp_client.event_consumer(create_billing_request_from_event, 'BOOKING_TX_EVENT_STORE', 'booking.created', 'booking_events')
17 | await amqp_client.event_consumer(set_billing_request_status_from_event, 'BOOKING_TX_EVENT_STORE', 'parking.unavailable')
18 |
19 | app.state.amqp_client = amqp_client
20 |
21 |
22 | @app.on_event('shutdown')
23 | async def shutdown():
24 | await app.state.amqp_client.connection.close()
25 |
26 |
27 | @app.get('/health')
28 | async def root():
29 | return {'message': 'Billing server is running'}
30 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/models.py:
--------------------------------------------------------------------------------
1 | from decimal import Decimal
2 | from typing import Any
3 |
4 | from pydantic import BaseModel
5 | from sqlalchemy import Column, ForeignKey, Integer, Numeric, String
6 | from sqlalchemy.ext.hybrid import hybrid_property
7 | from sqlalchemy.orm import relationship
8 |
9 | from app.db import Base
10 |
11 |
12 | class DictMixin:
13 |
14 | def to_dict(self):
15 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
16 |
17 |
18 | class BillingRequest(Base, DictMixin):
19 | __tablename__ = 'billing_requests'
20 |
21 | id = Column(Integer, primary_key=True, autoincrement=True)
22 | total = Column(Numeric(precision=12, scale=2), nullable=True)
23 | status = Column(String, default='pending')
24 |
25 | reference_no = Column(String, unique=True, nullable=False)
26 |
27 | payment_reconciliations = relationship(
28 | 'PaymentReconciliation',
29 | backref='billing_request',
30 | )
31 |
32 | @hybrid_property
33 | def total_paid(self):
34 | return Decimal(sum([payment.amount for payment in self.payment_reconciliations]))
35 |
36 | @hybrid_property
37 | def balance(self):
38 | return Decimal(self.total - self.total_paid)
39 |
40 | def calculate_total_payment(self, amount: Decimal) -> Decimal:
41 | '''
42 | Sum of total payment including incoming payment.
43 | '''
44 | return Decimal(amount + self.total_paid)
45 |
46 |
47 | class PaymentReconciliation(Base, DictMixin):
48 | __tablename__ = 'payment_reconciliations'
49 |
50 | id = Column(Integer, primary_key=True, autoincrement=True)
51 | amount = Column(Numeric(precision=12, scale=2), nullable=False)
52 |
53 | billing_request_id = Column(Integer, ForeignKey('billing_requests.id'))
54 |
55 |
56 | class AMQPMessage(BaseModel):
57 | id: str
58 | content: Any
59 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/pydantic_models.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from pydantic_sqlalchemy import sqlalchemy_to_pydantic
4 |
5 | from app.models import BillingRequest, PaymentReconciliation
6 |
7 | PydanticBillingRequest = sqlalchemy_to_pydantic(BillingRequest)
8 | PydanticPaymentReconciliation = sqlalchemy_to_pydantic(PaymentReconciliation)
9 |
10 |
11 | class PydanticBillingRequestWithPaymentReconciliation(PydanticBillingRequest):
12 | payment_reconciliations: List[PydanticPaymentReconciliation]
13 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/services.py:
--------------------------------------------------------------------------------
1 | import ast
2 | from decimal import Decimal
3 | from typing import List
4 |
5 | from aio_pika import IncomingMessage
6 | from sqlalchemy.orm import Session
7 |
8 | from app import logging
9 | from app.models import BillingRequest, PaymentReconciliation
10 | from app.db import Session
11 |
12 |
13 | async def create_billing_request_from_event(message: IncomingMessage):
14 | decoded_message = ast.literal_eval(str(message.body.decode()))
15 |
16 | with Session() as session:
17 | br = await create_billing_request(session, decoded_message['id'])
18 |
19 | logging.info(f'Billing request with ID {br.id} was created!')
20 |
21 | return br
22 |
23 |
24 | async def set_billing_request_status_from_event(message: IncomingMessage):
25 | decoded_message = ast.literal_eval(str(message.body.decode()))
26 |
27 | with Session() as session:
28 | br = await billing_request_details_by_reference_no(session, decoded_message['id'])
29 |
30 | if decoded_message['content']['status'] == 'unavailable':
31 | br.status = 'refunded'
32 | await update_billing_request(session, br)
33 |
34 | logging.info(f'Billing request with ID {br.id} was {br.status}!')
35 |
36 | return br
37 |
38 |
39 | async def create_billing_request(session: Session, uuid: str, total: Decimal = Decimal('100.00')) -> BillingRequest:
40 | '''
41 | Assuming all billing request total is cost $100.00.
42 | '''
43 | br = BillingRequest(reference_no=uuid, total=total)
44 | session.add(br)
45 | session.commit()
46 | session.refresh(br)
47 |
48 | return br
49 |
50 |
51 | async def update_billing_request(session: Session, br: BillingRequest) -> BillingRequest:
52 | session.commit()
53 | session.refresh(br)
54 | return br
55 |
56 |
57 | async def billing_request_list(session: Session) -> List[BillingRequest]:
58 | return session.query(BillingRequest).all()
59 |
60 |
61 | async def billing_request_details(session: Session, id: int) -> BillingRequest:
62 | return session.query(BillingRequest).filter(BillingRequest.id == id).one()
63 |
64 |
65 | async def billing_request_details_by_reference_no(session: Session, ref_no: str) -> BillingRequest:
66 | return session.query(BillingRequest).filter(BillingRequest.reference_no == ref_no).one()
67 |
68 |
69 | async def create_payment_reconciliation(session: Session, billing_request_id: int, amount: Decimal) -> PaymentReconciliation:
70 | pr = PaymentReconciliation(
71 | billing_request_id=billing_request_id,
72 | amount=amount
73 | )
74 | session.add(pr)
75 | session.commit()
76 | session.refresh(pr)
77 | return pr
78 |
79 |
80 | async def payment_reconciliation_list(session: Session) -> List[PaymentReconciliation]:
81 | return session.query(PaymentReconciliation).all()
82 |
83 |
84 | async def payment_reconciliation_details(session: Session, id: int) -> List[PaymentReconciliation]:
85 | return session.query(PaymentReconciliation).filter(PaymentReconciliation.id == id).one()
86 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/app/settings.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseSettings
2 |
3 |
4 | class Settings(BaseSettings):
5 | DATABASE_URL: str
6 | RABBITMQ_BROKER_URL: str
7 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-choreography-example/billing/migrations/env.py:
--------------------------------------------------------------------------------
1 | from app.models import *
2 | from app.db import Base
3 | from logging.config import fileConfig
4 |
5 | from sqlalchemy import engine_from_config
6 | from sqlalchemy import pool
7 |
8 | from alembic import context
9 |
10 | from app import logging
11 | from app.dependencies import get_settings
12 |
13 | app_settings = get_settings()
14 |
15 | # this is the Alembic Config object, which provides
16 | # access to the values within the .ini file in use.
17 | config = context.config
18 |
19 | # Interpret the config file for Python logging.
20 | # This line sets up loggers basically.
21 | if config.config_file_name is not None:
22 | fileConfig(config.config_file_name)
23 |
24 | # add your model's MetaData object here
25 | # for 'autogenerate' support
26 | # from myapp import mymodel
27 | # target_metadata = mymodel.Base.metadata
28 |
29 | config.set_main_option('sqlalchemy.url', app_settings.DATABASE_URL)
30 | target_metadata = [Base.metadata]
31 |
32 | # other values from the config, defined by the needs of env.py,
33 | # can be acquired:
34 | # my_important_option = config.get_main_option("my_important_option")
35 | # ... etc.
36 |
37 |
38 | def run_migrations_offline():
39 | """Run migrations in 'offline' mode.
40 |
41 | This configures the context with just a URL
42 | and not an Engine, though an Engine is acceptable
43 | here as well. By skipping the Engine creation
44 | we don't even need a DBAPI to be available.
45 |
46 | Calls to context.execute() here emit the given string to the
47 | script output.
48 |
49 | """
50 | url = config.get_main_option("sqlalchemy.url")
51 | context.configure(
52 | url=url,
53 | target_metadata=target_metadata,
54 | literal_binds=True,
55 | compare_type=True,
56 | compare_server_default=True,
57 | include_schemas=True,
58 | version_table_schema='billing_schema',
59 | include_object=include_object,
60 | dialect_opts={"paramstyle": "named"},
61 | )
62 |
63 | with context.begin_transaction():
64 | context.run_migrations()
65 |
66 |
67 | def run_migrations_online():
68 | """Run migrations in 'online' mode.
69 |
70 | In this scenario we need to create an Engine
71 | and associate a connection with the context.
72 |
73 | """
74 |
75 | # this callback is used to prevent an auto-migration from being generated
76 | # when there are no changes to the schema
77 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
78 | def process_revision_directives(context, revision, directives):
79 | if getattr(config.cmd_opts, 'autogenerate', False):
80 | script = directives[0]
81 | if script.upgrade_ops.is_empty():
82 | directives[:] = []
83 | logging.info('No changes in schema detected.')
84 |
85 | connectable = engine_from_config(
86 | config.get_section(config.config_ini_section),
87 | prefix="sqlalchemy.",
88 | poolclass=pool.NullPool,
89 | )
90 |
91 | with connectable.connect() as connection:
92 | context.configure(
93 | connection=connection, target_metadata=target_metadata,
94 | process_revision_directives=process_revision_directives,
95 | compare_type=True,
96 | compare_server_default=True,
97 | include_schemas=True,
98 | version_table_schema='billing_schema',
99 | include_object=include_object
100 | )
101 |
102 | with context.begin_transaction():
103 | context.run_migrations()
104 |
105 |
106 | def include_object(object, name, type_, reflected, compare_to):
107 | if (type_ == 'table'):
108 | return object.schema == 'billing_schema'
109 | return True
110 |
111 |
112 | if context.is_offline_mode():
113 | run_migrations_offline()
114 | else:
115 | run_migrations_online()
116 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/migrations/versions/9cbf6f752227_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: 9cbf6f752227
4 | Revises:
5 | Create Date: 2022-05-22 17:22:49.274560
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '9cbf6f752227'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('billing_requests',
22 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
23 | sa.Column('total', sa.Numeric(precision=12, scale=2), nullable=True),
24 | sa.Column('status', sa.String(), nullable=True),
25 | sa.Column('reference_no', sa.String(), nullable=False),
26 | sa.PrimaryKeyConstraint('id'),
27 | sa.UniqueConstraint('reference_no'),
28 | schema='billing_schema'
29 | )
30 | op.create_table('payment_reconciliations',
31 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
32 | sa.Column('amount', sa.Numeric(precision=12, scale=2), nullable=False),
33 | sa.Column('billing_request_id', sa.Integer(), nullable=True),
34 | sa.ForeignKeyConstraint(['billing_request_id'], ['billing_schema.billing_requests.id'], ),
35 | sa.PrimaryKeyConstraint('id'),
36 | schema='billing_schema'
37 | )
38 | # ### end Alembic commands ###
39 |
40 |
41 | def downgrade():
42 | # ### commands auto generated by Alembic - please adjust! ###
43 | op.drop_table('payment_reconciliations', schema='billing_schema')
44 | op.drop_table('billing_requests', schema='billing_schema')
45 | # ### end Alembic commands ###
46 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "saga-choreograhpy-example-billing-service"
3 | version = "0.1.0"
4 | description = "Saga's choreography pattern example - Billing service"
5 | authors = ["Roel Delos Reyes "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "3.10.3"
9 | fastapi = "0.75.2"
10 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
11 | pydantic = "^1.9.0"
12 | aio-pika = "^7.2.0"
13 | fire = "^0.4.0"
14 | alembic = "^1.7.7"
15 | psycopg2-binary = "2.9.3"
16 | pydantic-sqlalchemy = "^0.0.9"
17 | simplejson = "^3.17.6"
18 |
19 | [tool.poetry.dev-dependencies]
20 |
21 | [build-system]
22 | requires = ["poetry-core>=1.0.0"]
23 | build-backend = "poetry.core.masonry.api"
24 |
--------------------------------------------------------------------------------
/saga-choreography-example/billing/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
8 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 | RUN poetry lock
18 |
19 | COPY pyproject.toml .
20 | COPY poetry.lock .
21 |
22 | RUN poetry install
23 |
24 | COPY . .
25 |
26 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
27 |
28 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
10 |
11 | # sys.path path, will be prepended to sys.path if present.
12 | # defaults to the current working directory.
13 | # (new in 1.5.5)
14 | prepend_sys_path = .
15 |
16 | # timezone to use when rendering the date within the migration file
17 | # as well as the filename.
18 | # If specified, requires the python-dateutil library that can be
19 | # installed by adding `alembic[tz]` to the pip requirements
20 | # string value is passed to dateutil.tz.gettz()
21 | # leave blank for localtime
22 | # timezone =
23 |
24 | # max length of characters to apply to the
25 | # "slug" field
26 | # truncate_slug_length = 40
27 |
28 | # set to 'true' to run the environment during
29 | # the 'revision' command, regardless of autogenerate
30 | # revision_environment = false
31 |
32 | # set to 'true' to allow .pyc and .pyo files without
33 | # a source .py file to be detected as revisions in the
34 | # versions/ directory
35 | # sourceless = false
36 |
37 | # version location specification; This defaults
38 | # to ${script_location}/versions. When using multiple version
39 | # directories, initial revisions must be specified with --version-path.
40 | # The path separator used here should be the separator specified by "version_path_separator" below.
41 | # version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions
42 |
43 | # version path separator; As mentioned above, this is the character used to split
44 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
45 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
46 | # Valid values for version_path_separator are:
47 | #
48 | # version_path_separator = :
49 | # version_path_separator = ;
50 | # version_path_separator = space
51 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
52 |
53 | # the output encoding used when revision files
54 | # are written from script.py.mako
55 | # output_encoding = utf-8
56 |
57 | # sqlalchemy.url = %(DATABASE_URL)s
58 |
59 | # [post_write_hooks]
60 | # This section defines scripts or Python functions that are run
61 | # on newly generated revision scripts. See the documentation for further
62 | # detail and examples
63 |
64 | # format using "black" - use the console_scripts runner,
65 | # against the "black" entrypoint
66 | # hooks = black
67 | # black.type = console_scripts
68 | # black.entrypoint = black
69 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
70 |
71 | # Logging configuration
72 | [loggers]
73 | keys = root,sqlalchemy,alembic
74 |
75 | [handlers]
76 | keys = console
77 |
78 | [formatters]
79 | keys = generic
80 |
81 | [logger_root]
82 | level = WARN
83 | handlers = console
84 | qualname =
85 |
86 | [logger_sqlalchemy]
87 | level = WARN
88 | handlers =
89 | qualname = sqlalchemy.engine
90 |
91 | [logger_alembic]
92 | level = INFO
93 | handlers =
94 | qualname = alembic
95 |
96 | [handler_console]
97 | class = StreamHandler
98 | args = (sys.stderr,)
99 | level = NOTSET
100 | formatter = generic
101 |
102 | [formatter_generic]
103 | format = %(levelname)-5.5s [%(name)s] %(message)s
104 | datefmt = %H:%M:%S
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | from aio_pika import Message
7 |
8 | from app.dependencies import get_settings
9 | from app.models import AMQPMessage
10 |
11 |
12 | class AMQPClient:
13 |
14 | async def init(self) -> None:
15 | '''
16 | Inititalize AMQP client.
17 | '''
18 |
19 | settings = get_settings()
20 | self.connection = await aio_pika.connect_robust(
21 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
22 | )
23 |
24 | # Creating channel
25 | self.channel = await self.connection.channel()
26 | await self.channel.set_qos(prefetch_count=1)
27 |
28 | return self
29 |
30 | async def event_consumer(
31 | self, callback: Callable, event_store: str, event: str = '#', queue_name: str | None = None,
32 | ) -> None:
33 | '''
34 | Create an event consumer.
35 |
36 | callback - A function that will process the incoming message.
37 | event_store - Declare an exchange as an event store. We store send messages/events
38 | to this exchange.
39 | event - Serves as a binding key or a type of event that occurred.
40 | queue_name - Create a queue to set of events from the Exchange (Optional).
41 | If not specified it will still create a queue with a random name.
42 | '''
43 | exchange = await self.channel.declare_exchange(
44 | event_store,
45 | type='topic',
46 | durable=True
47 | )
48 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
49 |
50 | await queue.bind(exchange, event)
51 | await queue.consume(partial(self._process_message, callback=callback))
52 |
53 | async def event_producer(
54 | self, exchange: str, binding_key: str, message: AMQPMessage
55 | ) -> None:
56 | '''
57 | Send event/message to a specific exchange with binding-key.
58 |
59 | If an existing queue is bound to the given binding-key, the message will be stored
60 | to that queue otherwise the message will be lost.
61 |
62 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
63 | to the right queue.
64 | '''
65 |
66 | # Declare exchange
67 | exchange = await self.channel.declare_exchange(
68 | exchange,
69 | type='topic',
70 | durable=True
71 | )
72 |
73 | await exchange.publish(
74 | Message(
75 | body=str(message.dict()).encode(),
76 | content_type='application/json',
77 | ),
78 | routing_key=binding_key,
79 | )
80 |
81 | async def _process_message(self, message: aio_pika.IncomingMessage, callback: Callable) -> None:
82 | '''
83 | Process incoming message from a Queue. It will require a callback function to handle
84 | message content.
85 | '''
86 | async with message.process(ignore_processed=True):
87 | await message.ack()
88 | await callback(message)
89 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/cli.py:
--------------------------------------------------------------------------------
1 |
2 | import fire
3 |
4 | from app.amqp_client import AMQPClient
5 | from app.db import Session
6 | from app.models import AMQPMessage
7 | from app.services import (booking_details_by_parking_ref_no, booking_list,
8 | create_booking)
9 |
10 |
11 | class AppCLI(object):
12 |
13 | async def create_booking(self, parking_slot_uuid):
14 | with Session() as session:
15 | booking = await create_booking(session, parking_slot_uuid)
16 |
17 | obj = booking.to_dict()
18 |
19 | amqp_client: AMQPClient = await AMQPClient().init()
20 | await amqp_client.event_producer(
21 | 'BOOKING_TX_EVENT_STORE', 'booking.created', message=AMQPMessage(id=booking.parking_slot_ref_no, content=obj)
22 | )
23 | await amqp_client.connection.close()
24 |
25 | return obj
26 |
27 | async def booking_list(self):
28 | with Session() as session:
29 | b_list = await booking_list(session)
30 | return [
31 | booking.to_dict() for booking in b_list
32 | ]
33 |
34 | async def booking_details_by_parking_ref_no(self, uuid: str):
35 | with Session() as session:
36 | booking = await booking_details_by_parking_ref_no(session, uuid)
37 | return booking.to_dict()
38 |
39 |
40 | if __name__ == '__main__':
41 | fire.Fire(AppCLI)
42 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from app.dependencies import get_settings
8 |
9 | settings = get_settings()
10 |
11 | engine = create_engine(settings.DATABASE_URL)
12 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
13 |
14 | Base = declarative_base()
15 | Base.metadata.schema = 'booking_schema'
16 |
17 |
18 | @contextlib.contextmanager
19 | def Session():
20 | db = SessionLocal()
21 | try:
22 | yield db
23 | finally:
24 | db.close()
25 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/dependencies.py:
--------------------------------------------------------------------------------
1 | from functools import lru_cache
2 |
3 | from app.settings import Settings
4 |
5 |
6 | @lru_cache()
7 | def get_settings():
8 | return Settings()
9 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/main.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from fastapi import FastAPI
4 |
5 | from app.amqp_client import AMQPClient
6 | from app.services import set_booking_to_status_from_event
7 |
8 | app = FastAPI()
9 |
10 |
11 | @app.on_event('startup')
12 | async def startup():
13 | amqp_client: AMQPClient = await AMQPClient().init()
14 |
15 | await amqp_client.event_consumer(set_booking_to_status_from_event, 'BOOKING_TX_EVENT_STORE', 'parking.reserved', 'parking_events')
16 | await amqp_client.event_consumer(set_booking_to_status_from_event, 'BOOKING_TX_EVENT_STORE', 'parking.unavailable')
17 |
18 | app.state.amqp_client = amqp_client
19 |
20 |
21 | @app.on_event('shutdown')
22 | async def shutdown():
23 | await app.state.amqp_client.connection.close()
24 |
25 |
26 | @app.get('/health')
27 | async def root():
28 | return {'message': 'Booking server is running'}
29 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from pydantic import BaseModel
4 | from sqlalchemy import Column, String, Integer
5 |
6 | from app.db import Base
7 |
8 |
9 | class DictMixin:
10 | def to_dict(self) -> Dict:
11 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
12 |
13 |
14 | class Booking(Base, DictMixin):
15 | __tablename__ = 'bookings'
16 |
17 | id = Column(Integer, primary_key=True, unique=True, index=True)
18 | status = Column(String, nullable=False, server_default='created')
19 |
20 | parking_slot_ref_no = Column(String, nullable=True)
21 |
22 |
23 | class AMQPMessage(BaseModel):
24 | id: str
25 | content: Any
26 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/pydantic_models.py:
--------------------------------------------------------------------------------
1 | from pydantic_sqlalchemy import sqlalchemy_to_pydantic
2 |
3 | from app.models import Booking
4 |
5 | PydanticBooking = sqlalchemy_to_pydantic(Booking)
6 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/services.py:
--------------------------------------------------------------------------------
1 | import ast
2 | from typing import List
3 | from uuid import uuid4
4 |
5 | from aio_pika import IncomingMessage
6 | from sqlalchemy.orm import Session
7 |
8 | from app import logging
9 | from app.db import Session
10 | from app.models import Booking
11 |
12 |
13 | async def set_booking_to_status_from_event(message: IncomingMessage):
14 | decoded_message = ast.literal_eval(str(message.body.decode()))
15 |
16 | with Session() as session:
17 | booking = await booking_details_by_parking_ref_no(session, decoded_message['id'])
18 |
19 | if decoded_message['content']['status'] == 'unavailable':
20 | booking.status = 'failed'
21 | else:
22 | # reserved.
23 | booking.status = 'done'
24 |
25 | await update_booking(session, booking)
26 |
27 | logging.info(f'Booking with ID {booking.id} was marked as {booking.status}!')
28 |
29 | return booking
30 |
31 |
32 | async def booking_details(session: Session, id: str) -> Booking:
33 | return session.query(Booking).filter(Booking.id == id).one()
34 |
35 |
36 | async def booking_details_by_parking_ref_no(session: Session, parking_slot_ref_no: str) -> Booking:
37 | return session.query(Booking).filter(Booking.parking_slot_ref_no == parking_slot_ref_no).one()
38 |
39 |
40 | async def booking_list(session: Session) -> List[Booking]:
41 | return session.query(Booking).all()
42 |
43 |
44 | async def create_booking(session: Session, parking_slot_uuid: str) -> Booking:
45 | # Since customers may happen to book the same parking slot,
46 | # we need to include unique booking identifier (uuid4) to parking_slot_ref_no.
47 | # The booking identifier will be used throughout the services to identify
48 | # transaction.
49 | booking = Booking(parking_slot_ref_no=f'{parking_slot_uuid}:{uuid4()}', status='pending')
50 | session.add(booking)
51 | session.commit()
52 | session.refresh(booking)
53 |
54 | return booking
55 |
56 |
57 | async def update_booking(session: Session, booking: Booking) -> Booking:
58 | session.commit()
59 | session.refresh(booking)
60 | return booking
61 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/app/settings.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseSettings
2 |
3 |
4 | class Settings(BaseSettings):
5 | DATABASE_URL: str
6 | RABBITMQ_BROKER_URL: str
7 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from alembic import context
7 |
8 | from app import logging
9 | from app.dependencies import get_settings
10 |
11 | app_settings = get_settings()
12 |
13 | # this is the Alembic Config object, which provides
14 | # access to the values within the .ini file in use.
15 | config = context.config
16 |
17 | # Interpret the config file for Python logging.
18 | # This line sets up loggers basically.
19 | if config.config_file_name is not None:
20 | fileConfig(config.config_file_name)
21 |
22 | # add your model's MetaData object here
23 | # for 'autogenerate' support
24 | # from myapp import mymodel
25 | # target_metadata = mymodel.Base.metadata
26 | from app.db import Base
27 |
28 | from app.models import Booking
29 |
30 | config.set_main_option('sqlalchemy.url', app_settings.DATABASE_URL)
31 | target_metadata = [Base.metadata]
32 |
33 | # other values from the config, defined by the needs of env.py,
34 | # can be acquired:
35 | # my_important_option = config.get_main_option("my_important_option")
36 | # ... etc.
37 |
38 |
39 | def run_migrations_offline():
40 | """Run migrations in 'offline' mode.
41 |
42 | This configures the context with just a URL
43 | and not an Engine, though an Engine is acceptable
44 | here as well. By skipping the Engine creation
45 | we don't even need a DBAPI to be available.
46 |
47 | Calls to context.execute() here emit the given string to the
48 | script output.
49 |
50 | """
51 | url = config.get_main_option("sqlalchemy.url")
52 | context.configure(
53 | url=url,
54 | target_metadata=target_metadata,
55 | literal_binds=True,
56 | compare_type=True,
57 | compare_server_default=True,
58 | include_schemas=True,
59 | version_table_schema='booking_schema',
60 | include_object=include_object,
61 | dialect_opts={"paramstyle": "named"},
62 | )
63 |
64 | with context.begin_transaction():
65 | context.run_migrations()
66 |
67 |
68 | def run_migrations_online():
69 | """Run migrations in 'online' mode.
70 |
71 | In this scenario we need to create an Engine
72 | and associate a connection with the context.
73 |
74 | """
75 |
76 | # this callback is used to prevent an auto-migration from being generated
77 | # when there are no changes to the schema
78 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
79 | def process_revision_directives(context, revision, directives):
80 | if getattr(config.cmd_opts, 'autogenerate', False):
81 | script = directives[0]
82 | if script.upgrade_ops.is_empty():
83 | directives[:] = []
84 | logging.info('No changes in schema detected.')
85 |
86 |
87 | connectable = engine_from_config(
88 | config.get_section(config.config_ini_section),
89 | prefix="sqlalchemy.",
90 | poolclass=pool.NullPool,
91 | )
92 |
93 | with connectable.connect() as connection:
94 | context.configure(
95 | connection=connection, target_metadata=target_metadata,
96 | process_revision_directives=process_revision_directives,
97 | compare_type=True,
98 | compare_server_default=True,
99 | include_schemas=True,
100 | version_table_schema='booking_schema',
101 | include_object=include_object
102 | )
103 |
104 | with context.begin_transaction():
105 | context.run_migrations()
106 |
107 |
108 | def include_object(object, name, type_, reflected, compare_to):
109 | if (type_ == 'table'):
110 | return object.schema == 'booking_schema'
111 | return True
112 |
113 |
114 | if context.is_offline_mode():
115 | run_migrations_offline()
116 | else:
117 | run_migrations_online()
118 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/versions/29af7026cb10_rename_booking_parking_slot_uuid_to_.py:
--------------------------------------------------------------------------------
1 | """Rename Booking.parking_slot_uuid to Booking.parking_slot_ref_no
2 |
3 | Revision ID: 29af7026cb10
4 | Revises: caeb28082687
5 | Create Date: 2022-05-28 19:42:20.938039
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '29af7026cb10'
14 | down_revision = 'caeb28082687'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.alter_column('bookings', 'parking_slot_uuid', new_column_name='parking_slot_ref_no', schema='booking_schema')
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.alter_column('bookings', 'parking_slot_ref_no', new_column_name='parking_slot_uuid', schema='booking_schema')
28 | # ### end Alembic commands ###
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/versions/a2f83ffe751f_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: a2f83ffe751f
4 | Revises:
5 | Create Date: 2022-05-23 19:49:37.182505
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'a2f83ffe751f'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('bookings',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('description', sa.String(), nullable=False),
24 | sa.Column('status', sa.String(), server_default='created', nullable=False),
25 | sa.Column('parking_space_uuid', sa.String(), nullable=True),
26 | sa.PrimaryKeyConstraint('id'),
27 | schema='booking_schema'
28 | )
29 | op.create_index(op.f('ix_booking_schema_bookings_id'), 'bookings', ['id'], unique=True, schema='booking_schema')
30 | # ### end Alembic commands ###
31 |
32 |
33 | def downgrade():
34 | # ### commands auto generated by Alembic - please adjust! ###
35 | op.drop_index(op.f('ix_booking_schema_bookings_id'), table_name='bookings', schema='booking_schema')
36 | op.drop_table('bookings', schema='booking_schema')
37 | # ### end Alembic commands ###
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/versions/caeb28082687_removal_of_description_column.py:
--------------------------------------------------------------------------------
1 | """Removal of description column
2 |
3 | Revision ID: caeb28082687
4 | Revises: d6bb61f1e755
5 | Create Date: 2022-05-23 19:59:44.995294
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'caeb28082687'
14 | down_revision = 'd6bb61f1e755'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.drop_column('bookings', 'description', schema='booking_schema')
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.add_column('bookings', sa.Column('description', sa.VARCHAR(), autoincrement=False, nullable=False), schema='booking_schema')
28 | # ### end Alembic commands ###
--------------------------------------------------------------------------------
/saga-choreography-example/booking/migrations/versions/d6bb61f1e755_alter_parking_space_no_to_parking_slot_.py:
--------------------------------------------------------------------------------
1 | """Alter parking_space_no to parking_slot_uuid
2 |
3 | Revision ID: d6bb61f1e755
4 | Revises: a2f83ffe751f
5 | Create Date: 2022-05-23 19:58:24.303388
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'd6bb61f1e755'
14 | down_revision = 'a2f83ffe751f'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('bookings', sa.Column('parking_slot_uuid', sa.String(), nullable=True), schema='booking_schema')
22 | op.drop_column('bookings', 'parking_space_uuid', schema='booking_schema')
23 | # ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.add_column('bookings', sa.Column('parking_space_uuid', sa.VARCHAR(), autoincrement=False, nullable=True), schema='booking_schema')
29 | op.drop_column('bookings', 'parking_slot_uuid', schema='booking_schema')
30 | # ### end Alembic commands ###
--------------------------------------------------------------------------------
/saga-choreography-example/booking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "saga-choreograhpy-example-booking-service"
3 | version = "0.1.0"
4 | description = "Saga's choreography pattern example - Booking service"
5 | authors = ["Roel Delos Reyes "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "3.10.3"
9 | fastapi = "0.75.2"
10 | psycopg2-binary = "2.9.3"
11 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
12 | pydantic = "^1.9.0"
13 | aio-pika = "^7.2.0"
14 | fire = "^0.4.0"
15 | alembic = "^1.7.7"
16 | SQLAlchemy = "^1.4.36"
17 | pydantic-sqlalchemy = "^0.0.9"
18 |
19 | [tool.poetry.dev-dependencies]
20 |
21 | [build-system]
22 | requires = ["poetry-core>=1.0.0"]
23 | build-backend = "poetry.core.masonry.api"
24 |
--------------------------------------------------------------------------------
/saga-choreography-example/booking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
8 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | COPY pyproject.toml .
17 | COPY poetry.lock .
18 |
19 | RUN poetry config virtualenvs.in-project false
20 | RUN poetry install
21 |
22 | COPY . .
23 |
24 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
25 |
26 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration files
8 | # file_template = %%(rev)s_%%(slug)s
9 |
10 | # sys.path path, will be prepended to sys.path if present.
11 | # defaults to the current working directory.
12 | prepend_sys_path = .
13 |
14 | # timezone to use when rendering the date within the migration file
15 | # as well as the filename.
16 | # If specified, requires the python-dateutil library that can be
17 | # installed by adding `alembic[tz]` to the pip requirements
18 | # string value is passed to dateutil.tz.gettz()
19 | # leave blank for localtime
20 | # timezone =
21 |
22 | # max length of characters to apply to the
23 | # "slug" field
24 | # truncate_slug_length = 40
25 |
26 | # set to 'true' to run the environment during
27 | # the 'revision' command, regardless of autogenerate
28 | # revision_environment = false
29 |
30 | # set to 'true' to allow .pyc and .pyo files without
31 | # a source .py file to be detected as revisions in the
32 | # versions/ directory
33 | # sourceless = false
34 |
35 | # version location specification; This defaults
36 | # to migrations/versions. When using multiple version
37 | # directories, initial revisions must be specified with --version-path.
38 | # The path separator used here should be the separator specified by "version_path_separator" below.
39 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
40 |
41 | # version path separator; As mentioned above, this is the character used to split
42 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
43 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
44 | # Valid values for version_path_separator are:
45 | #
46 | # version_path_separator = :
47 | # version_path_separator = ;
48 | # version_path_separator = space
49 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
50 |
51 | # the output encoding used when revision files
52 | # are written from script.py.mako
53 | # output_encoding = utf-8
54 |
55 | # sqlalchemy.url = %(DATABASE_URL)s
56 |
57 |
58 | [post_write_hooks]
59 | # post_write_hooks defines scripts or Python functions that are run
60 | # on newly generated revision scripts. See the documentation for further
61 | # detail and examples
62 |
63 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
64 | # hooks = black
65 | # black.type = console_scripts
66 | # black.entrypoint = black
67 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
68 |
69 | # Logging configuration
70 | [loggers]
71 | keys = root,sqlalchemy,alembic
72 |
73 | [handlers]
74 | keys = console
75 |
76 | [formatters]
77 | keys = generic
78 |
79 | [logger_root]
80 | level = WARN
81 | handlers = console
82 | qualname =
83 |
84 | [logger_sqlalchemy]
85 | level = WARN
86 | handlers =
87 | qualname = sqlalchemy.engine
88 |
89 | [logger_alembic]
90 | level = INFO
91 | handlers =
92 | qualname = alembic
93 |
94 | [handler_console]
95 | class = StreamHandler
96 | args = (sys.stderr,)
97 | level = NOTSET
98 | formatter = generic
99 |
100 | [formatter_generic]
101 | format = %(levelname)-5.5s [%(name)s] %(message)s
102 | datefmt = %H:%M:%S
103 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | from aio_pika import IncomingMessage, Message
7 |
8 | from app.dependencies import get_settings
9 | from app.models import AMQPMessage
10 |
11 |
12 | class AMQPClient:
13 |
14 | async def init(self) -> None:
15 | '''
16 | Inititalize AMQP client.
17 | '''
18 |
19 | settings = get_settings()
20 | self.connection = await aio_pika.connect_robust(
21 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
22 | )
23 |
24 | # Creating channel
25 | self.channel = await self.connection.channel()
26 | await self.channel.set_qos(prefetch_count=1)
27 |
28 | return self
29 |
30 | async def event_consumer(
31 | self, callback: Callable, event_store: str, event: str = '#', queue_name: str | None = None,
32 | ) -> None:
33 | '''
34 | Create an event consumer.
35 |
36 | callback - A function that will process the incoming message.
37 | event_store - Declare an exchange as an event store. We store send messages/events
38 | to this exchange.
39 | event - Serves as a binding key or a type of event that occurred.
40 | queue_name - Create a queue to set of events from the Exchange (Optional).
41 | If not specified it will still create a queue with a random name.
42 | '''
43 | exchange = await self.channel.declare_exchange(
44 | event_store,
45 | type='topic',
46 | durable=True
47 | )
48 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
49 |
50 | await queue.bind(exchange, event)
51 | await queue.consume(partial(self._process_message, callback=callback))
52 |
53 | async def event_producer(
54 | self, exchange: str, binding_key: str, message: AMQPMessage
55 | ) -> None:
56 | '''
57 | Send event/message to a specific exchange with binding-key.
58 |
59 | If an existing queue is bound to the given binding-key, the message will be stored
60 | to that queue otherwise the message will be lost.
61 |
62 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
63 | to the right queue.
64 | '''
65 |
66 | # Declare exchange
67 | exchange = await self.channel.declare_exchange(
68 | exchange,
69 | type='topic',
70 | durable=True
71 | )
72 |
73 | payload = message.dict()
74 | await exchange.publish(
75 | Message(
76 | body=str(payload).encode(),
77 | content_type='application/json',
78 | ),
79 | routing_key=binding_key,
80 | )
81 |
82 | async def _process_message(self, message: IncomingMessage, callback: Callable) -> None:
83 | '''
84 | Process incoming message from a Queue. It will require a callback function to handle
85 | message content.
86 | '''
87 | async with message.process(ignore_processed=True):
88 | await message.ack()
89 | await callback(message)
90 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 |
3 | from app.db import Session
4 | from app.services import (create_parking_slot, parking_slot_details,
5 | parking_slot_list)
6 |
7 |
8 | class AppCLI(object):
9 |
10 | async def create_parking_slot(self, name: str, status: str | None = 'available'):
11 | with Session() as session:
12 | ps = await create_parking_slot(session, name=name, status=status)
13 | return ps.to_dict()
14 |
15 | async def parking_slot_list(self):
16 | with Session() as session:
17 | ps_list = await parking_slot_list(session)
18 | return [
19 | ps.to_dict() for ps in ps_list
20 | ]
21 |
22 | async def parking_slot_details(self, uuid: str):
23 | with Session() as session:
24 | ps = await parking_slot_details(session, uuid)
25 | return ps.to_dict()
26 |
27 |
28 | if __name__ == '__main__':
29 | fire.Fire(AppCLI)
30 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from app.dependencies import get_settings
8 |
9 | settings = get_settings()
10 |
11 | engine = create_engine(settings.DATABASE_URL)
12 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
13 |
14 | Base = declarative_base()
15 | Base.metadata.schema = 'parking_schema'
16 |
17 |
18 | @contextlib.contextmanager
19 | def Session():
20 | db = SessionLocal()
21 | try:
22 | yield db
23 | finally:
24 | db.close()
25 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/dependencies.py:
--------------------------------------------------------------------------------
1 | from functools import lru_cache
2 |
3 | from app.settings import Settings
4 |
5 |
6 | @lru_cache()
7 | def get_settings():
8 | return Settings()
9 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/main.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI
2 |
3 | from app.amqp_client import AMQPClient
4 | from app.services import update_parking_slot_to_reserved_by_ref_no
5 |
6 | app = FastAPI()
7 |
8 |
9 | @app.on_event('startup')
10 | async def startup():
11 | amqp_client: AMQPClient = await AMQPClient().init()
12 |
13 | await amqp_client.event_consumer(update_parking_slot_to_reserved_by_ref_no, 'BOOKING_TX_EVENT_STORE', 'bill.paid', 'billing_events')
14 |
15 | app.state.amqp_client = amqp_client
16 |
17 |
18 | @app.on_event('shutdown')
19 | async def shutdown():
20 | await app.state.amqp_client.connection.close()
21 |
22 |
23 | @app.get('/health')
24 | async def root():
25 | return {'message': 'Manager server is running'}
26 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from pydantic import BaseModel
4 | from sqlalchemy import Column, String
5 |
6 | from app.db import Base
7 |
8 |
9 | class DictMixin:
10 | def to_dict(self) -> Dict:
11 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
12 |
13 |
14 | class ParkingSlot(Base, DictMixin):
15 | __tablename__ = 'parking_slots'
16 |
17 | uuid = Column(String, primary_key=True, unique=True, index=True)
18 | name = Column(String, nullable=False)
19 |
20 | status = Column(String, nullable=False, server_default='pending')
21 |
22 |
23 | class AMQPMessage(BaseModel):
24 | id: str
25 | content: Any
26 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/pydantic_models.py:
--------------------------------------------------------------------------------
1 | from pydantic_sqlalchemy import sqlalchemy_to_pydantic
2 |
3 | from app.models import ParkingSlot
4 |
5 | PydanticParkingSlot = sqlalchemy_to_pydantic(ParkingSlot)
6 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/services.py:
--------------------------------------------------------------------------------
1 | import ast
2 | from typing import List
3 | from uuid import uuid4
4 |
5 | from aio_pika import IncomingMessage
6 |
7 | from app import logging
8 | from app.amqp_client import AMQPClient
9 | from app.db import Session
10 | from app.models import AMQPMessage, ParkingSlot
11 |
12 |
13 | async def update_parking_slot_to_reserved_by_ref_no(message: IncomingMessage) -> ParkingSlot:
14 | decoded_message = ast.literal_eval(str(message.body.decode()))
15 |
16 | with Session() as session:
17 | reference_no = decoded_message['id']
18 |
19 | # Get the parking-slot identifier only.
20 | parking_slot_uuid = reference_no.split(':')[0]
21 | ps = await parking_slot_details(session, parking_slot_uuid)
22 |
23 | amqp_client: AMQPClient = await AMQPClient().init()
24 |
25 | # If status is already reserved.
26 | if ps.status == 'reserved':
27 | await amqp_client.event_producer(
28 | 'BOOKING_TX_EVENT_STORE', 'parking.unavailable',
29 | message=AMQPMessage(
30 | id=reference_no,
31 | content={'status': 'unavailable'}
32 | )
33 | )
34 | await amqp_client.connection.close()
35 | logging.info(f'Parking slot with UUID {ps.uuid} is unavailable!')
36 | return None
37 |
38 | ps.status = 'reserved'
39 | ps = await update_parking_slot(session, ps)
40 |
41 | amqp_client: AMQPClient = await AMQPClient().init()
42 | await amqp_client.event_producer(
43 | 'BOOKING_TX_EVENT_STORE', 'parking.reserved',
44 | message=AMQPMessage(
45 | id=reference_no, content={'status': 'reserved'}
46 | )
47 | )
48 | await amqp_client.connection.close()
49 |
50 | logging.info(f'Parking slot with UUID {ps.uuid} has been reserved!')
51 |
52 | return ps
53 |
54 |
55 | async def update_parking_slot(session: Session, ps: ParkingSlot) -> ParkingSlot:
56 | session.commit()
57 | session.refresh(ps)
58 | return ps
59 |
60 |
61 | async def create_parking_slot(session: Session, **kwargs) -> ParkingSlot:
62 | ps = ParkingSlot(uuid=str(uuid4()))
63 |
64 | {setattr(ps, k, v) for k,v in kwargs.items()}
65 |
66 | session.add(ps)
67 | session.commit()
68 | session.refresh(ps)
69 | return ps
70 |
71 |
72 | async def parking_slot_list(session: Session) -> List[ParkingSlot]:
73 | return session.query(ParkingSlot).all()
74 |
75 |
76 | async def parking_slot_details(session: Session, uuid: str) -> ParkingSlot:
77 | return session.query(ParkingSlot).filter(ParkingSlot.uuid == uuid).one()
78 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/settings.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseSettings
2 |
3 |
4 | class Settings(BaseSettings):
5 | DATABASE_URL: str
6 | RABBITMQ_BROKER_URL: str
7 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/app/signals.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from sqlalchemy import event
3 |
4 | from app.amqp_client import AMQPClient
5 | from app.models import AMQPMessage, ParkingSlot
6 |
7 |
8 | @event.listens_for(ParkingSlot, 'after_update')
9 | def parking_slot_receive_after_update(mapper, connection, target: ParkingSlot):
10 | asyncio.ensure_future(async_parking_slot_receive_after_update(mapper, connection, target))
11 |
12 | async def async_parking_slot_receive_after_update(mapper, connection, target):
13 | if target.status == 'reserved':
14 | amqp_client: AMQPClient = await AMQPClient().init()
15 | await amqp_client.event_producer(
16 | 'BOOKING_TX_EVENT_STORE', 'parking.reserved',
17 | message=AMQPMessage(
18 | id=str(target.uuid), content={'status': 'reserved'}
19 | )
20 | )
21 | await amqp_client.connection.close()
22 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-choreography-example/parking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from alembic import context
7 |
8 | from app import logging
9 | from app.dependencies import get_settings
10 |
11 | app_settings = get_settings()
12 |
13 | # this is the Alembic Config object, which provides
14 | # access to the values within the .ini file in use.
15 | config = context.config
16 |
17 | # Interpret the config file for Python logging.
18 | # This line sets up loggers basically.
19 | if config.config_file_name is not None:
20 | fileConfig(config.config_file_name)
21 |
22 | # add your model's MetaData object here
23 | # for 'autogenerate' support
24 | # from myapp import mymodel
25 | # target_metadata = mymodel.Base.metadata
26 | from app.db import Base
27 |
28 | from app.models import ParkingSlot
29 | config.set_main_option('sqlalchemy.url', app_settings.DATABASE_URL)
30 | target_metadata = [Base.metadata]
31 |
32 | # other values from the config, defined by the needs of env.py,
33 | # can be acquired:
34 | # my_important_option = config.get_main_option("my_important_option")
35 | # ... etc.
36 |
37 |
38 | def run_migrations_offline():
39 | """Run migrations in 'offline' mode.
40 |
41 | This configures the context with just a URL
42 | and not an Engine, though an Engine is acceptable
43 | here as well. By skipping the Engine creation
44 | we don't even need a DBAPI to be available.
45 |
46 | Calls to context.execute() here emit the given string to the
47 | script output.
48 |
49 | """
50 | url = config.get_main_option("sqlalchemy.url")
51 | context.configure(
52 | url=url,
53 | target_metadata=target_metadata,
54 | literal_binds=True,
55 | compare_type=True,
56 | compare_server_default=True,
57 | include_schemas=True,
58 | version_table_schema='parking_schema',
59 | include_object=include_object,
60 | dialect_opts={"paramstyle": "named"},
61 | )
62 |
63 | with context.begin_transaction():
64 | context.run_migrations()
65 |
66 |
67 | def run_migrations_online():
68 | """Run migrations in 'online' mode.
69 |
70 | In this scenario we need to create an Engine
71 | and associate a connection with the context.
72 |
73 | """
74 |
75 | # this callback is used to prevent an auto-migration from being generated
76 | # when there are no changes to the schema
77 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
78 | def process_revision_directives(context, revision, directives):
79 | if getattr(config.cmd_opts, 'autogenerate', False):
80 | script = directives[0]
81 | if script.upgrade_ops.is_empty():
82 | directives[:] = []
83 | logging.info('No changes in schema detected.')
84 |
85 | connectable = engine_from_config(
86 | config.get_section(config.config_ini_section),
87 | prefix="sqlalchemy.",
88 | poolclass=pool.NullPool,
89 | )
90 |
91 | with connectable.connect() as connection:
92 | context.configure(
93 | connection=connection, target_metadata=target_metadata,
94 | process_revision_directives=process_revision_directives,
95 | compare_type=True,
96 | compare_server_default=True,
97 | include_schemas=True,
98 | version_table_schema='parking_schema',
99 | include_object=include_object
100 | )
101 |
102 | with context.begin_transaction():
103 | context.run_migrations()
104 |
105 |
106 | def include_object(object, name, type_, reflected, compare_to):
107 | if (type_ == 'table'):
108 | return object.schema == 'parking_schema'
109 | return True
110 |
111 |
112 | if context.is_offline_mode():
113 | run_migrations_offline()
114 | else:
115 | run_migrations_online()
116 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/migrations/versions/2efeb1766d65_add_status_field.py:
--------------------------------------------------------------------------------
1 | """Add status field
2 |
3 | Revision ID: 2efeb1766d65
4 | Revises: 3a414c21545c
5 | Create Date: 2022-05-23 19:30:31.238388
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '2efeb1766d65'
14 | down_revision = '3a414c21545c'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('parking_slots', sa.Column('status', sa.String(), server_default='pending', nullable=False), schema='parking_schema')
22 | op.create_index(op.f('ix_parking_schema_parking_slots_uuid'), 'parking_slots', ['uuid'], unique=True, schema='parking_schema')
23 | # ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.drop_index(op.f('ix_parking_schema_parking_slots_uuid'), table_name='parking_slots', schema='parking_schema')
29 | op.drop_column('parking_slots', 'status', schema='parking_schema')
30 | # ### end Alembic commands ###
31 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/migrations/versions/3a414c21545c_inital_migrations.py:
--------------------------------------------------------------------------------
1 | """Inital migrations
2 |
3 | Revision ID: 3a414c21545c
4 | Revises:
5 | Create Date: 2022-05-23 18:52:57.406382
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '3a414c21545c'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('parking_slots',
22 | sa.Column('uuid', sa.String(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.PrimaryKeyConstraint('uuid'),
25 | schema='parking_schema'
26 | )
27 | # ### end Alembic commands ###
28 |
29 |
30 | def downgrade():
31 | # ### commands auto generated by Alembic - please adjust! ###
32 | op.drop_table('parking_slots', schema='parking_schema')
33 | # ### end Alembic commands ###
34 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "saga-choreograhpy-example-parking-service"
3 | version = "0.1.0"
4 | description = "Saga's choreography pattern example - Parking service"
5 | authors = ["Roel Delos Reyes "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "3.10.3"
9 | fastapi = "0.75.2"
10 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
11 | pydantic = "^1.9.0"
12 | aio-pika = "^7.2.0"
13 | fire = "^0.4.0"
14 | alembic = "^1.7.7"
15 | psycopg2-binary = "2.9.3"
16 | pydantic-sqlalchemy = "^0.0.9"
17 |
18 | [tool.poetry.dev-dependencies]
19 |
20 | [build-system]
21 | requires = ["poetry-core>=1.0.0"]
22 | build-backend = "poetry.core.masonry.api"
23 |
--------------------------------------------------------------------------------
/saga-choreography-example/parking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
8 |
--------------------------------------------------------------------------------
/saga-choreography-example/resources/saga-choreography-pattern-rb-transaction.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/saga-choreography-example/resources/saga-choreography-pattern-rb-transaction.png
--------------------------------------------------------------------------------
/saga-choreography-example/resources/saga-choreography-pattern.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/saga-choreography-example/resources/saga-choreography-pattern.png
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
24 |
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | sqlalchemy.url = driver://user:pass@localhost/dbname
59 |
60 |
61 | [post_write_hooks]
62 | # post_write_hooks defines scripts or Python functions that are run
63 | # on newly generated revision scripts. See the documentation for further
64 | # detail and examples
65 |
66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
67 | # hooks = black
68 | # black.type = console_scripts
69 | # black.entrypoint = black
70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
71 |
72 | # Logging configuration
73 | [loggers]
74 | keys = root,sqlalchemy,alembic
75 |
76 | [handlers]
77 | keys = console
78 |
79 | [formatters]
80 | keys = generic
81 |
82 | [logger_root]
83 | level = WARN
84 | handlers = console
85 | qualname =
86 |
87 | [logger_sqlalchemy]
88 | level = WARN
89 | handlers =
90 | qualname = sqlalchemy.engine
91 |
92 | [logger_alembic]
93 | level = INFO
94 | handlers =
95 | qualname = alembic
96 |
97 | [handler_console]
98 | class = StreamHandler
99 | args = (sys.stderr,)
100 | level = NOTSET
101 | formatter = generic
102 |
103 | [formatter_generic]
104 | format = %(levelname)-5.5s [%(name)s] %(message)s
105 | datefmt = %H:%M:%S
106 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | import simplejson as json
7 | from aio_pika import IncomingMessage, Message
8 |
9 | from app import settings
10 | from app.models import AMQPMessage
11 |
12 |
13 | class AMQPClient:
14 |
15 | async def init(self) -> None:
16 | '''
17 | Inititalize AMQP client.
18 | '''
19 |
20 | self.connection = await aio_pika.connect_robust(
21 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
22 | )
23 |
24 | # Creating channel
25 | self.channel = await self.connection.channel()
26 | return self
27 |
28 | async def event_consumer(
29 | self, callback: Callable, event_store: str, event: str = '#', queue_name: str | None = None,
30 | ) -> None:
31 | '''
32 | Create an event consumer.
33 |
34 | callback - A function that will process the incoming message.
35 | event_store - Declare an exchange as an event store. We store send messages/events
36 | to this exchange.
37 | event - Serves as a binding key or a type of event that occurred.
38 | queue_name - Create a queue to set of events from the Exchange (Optional).
39 | If not specified it will still create a queue with a random name.
40 | '''
41 | exchange = await self.channel.declare_exchange(
42 | event_store,
43 | type='topic',
44 | durable=True
45 | )
46 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
47 |
48 | await queue.bind(exchange, event)
49 | await queue.consume(partial(self._process_message, callback=callback))
50 |
51 | async def event_producer(
52 | self, event_store: str, binding_key: str, correlation_id: str, message: AMQPMessage
53 | ) -> None:
54 | '''
55 | Send event/message to a specific exchange with binding-key.
56 |
57 | If an existing queue is bound to the given binding-key, the message will be stored
58 | to that queue otherwise the message will be lost.
59 |
60 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
61 | to the right queue.
62 | '''
63 |
64 | # Declare exchange
65 | exchange = await self.channel.declare_exchange(
66 | event_store,
67 | type='topic',
68 | durable=True
69 | )
70 |
71 | payload = json.dumps(message.dict())
72 | await exchange.publish(
73 | Message(
74 | body=str(payload).encode(),
75 | content_type='application/json',
76 | correlation_id=correlation_id
77 | ),
78 | routing_key=binding_key,
79 | )
80 |
81 | async def _process_message(self, message: IncomingMessage, callback: Callable) -> None:
82 | '''
83 | Process incoming message from a Queue. It will require a callback function to handle
84 | message content.
85 | '''
86 | async with message.process(ignore_processed=True):
87 | await callback(message)
88 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 |
3 | from app.db import Session
4 | from app.services import (billing_request_details_by_reference_no,
5 | billing_request_list, create_billing_request)
6 |
7 |
8 | class AppCLI(object):
9 |
10 | async def billing_request_list(self):
11 | with Session() as session:
12 | br_list = await billing_request_list(session)
13 | return [br.to_dict() for br in br_list]
14 |
15 | async def billing_request_details_by_reference_no(self, reference_no: str):
16 | with Session() as session:
17 | br = await billing_request_details_by_reference_no(session, reference_no)
18 | return br.to_dict()
19 |
20 | async def create_billing_request(self, reference_no: str):
21 | with Session() as session:
22 | br = await create_billing_request(session, reference_no)
23 | return br.to_dict()
24 |
25 |
26 | if __name__ == '__main__':
27 | fire.Fire(AppCLI)
28 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 | from app import settings
7 |
8 | engine = create_engine(settings.DATABASE_URL)
9 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
10 |
11 | Base = declarative_base()
12 |
13 |
14 | @contextlib.contextmanager
15 | def Session():
16 | db = SessionLocal()
17 | try:
18 | yield db
19 | finally:
20 | db.close()
21 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/main.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import contextlib
3 |
4 | from starlette.applications import Starlette
5 | from starlette.responses import JSONResponse
6 | from starlette.routing import Route
7 |
8 | from app.amqp_client import AMQPClient
9 | from app.services import billing_command_event_processor
10 |
11 |
12 | @contextlib.asynccontextmanager
13 | async def lifespan(app):
14 | amqp_client: AMQPClient = await AMQPClient().init()
15 | try:
16 |
17 | await amqp_client.event_consumer(billing_command_event_processor, 'BOOKING_TX_EVENT_STORE', 'billing.*')
18 |
19 | yield
20 | finally:
21 | await amqp_client.connection.close()
22 |
23 |
24 | async def health(request):
25 | return JSONResponse({'message': 'Billing server is running'})
26 |
27 |
28 | routes = [
29 | Route('/health', health),
30 | ]
31 |
32 | app = Starlette(routes=routes, lifespan=lifespan)
33 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from pydantic import BaseModel
4 | from sqlalchemy import Column, Integer, Numeric, String
5 |
6 | from app.db import Base
7 |
8 |
9 | class DictMixin:
10 | def to_dict(self) -> Dict:
11 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
12 |
13 |
14 | class BillingRequest(Base, DictMixin):
15 | __tablename__ = 'billing_requests'
16 |
17 | id = Column(Integer, primary_key=True, autoincrement=True)
18 | total = Column(Numeric(precision=12, scale=2), nullable=True)
19 | status = Column(String, default='pending')
20 |
21 | # Must be :
22 | reference_no = Column(String, unique=True, nullable=False)
23 |
24 |
25 | class AMQPMessage(BaseModel):
26 | id: str
27 | content: Any | None = None
28 | reply_state: str | None = None
29 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/services.py:
--------------------------------------------------------------------------------
1 |
2 | import ast
3 | import json
4 | from typing import List
5 |
6 | from aio_pika import IncomingMessage
7 |
8 | from app.amqp_client import AMQPClient
9 | from app.db import Session
10 | from app.models import AMQPMessage, BillingRequest
11 |
12 |
13 | async def create_billing_request(
14 | session: Session, reference_no: str,
15 | status: str = 'pending'
16 | ) -> BillingRequest:
17 | br = BillingRequest(
18 | # Default total to 100.00
19 | total=100.0,
20 | reference_no=reference_no,
21 | status=status
22 | )
23 | session.add(br)
24 | session.commit()
25 | session.refresh(br)
26 |
27 | return br
28 |
29 |
30 | async def refund_billing_request(session: Session, reference_no: str) -> BillingRequest:
31 | br = await billing_request_details_by_reference_no(session, reference_no)
32 | br.status = 'refunded'
33 | return await billing_request_update(session, br)
34 |
35 |
36 | async def billing_request_update(session: Session, br: BillingRequest) -> BillingRequest:
37 | session.commit()
38 | session.refresh(br)
39 | return br
40 |
41 |
42 | async def billing_request_details_by_reference_no(session: Session, reference_no: str) -> BillingRequest:
43 | return session.query(BillingRequest).filter(BillingRequest.reference_no == reference_no).one()
44 |
45 |
46 | async def billing_request_list(session: Session) -> List[BillingRequest]:
47 | return session.query(BillingRequest).all()
48 |
49 |
50 | async def billing_command_event_processor(message: IncomingMessage):
51 | async with message.process(ignore_processed=True):
52 | command = message.headers.get('COMMAND')
53 | client = message.headers.get('CLIENT')
54 |
55 | booking = json.loads(str(message.body.decode('utf-8')))
56 | response_obj: AMQPMessage = None
57 | if client == 'BOOKING_REQUEST_ORCHESTRATOR' and command == 'BILLING_AUTHORIZE_PAYMENT':
58 | with Session() as session:
59 | # NOTE: For the purpose of this example we will assume that
60 | # during booking request the payment should be automatic.
61 | await create_billing_request(session, booking.get('parking_slot_ref_no'), status='paid')
62 |
63 | await message.ack()
64 | response_obj = AMQPMessage(
65 | id=message.correlation_id,
66 | content=None,
67 | reply_state='PAYMENT_SUCCESSFUL'
68 | )
69 |
70 | if client == 'BOOKING_REQUEST_ORCHESTRATOR' and command == 'BILLING_REFUND':
71 | with Session() as session:
72 | await refund_billing_request(session, booking.get('parking_slot_ref_no'))
73 |
74 | await message.ack()
75 | response_obj = AMQPMessage(
76 | id=message.correlation_id,
77 | content=None,
78 | reply_state='BILL_REFUNDED'
79 | )
80 |
81 | # There must be a response object to signal orchestrator of
82 | # the outcome of the request.
83 | assert response_obj is not None
84 |
85 | amqp_client: AMQPClient = await AMQPClient().init()
86 | await amqp_client.event_producer(
87 | 'BOOKING_TX_EVENT_STORE',
88 | message.reply_to,
89 | message.correlation_id,
90 | response_obj
91 | )
92 | await amqp_client.connection.close()
93 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | DATABASE_URL = config('DATABASE_URL')
6 | RABBITMQ_BROKER_URL = config('RABBITMQ_BROKER_URL')
7 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from alembic import context
7 |
8 | from app import logging, settings
9 |
10 | # this is the Alembic Config object, which provides
11 | # access to the values within the .ini file in use.
12 | config = context.config
13 |
14 | # Interpret the config file for Python logging.
15 | # This line sets up loggers basically.
16 | if config.config_file_name is not None:
17 | fileConfig(config.config_file_name)
18 |
19 | # add your model's MetaData object here
20 | # for 'autogenerate' support
21 | # from myapp import mymodel
22 | # target_metadata = mymodel.Base.metadata
23 | from app.db import Base
24 |
25 | from app.models import BillingRequest
26 | config.set_main_option('sqlalchemy.url', settings.DATABASE_URL)
27 | target_metadata = [Base.metadata]
28 |
29 |
30 | # other values from the config, defined by the needs of env.py,
31 | # can be acquired:
32 | # my_important_option = config.get_main_option("my_important_option")
33 | # ... etc.
34 |
35 |
36 | def run_migrations_offline() -> None:
37 | """Run migrations in 'offline' mode.
38 |
39 | This configures the context with just a URL
40 | and not an Engine, though an Engine is acceptable
41 | here as well. By skipping the Engine creation
42 | we don't even need a DBAPI to be available.
43 |
44 | Calls to context.execute() here emit the given string to the
45 | script output.
46 |
47 | """
48 | url = config.get_main_option("sqlalchemy.url")
49 | context.configure(
50 | url=url,
51 | target_metadata=target_metadata,
52 | literal_binds=True,
53 | compare_type=True,
54 | compare_server_default=True,
55 | render_as_batch=True,
56 | dialect_opts={"paramstyle": "named"},
57 | )
58 |
59 | with context.begin_transaction():
60 | context.run_migrations()
61 |
62 |
63 | def run_migrations_online() -> None:
64 | """Run migrations in 'online' mode.
65 |
66 | In this scenario we need to create an Engine
67 | and associate a connection with the context.
68 |
69 | """
70 |
71 | # this callback is used to prevent an auto-migration from being generated
72 | # when there are no changes to the schema
73 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
74 | def process_revision_directives(context, revision, directives):
75 | if getattr(config.cmd_opts, 'autogenerate', False):
76 | script = directives[0]
77 | if script.upgrade_ops.is_empty():
78 | directives[:] = []
79 | logging.info('No changes in schema detected.')
80 |
81 |
82 | connectable = engine_from_config(
83 | config.get_section(config.config_ini_section),
84 | prefix="sqlalchemy.",
85 | poolclass=pool.NullPool,
86 | )
87 |
88 | with connectable.connect() as connection:
89 | context.configure(
90 | connection=connection, target_metadata=target_metadata,
91 | process_revision_directives=process_revision_directives,
92 | compare_type=True,
93 | compare_server_default=True,
94 | render_as_batch=True
95 | )
96 |
97 | with context.begin_transaction():
98 | context.run_migrations()
99 |
100 |
101 | if context.is_offline_mode():
102 | run_migrations_offline()
103 | else:
104 | run_migrations_online()
105 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade() -> None:
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/migrations/versions/74f91803e544_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: 74f91803e544
4 | Revises:
5 | Create Date: 2022-06-11 19:49:05.600874
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '74f91803e544'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('billing_requests',
22 | sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
23 | sa.Column('total', sa.Numeric(precision=12, scale=2), nullable=True),
24 | sa.Column('status', sa.String(), nullable=True),
25 | sa.Column('reference_no', sa.String(), nullable=False),
26 | sa.PrimaryKeyConstraint('id'),
27 | sa.UniqueConstraint('reference_no')
28 | )
29 | # ### end Alembic commands ###
30 |
31 |
32 | def downgrade() -> None:
33 | # ### commands auto generated by Alembic - please adjust! ###
34 | op.drop_table('billing_requests')
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "billing"
3 | version = "0.1.0"
4 | description = "Saga's orchestration pattern example - Billing service"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.10"
9 | starlette = "^0.20.1"
10 | aio-pika = "^7.2.0"
11 | fire = "^0.4.0"
12 | alembic = "^1.7.7"
13 | psycopg2-binary = "2.9.3"
14 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
15 | pydantic = "^1.9.0"
16 | simplejson = "^3.17.6"
17 |
18 | [tool.poetry.dev-dependencies]
19 |
20 | [build-system]
21 | requires = ["poetry-core>=1.0.0"]
22 | build-backend = "poetry.core.masonry.api"
23 |
--------------------------------------------------------------------------------
/saga-orchestration-example/billing/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run alembic upgrade head
8 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
9 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
24 |
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | sqlalchemy.url = driver://user:pass@localhost/dbname
59 |
60 |
61 | [post_write_hooks]
62 | # post_write_hooks defines scripts or Python functions that are run
63 | # on newly generated revision scripts. See the documentation for further
64 | # detail and examples
65 |
66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
67 | # hooks = black
68 | # black.type = console_scripts
69 | # black.entrypoint = black
70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
71 |
72 | # Logging configuration
73 | [loggers]
74 | keys = root,sqlalchemy,alembic
75 |
76 | [handlers]
77 | keys = console
78 |
79 | [formatters]
80 | keys = generic
81 |
82 | [logger_root]
83 | level = WARN
84 | handlers = console
85 | qualname =
86 |
87 | [logger_sqlalchemy]
88 | level = WARN
89 | handlers =
90 | qualname = sqlalchemy.engine
91 |
92 | [logger_alembic]
93 | level = INFO
94 | handlers =
95 | qualname = alembic
96 |
97 | [handler_console]
98 | class = StreamHandler
99 | args = (sys.stderr,)
100 | level = NOTSET
101 | formatter = generic
102 |
103 | [formatter_generic]
104 | format = %(levelname)-5.5s [%(name)s] %(message)s
105 | datefmt = %H:%M:%S
106 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 |
3 | from app import logging
4 | from app.db import Session
5 | from app.sagas import CreateBookingRequestSaga
6 | from app.services import booking_details_by_parking_ref_no, booking_list
7 |
8 |
9 | class AppCLI(object):
10 |
11 | async def create_booking_request(self, parking_slot_uuid):
12 | saga: CreateBookingRequestSaga = CreateBookingRequestSaga(
13 | parking_slot_uuid=parking_slot_uuid
14 | )
15 |
16 | async with saga.connect() as saga:
17 | await saga.start_workflow()
18 |
19 | logging.info('Booking request workflow done.')
20 |
21 | async def booking_list(self):
22 | with Session() as session:
23 | b_list = await booking_list(session)
24 | return [
25 | booking.to_dict() for booking in b_list
26 | ]
27 |
28 | async def booking_details_by_parking_ref_no(self, uuid: str):
29 | with Session() as session:
30 | booking = await booking_details_by_parking_ref_no(session, uuid)
31 | return booking.to_dict()
32 |
33 |
34 | if __name__ == '__main__':
35 | fire.Fire(AppCLI)
36 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 | from app import settings
7 |
8 | engine = create_engine(settings.DATABASE_URL)
9 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
10 |
11 | Base = declarative_base()
12 |
13 |
14 | @contextlib.contextmanager
15 | def Session():
16 | db = SessionLocal()
17 | try:
18 | yield db
19 | finally:
20 | db.close()
21 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/main.py:
--------------------------------------------------------------------------------
1 | from starlette.applications import Starlette
2 | from starlette.responses import JSONResponse
3 | from starlette.routing import Route
4 |
5 |
6 | async def health(request):
7 | return JSONResponse({'message': 'Booking server is running'})
8 |
9 | routes = [
10 | Route('/health', health),
11 | ]
12 |
13 | app = Starlette(
14 | routes=routes
15 | )
16 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 |
3 | from sqlalchemy import Column, Integer, String
4 |
5 | from app.db import Base
6 |
7 |
8 | class DictMixin:
9 | def to_dict(self) -> Dict:
10 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
11 |
12 |
13 | class Booking(Base, DictMixin):
14 | __tablename__ = 'bookings'
15 |
16 | id = Column(Integer, primary_key=True, unique=True, index=True)
17 | status = Column(String, nullable=False, server_default='created')
18 |
19 | parking_slot_ref_no = Column(String, nullable=True)
20 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/services.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from uuid import uuid4
3 |
4 | from sqlalchemy.orm import Session
5 |
6 | from app.db import Session
7 | from app.models import Booking
8 |
9 |
10 | async def booking_details(session: Session, id: str) -> Booking:
11 | return session.query(Booking).filter(Booking.id == id).one()
12 |
13 |
14 | async def booking_details_by_parking_ref_no(session: Session, parking_slot_ref_no: str) -> Booking:
15 | return session.query(Booking).filter(Booking.parking_slot_ref_no == parking_slot_ref_no).one()
16 |
17 |
18 | async def booking_list(session: Session) -> List[Booking]:
19 | return session.query(Booking).all()
20 |
21 |
22 | async def create_booking(session: Session, parking_slot_uuid: str) -> Booking:
23 | # Since customers may happen to book the same parking slot,
24 | # we need to include unique booking identifier (uuid4) to parking_slot_ref_no.
25 | # The booking identifier will be used throughout the services to identify
26 | # transaction.
27 | booking = Booking(
28 | parking_slot_ref_no=f'{parking_slot_uuid}:{uuid4()}',
29 | status='pending'
30 | )
31 | session.add(booking)
32 | session.commit()
33 | session.refresh(booking)
34 |
35 | return booking
36 |
37 |
38 | async def update_booking(session: Session, booking: Booking) -> Booking:
39 | session.commit()
40 | session.refresh(booking)
41 | return booking
42 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | DATABASE_URL = config('DATABASE_URL')
6 | RABBITMQ_BROKER_URL = config('RABBITMQ_BROKER_URL')
7 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from alembic import context
7 |
8 | from app import logging, settings
9 |
10 | # this is the Alembic Config object, which provides
11 | # access to the values within the .ini file in use.
12 | config = context.config
13 |
14 | # Interpret the config file for Python logging.
15 | # This line sets up loggers basically.
16 | if config.config_file_name is not None:
17 | fileConfig(config.config_file_name)
18 |
19 | # add your model's MetaData object here
20 | # for 'autogenerate' support
21 | # from myapp import mymodel
22 | # target_metadata = mymodel.Base.metadata
23 | from app.db import Base
24 |
25 | from app.models import Booking
26 | config.set_main_option('sqlalchemy.url', settings.DATABASE_URL)
27 | target_metadata = [Base.metadata]
28 |
29 |
30 | # other values from the config, defined by the needs of env.py,
31 | # can be acquired:
32 | # my_important_option = config.get_main_option("my_important_option")
33 | # ... etc.
34 |
35 |
36 | def run_migrations_offline() -> None:
37 | """Run migrations in 'offline' mode.
38 |
39 | This configures the context with just a URL
40 | and not an Engine, though an Engine is acceptable
41 | here as well. By skipping the Engine creation
42 | we don't even need a DBAPI to be available.
43 |
44 | Calls to context.execute() here emit the given string to the
45 | script output.
46 |
47 | """
48 | url = config.get_main_option("sqlalchemy.url")
49 | context.configure(
50 | url=url,
51 | target_metadata=target_metadata,
52 | literal_binds=True,
53 | compare_type=True,
54 | compare_server_default=True,
55 | dialect_opts={"paramstyle": "named"},
56 | )
57 |
58 | with context.begin_transaction():
59 | context.run_migrations()
60 |
61 |
62 | def run_migrations_online() -> None:
63 | """Run migrations in 'online' mode.
64 |
65 | In this scenario we need to create an Engine
66 | and associate a connection with the context.
67 |
68 | """
69 |
70 | # this callback is used to prevent an auto-migration from being generated
71 | # when there are no changes to the schema
72 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
73 | def process_revision_directives(context, revision, directives):
74 | if getattr(config.cmd_opts, 'autogenerate', False):
75 | script = directives[0]
76 | if script.upgrade_ops.is_empty():
77 | directives[:] = []
78 | logging.info('No changes in schema detected.')
79 |
80 |
81 | connectable = engine_from_config(
82 | config.get_section(config.config_ini_section),
83 | prefix="sqlalchemy.",
84 | poolclass=pool.NullPool,
85 | )
86 |
87 | with connectable.connect() as connection:
88 | context.configure(
89 | connection=connection, target_metadata=target_metadata,
90 | process_revision_directives=process_revision_directives,
91 | compare_type=True,
92 | compare_server_default=True
93 | )
94 |
95 | with context.begin_transaction():
96 | context.run_migrations()
97 |
98 |
99 | if context.is_offline_mode():
100 | run_migrations_offline()
101 | else:
102 | run_migrations_online()
103 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade() -> None:
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/migrations/versions/fec7a5b19428_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial Migration
2 |
3 | Revision ID: fec7a5b19428
4 | Revises:
5 | Create Date: 2022-06-04 15:52:04.207246
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'fec7a5b19428'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('bookings',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('status', sa.String(), server_default='created', nullable=False),
24 | sa.Column('parking_slot_ref_no', sa.String(), nullable=True),
25 | sa.PrimaryKeyConstraint('id')
26 | )
27 | op.create_index(op.f('ix_bookings_id'), 'bookings', ['id'], unique=True)
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | op.drop_index(op.f('ix_bookings_id'), table_name='bookings')
34 | op.drop_table('bookings')
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "parking"
3 | version = "0.1.0"
4 | description = "Saga's orchestration pattern example - Booking service"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.10"
9 | starlette = "^0.20.1"
10 | aio-pika = "^7.2.0"
11 | fire = "^0.4.0"
12 | alembic = "^1.7.7"
13 | psycopg2-binary = "2.9.3"
14 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
15 |
16 | [tool.poetry.dev-dependencies]
17 |
18 | [build-system]
19 | requires = ["poetry-core>=1.0.0"]
20 | build-backend = "poetry.core.masonry.api"
21 |
--------------------------------------------------------------------------------
/saga-orchestration-example/booking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run alembic upgrade head
8 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
9 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.10.3-slim-buster
2 |
3 | ENV PYTHONUNBUFFERED 1
4 |
5 | RUN apt-get update
6 |
7 | RUN pip install -U \
8 | pip \
9 | setuptools \
10 | wheel
11 |
12 | RUN pip --no-cache-dir install poetry
13 |
14 | WORKDIR /code
15 |
16 | RUN poetry config virtualenvs.in-project false
17 |
18 | COPY pyproject.toml .
19 | RUN poetry lock && poetry install
20 |
21 | COPY . .
22 |
23 | CMD ["chmod", "+x", "/code/start_dev.sh", '/code/wait-for-it.sh']
24 |
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 |
7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8 | # Uncomment the line below if you want the files to be prepended with date and time
9 | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10 | # for all available tokens
11 | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12 |
13 | # sys.path path, will be prepended to sys.path if present.
14 | # defaults to the current working directory.
15 | prepend_sys_path = .
16 |
17 | # timezone to use when rendering the date within the migration file
18 | # as well as the filename.
19 | # If specified, requires the python-dateutil library that can be
20 | # installed by adding `alembic[tz]` to the pip requirements
21 | # string value is passed to dateutil.tz.gettz()
22 | # leave blank for localtime
23 | # timezone =
24 |
25 | # max length of characters to apply to the
26 | # "slug" field
27 | # truncate_slug_length = 40
28 |
29 | # set to 'true' to run the environment during
30 | # the 'revision' command, regardless of autogenerate
31 | # revision_environment = false
32 |
33 | # set to 'true' to allow .pyc and .pyo files without
34 | # a source .py file to be detected as revisions in the
35 | # versions/ directory
36 | # sourceless = false
37 |
38 | # version location specification; This defaults
39 | # to migrations/versions. When using multiple version
40 | # directories, initial revisions must be specified with --version-path.
41 | # The path separator used here should be the separator specified by "version_path_separator" below.
42 | # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43 |
44 | # version path separator; As mentioned above, this is the character used to split
45 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47 | # Valid values for version_path_separator are:
48 | #
49 | # version_path_separator = :
50 | # version_path_separator = ;
51 | # version_path_separator = space
52 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53 |
54 | # the output encoding used when revision files
55 | # are written from script.py.mako
56 | # output_encoding = utf-8
57 |
58 | sqlalchemy.url = driver://user:pass@localhost/dbname
59 |
60 |
61 | [post_write_hooks]
62 | # post_write_hooks defines scripts or Python functions that are run
63 | # on newly generated revision scripts. See the documentation for further
64 | # detail and examples
65 |
66 | # format using "black" - use the console_scripts runner, against the "black" entrypoint
67 | # hooks = black
68 | # black.type = console_scripts
69 | # black.entrypoint = black
70 | # black.options = -l 79 REVISION_SCRIPT_FILENAME
71 |
72 | # Logging configuration
73 | [loggers]
74 | keys = root,sqlalchemy,alembic
75 |
76 | [handlers]
77 | keys = console
78 |
79 | [formatters]
80 | keys = generic
81 |
82 | [logger_root]
83 | level = WARN
84 | handlers = console
85 | qualname =
86 |
87 | [logger_sqlalchemy]
88 | level = WARN
89 | handlers =
90 | qualname = sqlalchemy.engine
91 |
92 | [logger_alembic]
93 | level = INFO
94 | handlers =
95 | qualname = alembic
96 |
97 | [handler_console]
98 | class = StreamHandler
99 | args = (sys.stderr,)
100 | level = NOTSET
101 | formatter = generic
102 |
103 | [formatter_generic]
104 | format = %(levelname)-5.5s [%(name)s] %(message)s
105 | datefmt = %H:%M:%S
106 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/__init__.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | logging.basicConfig(level=logging.INFO)
4 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/amqp_client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from typing import Callable
4 |
5 | import aio_pika
6 | import simplejson as json
7 | from aio_pika import IncomingMessage, Message
8 |
9 | from app import settings
10 | from app.models import AMQPMessage
11 |
12 |
13 | class AMQPClient:
14 |
15 | async def init(self) -> None:
16 | '''
17 | Inititalize AMQP client.
18 | '''
19 |
20 | self.connection = await aio_pika.connect_robust(
21 | settings.RABBITMQ_BROKER_URL, loop=asyncio.get_event_loop()
22 | )
23 |
24 | # Creating channel
25 | self.channel = await self.connection.channel()
26 | return self
27 |
28 | async def event_consumer(
29 | self, callback: Callable, event_store: str, event: str = '#', queue_name: str | None = None,
30 | ) -> None:
31 | '''
32 | Create an event consumer.
33 |
34 | callback - A function that will process the incoming message.
35 | event_store - Declare an exchange as an event store. We store send messages/events
36 | to this exchange.
37 | event - Serves as a binding key or a type of event that occurred.
38 | queue_name - Create a queue to set of events from the Exchange (Optional).
39 | If not specified it will still create a queue with a random name.
40 | '''
41 | exchange = await self.channel.declare_exchange(
42 | event_store,
43 | type='topic',
44 | durable=True
45 | )
46 | queue = await self.channel.declare_queue(queue_name, auto_delete=True)
47 |
48 | await queue.bind(exchange, event)
49 | await queue.consume(partial(self._process_message, callback=callback))
50 |
51 | async def event_producer(
52 | self, event_store: str, binding_key: str, correlation_id: str, message: AMQPMessage
53 | ) -> None:
54 | '''
55 | Send event/message to a specific exchange with binding-key.
56 |
57 | If an existing queue is bound to the given binding-key, the message will be stored
58 | to that queue otherwise the message will be lost.
59 |
60 | NOTE: The binding_key is mandatory so we can explicitly route the message/event
61 | to the right queue.
62 | '''
63 |
64 | # Declare exchange
65 | exchange = await self.channel.declare_exchange(
66 | event_store,
67 | type='topic',
68 | durable=True
69 | )
70 |
71 | payload = json.dumps(message.dict())
72 | await exchange.publish(
73 | Message(
74 | body=str(payload).encode(),
75 | content_type='application/json',
76 | correlation_id=correlation_id
77 | ),
78 | routing_key=binding_key,
79 | )
80 |
81 | async def _process_message(self, message: IncomingMessage, callback: Callable) -> None:
82 | '''
83 | Process incoming message from a Queue. It will require a callback function to handle
84 | message content.
85 | '''
86 | async with message.process(ignore_processed=True):
87 | await callback(message)
88 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/cli.py:
--------------------------------------------------------------------------------
1 | import fire
2 |
3 | from app.db import Session
4 | from app.services import (create_parking_slot, parking_slot_details,
5 | parking_slot_list)
6 |
7 |
8 | class AppCLI(object):
9 |
10 | async def create_parking_slot(self, name: str, status: str | None = 'available'):
11 | with Session() as session:
12 | ps = await create_parking_slot(session, name=name, status=status)
13 | return ps.to_dict()
14 |
15 | async def parking_slot_list(self):
16 | with Session() as session:
17 | ps_list = await parking_slot_list(session)
18 | return [ps.to_dict() for ps in ps_list]
19 |
20 | async def parking_slot_details(self, uuid: str):
21 | with Session() as session:
22 | ps = await parking_slot_details(session, uuid)
23 | return ps.to_dict()
24 |
25 |
26 | if __name__ == '__main__':
27 | fire.Fire(AppCLI)
28 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/db.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 |
3 | from sqlalchemy import create_engine
4 | from sqlalchemy.ext.declarative import declarative_base
5 | from sqlalchemy.orm import sessionmaker
6 | from app import settings
7 |
8 | engine = create_engine(settings.DATABASE_URL)
9 | SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
10 |
11 | Base = declarative_base()
12 |
13 |
14 | @contextlib.contextmanager
15 | def Session():
16 | db = SessionLocal()
17 | try:
18 | yield db
19 | finally:
20 | db.close()
21 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/main.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import asyncio
3 | import contextlib
4 |
5 | import aio_pika
6 | from aio_pika import IncomingMessage, Message
7 | from starlette.applications import Starlette
8 | from starlette.responses import JSONResponse
9 | from starlette.routing import Route
10 |
11 | from app.services import parking_command_event_processor
12 | from app.amqp_client import AMQPClient
13 |
14 |
15 | @contextlib.asynccontextmanager
16 | async def lifespan(app):
17 | amqp_client: AMQPClient = await AMQPClient().init()
18 |
19 | try:
20 | await amqp_client.event_consumer(parking_command_event_processor, 'BOOKING_TX_EVENT_STORE', 'parking.*')
21 | yield
22 | finally:
23 | await amqp_client.connection.close()
24 |
25 |
26 | async def health(request):
27 | return JSONResponse({'message': 'Parking server is running'})
28 |
29 | routes = [
30 | Route('/health', health),
31 | ]
32 |
33 | app = Starlette(routes=routes, lifespan=lifespan)
34 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/models.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | from sqlalchemy import Column, String
4 |
5 | from app.db import Base
6 | from pydantic import BaseModel
7 |
8 |
9 | class DictMixin:
10 | def to_dict(self) -> Dict:
11 | return dict((col, getattr(self, col)) for col in self.__table__.columns.keys())
12 |
13 |
14 | class ParkingSlot(Base, DictMixin):
15 | __tablename__ = 'parking_slots'
16 |
17 | uuid = Column(String, primary_key=True, unique=True, index=True)
18 | name = Column(String, nullable=False)
19 |
20 | # available/blocked/reserved
21 | status = Column(String, nullable=False, server_default='available')
22 |
23 |
24 | class AMQPMessage(BaseModel):
25 | id: str
26 | content: Any | None = None
27 | reply_state: str | None = None
28 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/app/settings.py:
--------------------------------------------------------------------------------
1 | from starlette.config import Config
2 |
3 | config = Config()
4 |
5 | DATABASE_URL = config('DATABASE_URL')
6 | RABBITMQ_BROKER_URL = config('RABBITMQ_BROKER_URL')
7 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/migrations/env.py:
--------------------------------------------------------------------------------
1 | from logging.config import fileConfig
2 |
3 | from sqlalchemy import engine_from_config
4 | from sqlalchemy import pool
5 |
6 | from alembic import context
7 |
8 | from app import logging, settings
9 |
10 | # this is the Alembic Config object, which provides
11 | # access to the values within the .ini file in use.
12 | config = context.config
13 |
14 | # Interpret the config file for Python logging.
15 | # This line sets up loggers basically.
16 | if config.config_file_name is not None:
17 | fileConfig(config.config_file_name)
18 |
19 | # add your model's MetaData object here
20 | # for 'autogenerate' support
21 | # from myapp import mymodel
22 | # target_metadata = mymodel.Base.metadata
23 | from app.db import Base
24 |
25 | from app.models import ParkingSlot
26 | config.set_main_option('sqlalchemy.url', settings.DATABASE_URL)
27 | target_metadata = [Base.metadata]
28 |
29 |
30 | # other values from the config, defined by the needs of env.py,
31 | # can be acquired:
32 | # my_important_option = config.get_main_option("my_important_option")
33 | # ... etc.
34 |
35 |
36 | def run_migrations_offline() -> None:
37 | """Run migrations in 'offline' mode.
38 |
39 | This configures the context with just a URL
40 | and not an Engine, though an Engine is acceptable
41 | here as well. By skipping the Engine creation
42 | we don't even need a DBAPI to be available.
43 |
44 | Calls to context.execute() here emit the given string to the
45 | script output.
46 |
47 | """
48 | url = config.get_main_option("sqlalchemy.url")
49 | context.configure(
50 | url=url,
51 | target_metadata=target_metadata,
52 | literal_binds=True,
53 | compare_type=True,
54 | compare_server_default=True,
55 | render_as_batch=True,
56 | dialect_opts={"paramstyle": "named"},
57 | )
58 |
59 | with context.begin_transaction():
60 | context.run_migrations()
61 |
62 |
63 | def run_migrations_online() -> None:
64 | """Run migrations in 'online' mode.
65 |
66 | In this scenario we need to create an Engine
67 | and associate a connection with the context.
68 |
69 | """
70 |
71 | # this callback is used to prevent an auto-migration from being generated
72 | # when there are no changes to the schema
73 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
74 | def process_revision_directives(context, revision, directives):
75 | if getattr(config.cmd_opts, 'autogenerate', False):
76 | script = directives[0]
77 | if script.upgrade_ops.is_empty():
78 | directives[:] = []
79 | logging.info('No changes in schema detected.')
80 |
81 |
82 | connectable = engine_from_config(
83 | config.get_section(config.config_ini_section),
84 | prefix="sqlalchemy.",
85 | poolclass=pool.NullPool,
86 | )
87 |
88 | with connectable.connect() as connection:
89 | context.configure(
90 | connection=connection, target_metadata=target_metadata,
91 | process_revision_directives=process_revision_directives,
92 | compare_type=True,
93 | compare_server_default=True,
94 | render_as_batch=True
95 | )
96 |
97 | with context.begin_transaction():
98 | context.run_migrations()
99 |
100 |
101 | if context.is_offline_mode():
102 | run_migrations_offline()
103 | else:
104 | run_migrations_online()
105 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade() -> None:
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade() -> None:
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/migrations/versions/5e95058bcf50_update_parkingslot_status_default_value_.py:
--------------------------------------------------------------------------------
1 | """Update ParkingSlot.status default value to available
2 |
3 | Revision ID: 5e95058bcf50
4 | Revises: 6935a5c10469
5 | Create Date: 2022-06-10 20:41:18.696370
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '5e95058bcf50'
14 | down_revision = '6935a5c10469'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('parking_slots', schema=None) as batch_op:
22 | batch_op.alter_column('status',
23 | existing_type=sa.VARCHAR(),
24 | server_default='available',
25 | existing_nullable=False)
26 |
27 | # ### end Alembic commands ###
28 |
29 |
30 | def downgrade() -> None:
31 | # ### commands auto generated by Alembic - please adjust! ###
32 | with op.batch_alter_table('parking_slots', schema=None) as batch_op:
33 | batch_op.alter_column('status',
34 | existing_type=sa.VARCHAR(),
35 | server_default=sa.text("'pending'"),
36 | existing_nullable=False)
37 |
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/migrations/versions/6935a5c10469_initial_migration.py:
--------------------------------------------------------------------------------
1 | """Initial migration
2 |
3 | Revision ID: 6935a5c10469
4 | Revises:
5 | Create Date: 2022-06-03 18:45:48.121616
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '6935a5c10469'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade() -> None:
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('parking_slots',
22 | sa.Column('uuid', sa.String(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.Column('status', sa.String(), server_default='pending', nullable=False),
25 | sa.PrimaryKeyConstraint('uuid')
26 | )
27 | op.create_index(op.f('ix_parking_slots_uuid'), 'parking_slots', ['uuid'], unique=True)
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade() -> None:
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | op.drop_index(op.f('ix_parking_slots_uuid'), table_name='parking_slots')
34 | op.drop_table('parking_slots')
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "parking"
3 | version = "0.1.0"
4 | description = "Saga's orchestration pattern example - Parking service"
5 | authors = ["roelzkie "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.10"
9 | starlette = "^0.20.1"
10 | aio-pika = "^7.2.0"
11 | fire = "^0.4.0"
12 | alembic = "^1.7.7"
13 | psycopg2-binary = "2.9.3"
14 | uvicorn = {extras = ["standard"], version = "^0.17.6"}
15 | pydantic = "^1.9.0"
16 | simplejson = "^3.17.6"
17 |
18 | [tool.poetry.dev-dependencies]
19 |
20 | [build-system]
21 | requires = ["poetry-core>=1.0.0"]
22 | build-backend = "poetry.core.masonry.api"
23 |
--------------------------------------------------------------------------------
/saga-orchestration-example/parking/start_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -o errexit
4 | set -o pipefail
5 | set -o nounset
6 |
7 | poetry run alembic upgrade head
8 | poetry run uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
9 |
--------------------------------------------------------------------------------
/saga-orchestration-example/resources/saga-orchestration-pattern-rb-transaction.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/saga-orchestration-example/resources/saga-orchestration-pattern-rb-transaction.png
--------------------------------------------------------------------------------
/saga-orchestration-example/resources/saga-orchestration-pattern.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/roelzkie15/python-microservices-patterns/a977e0a7b34ead5ff3f39f112450a0491673b7aa/saga-orchestration-example/resources/saga-orchestration-pattern.png
--------------------------------------------------------------------------------