├── src
├── ees
│ ├── __init__.py
│ ├── handlers
│ │ ├── __init__.py
│ │ ├── version.py
│ │ ├── invalid.py
│ │ ├── stats.py
│ │ ├── global_changesets.py
│ │ ├── changesets.py
│ │ ├── publisher.py
│ │ ├── events.py
│ │ ├── analysis_projector.py
│ │ ├── global_indexer.py
│ │ └── commit.py
│ ├── infrastructure
│ │ ├── sns.py
│ │ ├── aws_lambda.py
│ │ └── dynamodb.py
│ ├── commands.py
│ ├── app.py
│ └── model.py
├── tests
│ ├── __init__.py
│ ├── unit
│ │ ├── __init__.py
│ │ ├── context.py
│ │ ├── test_cmd_router.py
│ │ ├── test_model.py
│ │ ├── test_publishing.py
│ │ ├── test_checkmark_calc.py
│ │ ├── test_projecting_analysis.py
│ │ ├── test_parsing_lambda_events.py
│ │ └── events.json
│ └── integration
│ │ ├── __init__.py
│ │ ├── api_test_client.py
│ │ ├── test_fetch_changesets.py
│ │ ├── test_fetch_events.py
│ │ └── test_commit_stream.py
├── requirements.txt
└── lambda_entrypoint.py
├── pytest.ini
├── run-all-tests.sh
├── .vscode
└── settings.json
├── run-unit-tests.sh
├── docs
└── diagrams
│ ├── aws-components.png
│ └── EES.drawio
├── deploy-test-env.sh
├── .aws-sam
└── build.toml
├── .github
└── workflows
│ ├── run-unit-tests.yml
│ └── run-integration-tests.yml
├── design-decisions.txt
├── LICENSE
├── events
└── event.json
├── .gitignore
├── template.yaml
└── README.md
/src/ees/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/ees/handlers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/tests/integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-mock
3 | boto3
4 | requests
5 | mock
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | markers =
3 | slow: marks tests as slow (deselect with '-m "not slow"')
--------------------------------------------------------------------------------
/run-all-tests.sh:
--------------------------------------------------------------------------------
1 | export AWS_SAM_STACK_NAME=ees-tests
2 | export AWS_DEFAULT_REGION=us-east-1
3 | pytest
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.linting.pylintEnabled": true,
3 | "python.linting.enabled": true
4 | }
--------------------------------------------------------------------------------
/run-unit-tests.sh:
--------------------------------------------------------------------------------
1 | export AWS_SAM_STACK_NAME=ees-tests
2 | export AWS_DEFAULT_REGION=us-east-1
3 | pytest -v -m "not slow"
--------------------------------------------------------------------------------
/docs/diagrams/aws-components.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/vladikk/elastic-event-store/HEAD/docs/diagrams/aws-components.png
--------------------------------------------------------------------------------
/src/tests/unit/context.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
4 | import ees
--------------------------------------------------------------------------------
/deploy-test-env.sh:
--------------------------------------------------------------------------------
1 | sam build
2 | sam deploy \
3 | --s3-bucket=$SAM_ARTIFACTS_BUCKET \
4 | --stack-name=ees-tests \
5 | --region=us-east-1 \
6 | --no-confirm-changeset \
7 | --capabilities="CAPABILITY_IAM" \
8 | --parameter-overrides=""
--------------------------------------------------------------------------------
/src/ees/handlers/version.py:
--------------------------------------------------------------------------------
1 | import json
2 | from ees.model import Response
3 |
4 | class VersionHandler:
5 | def execute(self, cmd):
6 | return Response(
7 | http_status=200,
8 | body={
9 | "version": "0.0.1"
10 | })
11 |
--------------------------------------------------------------------------------
/src/ees/handlers/invalid.py:
--------------------------------------------------------------------------------
1 | from ees.model import Response
2 |
3 |
4 | class InvalidEndpointHandler:
5 | def execute(self, event):
6 | return Response(
7 | http_status=404,
8 | body={
9 | "message": "Invalid endpoint"
10 | })
11 |
--------------------------------------------------------------------------------
/.aws-sam/build.toml:
--------------------------------------------------------------------------------
1 | # This file is auto generated by SAM CLI build command
2 |
3 | [function_build_definitions]
4 | [function_build_definitions.e3471256-cbdd-4eac-b00d-70b41aa35621]
5 | codeuri = "src"
6 | runtime = "python3.8"
7 | source_md5 = ""
8 | packagetype = "Zip"
9 | functions = ["CommandHandlerFunction", "GlobalIndexerFunction", "PublisherFunction", "AnalysisProjectorFunction"]
10 |
11 | [layer_build_definitions]
12 |
--------------------------------------------------------------------------------
/.github/workflows/run-unit-tests.yml:
--------------------------------------------------------------------------------
1 | name: run-unit-tests
2 | on: [push]
3 | jobs:
4 | run-unit-tests:
5 | runs-on: ubuntu-latest
6 | steps:
7 | - uses: actions/checkout@v2
8 | - name: Set up Python 3.8
9 | uses: actions/setup-python@v2
10 | with:
11 | python-version: 3.8
12 | - name: Install dependencies
13 | run: |
14 | python -m pip install --upgrade pip
15 | pip install -r src/requirements.txt
16 | - name: Test with pytest
17 | run: |
18 | ./run-unit-tests.sh
--------------------------------------------------------------------------------
/src/ees/infrastructure/sns.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import json
3 | import logging
4 |
5 | logger = logging.getLogger("ees.infrastructure.sns")
6 |
7 |
8 | class SNS:
9 | def __init__(self, topic):
10 | self.topic = topic
11 | self.sns = boto3.client('sns')
12 |
13 | def publish(self, message, group):
14 | logger.debug(f"Publishing message to {self.topic}: {message}")
15 |
16 | response = self.sns.publish(
17 | TopicArn=self.topic,
18 | Message=message,
19 | MessageGroupId=group)
20 |
21 | logger.debug(f"Publishing result: {response}")
--------------------------------------------------------------------------------
/design-decisions.txt:
--------------------------------------------------------------------------------
1 | 2021/02/02 13:09
2 | Decided to ditch the global events endpoints as it's too cumbersome and ineffective. A brain-friendly design requires a dedicated global-secondary index, first need to ensure that it's really worth the effort (and costs).
3 |
4 | 2021/01/30 18:06
5 | Fetching a subset of a stream's events is done by issuing two queries - to get the first mathiching changesets, and, if needed, forthcoming changesets as a separate query on a different local secondary index.
6 |
7 | 2021/01/29 22:29
8 | The events belonging to a changeset are represented as a map, where key is the event's ordinal number in the stream and the value is the event itself. The reason is that DynamoDB's "set" type doesn't preserve order, hence a map is used instead.
--------------------------------------------------------------------------------
/src/ees/handlers/stats.py:
--------------------------------------------------------------------------------
1 | import json
2 | from ees.model import Response
3 |
4 | class StatsHandler:
5 | def __init__(self, db):
6 | self.db = db
7 |
8 | def execute(self, cmd):
9 | v = self.db.get_analysis_state()
10 | if not v:
11 | return Response(
12 | http_status=404,
13 | body={
14 | "error": "Statistics are not yet generated"
15 | })
16 |
17 | return Response(
18 | http_status=200,
19 | body={
20 | 'total_streams': v.total_streams,
21 | 'total_changesets': v.total_changesets,
22 | 'total_events': v.total_events,
23 | 'max_stream_length': v.max_stream_length,
24 | 'statistics_version': v.version
25 | })
26 |
--------------------------------------------------------------------------------
/src/ees/commands.py:
--------------------------------------------------------------------------------
1 | from collections import namedtuple
2 | from os import name
3 | from typing import NamedTuple
4 |
5 | Version = namedtuple('Version', [])
6 |
7 | Stats = namedtuple('Stats', [])
8 |
9 | Commit = namedtuple(
10 | 'Commit',
11 | ['stream_id',
12 | 'expected_last_changeset',
13 | 'expected_last_event',
14 | 'events',
15 | 'metadata'])
16 |
17 | FetchStreamChangesets = namedtuple(
18 | 'FetchStreamChangesets',
19 | ['stream_id',
20 | 'from_changeset',
21 | 'to_changeset'])
22 |
23 | FetchStreamEvents = namedtuple(
24 | 'FetchStreamEvents',
25 | ['stream_id',
26 | 'from_event',
27 | 'to_event'])
28 |
29 | FetchGlobalChangesets = namedtuple(
30 | 'FetchGlobalChangesets',
31 | ['checkpoint',
32 | 'limit'])
33 |
34 | AssignGlobalIndexes = namedtuple(
35 | 'AssignGlobalIndexes', ['changesets'])
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | MIT License
3 |
4 | Copyright (c) 2021 Vladik Khononov
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
--------------------------------------------------------------------------------
/src/ees/handlers/global_changesets.py:
--------------------------------------------------------------------------------
1 | import json
2 | from ees.model import CheckpointCalc, Response
3 |
4 | class FetchGlobalChangesetsHandler:
5 | def __init__(self, db):
6 | self.db = db
7 | self.checkpoint_calc = CheckpointCalc()
8 | self.default_limit=10
9 |
10 | def execute(self, cmd):
11 | limit = cmd.limit or self.default_limit
12 |
13 | changesets = self.db.fetch_global_changesets(cmd.checkpoint, limit)
14 |
15 | changesets = [{
16 | "stream_id": c.stream_id,
17 | "changeset_id": c.changeset_id,
18 | "events": c.events,
19 | "metadata": c.metadata,
20 | "checkpoint":
21 | self.checkpoint_calc.to_checkpoint(c.page, c.page_item)
22 | } for c in changesets]
23 |
24 | next_checkpoint = cmd.checkpoint
25 | if changesets:
26 | next_checkpoint = max([c["checkpoint"] for c in changesets]) + 1
27 |
28 | return Response(
29 | http_status=200,
30 | body={
31 | "checkpoint": cmd.checkpoint,
32 | "limit": limit,
33 | "changesets": changesets,
34 | "next_checkpoint": next_checkpoint
35 | })
--------------------------------------------------------------------------------
/src/ees/handlers/changesets.py:
--------------------------------------------------------------------------------
1 | from ees.model import Response
2 |
3 | class FetchChangesetsHandler:
4 | def __init__(self, db):
5 | self.db = db
6 |
7 | def execute(self, cmd):
8 | changesets = self.db.fetch_stream_changesets(
9 | cmd.stream_id,
10 | from_changeset=cmd.from_changeset,
11 | to_changeset=cmd.to_changeset)
12 |
13 | changesets = [{ "changeset_id": c.changeset_id,
14 | "events": c.events,
15 | "metadata": c.metadata } for c in changesets]
16 |
17 | if not changesets:
18 | last_commit = self.db.fetch_last_commit(cmd.stream_id)
19 | if not last_commit:
20 | return self.stream_not_found(cmd.stream_id)
21 |
22 | return Response(
23 | http_status=200,
24 | body={
25 | "stream_id": cmd.stream_id,
26 | "changesets": changesets
27 | })
28 |
29 | def stream_not_found(self, stream_id):
30 | return Response(
31 | http_status=404,
32 | body={
33 | "stream_id": stream_id,
34 | "error": "STREAM_NOT_FOUND",
35 | "message": f'The specified stream({stream_id}) doesn\'t exist'
36 | })
37 |
--------------------------------------------------------------------------------
/src/tests/unit/test_cmd_router.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from ees import handlers
4 |
5 | from .context import ees
6 | from ees import app
7 | from ees.app import *
8 | from ees.commands import *
9 |
10 | def test_version(mocker):
11 | cmd = Version()
12 | handler = app.route_request(cmd)
13 | assert isinstance(handler, VersionHandler)
14 |
15 | def test_commit(mocker):
16 | cmd = Commit("1", 2, None, [], [])
17 | handler = app.route_request(cmd)
18 | assert isinstance(handler, CommitHandler)
19 |
20 | def test_fetch_changesets(mocker):
21 | cmd = FetchStreamChangesets("1", None, None)
22 | handler = app.route_request(cmd)
23 | assert isinstance(handler, FetchChangesetsHandler)
24 |
25 | def test_fetch_events(mocker):
26 | cmd = FetchStreamEvents("1", None, None)
27 | handler = app.route_request(cmd)
28 | assert isinstance(handler, FetchEventsHandler)
29 |
30 | def test_fetch_global_changesets(mocker):
31 | cmd = FetchGlobalChangesets(0, None)
32 | handler = app.route_request(cmd)
33 | assert isinstance(handler, FetchGlobalChangesetsHandler)
34 |
35 | def test_invalid_endpoint(mocker):
36 | cmd = "something-else"
37 | handler = app.route_request(cmd)
38 | assert isinstance(handler, InvalidEndpointHandler)
39 |
40 | def test_assign_global_indexes(mocker):
41 | cmd = AssignGlobalIndexes([])
42 | handler = app.route_request(cmd)
43 | assert isinstance(handler, GlobalIndexer)
44 |
--------------------------------------------------------------------------------
/src/ees/handlers/publisher.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import json
3 | import logging
4 |
5 | logger = logging.getLogger('ees.handlers.publisher')
6 |
7 | class Publisher(object):
8 | def __init__(self, changesets_topic, events_topic):
9 | self.changesets_topic = changesets_topic
10 | self.events_topic = events_topic
11 |
12 | def publish(self, changesets):
13 | logger.info(f"Publishing {len(changesets)} changesets.")
14 |
15 | for c in changesets:
16 | group = hashlib.sha224(c.stream_id.encode('utf-8')).hexdigest()
17 |
18 | message = {
19 | "stream_id": c.stream_id,
20 | "changeset_id": c.changeset_id,
21 | "events": c.events,
22 | "metadata": c.metadata
23 | }
24 | logger.debug(f"Publishing message to the changesets topic: {json.dumps(message)}")
25 | self.changesets_topic.publish(json.dumps(message), group)
26 | for i, e in enumerate(c.events):
27 | message = {
28 | "stream_id": c.stream_id,
29 | "changeset_id": c.changeset_id,
30 | "event_id": c.first_event_id + i,
31 | "data": e
32 | }
33 | logger.debug(f"Publishing message to the events topic: {json.dumps(message)}")
34 | self.events_topic.publish(json.dumps(message), group)
35 |
36 | logger.info(f"Finished publishing {len(changesets)} changesets.")
37 |
--------------------------------------------------------------------------------
/src/ees/handlers/events.py:
--------------------------------------------------------------------------------
1 | from ees.model import Response
2 |
3 |
4 | class FetchEventsHandler:
5 | def __init__(self, db):
6 | self.db = db
7 |
8 | def execute(self, cmd):
9 | changesets = self.db.fetch_stream_by_events(
10 | cmd.stream_id,
11 | from_event=cmd.from_event,
12 | to_event=cmd.to_event)
13 |
14 | events = []
15 | for c in changesets:
16 | for i, e in enumerate(c.events):
17 | events.append({
18 | "id": c.first_event_id + i,
19 | "data": e
20 | })
21 |
22 | events = [e for e in events
23 | if (not cmd.from_event or e["id"] >= cmd.from_event) and
24 | (not cmd.to_event or e["id"] <= cmd.to_event)]
25 |
26 | if not events:
27 | last_commit = self.db.fetch_last_commit(cmd.stream_id)
28 | if not last_commit:
29 | return self.stream_not_found(cmd.stream_id)
30 |
31 | return Response(
32 | http_status=200,
33 | body={
34 | "stream_id": cmd.stream_id,
35 | "events": events
36 | })
37 |
38 | def stream_not_found(self, stream_id):
39 | return Response(
40 | http_status=404,
41 | body={
42 | "stream_id": stream_id,
43 | "error": "STREAM_NOT_FOUND",
44 | "message": f'The specified stream({stream_id}) doesn\'t exist'
45 | })
46 |
--------------------------------------------------------------------------------
/.github/workflows/run-integration-tests.yml:
--------------------------------------------------------------------------------
1 | name: run-integration-tests
2 | on:
3 | push:
4 | branches: [ main ]
5 | jobs:
6 | run-integration-tests:
7 | env:
8 | AWS_SAM_STACK_NAME: ees-ci-integration-tests
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up Python 3.8
13 | uses: actions/setup-python@v2
14 | with:
15 | python-version: 3.8
16 | - name: Install dependencies
17 | run: |
18 | python -m pip install --upgrade pip
19 | pip install -r src/requirements.txt
20 | - name: Configure AWS credentials
21 | id: creds
22 | uses: aws-actions/configure-aws-credentials@v1
23 | with:
24 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
25 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
26 | aws-region: ${{ secrets.AWS_REGION }}
27 | - name: SAM Validate
28 | run: |
29 | sam validate
30 | - name: SAM Build
31 | run: |
32 | sam build
33 | - name: SAM Deploy Test Environment
34 | run: |
35 | sam deploy \
36 | --s3-bucket=${{ secrets.AWS_SAM_ARTIFACTS_BUCKET }} \
37 | --stack-name=$AWS_SAM_STACK_NAME \
38 | --region=${{ secrets.AWS_REGION }} \
39 | --no-confirm-changeset \
40 | --capabilities="CAPABILITY_IAM"
41 | - name: Run integration tests
42 | run: |
43 | pytest
44 | - name: Delete Test Environment
45 | run: |
46 | aws cloudformation delete-stack --stack-name $AWS_SAM_STACK_NAME
--------------------------------------------------------------------------------
/src/ees/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | from ees.handlers.version import VersionHandler
3 | from ees.handlers.commit import CommitHandler
4 | from ees.handlers.invalid import InvalidEndpointHandler
5 | from ees.handlers.changesets import FetchChangesetsHandler
6 | from ees.handlers.events import FetchEventsHandler
7 | from ees.handlers.global_changesets import FetchGlobalChangesetsHandler
8 | from ees.handlers.global_indexer import GlobalIndexer
9 | from ees.handlers.stats import StatsHandler
10 | from ees.infrastructure.dynamodb import DynamoDB
11 | from ees.commands import *
12 |
13 | db = DynamoDB(events_table=os.getenv('EventStoreTable'),
14 | analysis_table=os.getenv('AnalysisTable'))
15 |
16 |
17 | def route_request(cmd):
18 | commit = CommitHandler(db)
19 | version = VersionHandler()
20 | stats = StatsHandler(db)
21 | changesets = FetchChangesetsHandler(db)
22 | events = FetchEventsHandler(db)
23 | global_changesets = FetchGlobalChangesetsHandler(db)
24 | invalid = InvalidEndpointHandler()
25 | global_indexer = GlobalIndexer(db)
26 |
27 | if isinstance(cmd, Version):
28 | return version
29 |
30 | if isinstance(cmd, Stats):
31 | return stats
32 |
33 | if isinstance(cmd, Commit):
34 | return commit
35 |
36 | if isinstance(cmd, FetchStreamChangesets):
37 | return changesets
38 |
39 | if isinstance(cmd, FetchStreamEvents):
40 | return events
41 |
42 | if isinstance(cmd, FetchGlobalChangesets):
43 | return global_changesets
44 |
45 | if isinstance(cmd, AssignGlobalIndexes):
46 | return global_indexer
47 |
48 | return invalid
49 |
--------------------------------------------------------------------------------
/src/tests/unit/test_model.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from .context import ees
4 | from ees.model import CommitData, make_initial_commit, make_next_commit
5 |
6 | def test_initial_commit_factory(mocker):
7 | stream_id = 'aaa'
8 | metadata = "the changeset's metadata"
9 | events = [ "event1", "event2", "event3" ]
10 |
11 | commit = make_initial_commit(stream_id, events, metadata)
12 |
13 | assert commit.stream_id == stream_id
14 | assert commit.changeset_id == 1
15 | assert commit.metadata == "the changeset's metadata"
16 | assert commit.events[0] == "event1"
17 | assert commit.events[1] == "event2"
18 | assert commit.events[2] == "event3"
19 | assert commit.first_event_id == 1
20 | assert commit.last_event_id == 3
21 |
22 | def test_next_commit_factory(mocker):
23 | prev = CommitData(
24 | stream_id='aaa',
25 | changeset_id=4,
26 | metadata="the previous changeset's metadata",
27 | events=[ "old event 1", "old event 2", "old event 3"],
28 | first_event_id=5,
29 | last_event_id=8,
30 | page=None,
31 | page_item=None
32 | )
33 | metadata = "the new changeset's metadata"
34 | events = [ "new event 1", "new event 2", "new event 3" ]
35 |
36 | commit = make_next_commit(prev, events, metadata)
37 |
38 | assert commit.stream_id == 'aaa'
39 | assert commit.changeset_id == 5
40 | assert commit.metadata == "the new changeset's metadata"
41 | assert commit.events[0] == "new event 1"
42 | assert commit.events[1] == "new event 2"
43 | assert commit.events[2] == "new event 3"
44 | assert commit.first_event_id == 9
45 | assert commit.last_event_id == 11
--------------------------------------------------------------------------------
/docs/diagrams/EES.drawio:
--------------------------------------------------------------------------------
1 | 7Vpdk9o2FP01zLQP9ViSP/AjX5vNDG1JybRJXnaErbW1FRaVxYLz6yNjGWPs3SXsgknKE9aRZF/p3KN7JdFBg/n6ncCL6HceENaBZrDuoGEHQoCgqX4yJM0R1/FyIBQ00I1KYEq/Eg3qfuGSBiSpNJScM0kXVdDncUx8WcGwEHxVbXbPWfWrCxySGjD1Mauj/9BARhoFxTCyiltCw0h/ugvdvGKOi8Z6JEmEA77agdCogwaCc5k/zdcDwrLJK+Yl73fzRO3WMEFieUiHv73FOvwaRMBl8Z9fblfpzcP4NwC1cTItRkwCNQG6yIWMeMhjzEYl2hd8GQcke62pSmWbMecLBQIFPhApU80mXkquoEjOma5VFov0k+6/KXzOCoZdFIfr3cphuluaEEHnRBKhwURiIXsZ1QqIeUwK7IYyptvkg8xG9uTkaSjhS+GT52ZMOyEWIZHPtPO2FCttEK4sFqnqJwjDkj5W7cDaScNtu5JH9aCp/B5avTZorTDhM5wk1N8jA5yAjFdMsu464VR9GZp61bIAyrvoNQs6ZvUVOfm61x5VWzOOZw/a7Yqy1OHnigzfWJQkDooWM8b9fz9GNM7ho5zlRUXC83gL8va8Jffik3mLNvgRs6UewiDCcUgSIpMOdJiakv5MqKcwe/plmMZ4zof9X2s+xpeS0ZgMtpE0IynASbT1qkciJFWxcYxnhE14QiXlcUYfl5LPdxr0GA2zCpn5Xh/rkq8oyzyksmZEeJF9f74OswTCwKsEGcHGxrtgphrcK18YcMbFxkgERy7qdxWumgdUvbCo006mJ0PZQdbPe0zdEdYFgaBKqKkJXZUJgGUZlpWj0U70t82nvedVC7rzw4fpN1z14YGCR22GYFiT5V+j6UeF9CbvG2S5Qc13WJIVTi9Zm3hB78LczLo6h17PtcBJ1QmQ+bI6geHZDeo8Fdfu/yPbQgfq7rVx9lVcoJruev5GCtC8xXHAiGiKimM8nwX4knXHNhbe3S/jfDg17d3YXRtZJ9XeNhEutWfYdfW5hgfPGBtBy+lyNTha3R9gE+sdquRWQyi4qKzn5yLWanWNBm0Q28Ke9DC+3nxLahd7k5YOMLxaDJ4sZ4yqqCmusffI2GvDA/Lec0feerL17NnD9I/pJdOcxEkrWxrHq1ILrQZqHaNhR1Oib8+tVeN29KgGdOX1aF6RcwG8wm6rSdW2cOIbn3MHcuAemHnl94OtpdRuTdXvFbPra2A+/ri46xnQqQgduO3H5iJd2D0IUYpNE3q9FTgu/wKGW02rLdRw+HHmiwF4UXvkn2Y5P/xS0Wz2mjNdMtQv/yaCP5DixHMUh0q716X9+KX9ZcGfe12vR/Dh+ENTUv7hspPy/9pJyvfPsIHTbTjDLuirUIpORWn9vORK6feEZtA1vL0MzHFPxakqln/ay0/Myr8+otE3
--------------------------------------------------------------------------------
/events/event.json:
--------------------------------------------------------------------------------
1 | {
2 | "body": "{\"message\": \"hello world\"}",
3 | "resource": "/{proxy+}",
4 | "path": "/path/to/resource",
5 | "httpMethod": "POST",
6 | "isBase64Encoded": false,
7 | "queryStringParameters": {
8 | "foo": "bar"
9 | },
10 | "pathParameters": {
11 | "proxy": "/path/to/resource"
12 | },
13 | "stageVariables": {
14 | "baz": "qux"
15 | },
16 | "headers": {
17 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
18 | "Accept-Encoding": "gzip, deflate, sdch",
19 | "Accept-Language": "en-US,en;q=0.8",
20 | "Cache-Control": "max-age=0",
21 | "CloudFront-Forwarded-Proto": "https",
22 | "CloudFront-Is-Desktop-Viewer": "true",
23 | "CloudFront-Is-Mobile-Viewer": "false",
24 | "CloudFront-Is-SmartTV-Viewer": "false",
25 | "CloudFront-Is-Tablet-Viewer": "false",
26 | "CloudFront-Viewer-Country": "US",
27 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com",
28 | "Upgrade-Insecure-Requests": "1",
29 | "User-Agent": "Custom User Agent String",
30 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)",
31 | "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==",
32 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2",
33 | "X-Forwarded-Port": "443",
34 | "X-Forwarded-Proto": "https"
35 | },
36 | "requestContext": {
37 | "accountId": "123456789012",
38 | "resourceId": "123456",
39 | "stage": "prod",
40 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef",
41 | "requestTime": "09/Apr/2015:12:34:56 +0000",
42 | "requestTimeEpoch": 1428582896000,
43 | "identity": {
44 | "cognitoIdentityPoolId": null,
45 | "accountId": null,
46 | "cognitoIdentityId": null,
47 | "caller": null,
48 | "accessKey": null,
49 | "sourceIp": "127.0.0.1",
50 | "cognitoAuthenticationType": null,
51 | "cognitoAuthenticationProvider": null,
52 | "userArn": null,
53 | "userAgent": "Custom User Agent String",
54 | "user": null
55 | },
56 | "path": "/prod/path/to/resource",
57 | "resourcePath": "/{proxy+}",
58 | "httpMethod": "POST",
59 | "apiId": "1234567890",
60 | "protocol": "HTTP/1.1"
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/ees/handlers/analysis_projector.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from ees.commands import FetchGlobalChangesets
5 | from ees.model import AnalysisState
6 |
7 | logger = logging.getLogger('ees.handlers.analysis_projector')
8 |
9 | class AnalysisProjector(object):
10 | def __init__(self, db, global_changesets_handler):
11 | self.db = db
12 | self.global_changesets_handler = global_changesets_handler
13 | self.query_limit = 1000
14 |
15 | def execute(self):
16 | logger.info(f"Analysis projection strated.")
17 | prev_state = self.db.get_analysis_state()
18 |
19 | new_total_streams = prev_state.total_streams
20 | new_total_changesets = prev_state.total_changesets
21 | new_total_events = prev_state.total_events
22 | new_max_stream_length = prev_state.max_stream_length
23 | new_version = prev_state.version
24 |
25 | new_changesets = self.global_changesets_handler.execute(
26 | FetchGlobalChangesets(new_version, self.query_limit)
27 | )
28 | print(FetchGlobalChangesets(new_version, self.query_limit))
29 | print(new_changesets)
30 | while new_changesets.body["changesets"]:
31 | changesets = new_changesets.body["changesets"]
32 | for c in changesets:
33 | if c["changeset_id"] == 1:
34 | new_total_streams += 1
35 | new_total_changesets += 1
36 | if c["changeset_id"] > new_max_stream_length:
37 | new_max_stream_length = c["changeset_id"]
38 | new_total_events += len(c["events"])
39 | new_version = new_changesets.body["next_checkpoint"]
40 |
41 | new_changesets = self.global_changesets_handler.execute(
42 | FetchGlobalChangesets(new_version, self.query_limit)
43 | )
44 |
45 | self.db.set_analysis_state(AnalysisState(
46 | total_streams=new_total_streams,
47 | total_changesets=new_total_changesets,
48 | total_events=new_total_events,
49 | max_stream_length=new_max_stream_length,
50 | version=new_version
51 | ), prev_state.version)
52 |
53 | logger.info(f"Finished projecting new state.")
54 |
--------------------------------------------------------------------------------
/src/lambda_entrypoint.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import logging
4 | from ees.app import route_request
5 | from ees.handlers.analysis_projector import AnalysisProjector
6 | from ees.handlers.global_changesets import FetchGlobalChangesetsHandler
7 | from ees.handlers.publisher import Publisher
8 | from ees.infrastructure.aws_lambda import event_to_command, parse_dynamodb_new_records
9 | from ees.infrastructure.dynamodb import DynamoDB
10 | from ees.infrastructure.sns import SNS
11 | from ees.model import Response
12 | from ees.app import route_request
13 |
14 | logger = logging.getLogger("ees.entrypoint")
15 |
16 | def request_handler(event, context):
17 | logger.info(f"Processing incoming event: {event}")
18 | pretty_print = True
19 | parsed_event = event_to_command(event, context)
20 | logger.debug(f"Event was parsed to: {parsed_event}")
21 | if isinstance(parsed_event, Response):
22 | return render(parsed_event, pretty_print)
23 |
24 | handler = route_request(parsed_event)
25 | response = handler.execute(parsed_event)
26 | return render(response, pretty_print)
27 |
28 | def render(response, pretty_print):
29 | logger.debug(f"Rendering response: {response}")
30 | return {
31 | "statusCode": response.http_status,
32 | "body": json.dumps(response.body, indent=4 if pretty_print else None),
33 | }
34 |
35 | def indexer(event, context):
36 | logger.info(f"Processing incoming event: {event}")
37 | parsed_event = event_to_command(event, context)
38 | logger.debug(f"Event was parsed to: {parsed_event}")
39 | handler = route_request(parsed_event)
40 | handler.execute(parsed_event)
41 |
42 | def publisher(event, context):
43 | logger.info(f"Processing incoming event: {event}")
44 | changesets = parse_dynamodb_new_records(event, context)
45 | logger.debug(f"Event was parsed to: {changesets}")
46 |
47 | changesets_topic_arn = os.getenv('ChangesetsTopic')
48 | events_topic_arn = os.getenv('EventsTopic')
49 | changesets_topic = SNS(changesets_topic_arn)
50 | events_topic = SNS(events_topic_arn)
51 | p = Publisher(changesets_topic, events_topic)
52 | p.publish(changesets)
53 |
54 | def analysis_projector(event, context):
55 | events_table = os.getenv('EventStoreTable')
56 | analysis_table = os.getenv('AnalysisTable')
57 | db = DynamoDB(events_table, analysis_table)
58 | global_changesets_handler = FetchGlobalChangesetsHandler(db)
59 | projector = AnalysisProjector(db, global_changesets_handler)
60 | projector.execute()
61 |
--------------------------------------------------------------------------------
/src/tests/unit/test_publishing.py:
--------------------------------------------------------------------------------
1 | import json
2 | from unittest import TestCase
3 | from unittest.mock import Mock, call
4 |
5 | from .context import ees
6 | from ees.handlers.publisher import Publisher
7 | from ees.infrastructure.aws_lambda import parse_dynamodb_new_records
8 |
9 | class TestParsingLambdaEvents(TestCase):
10 | def __init__(self, x):
11 | with open('src/tests/unit/events.json') as f:
12 | self.sample_events = json.load(f)
13 | TestCase.__init__(self, x)
14 |
15 | def test_publishing(self):
16 | event = self.load_event('AssignGlobalIndex')
17 | events_topic = Mock()
18 | changesets_topic = Mock()
19 |
20 | p = Publisher(changesets_topic, events_topic)
21 | changesets = parse_dynamodb_new_records(event, None)
22 |
23 | p.publish(changesets)
24 |
25 | changesets_topic.assert_has_calls([
26 | call.publish('{"stream_id": "99038933-e620-444d-9033-4128254f0cbd", "changeset_id": 2, "events": [{"type": "init", "foo": "bar"}, {"type": "update", "foo": "baz"}], "metadata": {"timestamp": "123123", "command_id": "456346234", "issued_by": "test@test.com"}}', '8ebf57ca0228236805c448931bc9f2d8def48fff0380a57d13701091'),
27 | call.publish('{"stream_id": "206bc1ed-8e67-4a64-a596-8b32c0c20a97", "changeset_id": 1, "events": [{"type": "init", "foo": "bar"}, {"type": "update", "foo": "baz"}], "metadata": {"timestamp": "123123", "command_id": "456346234", "issued_by": "test@test.com"}}', '4520eff932295c3ca621d2a8fb018a7a82ddfde0fc86ae8538ea8524')
28 | ])
29 |
30 | events_topic.assert_has_calls([
31 | call.publish('{"stream_id": "99038933-e620-444d-9033-4128254f0cbd", "changeset_id": 2, "event_id": 3, "data": {"type": "init", "foo": "bar"}}', '8ebf57ca0228236805c448931bc9f2d8def48fff0380a57d13701091'),
32 | call.publish('{"stream_id": "99038933-e620-444d-9033-4128254f0cbd", "changeset_id": 2, "event_id": 4, "data": {"type": "update", "foo": "baz"}}', '8ebf57ca0228236805c448931bc9f2d8def48fff0380a57d13701091'),
33 | call.publish('{"stream_id": "206bc1ed-8e67-4a64-a596-8b32c0c20a97", "changeset_id": 1, "event_id": 1, "data": {"type": "init", "foo": "bar"}}', '4520eff932295c3ca621d2a8fb018a7a82ddfde0fc86ae8538ea8524'),
34 | call.publish('{"stream_id": "206bc1ed-8e67-4a64-a596-8b32c0c20a97", "changeset_id": 1, "event_id": 2, "data": {"type": "update", "foo": "baz"}}', '4520eff932295c3ca621d2a8fb018a7a82ddfde0fc86ae8538ea8524')
35 | ])
36 |
37 | def load_event(self, name):
38 | return self.sample_events[name]
--------------------------------------------------------------------------------
/src/tests/unit/test_checkmark_calc.py:
--------------------------------------------------------------------------------
1 | from .context import ees
2 | from ees.model import CheckpointCalc
3 |
4 | def test_increment_page_item(mocker):
5 | c = CheckpointCalc()
6 |
7 | (page, page_item) = c.next_page_and_item(0, 0)
8 |
9 | assert page == 0
10 | assert page_item == 1
11 |
12 | def test_increment_page_item2(mocker):
13 | c = CheckpointCalc()
14 |
15 | (page, page_item) = c.next_page_and_item(100, 10)
16 |
17 | assert page == 100
18 | assert page_item == 11
19 |
20 | def test_increment_page_item_over_page_size(mocker):
21 | c = CheckpointCalc()
22 | c.page_size = 100
23 |
24 | (page, page_item) = c.next_page_and_item(0, 99)
25 |
26 | assert page == 1
27 | assert page_item == 0
28 |
29 |
30 | def test_increment_page_item_over_page_size2(mocker):
31 | c = CheckpointCalc()
32 | c.page_size = 100
33 |
34 | (page, page_item) = c.next_page_and_item(100, 99)
35 |
36 | assert page == 101
37 | assert page_item == 0
38 |
39 |
40 | def test_page_item_to_checkpoint1(mocker):
41 | c = CheckpointCalc()
42 | c.page_size = 100
43 |
44 | checkpoint = c.to_checkpoint(0, 0)
45 |
46 | assert checkpoint == 0
47 |
48 | def test_page_item_to_checkpoint2(mocker):
49 | c = CheckpointCalc()
50 | c.page_size = 100
51 |
52 | checkpoint = c.to_checkpoint(0, 52)
53 |
54 | assert checkpoint == 52
55 |
56 | def test_page_item_to_checkpoint3(mocker):
57 | c = CheckpointCalc()
58 | c.page_size = 100
59 |
60 | checkpoint = c.to_checkpoint(12, 0)
61 |
62 | assert checkpoint == 1200
63 |
64 | def test_page_item_to_checkpoint4(mocker):
65 | c = CheckpointCalc()
66 | c.page_size = 100
67 |
68 | checkpoint = c.to_checkpoint(12, 52)
69 |
70 | assert checkpoint == 1252
71 |
72 | def test_checkpoint_to_page_item1(mocker):
73 | c = CheckpointCalc()
74 | c.page_size = 100
75 |
76 | (p, i) = c.to_page_item(0)
77 |
78 | assert p == 0
79 | assert i == 0
80 |
81 | def test_checkpoint_to_page_item2(mocker):
82 | c = CheckpointCalc()
83 | c.page_size = 100
84 |
85 | (p, i) = c.to_page_item(52)
86 |
87 | assert p == 0
88 | assert i == 52
89 |
90 | def test_checkpoint_to_page_item3(mocker):
91 | c = CheckpointCalc()
92 | c.page_size = 100
93 |
94 | (p, i) = c.to_page_item(1200)
95 |
96 | assert p == 12
97 | assert i == 0
98 |
99 | def test_checkpoint_to_page_item4(mocker):
100 | c = CheckpointCalc()
101 | c.page_size = 100
102 |
103 | (p, i) = c.to_page_item(1252)
104 |
105 | assert p == 12
106 | assert i == 52
--------------------------------------------------------------------------------
/src/ees/model.py:
--------------------------------------------------------------------------------
1 | from collections import namedtuple
2 |
3 | CommitData = namedtuple(
4 | 'CommitData',
5 | ['stream_id',
6 | 'changeset_id',
7 | 'metadata',
8 | 'events',
9 | 'first_event_id',
10 | 'last_event_id',
11 | 'page',
12 | 'page_item'])
13 |
14 | GlobalCounter = namedtuple(
15 | 'GlobalCounter',
16 | ['page',
17 | 'page_item',
18 | 'prev_stream_id',
19 | 'prev_changeset_id'])
20 |
21 | GlobalIndex = namedtuple(
22 | 'GlobalIndex',
23 | ['stream_id',
24 | 'changeset_id',
25 | 'page',
26 | 'page_item'])
27 |
28 | Response = namedtuple(
29 | 'Response',
30 | ['http_status',
31 | 'body'])
32 |
33 | AnalysisState = namedtuple(
34 | 'AnalysisState',
35 | ['total_streams',
36 | 'total_changesets',
37 | 'total_events',
38 | 'max_stream_length',
39 | 'version'])
40 |
41 | def make_initial_commit(stream_id, events, metadata={}):
42 | return CommitData(
43 | stream_id=stream_id,
44 | changeset_id=1,
45 | metadata=metadata,
46 | events=events,
47 | first_event_id=1,
48 | last_event_id=len(events),
49 | page=None,
50 | page_item=None
51 | )
52 |
53 | def make_next_commit(prev_commit, events, metadata={}):
54 | return CommitData(
55 | stream_id=prev_commit.stream_id,
56 | changeset_id=prev_commit.changeset_id + 1,
57 | metadata=metadata,
58 | events=events,
59 | first_event_id=prev_commit.last_event_id + 1,
60 | last_event_id=prev_commit.last_event_id + len(events),
61 | page=None,
62 | page_item=None
63 | )
64 |
65 |
66 | class ConcurrencyException(Exception):
67 | def __init__(self, stream_id, changeset_id):
68 | self.stream_id = stream_id
69 | self.changeset_id = changeset_id
70 |
71 |
72 | class CheckpointCalc(object):
73 | # The value is hardcoded because it's not meant to be changed
74 | # a change in the page size requires rebuilding the index for
75 | # the whole table. Currently not implemented.
76 | page_size = 1000
77 |
78 | def next_page_and_item(self, page, page_item):
79 | prev_page = page
80 | prev_page_item = page_item
81 | new_page = prev_page
82 | new_page_item = prev_page_item + 1
83 | if new_page_item >= self.page_size:
84 | new_page += 1
85 | new_page_item = 0
86 | return (new_page, new_page_item)
87 |
88 | def to_checkpoint(self, page, page_item):
89 | return page * self.page_size + page_item
90 |
91 | def to_page_item(self, checkpoint):
92 | p = checkpoint // self.page_size
93 | i = checkpoint % self.page_size
94 | return (p, i)
95 |
96 |
--------------------------------------------------------------------------------
/src/tests/integration/api_test_client.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import os
3 | import requests
4 |
5 |
6 | class ApiTestClient():
7 | api_endpoint: str
8 |
9 | some_metadata = {
10 | 'timestamp': '123123',
11 | 'command_id': '456346234',
12 | 'issued_by': 'test@test.com'
13 | }
14 |
15 | some_events = [
16 | { "type": "init", "foo": "bar" },
17 | { "type": "update", "foo": "baz" },
18 | ]
19 |
20 | def __init__(self, sam_stack_name=None):
21 | if not sam_stack_name:
22 | sam_stack_name = os.environ.get("AWS_SAM_STACK_NAME")
23 | if not sam_stack_name:
24 | raise Exception(
25 | "Cannot find env var AWS_SAM_STACK_NAME. \n"
26 | "Please setup this environment variable with the stack name where we are running integration tests."
27 | )
28 |
29 | client = boto3.client("cloudformation")
30 |
31 | try:
32 | response = client.describe_stacks(StackName=sam_stack_name)
33 | except Exception as e:
34 | raise Exception(
35 | f"Cannot find stack {sam_stack_name}. \n" f'Please make sure stack with the name "{sam_stack_name}" exists.'
36 | ) from e
37 |
38 | stacks = response["Stacks"]
39 |
40 | stack_outputs = stacks[0]["Outputs"]
41 | api_outputs = [output for output in stack_outputs if output["OutputKey"] == "ApiEndpoint"]
42 | if not api_outputs:
43 | raise Exception(f"Cannot find output ApiEndpoint in stack {sam_stack_name}")
44 |
45 | self.api_endpoint = api_outputs[0]["OutputValue"] + '/'
46 |
47 | def commit(self, stream_id, events=None, metadata=None, last_changeset_id=None, last_event_id=None):
48 | expected_last_changeset = ""
49 | expected_last_event = ""
50 |
51 | if last_changeset_id is not None:
52 | try:
53 | expected_last_changeset = int(last_changeset_id)
54 | except ValueError:
55 | expected_last_changeset = last_changeset_id
56 |
57 | if last_event_id is not None:
58 | try:
59 | expected_last_event = int(last_event_id)
60 | except ValueError:
61 | expected_last_event = last_event_id
62 |
63 | url = self.api_endpoint + f'streams/{stream_id}?expected_last_changeset={expected_last_changeset}&expected_last_event={expected_last_event}'
64 |
65 | payload = { }
66 | if events:
67 | payload["events"] = events
68 | if metadata:
69 | payload["metadata"] = metadata
70 |
71 | return requests.post(url, json=payload)
72 |
73 | def query_changesets(self, stream_id, from_changeset=None, to_changeset=None):
74 | url = self.api_endpoint + f'streams/{stream_id}/changesets?&from={from_changeset or ""}&to={to_changeset or ""}'
75 | return requests.get(url)
76 |
77 | def query_events(self, stream_id, from_event=None, to_event=None):
78 | url = self.api_endpoint + f'streams/{stream_id}/events?&from={from_event or ""}&to={to_event or ""}'
79 | return requests.get(url)
80 |
81 | def version(self):
82 | return requests.get(self.api_endpoint + "version")
--------------------------------------------------------------------------------
/src/ees/handlers/global_indexer.py:
--------------------------------------------------------------------------------
1 | from ees.model import ConcurrencyException, GlobalCounter, GlobalIndex, CheckpointCalc
2 | import logging
3 |
4 |
5 | logger = logging.getLogger("ees.handlers.global_indexer")
6 |
7 | # The algorithm:
8 | # R assign global index to changeset:
9 | # If changeset already has a global id - return
10 | # R If the previous changeset in the stream has no global id then:
11 | # assign global index to the stream's previous changeset
12 | # R Get the last assigned global index counter value
13 | # R If the assigned global index wasn't written to the changeset then
14 | # write the last assigned global index value to its changeset
15 | # W Increment the last assigned global index value and assign it to the changeset
16 | # W Write the global id to the changeset
17 |
18 |
19 | class GlobalIndexer:
20 | def __init__(self, db):
21 | self.db = db
22 | self.checkpoint_calc = CheckpointCalc()
23 |
24 | def execute(self, cmd):
25 | for c in cmd.changesets:
26 | self.assign_global_index(c["stream_id"], c["changeset_id"])
27 |
28 | def assign_global_index(self, stream_id, changeset_id):
29 | logger.info(f"Assign global index to {stream_id}/{changeset_id}")
30 | g_ind = self.db.get_global_index_value(stream_id, changeset_id)
31 | if g_ind.page != None and g_ind.page_item != None:
32 | logger.debug("The changeset already has an assigned global index")
33 | return
34 |
35 | self.ensure_prev_changeset_has_global_index(stream_id, changeset_id)
36 |
37 | last_assigned_index = self.db.get_global_counter()
38 | logger.debug(f"Current global counter: {last_assigned_index}")
39 | self.ensure_index_committed(last_assigned_index)
40 |
41 | if last_assigned_index.prev_stream_id != stream_id or \
42 | last_assigned_index.prev_changeset_id != changeset_id:
43 | new_counter_value = self.increment_counter(stream_id, changeset_id, last_assigned_index)
44 | new_global_index = GlobalIndex(stream_id,
45 | changeset_id,
46 | new_counter_value.page,
47 | new_counter_value.page_item)
48 | self.db.set_global_index(new_global_index)
49 | logger.debug(f"Global index value set for {stream_id}/{changeset_id}: {new_global_index}")
50 |
51 | def ensure_prev_changeset_has_global_index(self, stream_id, changeset_id):
52 | if changeset_id > 1:
53 | prev_changeset_id = changeset_id - 1
54 | logger.debug(f"First have to ensure that the prev changeset has a global index({stream_id}/{prev_changeset_id})")
55 | self.assign_global_index(stream_id, prev_changeset_id)
56 |
57 | def ensure_index_committed(self, index):
58 | if not index.prev_stream_id:
59 | return
60 |
61 | changeset_index = self.db.get_global_index_value(index.prev_stream_id, index.prev_changeset_id)
62 | if not changeset_index:
63 | return
64 |
65 | if changeset_index.page is None or changeset_index.page_item is None:
66 | logger.info("The previous assigned index was not written. Repairing.")
67 | fixed_index = GlobalIndex(changeset_index.stream_id,
68 | changeset_index.changeset_id,
69 | index.page,
70 | index.page_item)
71 |
72 | self.db.set_global_index(fixed_index)
73 |
74 | def increment_counter(self, stream_id, changeset_id, prev_counter):
75 | (p, i) = self.checkpoint_calc.next_page_and_item(prev_counter.page,
76 | prev_counter.page_item)
77 | new_counter = GlobalCounter(p, i, stream_id, changeset_id)
78 | self.db.update_global_counter(prev_counter, new_counter)
79 | logger.debug(f"Counter increased from {prev_counter} to {new_counter}")
80 | return new_counter
81 |
82 |
83 |
--------------------------------------------------------------------------------
/src/ees/handlers/commit.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from ees.model import make_initial_commit, make_next_commit, ConcurrencyException, Response
3 |
4 | logger = logging.getLogger("ees.handlers.commit")
5 |
6 |
7 | class CommitHandler:
8 | def __init__(self, db):
9 | self.db = db
10 |
11 | def execute(self, cmd):
12 | logger.debug(f'expected last changeset id {cmd.expected_last_changeset}')
13 | logger.debug(f'expected last event id {cmd.expected_last_event}')
14 |
15 | commit = None
16 | if cmd.expected_last_changeset == 0 or cmd.expected_last_event == 0:
17 | commit = make_initial_commit(cmd.stream_id, cmd.events, cmd.metadata)
18 | else:
19 | prev_commit = self.db.fetch_last_commit(cmd.stream_id)
20 | if cmd.expected_last_changeset and \
21 | prev_commit.changeset_id > cmd.expected_last_changeset:
22 | return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event)
23 |
24 | if cmd.expected_last_changeset and \
25 | prev_commit.changeset_id < cmd.expected_last_changeset:
26 | return self.missing_expected_changeset_exception(cmd.stream_id, 'changeset', cmd.expected_last_changeset, prev_commit.changeset_id)
27 |
28 | if cmd.expected_last_event and \
29 | prev_commit.last_event_id > cmd.expected_last_event:
30 | return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event)
31 |
32 | if cmd.expected_last_event and \
33 | prev_commit.last_event_id < cmd.expected_last_event:
34 | return self.missing_expected_changeset_exception(cmd.stream_id, 'event', cmd.expected_last_event, prev_commit.last_event_id)
35 |
36 | commit = make_next_commit(prev_commit, cmd.events, cmd.metadata)
37 |
38 | try:
39 | self.db.append(commit)
40 | except ConcurrencyException:
41 | return self.concurrency_exception(cmd.stream_id, cmd.expected_last_changeset, cmd.expected_last_event)
42 |
43 | return Response(
44 | http_status=200,
45 | body={
46 | "stream_id": commit.stream_id,
47 | "changeset_id": commit.changeset_id
48 | })
49 |
50 | def concurrency_exception(self, stream_id, expected_last_changeset, expected_last_event):
51 | lock_by = None
52 | lock_value = None
53 | forthcoming_changesets = None
54 |
55 | if expected_last_changeset:
56 | lock_by = "changeset"
57 | lock_value = expected_last_changeset
58 | forthcoming_changesets = self.db.fetch_stream_changesets(
59 | stream_id,
60 | from_changeset=expected_last_changeset + 1)
61 |
62 | if expected_last_event:
63 | lock_by = "event"
64 | lock_value = expected_last_event
65 | forthcoming_changesets = self.db.fetch_stream_by_events(
66 | stream_id,
67 | from_event=expected_last_event + 1)
68 |
69 | forthcoming_changesets = [{
70 | "changeset_id": c.changeset_id,
71 | "events": c.events,
72 | "metadata": c.metadata
73 | } for c in forthcoming_changesets]
74 |
75 | return Response(
76 | http_status=409,
77 | body={
78 | "stream_id": stream_id,
79 | "error": "OPTIMISTIC_CONCURRENCY_EXCEPTION",
80 | "forthcoming_changesets": forthcoming_changesets,
81 | "message": f'The expected last {lock_by} ({lock_value}) is outdated, review the {lock_by}(s) appended after it.'
82 | })
83 |
84 | def missing_expected_changeset_exception(self, stream_id, lock_type, last_changeset, last_known):
85 | return Response(
86 | http_status=400,
87 | body={
88 | "stream_id": stream_id,
89 | "error": "INVALID_EXPECTED_CHANGESET_ID",
90 | "message": f'The specified expected {lock_type}({last_changeset}) doesn\'t exist. The "{stream_id}" stream\'s most recent {lock_type} is {last_known}.'
91 | })
--------------------------------------------------------------------------------
/src/tests/unit/test_projecting_analysis.py:
--------------------------------------------------------------------------------
1 | import json
2 | from unittest import TestCase
3 | from unittest.mock import Mock, call
4 |
5 | from .context import ees
6 | from ees.commands import FetchGlobalChangesets
7 | from ees.handlers.analysis_projector import AnalysisProjector
8 | from ees.model import Response, AnalysisState
9 |
10 | class TestProjectingAnalysisModel(TestCase):
11 | def test_init_new_projection(self):
12 | def global_changesets_return_values(cmd):
13 | if cmd.checkpoint == 0:
14 | return Response(
15 | http_status=200,
16 | body={
17 | "checkpoint": 0,
18 | "limit": 10,
19 | "changesets": [
20 | {
21 | "stream_id": "stream1",
22 | "changeset_id": 1,
23 | "events": [
24 | { "type": "init" },
25 | { "type": "set" }
26 | ],
27 | "metadata": { },
28 | "checkpoint": 0
29 | }, {
30 | "stream_id": "stream2",
31 | "changeset_id": 1,
32 | "events": [
33 | { "type": "init" },
34 | { "type": "set" },
35 | { "type": "update" }
36 | ],
37 | "metadata": { },
38 | "checkpoint": 1
39 | }, {
40 | "stream_id": "stream1",
41 | "changeset_id": 2,
42 | "events": [
43 | { "type": "modify" },
44 | { "type": "delete" }
45 | ],
46 | "metadata": { },
47 | "checkpoint": 2
48 | }
49 | ],
50 | "next_checkpoint": 3
51 | })
52 | return Response(
53 | http_status=200,
54 | body={
55 | "checkpoint": 3,
56 | "limit": 10,
57 | "changesets": [],
58 | "next_checkpoint": 3
59 | })
60 |
61 | dynamo_db = Mock()
62 | global_changesets_endpoint = Mock()
63 | global_changesets_endpoint.execute.side_effect = global_changesets_return_values
64 |
65 | dynamo_db.get_analysis_state.return_value = AnalysisState(
66 | total_streams=0,
67 | total_changesets=0,
68 | total_events=0,
69 | max_stream_length=0,
70 | version=0
71 | )
72 |
73 | p = AnalysisProjector(dynamo_db, global_changesets_endpoint)
74 | p.query_limit = 10
75 | p.execute()
76 |
77 | dynamo_db.set_analysis_state.assert_called_with(AnalysisState(
78 | total_streams=2,
79 | total_changesets=3,
80 | total_events=7,
81 | max_stream_length=2,
82 | version=3
83 | ), 0)
84 |
85 | def test_update_projection(self):
86 | def global_changesets_return_values(cmd):
87 | if cmd.checkpoint == 3:
88 | return Response(
89 | http_status=200,
90 | body={
91 | "checkpoint": 0,
92 | "limit": 10,
93 | "changesets": [
94 | {
95 | "stream_id": "stream3",
96 | "changeset_id": 1,
97 | "events": [
98 | { "type": "init" },
99 | { "type": "set" }
100 | ],
101 | "metadata": { },
102 | "checkpoint": 3
103 | }, {
104 | "stream_id": "stream3",
105 | "changeset_id": 2,
106 | "events": [
107 | { "type": "init" },
108 | { "type": "set" },
109 | { "type": "update" }
110 | ],
111 | "metadata": { },
112 | "checkpoint": 4
113 | }, {
114 | "stream_id": "stream3",
115 | "changeset_id": 3,
116 | "events": [
117 | { "type": "modify" },
118 | { "type": "delete" }
119 | ],
120 | "metadata": { },
121 | "checkpoint": 5
122 | }
123 | ],
124 | "next_checkpoint": 6
125 | })
126 | return Response(
127 | http_status=200,
128 | body={
129 | "checkpoint": 6,
130 | "limit": 10,
131 | "changesets": [],
132 | "next_checkpoint": 6
133 | })
134 |
135 | dynamo_db = Mock()
136 | global_changesets_endpoint = Mock()
137 | global_changesets_endpoint.execute.side_effect = global_changesets_return_values
138 |
139 | dynamo_db.get_analysis_state.return_value = AnalysisState(
140 | total_streams=2,
141 | total_changesets=3,
142 | total_events=7,
143 | max_stream_length=2,
144 | version=3
145 | )
146 |
147 | p = AnalysisProjector(dynamo_db, global_changesets_endpoint)
148 | p.query_limit = 10
149 | p.execute()
150 |
151 | dynamo_db.set_analysis_state.assert_called_with(AnalysisState(
152 | total_streams=3,
153 | total_changesets=6,
154 | total_events=14,
155 | max_stream_length=3,
156 | version=6
157 | ), 3)
158 |
159 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode
3 |
4 | ### Linux ###
5 | *~
6 |
7 | # temporary files which can be created if a process still has a handle open of a deleted file
8 | .fuse_hidden*
9 |
10 | # KDE directory preferences
11 | .directory
12 |
13 | # Linux trash folder which might appear on any partition or disk
14 | .Trash-*
15 |
16 | # .nfs files are created when an open file is removed but is still being accessed
17 | .nfs*
18 |
19 | ### OSX ###
20 | *.DS_Store
21 | .AppleDouble
22 | .LSOverride
23 |
24 | # Icon must end with two \r
25 | Icon
26 |
27 | # Thumbnails
28 | ._*
29 |
30 | # Files that might appear in the root of a volume
31 | .DocumentRevisions-V100
32 | .fseventsd
33 | .Spotlight-V100
34 | .TemporaryItems
35 | .Trashes
36 | .VolumeIcon.icns
37 | .com.apple.timemachine.donotpresent
38 |
39 | # Directories potentially created on remote AFP share
40 | .AppleDB
41 | .AppleDesktop
42 | Network Trash Folder
43 | Temporary Items
44 | .apdisk
45 |
46 | ### PyCharm ###
47 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
48 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
49 |
50 | # User-specific stuff:
51 | .idea/**/workspace.xml
52 | .idea/**/tasks.xml
53 | .idea/dictionaries
54 |
55 | # Sensitive or high-churn files:
56 | .idea/**/dataSources/
57 | .idea/**/dataSources.ids
58 | .idea/**/dataSources.xml
59 | .idea/**/dataSources.local.xml
60 | .idea/**/sqlDataSources.xml
61 | .idea/**/dynamic.xml
62 | .idea/**/uiDesigner.xml
63 |
64 | # Gradle:
65 | .idea/**/gradle.xml
66 | .idea/**/libraries
67 |
68 | # CMake
69 | cmake-build-debug/
70 |
71 | # Mongo Explorer plugin:
72 | .idea/**/mongoSettings.xml
73 |
74 | ## File-based project format:
75 | *.iws
76 |
77 | ## Plugin-specific files:
78 |
79 | # IntelliJ
80 | /out/
81 |
82 | # mpeltonen/sbt-idea plugin
83 | .idea_modules/
84 |
85 | # JIRA plugin
86 | atlassian-ide-plugin.xml
87 |
88 | # Cursive Clojure plugin
89 | .idea/replstate.xml
90 |
91 | # Ruby plugin and RubyMine
92 | /.rakeTasks
93 |
94 | # Crashlytics plugin (for Android Studio and IntelliJ)
95 | com_crashlytics_export_strings.xml
96 | crashlytics.properties
97 | crashlytics-build.properties
98 | fabric.properties
99 |
100 | ### PyCharm Patch ###
101 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
102 |
103 | # *.iml
104 | # modules.xml
105 | # .idea/misc.xml
106 | # *.ipr
107 |
108 | # Sonarlint plugin
109 | .idea/sonarlint
110 |
111 | ### Python ###
112 | # Byte-compiled / optimized / DLL files
113 | __pycache__/
114 | *.py[cod]
115 | *$py.class
116 |
117 | # C extensions
118 | *.so
119 |
120 | # Distribution / packaging
121 | .Python
122 | build/
123 | develop-eggs/
124 | dist/
125 | downloads/
126 | eggs/
127 | .eggs/
128 | lib/
129 | lib64/
130 | parts/
131 | sdist/
132 | var/
133 | wheels/
134 | *.egg-info/
135 | .installed.cfg
136 | *.egg
137 |
138 | # PyInstaller
139 | # Usually these files are written by a python script from a template
140 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
141 | *.manifest
142 | *.spec
143 |
144 | # Installer logs
145 | pip-log.txt
146 | pip-delete-this-directory.txt
147 |
148 | # Unit test / coverage reports
149 | htmlcov/
150 | .tox/
151 | .coverage
152 | .coverage.*
153 | .cache
154 | .pytest_cache/
155 | nosetests.xml
156 | coverage.xml
157 | *.cover
158 | .hypothesis/
159 |
160 | # Translations
161 | *.mo
162 | *.pot
163 |
164 | # Flask stuff:
165 | instance/
166 | .webassets-cache
167 |
168 | # Scrapy stuff:
169 | .scrapy
170 |
171 | # Sphinx documentation
172 | docs/_build/
173 |
174 | # PyBuilder
175 | target/
176 |
177 | # Jupyter Notebook
178 | .ipynb_checkpoints
179 |
180 | # pyenv
181 | .python-version
182 |
183 | # celery beat schedule file
184 | celerybeat-schedule.*
185 |
186 | # SageMath parsed files
187 | *.sage.py
188 |
189 | # Environments
190 | .env
191 | .venv
192 | env/
193 | venv/
194 | ENV/
195 | env.bak/
196 | venv.bak/
197 |
198 | # Spyder project settings
199 | .spyderproject
200 | .spyproject
201 |
202 | # Rope project settings
203 | .ropeproject
204 |
205 | # mkdocs documentation
206 | /site
207 |
208 | # mypy
209 | .mypy_cache/
210 |
211 | ### VisualStudioCode ###
212 | .vscode/*
213 | !.vscode/settings.json
214 | !.vscode/tasks.json
215 | !.vscode/launch.json
216 | !.vscode/extensions.json
217 | .history
218 |
219 | ### Windows ###
220 | # Windows thumbnail cache files
221 | Thumbs.db
222 | ehthumbs.db
223 | ehthumbs_vista.db
224 |
225 | # Folder config file
226 | Desktop.ini
227 |
228 | # Recycle Bin used on file shares
229 | $RECYCLE.BIN/
230 |
231 | # Windows Installer files
232 | *.cab
233 | *.msi
234 | *.msm
235 | *.msp
236 |
237 | # Windows shortcuts
238 | *.lnk
239 |
240 | # Build folder
241 |
242 | */build/*
243 |
244 | # Byte-compiled / optimized / DLL files
245 | __pycache__/
246 | *.py[cod]
247 | *$py.class
248 |
249 | # C extensions
250 | *.so
251 |
252 | # Distribution / packaging
253 | .Python
254 | build/
255 | develop-eggs/
256 | dist/
257 | downloads/
258 | eggs/
259 | .eggs/
260 | lib/
261 | lib64/
262 | parts/
263 | sdist/
264 | var/
265 | wheels/
266 | pip-wheel-metadata/
267 | share/python-wheels/
268 | *.egg-info/
269 | .installed.cfg
270 | *.egg
271 | MANIFEST
272 |
273 | # PyInstaller
274 | # Usually these files are written by a python script from a template
275 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
276 | *.manifest
277 | *.spec
278 |
279 | # Installer logs
280 | pip-log.txt
281 | pip-delete-this-directory.txt
282 |
283 | # Unit test / coverage reports
284 | htmlcov/
285 | .tox/
286 | .nox/
287 | .coverage
288 | .coverage.*
289 | .cache
290 | nosetests.xml
291 | coverage.xml
292 | *.cover
293 | *.py,cover
294 | .hypothesis/
295 | .pytest_cache/
296 |
297 | # Translations
298 | *.mo
299 | *.pot
300 |
301 | # Django stuff:
302 | *.log
303 | local_settings.py
304 | db.sqlite3
305 | db.sqlite3-journal
306 |
307 | # Flask stuff:
308 | instance/
309 | .webassets-cache
310 |
311 | # Scrapy stuff:
312 | .scrapy
313 |
314 | # Sphinx documentation
315 | docs/_build/
316 |
317 | # PyBuilder
318 | target/
319 |
320 | # Jupyter Notebook
321 | .ipynb_checkpoints
322 |
323 | # IPython
324 | profile_default/
325 | ipython_config.py
326 |
327 | # pyenv
328 | .python-version
329 |
330 | # pipenv
331 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
332 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
333 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
334 | # install all needed dependencies.
335 | #Pipfile.lock
336 |
337 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
338 | __pypackages__/
339 |
340 | # Celery stuff
341 | celerybeat-schedule
342 | celerybeat.pid
343 |
344 | # SageMath parsed files
345 | *.sage.py
346 |
347 | # Environments
348 | .env
349 | .venv
350 | env/
351 | venv/
352 | ENV/
353 | env.bak/
354 | venv.bak/
355 |
356 | # Spyder project settings
357 | .spyderproject
358 | .spyproject
359 |
360 | # Rope project settings
361 | .ropeproject
362 |
363 | # mkdocs documentation
364 | /site
365 |
366 | # mypy
367 | .mypy_cache/
368 | .dmypy.json
369 | dmypy.json
370 |
371 | # Pyre type checker
372 | .pyre/
373 |
374 | # virtual environment
375 | venv/
--------------------------------------------------------------------------------
/src/tests/integration/test_fetch_changesets.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import uuid
3 | from unittest import TestCase
4 | from tests.integration.api_test_client import ApiTestClient
5 |
6 |
7 | @pytest.mark.slow
8 | class TestFetchingChangesets(TestCase):
9 | api = None
10 |
11 | def setUp(self) -> None:
12 | self.api = self.api or ApiTestClient()
13 | return super().setUp()
14 |
15 | def test_fetch_changesets(self):
16 | stream_id = str(uuid.uuid4())
17 |
18 | self.api.commit(
19 | stream_id=stream_id,
20 | last_changeset_id=0,
21 | metadata=self.api.some_metadata,
22 | events=self.api.some_events
23 | )
24 |
25 | response = self.api.query_changesets(stream_id)
26 |
27 | self.assertDictEqual(response.json(), {
28 | "stream_id": stream_id,
29 | "changesets": [
30 | {
31 | "changeset_id": 1,
32 | "metadata": self.api.some_metadata,
33 | "events": self.api.some_events
34 | }
35 | ]
36 | })
37 |
38 | def test_fetch_from_specic_changeset(self):
39 | stream_id = str(uuid.uuid4())
40 |
41 | self.api.commit(
42 | stream_id=stream_id,
43 | last_changeset_id=0,
44 | metadata={ "metadata": "goes here" },
45 | events=[ { "type": "init" }, { "type": "update" } ]
46 | )
47 |
48 | self.api.commit(
49 | stream_id=stream_id,
50 | last_changeset_id=1,
51 | metadata={ "metadata": "goes here 2" },
52 | events=[ { "type": "update2" }, { "type": "delete" } ]
53 | )
54 |
55 | response = self.api.query_changesets(stream_id, from_changeset=2)
56 |
57 | self.assertDictEqual(response.json(), {
58 | "stream_id": stream_id,
59 | "changesets": [
60 | {
61 | "changeset_id": 2,
62 | "metadata": { "metadata": "goes here 2" },
63 | "events": [ { "type": "update2" }, { "type": "delete" } ]
64 | }
65 | ]
66 | })
67 |
68 | def test_fetch_to_specic_changeset(self):
69 | stream_id = str(uuid.uuid4())
70 |
71 | self.api.commit(
72 | stream_id=stream_id,
73 | last_changeset_id=0,
74 | metadata={ "metadata": "goes here" },
75 | events=[ { "type": "init" }, { "type": "update" } ]
76 | )
77 |
78 | self.api.commit(
79 | stream_id=stream_id,
80 | last_changeset_id=0,
81 | metadata={ "metadata": "goes here 2" },
82 | events=[ { "type": "update2" }, { "type": "delete" } ]
83 | )
84 |
85 | response = self.api.query_changesets(stream_id, to_changeset=1)
86 |
87 | self.assertDictEqual(response.json(), {
88 | "stream_id": stream_id,
89 | "changesets": [
90 | {
91 | "changeset_id": 1,
92 | "metadata": { "metadata": "goes here" },
93 | "events": [ { "type": "init" }, { "type": "update" } ]
94 | }
95 | ]
96 | })
97 |
98 | def test_fetch_from_and_to_specic_changesets(self):
99 | stream_id = str(uuid.uuid4())
100 |
101 | self.api.commit(
102 | stream_id=stream_id,
103 | last_changeset_id=0,
104 | metadata={ "metadata": "goes here" },
105 | events=[ { "type": "init" }, { "type": "update" } ]
106 | )
107 |
108 | self.api.commit(
109 | stream_id=stream_id,
110 | last_changeset_id=1,
111 | metadata={ "metadata": "goes here 2" },
112 | events=[ { "type": "update2" } ]
113 | )
114 |
115 | self.api.commit(
116 | stream_id=stream_id,
117 | last_changeset_id=2,
118 | metadata={ "metadata": "goes here 3" },
119 | events=[ { "type": "update3" } ]
120 | )
121 |
122 | self.api.commit(
123 | stream_id=stream_id,
124 | last_changeset_id=3,
125 | metadata={ "metadata": "goes here 4" },
126 | events=[ { "type": "update4" } ]
127 | )
128 |
129 | response = self.api.query_changesets(stream_id, from_changeset=2, to_changeset=3)
130 |
131 | self.assertDictEqual(response.json(), {
132 | "stream_id": stream_id,
133 | "changesets": [
134 | {
135 | "changeset_id": 2,
136 | "metadata": { "metadata": "goes here 2" },
137 | "events": [ { "type": "update2" } ]
138 | }, {
139 | "changeset_id": 3,
140 | "metadata": { "metadata": "goes here 3" },
141 | "events": [ { "type": "update3" } ]
142 | }
143 | ]
144 | })
145 |
146 | def test_invalid_querying_params1(self):
147 | stream_id = str(uuid.uuid4())
148 |
149 | response = self.api.query_changesets(stream_id, from_changeset=3, to_changeset=2)
150 |
151 | assert response.status_code == 400
152 | self.assertDictEqual(response.json(), {
153 | "stream_id": stream_id,
154 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
155 | "message": 'The higher boundary cannot be lower than the lower boundary: 3(from) > 2(to)'
156 | })
157 |
158 | def test_invalid_querying_params2(self):
159 | stream_id = str(uuid.uuid4())
160 |
161 | response = self.api.query_changesets(stream_id, from_changeset="test")
162 |
163 | assert response.status_code == 400
164 | self.assertDictEqual(response.json(), {
165 | "stream_id": stream_id,
166 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
167 | "message": 'The filtering params(from, to) have to be positive integer values'
168 | })
169 |
170 | def test_invalid_querying_params3(self):
171 | stream_id = str(uuid.uuid4())
172 |
173 | response = self.api.query_changesets(stream_id, to_changeset="test")
174 |
175 | assert response.status_code == 400
176 | self.assertDictEqual(response.json(), {
177 | "stream_id": stream_id,
178 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
179 | "message": 'The filtering params(from, to) have to be positive integer values'
180 | })
181 |
182 | def test_no_stream_id(self):
183 | response = self.api.query_changesets("")
184 |
185 | assert response.status_code == 400
186 | self.assertDictEqual(response.json(), {
187 | "error": "MISSING_STREAM_ID",
188 | "message": 'stream_id is a required value'
189 | })
190 |
191 | def test_fetching_unexisting_stream(self):
192 |
193 | response = self.api.query_changesets("abcd")
194 |
195 | assert response.status_code == 404
196 | self.assertDictEqual(response.json(), {
197 | "stream_id": "abcd",
198 | "error": "STREAM_NOT_FOUND",
199 | "message": f'The specified stream(abcd) doesn\'t exist'
200 | })
201 |
202 | def test_fetch_nonexisting_changesets_in_existing_stream(self):
203 | stream_id = str(uuid.uuid4())
204 |
205 | self.api.commit(
206 | stream_id=stream_id,
207 | last_changeset_id=0,
208 | metadata=self.api.some_metadata,
209 | events=self.api.some_events
210 | )
211 |
212 | response = self.api.query_changesets(stream_id, from_changeset=2)
213 |
214 | self.assertDictEqual(response.json(), {
215 | "stream_id": stream_id,
216 | "changesets": [ ]
217 | })
218 |
219 |
--------------------------------------------------------------------------------
/template.yaml:
--------------------------------------------------------------------------------
1 | AWSTemplateFormatVersion: '2010-09-09'
2 | Transform: AWS::Serverless-2016-10-31
3 | Description: >
4 | Elastic Event Store
5 |
6 | Resources:
7 | EventStoreTable:
8 | Type: AWS::DynamoDB::Table
9 | Properties:
10 | TableName: !Join [ '_', ['ees', 'db', !Ref AWS::StackName] ]
11 | BillingMode: PAY_PER_REQUEST
12 | AttributeDefinitions:
13 | - AttributeName: stream_id
14 | AttributeType: S
15 | - AttributeName: changeset_id
16 | AttributeType: N
17 | - AttributeName: first_event_id
18 | AttributeType: N
19 | - AttributeName: last_event_id
20 | AttributeType: N
21 | - AttributeName: page
22 | AttributeType: N
23 | - AttributeName: page_item
24 | AttributeType: N
25 | KeySchema:
26 | - AttributeName: stream_id
27 | KeyType: HASH
28 | - AttributeName: changeset_id
29 | KeyType: RANGE
30 | LocalSecondaryIndexes:
31 | - IndexName: FirstEventId
32 | KeySchema:
33 | - AttributeName: stream_id
34 | KeyType: HASH
35 | - AttributeName: first_event_id
36 | KeyType: RANGE
37 | Projection:
38 | ProjectionType: ALL
39 | - IndexName: LastEventId
40 | KeySchema:
41 | - AttributeName: stream_id
42 | KeyType: HASH
43 | - AttributeName: last_event_id
44 | KeyType: RANGE
45 | Projection:
46 | ProjectionType: ALL
47 | GlobalSecondaryIndexes:
48 | - IndexName: EmumerationIndex
49 | KeySchema:
50 | - AttributeName: page
51 | KeyType: HASH
52 | - AttributeName: page_item
53 | KeyType: RANGE
54 | Projection:
55 | ProjectionType: ALL
56 | StreamSpecification:
57 | StreamViewType: NEW_IMAGE
58 |
59 | ChangesetsTopic:
60 | Type: AWS::SNS::Topic
61 | Properties:
62 | ContentBasedDeduplication: True
63 | TopicName: !Join [ '_', ['ees', 'changesets', !Ref AWS::StackName, '.fifo'] ]
64 | FifoTopic: True
65 |
66 | EventsTopic:
67 | Type: AWS::SNS::Topic
68 | Properties:
69 | ContentBasedDeduplication: True
70 | TopicName: !Join [ '_', ['ees', 'events', !Ref AWS::StackName, '.fifo'] ]
71 | FifoTopic: True
72 |
73 | CommandHandlerFunction:
74 | Type: AWS::Serverless::Function
75 | Properties:
76 | CodeUri: src
77 | Handler: lambda_entrypoint.request_handler
78 | Runtime: python3.8
79 | Timeout: 60
80 | Environment:
81 | Variables:
82 | EventStoreTable: !Ref EventStoreTable
83 | AnalysisTable: !Ref AnalysisTable
84 | Policies:
85 | - AWSLambdaDynamoDBExecutionRole
86 | - DynamoDBCrudPolicy:
87 | TableName: !Ref EventStoreTable
88 | - DynamoDBCrudPolicy:
89 | TableName: !Ref AnalysisTable
90 | Events:
91 | Version:
92 | Type: Api
93 | Properties:
94 | Path: /version
95 | Method: get
96 | Stats:
97 | Type: Api
98 | Properties:
99 | Path: /streams
100 | Method: get
101 | Commit:
102 | Type: Api
103 | Properties:
104 | Path: /streams/{stream_id}
105 | Method: post
106 | StreamChangesets:
107 | Type: Api
108 | Properties:
109 | Path: /streams/{stream_id}/changesets
110 | Method: get
111 | StreamEvents:
112 | Type: Api
113 | Properties:
114 | Path: /streams/{stream_id}/events
115 | Method: get
116 | GlobalChangesets:
117 | Type: Api
118 | Properties:
119 | Path: /changesets
120 | Method: get
121 | GlobalEvents:
122 | Type: Api
123 | Properties:
124 | Path: /events
125 | Method: get
126 |
127 | GlobalIndexerFunction:
128 | Type: AWS::Serverless::Function
129 | Properties:
130 | CodeUri: src
131 | Handler: lambda_entrypoint.indexer
132 | Runtime: python3.8
133 | Timeout: 60
134 | Environment:
135 | Variables:
136 | EventStoreTable: !Ref EventStoreTable
137 | AnalysisTable: !Ref AnalysisTable
138 | Policies:
139 | - AWSLambdaDynamoDBExecutionRole
140 | - DynamoDBCrudPolicy:
141 | TableName: !Ref EventStoreTable
142 | - DynamoDBCrudPolicy:
143 | TableName: !Ref AnalysisTable
144 | Events:
145 | Stream:
146 | Type: DynamoDB
147 | Properties:
148 | Stream: !GetAtt EventStoreTable.StreamArn
149 | BatchSize: 10
150 | StartingPosition: TRIM_HORIZON
151 | MaximumBatchingWindowInSeconds: 1
152 | Enabled: true
153 | ParallelizationFactor: 1
154 | MaximumRetryAttempts: 1000
155 | DestinationConfig:
156 | OnFailure:
157 | Type: SQS
158 | Destination: !GetAtt GlobalIndexerDLQ.Arn
159 | GlobalIndexerDLQ:
160 | Type: AWS::SQS::Queue
161 | Properties:
162 | MessageRetentionPeriod: 1209600
163 | QueueName: !Join [ '_', ['ees', 'indexer_dead_letter_queue', !Ref AWS::StackName] ]
164 |
165 | PublisherFunction:
166 | Type: AWS::Serverless::Function
167 | Properties:
168 | CodeUri: src
169 | Handler: lambda_entrypoint.publisher
170 | Runtime: python3.8
171 | Timeout: 60
172 | Environment:
173 | Variables:
174 | EventStoreTable: !Ref EventStoreTable
175 | AnalysisTable: !Ref AnalysisTable
176 | ChangesetsTopic: !Ref ChangesetsTopic
177 | EventsTopic: !Ref EventsTopic
178 | Policies:
179 | - AmazonSNSFullAccess
180 | - SNSPublishMessagePolicy:
181 | TopicName: !Ref ChangesetsTopic
182 | - SNSPublishMessagePolicy:
183 | TopicName: !Ref EventsTopic
184 | Events:
185 | Stream:
186 | Type: DynamoDB
187 | Properties:
188 | Stream: !GetAtt EventStoreTable.StreamArn
189 | BatchSize: 10
190 | StartingPosition: TRIM_HORIZON
191 | MaximumBatchingWindowInSeconds: 1
192 | Enabled: true
193 | ParallelizationFactor: 1
194 | MaximumRetryAttempts: 1000
195 | DestinationConfig:
196 | OnFailure:
197 | Type: SQS
198 | Destination: !GetAtt PublisherIndexerDLQ.Arn
199 |
200 | PublisherIndexerDLQ:
201 | Type: AWS::SQS::Queue
202 | Properties:
203 | MessageRetentionPeriod: 1209600
204 | QueueName: !Join [ '_', ['ees', 'publisher_dead_letter_queue', !Ref AWS::StackName] ]
205 |
206 | AnalysisTable:
207 | Type: AWS::DynamoDB::Table
208 | Properties:
209 | TableName: !Join [ '_', ['ees', 'analysis', !Ref AWS::StackName] ]
210 | AttributeDefinitions:
211 | - AttributeName: projection_id
212 | AttributeType: S
213 | KeySchema:
214 | - AttributeName: projection_id
215 | KeyType: HASH
216 | BillingMode: PAY_PER_REQUEST
217 |
218 | AnalysisProjectorFunction:
219 | Type: AWS::Serverless::Function
220 | Properties:
221 | CodeUri: src
222 | Handler: lambda_entrypoint.analysis_projector
223 | Runtime: python3.8
224 | Timeout: 600
225 | Environment:
226 | Variables:
227 | EventStoreTable: !Ref EventStoreTable
228 | AnalysisTable: !Ref AnalysisTable
229 | Policies:
230 | - AWSLambdaDynamoDBExecutionRole
231 | - DynamoDBCrudPolicy:
232 | TableName: !Ref EventStoreTable
233 | - DynamoDBCrudPolicy:
234 | TableName: !Ref AnalysisTable
235 | Events:
236 | Chron:
237 | Type: Schedule
238 | Properties:
239 | Schedule: rate(1 minute)
240 |
241 | Outputs:
242 | # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function
243 | # Find out more about other implicit resources you can reference within SAM
244 | # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api
245 | ApiEndpoint:
246 | Description: "API Gateway endpoint URL for Prod stage"
247 | Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod"
248 | ChangesetsTopic:
249 | Description: "SNS topic for subscribing to published changesets."
250 | Value: !Ref ChangesetsTopic
251 | EventsTopic:
252 | Description: "SNS topic for subscribing to published events."
253 | Value: !Ref EventsTopic
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Elastic Event Store
2 |
3 | A fully serverless storage for event sourcing-based systems.
4 |
5 | 
6 |
7 | ## Table of Contents
8 |
9 | - [What is Event Sourcing?](#WhatIsEventSourcing)
10 | - [What is Event Store?](#WhatIsEventStore)
11 | - [Getting Started](#GettingStarted)
12 | * [Installing](#Installing)
13 | * [Using](#Using)
14 | - [Push Subscriptions](#PushSubscriptions)
15 | - [Pull Subscriptions](#PullSubscriptions)
16 | - [Architecture](#Architecture)
17 | - [Data Model](#DataModel)
18 | - [Ordering Guarantees](#OrderingGuarantees)
19 | - [Testing](#Testing)
20 | - [Limitations](#Limitations)
21 |
22 |
23 |
24 | ## What is Event Sourcing?
25 |
26 | Traditionally, software systems operate on state-based data. In other words, business entities and concepts are represented as a snapshot of their *current* state. E.g.:
27 |
28 | | Id | Name | Team |
29 | | --- | ---------- | ----------------- |
30 | | 1 | Gillian | Administration |
31 | | 2 | Krzysztof | Accounting |
32 | | 3 | Robyn | Frontend |
33 |
34 | In the above example, all we know about the data is its current state. *But how did it get to the current state?* — We don't know. The Event Sourcing pattern answers this and many other questions.
35 |
36 | Event Sourcing introduces the dimension of time into the modeling of business entities and their lifecycles. Instead of capturing an entity's current state, an event-sourced system keeps a transactional record of all events that have occurred during an entity's lifecycle. For example:
37 |
38 | ```
39 | { "id": 3, "type": "initialized", "name": "Robyn", "timestamp": "2021-01-05T13:15:30Z" }
40 | { "id": 3, "type": "assigned", "team": "Frontend", "timestamp": "2021-01-05T16:15:30Z" }
41 | { "id": 3, "type": "promoted", "position": "team-leader", "timestamp": "2021-01-22T16:15:30Z" }
42 | ```
43 |
44 | By modeling and persisting events, we capture exactly what happened during an entity's lifecycle. Events become the system's **source of truth**. Hence the name: event sourcing.
45 |
46 | Not only can we derive the current state by sequentially applying events, but the flexible event-based model also allows projecting different state models optimized for different tasks.
47 |
48 | Finally, Event Sourcing is **not** Event-Driven Architecture (EDA):
49 |
50 | > EventSourcing is not Event driven architecture. The former is about events _inside_ the app. The latter is about events _between_ (sub)systems
51 | > ~ [@ylorph](https://twitter.com/ylorph/status/1295480789765955586)
52 |
53 |
54 |
55 | ## What is Event Store?
56 |
57 | An event store is a storage mechanism optimized for event-sourcing-based systems. It should provide the following functionality:
58 |
59 | 1. Append events to a stream (stream = events of a distinct entity).
60 | 2. Read events from a stream.
61 | 3. Concurrency management to detect collisions when multiple processes write to the same stream.
62 | 4. Enumerate events across all streams (e.g., for CQRS projections).
63 | 5. Push newly committed events to interested subscribers.
64 |
65 | All of the above functions are supported by the Elastic Event Store.
66 |
67 |
68 |
69 | ## Getting Started
70 |
71 |
72 |
73 | ### Installing
74 |
75 | 1. Install [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) and configure your [AWS credentials](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html).
76 |
77 | 2. Clone the repository:
78 |
79 | ```sh
80 | git clone https://github.com/doitintl/elastic-event-store.git
81 | cd elastic-event-store
82 | ```
83 |
84 | 3. Build and deploy a new instance:
85 |
86 | ```sh
87 | sam build
88 | # ... Build Succeeded
89 |
90 | sam deploy --guided
91 | # ...
92 | # ApiEndpoint: https://XXXXXXXXXXXX.execute-api.XXXXXXXX.amazonaws.com/Prod/
93 | ```
94 |
95 | Verify installation:
96 |
97 | ```sh
98 | curl https://XXXXXXXXXXXX.execute-api.XXXXXXXX.amazonaws.com/Prod/version
99 | # { "version": "0.0.1" }
100 | ```
101 |
102 |
103 |
104 | ### Using
105 |
106 | #### 1. Submit a few changesets
107 |
108 | ```sh
109 | EES_URL=https://XXXXXXXXXXXX.execute-api.XXXXXXXX.amazonaws.com/Prod
110 | ```
111 |
112 | ```sh
113 | curl $EES_URL/streams/stream-aaa-111 \
114 | -H 'Content-Type: application/json' \
115 | -X POST \
116 | --data @- <
Note: Statistics are updated asynchronously every minute.
171 |
172 |
173 |
174 | ## Push Subscriptions
175 |
176 | The CloudFormation stack includes two SNS FIFO topics:
177 |
178 | 1. `ees_changesets_XXX_XXX_.fifo` — for new changesets
179 | 2. `ees_events_XXX_XXX_.fifo` — for individual events
180 |
181 |
182 |
183 | ## Pull (Catchup) Subscriptions
184 |
185 | To enumerate global changesets:
186 |
187 | ```sh
188 | curl "$EES_URL/changesets?checkpoint=0"
189 | ```
190 |
191 | Use the `next_checkpoint` value to fetch the next batch. This endpoint is critical for CQRS projections and state rebuilds.
192 |
193 |
194 |
195 | ## Architecture
196 |
197 | 
198 |
199 | - REST API exposed via API Gateway
200 | - System logic in AWS Lambda
201 | - Events stored in DynamoDB
202 | - DynamoDB Streams trigger Lambdas for global indexing and publishing
203 | - SNS FIFO topics for push subscriptions
204 | - SQS DLQs for failed stream processing
205 |
206 |
207 |
208 | ## Data Model
209 |
210 | Each partition in the events table represents a stream — i.e., a business entity's event history.
211 |
212 | Main DynamoDB schema:
213 |
214 | | Column | Type | Description |
215 | | ----------------- | --------------------- | ----------- |
216 | | stream_id | Partition Key (String) | Stream ID |
217 | | changeset_id | Sort Key (Number) | Commit ID in stream |
218 | | events | JSON (String) | Committed events |
219 | | metadata | JSON (String) | Changeset metadata |
220 | | timestamp | String | Commit timestamp |
221 | | first_event_id | LSI (Number) | First event ID in stream |
222 | | last_event_id | LSI (Number) | Last event ID in stream |
223 | | page | GSI Partition (Number) | For global ordering |
224 | | page_item | GSI Sort (Number) | Index within global page |
225 |
226 |
227 |
228 | ## Ordering Guarantees
229 |
230 | 1. **Intra-stream order** is preserved and strongly consistent.
231 | 2. **Inter-stream order** is not guaranteed but is repeatable — global enumeration always yields the same result.
232 |
233 |
234 |
235 | ## Testing
236 |
237 | 1. Set the `SAM_ARTIFACTS_BUCKET` environment variable:
238 |
239 | ```sh
240 | export SAM_ARTIFACTS_BUCKET=your-bucket-name
241 | ```
242 |
243 | 2. Deploy the test environment:
244 |
245 | ```sh
246 | ./deploy-test-env.sh
247 | ```
248 |
249 | 3. Run unit tests:
250 |
251 | ```sh
252 | ./run-unit-tests.sh
253 | ```
254 |
255 | 4. Run unit and integration tests:
256 |
257 | ```sh
258 | ./run-all-tests.sh
259 | ```
260 |
261 |
262 |
263 | ## Limitations
264 |
265 | Because DynamoDB is used:
266 |
267 | 1. Maximum item (changeset) size: 400 KB
268 | 2. Maximum item collection (stream) size: 10 GB
269 |
270 | As with all serverless solutions, at high scale, a self-managed deployment may be more cost-effective.
271 |
--------------------------------------------------------------------------------
/src/tests/integration/test_fetch_events.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import uuid
3 | from unittest import TestCase
4 | from tests.integration.api_test_client import ApiTestClient
5 |
6 |
7 | @pytest.mark.slow
8 | class TestFetchingEvents(TestCase):
9 | api = None
10 |
11 | def setUp(self) -> None:
12 | self.api = self.api or ApiTestClient()
13 | return super().setUp()
14 |
15 | def test_fetch_events(self):
16 | stream_id = str(uuid.uuid4())
17 |
18 | self.api.commit(
19 | stream_id=stream_id,
20 | last_changeset_id=0,
21 | metadata=self.api.some_metadata,
22 | events=[
23 | { "type": "init", "foo": "bar" },
24 | { "type": "update", "foo": "baz" },
25 | { "type": "switch", "baz": "foo" },
26 | { "type": "modify", "baz": "bar" },
27 | ]
28 | )
29 |
30 | response = self.api.query_events(stream_id)
31 |
32 | self.assertDictEqual(response.json(), {
33 | "stream_id": stream_id,
34 | "events": [
35 | { "id": 1, "data": { "type": "init", "foo": "bar" } },
36 | { "id": 2, "data": { "type": "update", "foo": "baz" } },
37 | { "id": 3, "data": { "type": "switch", "baz": "foo" } },
38 | { "id": 4, "data": { "type": "modify", "baz": "bar" } }
39 | ]
40 | })
41 |
42 | def test_fetch_single_event(self):
43 | stream_id = str(uuid.uuid4())
44 |
45 | self.api.commit(
46 | stream_id=stream_id,
47 | last_changeset_id=0,
48 | metadata=self.api.some_metadata,
49 | events=[
50 | { "type": "init", "foo": "bar" },
51 | { "type": "update", "foo": "baz" },
52 | { "type": "switch", "baz": "foo" },
53 | { "type": "modify", "baz": "bar" },
54 | ]
55 | )
56 |
57 | response = self.api.query_events(stream_id, from_event=3, to_event=3)
58 |
59 | self.assertDictEqual(response.json(), {
60 | "stream_id": stream_id,
61 | "events": [ { "id": 3, "data": { "type": "switch", "baz": "foo" } } ]
62 | })
63 |
64 | def test_fetch_events_from_number(self):
65 | stream_id = str(uuid.uuid4())
66 |
67 | self.api.commit(
68 | stream_id=stream_id,
69 | last_changeset_id=0,
70 | metadata=self.api.some_metadata,
71 | events=[
72 | { "type": "init", "foo": "bar" },
73 | { "type": "update", "foo": "baz" },
74 | { "type": "switch", "baz": "foo" },
75 | { "type": "modify", "baz": "bar" },
76 | ]
77 | )
78 |
79 | response = self.api.query_events(stream_id, from_event=3)
80 |
81 | self.assertDictEqual(response.json(), {
82 | "stream_id": stream_id,
83 | "events": [
84 | { "id": 3, "data": { "type": "switch", "baz": "foo" } },
85 | { "id": 4, "data": { "type": "modify", "baz": "bar" } }
86 | ]
87 | })
88 |
89 | def test_fetch_events_to_number(self):
90 | stream_id = str(uuid.uuid4())
91 |
92 | self.api.commit(
93 | stream_id=stream_id,
94 | last_changeset_id=0,
95 | metadata=self.api.some_metadata,
96 | events=[
97 | { "type": "init", "foo": "bar" },
98 | { "type": "update", "foo": "baz" },
99 | { "type": "switch", "baz": "foo" },
100 | { "type": "modify", "baz": "bar" },
101 | ]
102 | )
103 |
104 | response = self.api.query_events(stream_id, to_event=3)
105 |
106 | self.assertDictEqual(response.json(), {
107 | "stream_id": stream_id,
108 | "events": [
109 | { "id": 1, "data": { "type": "init", "foo": "bar" } },
110 | { "id": 2, "data": { "type": "update", "foo": "baz" } },
111 | { "id": 3, "data": { "type": "switch", "baz": "foo" } },
112 | ]
113 | })
114 |
115 | def test_fetch_events_from_and_to_numbers(self):
116 | stream_id = str(uuid.uuid4())
117 |
118 | self.api.commit(
119 | stream_id=stream_id,
120 | last_changeset_id=0,
121 | metadata=self.api.some_metadata,
122 | events=[
123 | { "type": "init", "foo": "bar" },
124 | { "type": "update", "foo": "baz" },
125 | { "type": "switch", "baz": "foo" },
126 | { "type": "modify", "baz": "bar" },
127 | ]
128 | )
129 |
130 | response = self.api.query_events(stream_id, from_event=2, to_event=3)
131 |
132 | self.assertDictEqual(response.json(), {
133 | "stream_id": stream_id,
134 | "events": [
135 | { "id": 2, "data": { "type": "update", "foo": "baz" } },
136 | { "id": 3, "data": { "type": "switch", "baz": "foo" } },
137 | ]
138 | })
139 |
140 | def test_fetch_events_from_and_to_numbers_across_multiple_commits(self):
141 | stream_id = str(uuid.uuid4())
142 |
143 | self.api.commit(
144 | stream_id=stream_id,
145 | last_changeset_id=0,
146 | metadata=self.api.some_metadata,
147 | events=[
148 | { "type": "init", "foo": "bar" },
149 | { "type": "update", "foo": "baz" },
150 | { "type": "switch", "baz": "foo" },
151 | { "type": "modify", "baz": "bar" },
152 | ]
153 | )
154 |
155 | self.api.commit(
156 | stream_id=stream_id,
157 | last_changeset_id=1,
158 | metadata=self.api.some_metadata,
159 | events=[
160 | { "type": "update", "baz": "bar" },
161 | { "type": "switch", "foo": "baz" },
162 | { "type": "modify", "bar": "foo" },
163 | ]
164 | )
165 |
166 | response = self.api.query_events(stream_id, from_event=3, to_event=6)
167 |
168 | self.assertDictEqual(response.json(), {
169 | "stream_id": stream_id,
170 | "events": [
171 | { "id": 3, "data": { "type": "switch", "baz": "foo" } },
172 | { "id": 4, "data": { "type": "modify", "baz": "bar" }, },
173 | { "id": 5, "data": { "type": "update", "baz": "bar" }, },
174 | { "id": 6, "data": { "type": "switch", "foo": "baz" }, },
175 | ]
176 | })
177 |
178 | def test_invalid_querying_params1(self):
179 | stream_id = str(uuid.uuid4())
180 |
181 | response = self.api.query_events(stream_id, from_event=4, to_event=3)
182 |
183 | assert response.status_code == 400
184 | self.assertDictEqual(response.json(), {
185 | "stream_id": stream_id,
186 | "error": "INVALID_EVENT_FILTERING_PARAMS",
187 | "message": 'The higher boundary cannot be lower than the lower boundary: 4(from) > 3(to)'
188 | })
189 |
190 | def test_invalid_querying_params2(self):
191 | stream_id = str(uuid.uuid4())
192 |
193 | response = self.api.query_events(stream_id, from_event="test")
194 |
195 | assert response.status_code == 400
196 | self.assertDictEqual(response.json(), {
197 | "stream_id": stream_id,
198 | "error": "INVALID_EVENT_FILTERING_PARAMS",
199 | "message": 'The filtering params(from, to) have to be positive integer values'
200 | })
201 |
202 | def test_invalid_querying_params3(self):
203 | stream_id = str(uuid.uuid4())
204 |
205 | response = self.api.query_events(stream_id, to_event="test")
206 |
207 | assert response.status_code == 400
208 | self.assertDictEqual(response.json(), {
209 | "stream_id": stream_id,
210 | "error": "INVALID_EVENT_FILTERING_PARAMS",
211 | "message": 'The filtering params(from, to) have to be positive integer values'
212 | })
213 |
214 | def test_no_stream_id(self):
215 | response = self.api.query_events("")
216 |
217 | assert response.status_code == 400
218 | self.assertDictEqual(response.json(), {
219 | "error": "MISSING_STREAM_ID",
220 | "message": 'stream_id is a required value'
221 | })
222 |
223 | def test_fetching_unexisting_stream(self):
224 | response = self.api.query_events("abcd")
225 |
226 | assert response.status_code == 404
227 | self.assertDictEqual(response.json(), {
228 | "stream_id": "abcd",
229 | "error": "STREAM_NOT_FOUND",
230 | "message": f'The specified stream(abcd) doesn\'t exist'
231 | })
232 |
233 | def test_fetch_unexisting_events(self):
234 | stream_id = str(uuid.uuid4())
235 |
236 | self.api.commit(
237 | stream_id=stream_id,
238 | last_changeset_id=0,
239 | metadata=self.api.some_metadata,
240 | events=self.api.some_events
241 | )
242 |
243 | response = self.api.query_events(stream_id, from_event=200)
244 |
245 | self.assertDictEqual(response.json(), {
246 | "stream_id": stream_id,
247 | "events": [ ]
248 | })
--------------------------------------------------------------------------------
/src/ees/infrastructure/aws_lambda.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | from ees.model import Response
4 | from ees.commands import *
5 | from ees.infrastructure.dynamodb import DynamoDB
6 |
7 | logger = logging.getLogger("ees.infrastructure.aws_lambda")
8 |
9 | def event_to_command(event, context={}):
10 | logger.info(f"Parsing incoming event: {event}")
11 | cmd = None
12 | if "requestContext" in event.keys():
13 | cmd = parse_api_gateway_event(event, context)
14 | elif "Records" in event.keys():
15 | cmd= parse_dynamodb_event(event, context)
16 | logger.info(f"Resulting command/result: {cmd}")
17 | return cmd
18 |
19 | def parse_api_gateway_event(event, context):
20 | request_path = event["requestContext"]["resourcePath"].lower()
21 | logger.debug(f"API Gateway path: {request_path}")
22 | parser = parsers[request_path]
23 | return parser(event, context)
24 |
25 | def parse_dynamodb_event(event, context):
26 | changesets = []
27 | for e in event["Records"]:
28 | keys = e["dynamodb"]["Keys"]
29 | stream_id = keys["stream_id"]["S"]
30 | changeset_id = int(keys["changeset_id"]["N"])
31 | if stream_id != DynamoDB.global_counter_key and e['eventName'] == "INSERT":
32 | changesets.append({
33 | "stream_id": stream_id,
34 | "changeset_id": changeset_id,
35 | })
36 | return AssignGlobalIndexes(changesets)
37 |
38 | def parse_version_request(event, context):
39 | return Version()
40 |
41 | def parse_stats_request(event, context):
42 | return Stats()
43 |
44 | def parse_commit_request(event, context):
45 | query_string = event.get("queryStringParameters") or {}
46 | stream_id = event["pathParameters"].get("stream_id")
47 | if not stream_id:
48 | return missing_stream_id()
49 |
50 | expected_last_changeset = query_string.get("expected_last_changeset")
51 | if expected_last_changeset is not None and expected_last_changeset != "":
52 | try:
53 | expected_last_changeset = int(expected_last_changeset)
54 | except ValueError:
55 | return invalid_expected_changeset_id(stream_id, expected_last_changeset)
56 | if expected_last_changeset < 0:
57 | return invalid_expected_changeset_id(stream_id, expected_last_changeset)
58 | else:
59 | expected_last_changeset = None
60 |
61 | expected_last_event = query_string.get("expected_last_event")
62 | if expected_last_event is not None and expected_last_event != "":
63 | try:
64 | expected_last_event = int(expected_last_event)
65 | except ValueError:
66 | return invalid_expected_event_id(stream_id, expected_last_event)
67 | if expected_last_event < 0:
68 | return invalid_expected_event_id(stream_id, expected_last_event)
69 | else:
70 | expected_last_event = None
71 |
72 | if expected_last_changeset is None and expected_last_event is None:
73 | expected_last_changeset = 0
74 |
75 | if expected_last_changeset is not None and expected_last_event is not None:
76 | return Response(
77 | http_status=400,
78 | body={
79 | "stream_id": stream_id,
80 | "error": "BOTH_EXPECTED_CHANGESET_AND_EVENT_ARE_SET",
81 | "message": 'Cannot use both "last_changeset_id" and "last_event_id" for concurrency management. Specify only one value.'
82 | })
83 |
84 | body = json.loads(event["body"])
85 | metadata = body.get("metadata", { })
86 | events = body["events"]
87 |
88 | return Commit(
89 | stream_id=stream_id,
90 | expected_last_changeset=expected_last_changeset,
91 | expected_last_event=expected_last_event,
92 | events=events,
93 | metadata=metadata
94 | )
95 |
96 | def parse_stream_changesets_request(event, context):
97 | query_string = event.get("queryStringParameters") or { }
98 | stream_id = event["pathParameters"].get("stream_id")
99 | if not stream_id:
100 | return missing_stream_id()
101 |
102 | to_changeset = query_string.get("to")
103 | from_changeset = query_string.get("from")
104 |
105 | if to_changeset:
106 | try:
107 | to_changeset = int(to_changeset)
108 | except ValueError:
109 | return invalid_filtering_values_type(stream_id, "CHANGESET")
110 |
111 | if from_changeset:
112 | try:
113 | from_changeset = int(from_changeset)
114 | except ValueError:
115 | return invalid_filtering_values_type(stream_id, "CHANGESET")
116 |
117 | if to_changeset and from_changeset and from_changeset > to_changeset:
118 | return invalid_filtering_values(stream_id, from_changeset, to_changeset, "CHANGESET")
119 |
120 | return FetchStreamChangesets(stream_id, from_changeset, to_changeset)
121 |
122 | def parse_stream_events_request(event, context):
123 | query_string = event.get("queryStringParameters") or { }
124 | stream_id = event["pathParameters"].get("stream_id")
125 | if not stream_id:
126 | return missing_stream_id()
127 |
128 | to_event = query_string.get("to")
129 | from_event = query_string.get("from")
130 |
131 | if to_event:
132 | try:
133 | to_event = int(to_event)
134 | except ValueError:
135 | return invalid_filtering_values_type(stream_id, "EVENT")
136 |
137 | if from_event:
138 | try:
139 | from_event = int(from_event)
140 | except ValueError:
141 | return invalid_filtering_values_type(stream_id, "EVENT")
142 |
143 | if to_event and from_event and from_event > to_event:
144 | return invalid_filtering_values(stream_id, from_event, to_event, "EVENT")
145 |
146 | return FetchStreamEvents(stream_id, from_event, to_event)
147 |
148 | def missing_stream_id():
149 | return Response(
150 | http_status=400,
151 | body = {
152 | "error": "MISSING_STREAM_ID",
153 | "message": 'stream_id is a required value'
154 | })
155 |
156 | def parse_global_changesets_request(event, context):
157 | query_string = event.get("queryStringParameters") or { }
158 | checkpoint = query_string.get("checkpoint", 0)
159 | limit = query_string.get("limit")
160 |
161 | if checkpoint:
162 | try:
163 | checkpoint = int(checkpoint)
164 | except ValueError:
165 | return invalid_checkpoint_value(checkpoint)
166 |
167 | if checkpoint < 0:
168 | return invalid_checkpoint_value(checkpoint)
169 |
170 | if limit:
171 | try:
172 | limit = int(limit)
173 | except ValueError:
174 | return invalid_limit_value(limit)
175 |
176 | if limit is not None and limit < 1:
177 | return invalid_limit_value(limit)
178 |
179 | return FetchGlobalChangesets(checkpoint, limit)
180 |
181 | def invalid_expected_changeset_id(stream_id, expected_last_changeset_id):
182 | return Response(
183 | http_status=400,
184 | body={
185 | "stream_id": stream_id,
186 | "error": "INVALID_EXPECTED_CHANGESET_ID",
187 | "message": f'The specified expected changeset id("{expected_last_changeset_id}") is invalid. Expected a positive integer.'
188 | })
189 |
190 | def invalid_expected_event_id(stream_id, expected_last_event_id):
191 | return Response(
192 | http_status=400,
193 | body={
194 | "stream_id": stream_id,
195 | "error": "INVALID_EXPECTED_EVENT_ID",
196 | "message": f'The specified expected event id("{expected_last_event_id}") is invalid. Expected a positive integer.'
197 | })
198 |
199 | def invalid_filtering_values_type(stream_id, filter_type):
200 | return Response(
201 | http_status=400,
202 | body={
203 | "stream_id": stream_id,
204 | "error": f"INVALID_{filter_type}_FILTERING_PARAMS",
205 | "message": 'The filtering params(from, to) have to be positive integer values'
206 | })
207 |
208 | def invalid_filtering_values(stream_id, from_changeset, to_changeset, filter_type):
209 | return Response(
210 | http_status=400,
211 | body={
212 | "stream_id": stream_id,
213 | "error": f"INVALID_{filter_type}_FILTERING_PARAMS",
214 | "message": f'The higher boundary cannot be lower than the lower boundary: {from_changeset}(from) > {to_changeset}(to)'
215 | })
216 |
217 | def invalid_checkpoint_value(checkpoint):
218 | return Response(
219 | http_status=400,
220 | body={
221 | "error": "INVALID_CHECKPOINT",
222 | "message": f'"{checkpoint}" is an invalid checkpoint value. Expected a positive integer value.'
223 | })
224 |
225 | def invalid_events_checkpoint_value(checkpoint_string):
226 | return Response(
227 | http_status=400,
228 | body={
229 | "error": "INVALID_CHECKPOINT",
230 | "message": f'"{checkpoint_string}" is an invalid checkpoint value. Set a valid checkpoint(e.g. "42.1").'
231 | })
232 |
233 | def invalid_limit_value(limit):
234 | return Response(
235 | http_status=400,
236 | body={
237 | "error": "INVALID_LIMIT",
238 | "message": f'"{limit}" is an invalid limit value. Expected an integer value greater than 0.'
239 | })
240 |
241 | def parse_dynamodb_new_records(event, context):
242 | changesets = []
243 | for e in event["Records"]:
244 | keys = e["dynamodb"]["Keys"]
245 | stream_id = keys["stream_id"]["S"]
246 | if stream_id != DynamoDB.global_counter_key and e['eventName'] == "INSERT":
247 | c = DynamoDB.parse_commit(e["dynamodb"]["NewImage"])
248 | changesets.append(c)
249 | return changesets
250 |
251 | parsers = {
252 | "/version": parse_version_request,
253 | "/streams": parse_stats_request,
254 | "/streams/{stream_id}": parse_commit_request,
255 | "/streams/{stream_id}/changesets": parse_stream_changesets_request,
256 | "/streams/{stream_id}/events": parse_stream_events_request,
257 | "/changesets": parse_global_changesets_request
258 | }
--------------------------------------------------------------------------------
/src/tests/integration/test_commit_stream.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import uuid
3 | from unittest import TestCase
4 | from tests.integration.api_test_client import ApiTestClient
5 |
6 |
7 | @pytest.mark.slow
8 | class TestCommittingChangesets(TestCase):
9 | api = None
10 |
11 | def setUp(self) -> None:
12 | self.api = self.api or ApiTestClient()
13 | return super().setUp()
14 |
15 | def test_version(self):
16 | response = self.api.version()
17 | self.assertDictEqual(response.json(), {"version": "0.0.1"})
18 |
19 | def test_new_stream(self):
20 | stream_id = str(uuid.uuid4())
21 |
22 | response = self.api.commit(
23 | stream_id=stream_id,
24 | last_changeset_id=0,
25 | metadata=self.api.some_metadata,
26 | events=self.api.some_events
27 | )
28 |
29 | self.assertDictEqual(response.json(), {"stream_id": stream_id, "changeset_id": 1})
30 |
31 | def test_append_to_existing_stream(self):
32 | stream_id = str(uuid.uuid4())
33 |
34 | self.api.commit(
35 | stream_id=stream_id,
36 | last_changeset_id=0,
37 | metadata=self.api.some_metadata,
38 | events=self.api.some_events
39 | )
40 |
41 | response = self.api.commit(
42 | stream_id=stream_id,
43 | last_changeset_id=1,
44 | metadata=self.api.some_metadata,
45 | events=self.api.some_events
46 | )
47 |
48 | self.assertDictEqual(response.json(), {"stream_id": stream_id, "changeset_id": 2})
49 |
50 | # When appending to an existing stream, but the expected version is already overwritten
51 | def test_concurrency_exception(self):
52 | stream_id = str(uuid.uuid4())
53 |
54 | self.api.commit(
55 | stream_id=stream_id,
56 | last_changeset_id=0,
57 | metadata=self.api.some_metadata,
58 | events=self.api.some_events
59 | )
60 |
61 | self.api.commit(
62 | stream_id=stream_id,
63 | last_changeset_id=1,
64 | metadata=self.api.some_metadata,
65 | events=self.api.some_events
66 | )
67 |
68 | response = self.api.commit(
69 | stream_id=stream_id,
70 | last_changeset_id=1,
71 | metadata=self.api.some_metadata,
72 | events=self.api.some_events
73 | )
74 |
75 | assert response.status_code == 409
76 | self.maxDiff = None
77 | self.assertDictEqual(response.json(), {
78 | "stream_id": stream_id,
79 | "error": "OPTIMISTIC_CONCURRENCY_EXCEPTION",
80 | "message": "The expected last changeset (1) is outdated, review the changeset(s) appended after it.",
81 | "forthcoming_changesets": [
82 | {
83 | "changeset_id": 2,
84 | "events": self.api.some_events,
85 | "metadata": self.api.some_metadata
86 | }
87 | ]
88 | })
89 |
90 | def test_append_with_invalid_expected_changeset_id(self):
91 | stream_id = str(uuid.uuid4())
92 |
93 | response = self.api.commit(
94 | stream_id=stream_id,
95 | last_changeset_id=-1,
96 | metadata=self.api.some_metadata,
97 | events=self.api.some_events
98 | )
99 |
100 | assert response.status_code == 400
101 | self.assertDictEqual(response.json(), {
102 | "stream_id": stream_id,
103 | "error": "INVALID_EXPECTED_CHANGESET_ID",
104 | "message": 'The specified expected changeset id("-1") is invalid. Expected a positive integer.'
105 | })
106 |
107 | def test_append_with_invalid_expected_changeset_id(self):
108 | stream_id = str(uuid.uuid4())
109 |
110 | response = self.api.commit(
111 | stream_id=stream_id,
112 | last_changeset_id="test",
113 | metadata=self.api.some_metadata,
114 | events=self.api.some_events
115 | )
116 |
117 | assert response.status_code == 400
118 | self.assertDictEqual(response.json(), {
119 | "stream_id": stream_id,
120 | "error": "INVALID_EXPECTED_CHANGESET_ID",
121 | "message": 'The specified expected changeset id("test") is invalid. Expected a positive integer.'
122 | })
123 |
124 | def test_append_with_both_expected_last_changeset_and_event(self):
125 | stream_id = str(uuid.uuid4())
126 |
127 | response = self.api.commit(
128 | stream_id=stream_id,
129 | last_changeset_id=0,
130 | last_event_id=0,
131 | metadata=self.api.some_metadata,
132 | events=self.api.some_events
133 | )
134 |
135 | assert response.status_code == 400
136 | self.assertDictEqual(response.json(), {
137 | "stream_id": stream_id,
138 | "error": "BOTH_EXPECTED_CHANGESET_AND_EVENT_ARE_SET",
139 | "message": 'Cannot use both "last_changeset_id" and "last_event_id" for concurrency management. Specify only one value.'
140 | })
141 |
142 | def test_new_stream_with_expected_last_event(self):
143 | stream_id = str(uuid.uuid4())
144 |
145 | response = self.api.commit(
146 | stream_id=stream_id,
147 | last_event_id=0,
148 | metadata=self.api.some_metadata,
149 | events=self.api.some_events
150 | )
151 |
152 | self.assertDictEqual(response.json(), {"stream_id": stream_id, "changeset_id": 1})
153 |
154 | def test_append_to_existing_stream_with_expected_last_event(self):
155 | stream_id = str(uuid.uuid4())
156 |
157 | self.api.commit(
158 | stream_id=stream_id,
159 | last_event_id=0,
160 | metadata=self.api.some_metadata,
161 | events=self.api.some_events
162 | )
163 |
164 | response = self.api.commit(
165 | stream_id=stream_id,
166 | last_event_id=2,
167 | metadata=self.api.some_metadata,
168 | events=self.api.some_events
169 | )
170 |
171 | self.assertDictEqual(response.json(), {"stream_id": stream_id, "changeset_id": 2})
172 |
173 | # When appending to an existing stream, but the expected version is already overwritten
174 | def test_concurrency_exception_with_expected_last_event(self):
175 | stream_id = str(uuid.uuid4())
176 |
177 | self.api.commit(
178 | stream_id=stream_id,
179 | last_event_id=0,
180 | metadata=self.api.some_metadata,
181 | events=self.api.some_events
182 | )
183 |
184 | self.api.commit(
185 | stream_id=stream_id,
186 | last_event_id=2,
187 | metadata=self.api.some_metadata,
188 | events=self.api.some_events
189 | )
190 |
191 | response = self.api.commit(
192 | stream_id=stream_id,
193 | last_event_id=3,
194 | metadata=self.api.some_metadata,
195 | events=self.api.some_events
196 | )
197 |
198 | assert response.status_code == 409
199 | self.maxDiff = None
200 | self.assertDictEqual(response.json(), {
201 | "stream_id": stream_id,
202 | "error": "OPTIMISTIC_CONCURRENCY_EXCEPTION",
203 | "message": "The expected last event (3) is outdated, review the event(s) appended after it.",
204 | "forthcoming_changesets": [
205 | {
206 | "changeset_id": 2,
207 | "events": self.api.some_events,
208 | "metadata": self.api.some_metadata
209 | }
210 | ]
211 | })
212 |
213 | def test_append_with_invalid_expected_last_event_id(self):
214 | stream_id = str(uuid.uuid4())
215 |
216 | response = self.api.commit(
217 | stream_id=stream_id,
218 | last_event_id=-1,
219 | metadata=self.api.some_metadata,
220 | events=self.api.some_events
221 | )
222 |
223 | assert response.status_code == 400
224 | self.assertDictEqual(response.json(), {
225 | "stream_id": stream_id,
226 | "error": "INVALID_EXPECTED_EVENT_ID",
227 | "message": 'The specified expected event id("-1") is invalid. Expected a positive integer.'
228 | })
229 |
230 | def test_append_with_invalid_expected_last_event_id(self):
231 | stream_id = str(uuid.uuid4())
232 |
233 | response = self.api.commit(
234 | stream_id=stream_id,
235 | last_event_id="test",
236 | metadata=self.api.some_metadata,
237 | events=self.api.some_events
238 | )
239 |
240 | assert response.status_code == 400
241 | self.assertDictEqual(response.json(), {
242 | "stream_id": stream_id,
243 | "error": "INVALID_EXPECTED_EVENT_ID",
244 | "message": 'The specified expected event id("test") is invalid. Expected a positive integer.'
245 | })
246 |
247 | def test_append_without_any_metadata(self):
248 | stream_id = str(uuid.uuid4())
249 |
250 | response = self.api.commit(
251 | stream_id=stream_id,
252 | last_changeset_id=0,
253 | events=self.api.some_events
254 | )
255 |
256 | self.assertDictEqual(response.json(), {"stream_id": stream_id, "changeset_id": 1})
257 |
258 | def test_append_test_skipping_a_changeset(self):
259 | stream_id = str(uuid.uuid4())
260 |
261 | self.api.commit(
262 | stream_id=stream_id,
263 | last_changeset_id=0,
264 | metadata=self.api.some_metadata,
265 | events=self.api.some_events
266 | )
267 |
268 | response = self.api.commit(
269 | stream_id=stream_id,
270 | last_changeset_id=2,
271 | metadata=self.api.some_metadata,
272 | events=self.api.some_events
273 | )
274 |
275 | assert response.status_code == 400
276 | self.assertDictEqual(response.json(), {
277 | "stream_id": stream_id,
278 | "error": "INVALID_EXPECTED_CHANGESET_ID",
279 | "message": f'The specified expected changeset(2) doesn\'t exist. The "{stream_id}" stream\'s most recent changeset is 1.'
280 | })
281 |
282 | def test_append_test_skipping_an_event(self):
283 | stream_id = str(uuid.uuid4())
284 |
285 | self.api.commit(
286 | stream_id=stream_id,
287 | last_event_id=0,
288 | metadata=self.api.some_metadata,
289 | events=self.api.some_events
290 | )
291 |
292 | response = self.api.commit(
293 | stream_id=stream_id,
294 | last_event_id=3,
295 | metadata=self.api.some_metadata,
296 | events=self.api.some_events
297 | )
298 |
299 | assert response.status_code == 400
300 | self.assertDictEqual(response.json(), {
301 | "stream_id": stream_id,
302 | "error": "INVALID_EXPECTED_CHANGESET_ID",
303 | "message": f'The specified expected event(3) doesn\'t exist. The "{stream_id}" stream\'s most recent event is 2.'
304 | })
--------------------------------------------------------------------------------
/src/tests/unit/test_parsing_lambda_events.py:
--------------------------------------------------------------------------------
1 | import json
2 | from unittest import TestCase
3 |
4 | from .context import ees
5 | from ees.infrastructure.aws_lambda import event_to_command, parse_dynamodb_new_records
6 | from ees.commands import *
7 | from ees.model import Response, CommitData
8 |
9 | class TestParsingLambdaEvents(TestCase):
10 | def __init__(self, x):
11 | with open('src/tests/unit/events.json') as f:
12 | self.sample_events = json.load(f)
13 | TestCase.__init__(self, x)
14 |
15 | def test_version(self):
16 | event = self.load_event("Version")
17 | cmd = event_to_command(event)
18 | assert isinstance(cmd, Version)
19 |
20 | def test_commit(self):
21 | event = self.load_event("Commit")
22 | cmd = event_to_command(event)
23 | assert isinstance(cmd, Commit)
24 | assert cmd.stream_id == "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
25 | assert cmd.expected_last_changeset == 7
26 | assert cmd.events == json.loads(event["body"])["events"]
27 | assert cmd.metadata == json.loads(event["body"])["metadata"]
28 |
29 | def test_commit_with_implicit_expected_changeset(self):
30 | event = self.load_event("Commit")
31 | del event["queryStringParameters"]["expected_last_changeset"]
32 | cmd = event_to_command(event)
33 | assert isinstance(cmd, Commit)
34 | assert cmd.stream_id == "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
35 | assert cmd.expected_last_changeset == 0
36 | assert cmd.events == json.loads(event["body"])["events"]
37 | assert cmd.metadata == json.loads(event["body"])["metadata"]
38 |
39 | def test_commit_without_stream_id(self):
40 | event = self.load_event("Commit")
41 | del event["pathParameters"]["stream_id"]
42 |
43 | err = event_to_command(event)
44 |
45 | assert isinstance(err, Response)
46 | assert err.http_status == 400
47 | self.assertDictEqual(err.body, {
48 | "error": "MISSING_STREAM_ID",
49 | "message": 'stream_id is a required value'
50 | })
51 |
52 | def test_commit_with_invalid_expected_changeset(self):
53 | event = self.load_event("Commit")
54 | event["queryStringParameters"]["expected_last_changeset"] = "test"
55 |
56 | err = event_to_command(event)
57 |
58 | assert isinstance(err, Response)
59 | assert err.http_status == 400
60 | self.assertDictEqual(err.body, {
61 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
62 | "error": "INVALID_EXPECTED_CHANGESET_ID",
63 | "message": f'The specified expected changeset id("test") is invalid. Expected a positive integer.'
64 | })
65 |
66 | def test_commit_with_negative_expected_changeset(self):
67 | event = self.load_event("Commit")
68 | event["queryStringParameters"]["expected_last_changeset"] = -1
69 |
70 | err = event_to_command(event)
71 |
72 | assert isinstance(err, Response)
73 | assert err.http_status == 400
74 | self.assertDictEqual(err.body, {
75 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
76 | "error": "INVALID_EXPECTED_CHANGESET_ID",
77 | "message": f'The specified expected changeset id("-1") is invalid. Expected a positive integer.'
78 | })
79 |
80 | def test_commit_with_both_expected_event_and_changeset(self):
81 | event = self.load_event("Commit")
82 | event["queryStringParameters"]["expected_last_event"] = "0"
83 | event["queryStringParameters"]["expected_last_changeset"] = "0"
84 | err = event_to_command(event)
85 | assert isinstance(err, Response)
86 | assert err.http_status == 400
87 | self.assertDictEqual(err.body, {
88 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
89 | "error": "BOTH_EXPECTED_CHANGESET_AND_EVENT_ARE_SET",
90 | "message": 'Cannot use both "last_changeset_id" and "last_event_id" for concurrency management. Specify only one value.'
91 | })
92 |
93 |
94 | def test_commit_with_last_event_as_empty_string(self):
95 | event = self.load_event("Commit")
96 | event["queryStringParameters"]["expected_last_event"] = ""
97 | event["queryStringParameters"]["expected_last_changeset"] = ""
98 | cmd = event_to_command(event)
99 | assert isinstance(cmd, Commit)
100 | assert cmd.stream_id == "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
101 | assert cmd.expected_last_event == None
102 | assert cmd.expected_last_changeset == 0
103 |
104 | def test_commit_with_last_event(self):
105 | event = self.load_event("Commit")
106 | del event["queryStringParameters"]["expected_last_changeset"]
107 | event["queryStringParameters"]["expected_last_event"] = 7
108 | cmd = event_to_command(event)
109 | assert isinstance(cmd, Commit)
110 | assert cmd.stream_id == "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
111 | assert cmd.expected_last_event == 7
112 | assert cmd.events == json.loads(event["body"])["events"]
113 | assert cmd.metadata == json.loads(event["body"])["metadata"]
114 |
115 | def test_commit_with_invalid_expected_event(self):
116 | event = self.load_event("Commit")
117 | del event["queryStringParameters"]["expected_last_changeset"]
118 | event["queryStringParameters"]["expected_last_event"] = "test"
119 |
120 | err = event_to_command(event)
121 |
122 | assert isinstance(err, Response)
123 | assert err.http_status == 400
124 | self.assertDictEqual(err.body, {
125 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
126 | "error": "INVALID_EXPECTED_EVENT_ID",
127 | "message": f'The specified expected event id("test") is invalid. Expected a positive integer.'
128 | })
129 |
130 | def test_commit_with_negative_expected_changeset(self):
131 | event = self.load_event("Commit")
132 | del event["queryStringParameters"]["expected_last_changeset"]
133 | event["queryStringParameters"]["expected_last_event"] = -1
134 |
135 | err = event_to_command(event)
136 |
137 | assert isinstance(err, Response)
138 | assert err.http_status == 400
139 | self.assertDictEqual(err.body, {
140 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
141 | "error": "INVALID_EXPECTED_EVENT_ID",
142 | "message": f'The specified expected event id("-1") is invalid. Expected a positive integer.'
143 | })
144 |
145 | def test_commit_without_metadata(self):
146 | event = self.load_event("Commit")
147 | body = json.loads(event["body"])
148 | del body["metadata"]
149 | event["body"] = json.dumps(body)
150 | cmd = event_to_command(event)
151 | assert isinstance(cmd, Commit)
152 | assert cmd.stream_id == "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
153 | assert cmd.expected_last_changeset == 7
154 | assert cmd.events == json.loads(event["body"])["events"]
155 | assert cmd.metadata == { }
156 |
157 | def test_fetch_stream_changesets(self):
158 | event = self.load_event("StreamChangesets")
159 | cmd = event_to_command(event)
160 | assert isinstance(cmd, FetchStreamChangesets)
161 | assert cmd.stream_id == "fe80eaef-90c3-41be-9bc0-3f85458b9a8e"
162 | assert cmd.from_changeset == 1
163 | assert cmd.to_changeset == 5
164 |
165 | def test_fetch_stream_changesets_without_stream_id(self):
166 | event = self.load_event("StreamChangesets")
167 | del event["pathParameters"]["stream_id"]
168 |
169 | err = event_to_command(event)
170 |
171 | assert isinstance(err, Response)
172 | assert err.http_status == 400
173 | self.assertDictEqual(err.body, {
174 | "error": "MISSING_STREAM_ID",
175 | "message": 'stream_id is a required value'
176 | })
177 |
178 | def test_fetch_stream_changesets_without_from(self):
179 | event = self.load_event("StreamChangesets")
180 | del event["queryStringParameters"]["from"]
181 | cmd = event_to_command(event)
182 | assert isinstance(cmd, FetchStreamChangesets)
183 | assert cmd.stream_id == "fe80eaef-90c3-41be-9bc0-3f85458b9a8e"
184 | assert cmd.from_changeset == None
185 | assert cmd.to_changeset == 5
186 |
187 | def test_fetch_stream_changesets_without_to(self):
188 | event = self.load_event("StreamChangesets")
189 | del event["queryStringParameters"]["to"]
190 | cmd = event_to_command(event)
191 | assert isinstance(cmd, FetchStreamChangesets)
192 | assert cmd.stream_id == "fe80eaef-90c3-41be-9bc0-3f85458b9a8e"
193 | assert cmd.from_changeset == 1
194 | assert cmd.to_changeset == None
195 |
196 | def test_fetch_stream_changesets_invalid_to(self):
197 | event = self.load_event("StreamChangesets")
198 | event["queryStringParameters"]["to"] = "test"
199 |
200 | err = event_to_command(event)
201 |
202 | assert isinstance(err, Response)
203 | assert err.http_status == 400
204 | self.assertDictEqual(err.body, {
205 | "stream_id": "fe80eaef-90c3-41be-9bc0-3f85458b9a8e",
206 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
207 | "message": 'The filtering params(from, to) have to be positive integer values'
208 | })
209 |
210 | def test_fetch_stream_changesets_invalid_from(self):
211 | event = self.load_event("StreamChangesets")
212 | event["queryStringParameters"]["from"] = "test"
213 |
214 | err = event_to_command(event)
215 |
216 | assert isinstance(err, Response)
217 | assert err.http_status == 400
218 | self.assertDictEqual(err.body, {
219 | "stream_id": "fe80eaef-90c3-41be-9bc0-3f85458b9a8e",
220 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
221 | "message": 'The filtering params(from, to) have to be positive integer values'
222 | })
223 |
224 | def test_fetch_stream_changesets_wrong_order_of_from_and_to(self):
225 | event = self.load_event("StreamChangesets")
226 | event["queryStringParameters"]["from"] = "7"
227 | event["queryStringParameters"]["to"] = "1"
228 |
229 | err = event_to_command(event)
230 |
231 | assert isinstance(err, Response)
232 | assert err.http_status == 400
233 | self.assertDictEqual(err.body, {
234 | "stream_id": "fe80eaef-90c3-41be-9bc0-3f85458b9a8e",
235 | "error": "INVALID_CHANGESET_FILTERING_PARAMS",
236 | "message": f'The higher boundary cannot be lower than the lower boundary: 7(from) > 1(to)'
237 | })
238 |
239 | def test_fetch_stream_events(self):
240 | event = self.load_event("StreamEvents")
241 | cmd = event_to_command(event)
242 | assert isinstance(cmd, FetchStreamEvents)
243 | assert cmd.stream_id == "d2333e6b-65a7-4a10-9886-2dd2fe873bed"
244 | assert cmd.from_event == 1
245 | assert cmd.to_event == 5
246 |
247 | def test_fetch_stream_events_without_stream_id(self):
248 | event = self.load_event("StreamEvents")
249 | del event["pathParameters"]["stream_id"]
250 |
251 | err = event_to_command(event)
252 |
253 | assert isinstance(err, Response)
254 | assert err.http_status == 400
255 | self.assertDictEqual(err.body, {
256 | "error": "MISSING_STREAM_ID",
257 | "message": 'stream_id is a required value'
258 | })
259 |
260 | def test_fetch_stream_events_without_from(self):
261 | event = self.load_event("StreamEvents")
262 | del event["queryStringParameters"]["from"]
263 | cmd = event_to_command(event)
264 | assert isinstance(cmd, FetchStreamEvents)
265 | assert cmd.stream_id == "d2333e6b-65a7-4a10-9886-2dd2fe873bed"
266 | assert cmd.from_event == None
267 | assert cmd.to_event == 5
268 |
269 | def test_fetch_stream_events_without_to(self):
270 | event = self.load_event("StreamEvents")
271 | del event["queryStringParameters"]["to"]
272 | cmd = event_to_command(event)
273 | assert isinstance(cmd, FetchStreamEvents)
274 | assert cmd.stream_id == "d2333e6b-65a7-4a10-9886-2dd2fe873bed"
275 | assert cmd.from_event == 1
276 | assert cmd.to_event == None
277 |
278 | def test_fetch_stream_events_invalid_to(self):
279 | event = self.load_event("StreamEvents")
280 | event["queryStringParameters"]["to"] = "test"
281 |
282 | err = event_to_command(event)
283 |
284 | assert isinstance(err, Response)
285 | assert err.http_status == 400
286 | self.assertDictEqual(err.body, {
287 | "stream_id": "d2333e6b-65a7-4a10-9886-2dd2fe873bed",
288 | "error": "INVALID_EVENT_FILTERING_PARAMS",
289 | "message": 'The filtering params(from, to) have to be positive integer values'
290 | })
291 |
292 | def test_fetch_stream_events_invalid_from(self):
293 | event = self.load_event("StreamEvents")
294 | event["queryStringParameters"]["from"] = "test"
295 |
296 | err = event_to_command(event)
297 |
298 | assert isinstance(err, Response)
299 | assert err.http_status == 400
300 | self.assertDictEqual(err.body, {
301 | "stream_id": "d2333e6b-65a7-4a10-9886-2dd2fe873bed",
302 | "error": "INVALID_EVENT_FILTERING_PARAMS",
303 | "message": 'The filtering params(from, to) have to be positive integer values'
304 | })
305 |
306 | def test_fetch_stream_events_wrong_order_of_from_and_to(self):
307 | event = self.load_event("StreamEvents")
308 | event["queryStringParameters"]["from"] = "7"
309 | event["queryStringParameters"]["to"] = "1"
310 |
311 | err = event_to_command(event)
312 |
313 | assert isinstance(err, Response)
314 | assert err.http_status == 400
315 | self.assertDictEqual(err.body, {
316 | "stream_id": "d2333e6b-65a7-4a10-9886-2dd2fe873bed",
317 | "error": "INVALID_EVENT_FILTERING_PARAMS",
318 | "message": f'The higher boundary cannot be lower than the lower boundary: 7(from) > 1(to)'
319 | })
320 |
321 | def test_global_changesets(self):
322 | event = self.load_event("GlobalChangesets")
323 | cmd = event_to_command(event)
324 | assert isinstance(cmd, FetchGlobalChangesets)
325 | assert cmd.checkpoint == 44
326 | assert cmd.limit == 120
327 |
328 | def test_global_changesets_without_explicit_limit(self):
329 | event = self.load_event("GlobalChangesets")
330 | del event["queryStringParameters"]["limit"]
331 | cmd = event_to_command(event)
332 | assert isinstance(cmd, FetchGlobalChangesets)
333 | assert cmd.checkpoint == 44
334 | assert cmd.limit == None
335 |
336 | def test_global_changesets_without_explicit_checkpoint(self):
337 | event = self.load_event("GlobalChangesets")
338 | del event["queryStringParameters"]["checkpoint"]
339 | del event["queryStringParameters"]["limit"]
340 | cmd = event_to_command(event)
341 | assert isinstance(cmd, FetchGlobalChangesets)
342 | assert cmd.checkpoint == 0
343 | assert cmd.limit == None
344 |
345 | def test_global_changesets_with_invalid_checkpoint(self):
346 | event = self.load_event("GlobalChangesets")
347 | event["queryStringParameters"]["checkpoint"] = "test"
348 | err = event_to_command(event)
349 | assert isinstance(err, Response)
350 | assert err.http_status == 400
351 | self.assertDictEqual(err.body, {
352 | "error": "INVALID_CHECKPOINT",
353 | "message": '"test" is an invalid checkpoint value. Expected a positive integer value.'
354 | })
355 |
356 | def test_global_changesets_with_invalid_checkpoint2(self):
357 | event = self.load_event("GlobalChangesets")
358 | event["queryStringParameters"]["checkpoint"] = "-2"
359 | err = event_to_command(event)
360 | assert isinstance(err, Response)
361 | assert err.http_status == 400
362 | self.assertDictEqual(err.body, {
363 | "error": "INVALID_CHECKPOINT",
364 | "message": '"-2" is an invalid checkpoint value. Expected a positive integer value.'
365 | })
366 |
367 | def test_global_changesets_with_invalid_limit(self):
368 | event = self.load_event("GlobalChangesets")
369 | event["queryStringParameters"]["limit"] = "test"
370 | err = event_to_command(event)
371 | assert isinstance(err, Response)
372 | assert err.http_status == 400
373 | self.assertDictEqual(err.body, {
374 | "error": "INVALID_LIMIT",
375 | "message": '"test" is an invalid limit value. Expected an integer value greater than 0.'
376 | })
377 |
378 | def test_global_changesets_with_invalid_limit2(self):
379 | event = self.load_event("GlobalChangesets")
380 | event["queryStringParameters"]["limit"] = "-2"
381 | err = event_to_command(event)
382 | assert isinstance(err, Response)
383 | assert err.http_status == 400
384 | self.assertDictEqual(err.body, {
385 | "error": "INVALID_LIMIT",
386 | "message": '"-2" is an invalid limit value. Expected an integer value greater than 0.'
387 | })
388 |
389 | def test_global_changesets_with_invalid_limit2(self):
390 | event = self.load_event("GlobalChangesets")
391 | event["queryStringParameters"]["limit"] = "0"
392 | err = event_to_command(event)
393 | assert isinstance(err, Response)
394 | assert err.http_status == 400
395 | self.assertDictEqual(err.body, {
396 | "error": "INVALID_LIMIT",
397 | "message": '"0" is an invalid limit value. Expected an integer value greater than 0.'
398 | })
399 |
400 | def test_assign_global_index(self):
401 | event = self.load_event("AssignGlobalIndex")
402 | cmd = event_to_command(event)
403 | assert isinstance(cmd, AssignGlobalIndexes)
404 | self.assertListEqual(cmd.changesets, [
405 | { "stream_id": "99038933-e620-444d-9033-4128254f0cbd", "changeset_id": 2 },
406 | { "stream_id": "206bc1ed-8e67-4a64-a596-8b32c0c20a97", "changeset_id": 1 }
407 | ])
408 |
409 | def test_new_dynamodb_records(self):
410 | self.maxDiff = None
411 | event = self.load_event("AssignGlobalIndex")
412 | changesets = parse_dynamodb_new_records(event, None)
413 | self.assertListEqual(
414 | changesets,
415 | [
416 | CommitData(stream_id='99038933-e620-444d-9033-4128254f0cbd', changeset_id=2, metadata={'timestamp': '123123', 'command_id': '456346234', 'issued_by': 'test@test.com'}, events=[{'type': 'init', 'foo': 'bar'}, {'type': 'update', 'foo': 'baz'}], first_event_id=3, last_event_id=4, page=None, page_item=None),
417 | CommitData(stream_id='206bc1ed-8e67-4a64-a596-8b32c0c20a97', changeset_id=1, metadata={'timestamp': '123123', 'command_id': '456346234', 'issued_by': 'test@test.com'}, events=[{'type': 'init', 'foo': 'bar'}, {'type': 'update', 'foo': 'baz'}], first_event_id=1, last_event_id=2, page=None, page_item=None)
418 | ]
419 | )
420 |
421 | def load_event(self, name):
422 | return self.sample_events[name]
--------------------------------------------------------------------------------
/src/ees/infrastructure/dynamodb.py:
--------------------------------------------------------------------------------
1 | import boto3
2 | import botocore
3 | from datetime import datetime
4 | import json
5 | import logging
6 | from ees.model import CommitData, ConcurrencyException, GlobalCounter, GlobalIndex, CheckpointCalc, AnalysisState
7 |
8 | logger = logging.getLogger("ees.infrastructure.dynamodb")
9 |
10 | class DynamoDB:
11 | global_counter_key = '!!!RESERVED:GLOBAL-COUNTER!!!'
12 | global_counter_range = 0
13 |
14 | def __init__(self, events_table, analysis_table):
15 | self.events_table = events_table
16 | self.analysis_table = analysis_table
17 | self.dynamodb_ll = boto3.client('dynamodb')
18 | self.checkpoint_calc = CheckpointCalc()
19 |
20 | def append(self, commit):
21 | item = {
22 | 'stream_id': { "S": commit.stream_id },
23 | 'changeset_id': { "N": str(commit.changeset_id) },
24 | 'metadata': { "S": json.dumps(commit.metadata) },
25 | 'events': { "S": json.dumps(commit.events) },
26 | 'first_event_id': { "N": str(commit.first_event_id) },
27 | 'last_event_id': { "N": str(commit.last_event_id) },
28 | 'timestamp': { "S": self.get_timestamp() }
29 | }
30 |
31 | condition = {
32 | 'stream_id': { "Exists": False },
33 | 'changeset_id': { "Exists": False },
34 | }
35 |
36 | try:
37 | self.dynamodb_ll.put_item(
38 | TableName=self.events_table, Item=item, Expected=condition
39 | )
40 | except botocore.exceptions.ClientError as e:
41 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
42 | logger.debug(f"ConditionalCheckFailedException for {commit.stream_id}/{commit.changeset_id}")
43 | raise ConcurrencyException(commit.stream_id, commit.changeset_id)
44 | else:
45 | raise e
46 |
47 | def fetch_last_commit(self, stream_id, meta_only=False):
48 | projection = 'stream_id,changeset_id,events,metadata,first_event_id,last_event_id'
49 | if meta_only:
50 | projection = 'stream_id,changeset_id,first_event_id,last_event_id'
51 |
52 | response = self.dynamodb_ll.query(
53 | TableName=self.events_table,
54 | ProjectionExpression=projection,
55 | Limit=1,
56 | ScanIndexForward=False,
57 | KeyConditions={
58 | 'stream_id': {
59 | 'AttributeValueList': [
60 | {
61 | 'S': stream_id
62 | },
63 | ],
64 | 'ComparisonOperator': 'EQ'
65 | }
66 | }
67 | )
68 | if response["Count"] == 0:
69 | return None
70 |
71 | return DynamoDB.parse_commit(response["Items"][0])
72 |
73 | def fetch_stream_changesets(self,
74 | stream_id,
75 | from_changeset=None,
76 | to_changeset=None):
77 | if not from_changeset and not to_changeset:
78 | from_changeset = 1
79 |
80 | range_condition = None
81 | if from_changeset and to_changeset:
82 | range_condition = {
83 | 'AttributeValueList': [
84 | {
85 | 'N': str(from_changeset)
86 | },
87 | {
88 | 'N': str(to_changeset)
89 | }
90 | ],
91 | 'ComparisonOperator': 'BETWEEN'
92 | }
93 | elif from_changeset:
94 | range_condition = {
95 | 'AttributeValueList': [
96 | {
97 | 'N': str(from_changeset)
98 | }
99 | ],
100 | 'ComparisonOperator': 'GE'
101 | }
102 | elif to_changeset:
103 | range_condition = {
104 | 'AttributeValueList': [
105 | {
106 | 'N': str(to_changeset)
107 | }
108 | ],
109 | 'ComparisonOperator': 'LE'
110 | }
111 |
112 | response = self.dynamodb_ll.query(
113 | TableName=self.events_table,
114 | Select='ALL_ATTRIBUTES',
115 | ScanIndexForward=True,
116 | KeyConditions={
117 | 'stream_id': {
118 | 'AttributeValueList': [
119 | {
120 | 'S': stream_id
121 | },
122 | ],
123 | 'ComparisonOperator': 'EQ'
124 | },
125 | 'changeset_id': range_condition
126 | }
127 | )
128 |
129 | return [DynamoDB.parse_commit(r) for r in response["Items"]]
130 |
131 | def fetch_stream_by_events(self, stream_id, from_event=None, to_event=None):
132 | if not from_event and not to_event:
133 | from_event = 1
134 |
135 | index_name = None
136 | range_condition = None
137 | column = None
138 |
139 | if from_event and to_event and from_event == to_event:
140 | return [self.read_changeset_containing_event(stream_id, from_event)]
141 |
142 | if from_event and to_event:
143 | return self.fetch_changesets_by_events_range(stream_id, from_event, to_event)
144 |
145 | if from_event:
146 | index_name = 'LastEventId'
147 | column = 'last_event_id'
148 | range_condition = {
149 | 'AttributeValueList': [
150 | {
151 | 'N': str(from_event)
152 | }
153 | ],
154 | 'ComparisonOperator': 'GE'
155 | }
156 | elif to_event:
157 | index_name = 'FirstEventId'
158 | column = 'first_event_id'
159 | range_condition = {
160 | 'AttributeValueList': [
161 | {
162 | 'N': str(to_event)
163 | }
164 | ],
165 | 'ComparisonOperator': 'LE'
166 | }
167 |
168 | response = self.dynamodb_ll.query(
169 | TableName=self.events_table,
170 | Select='ALL_ATTRIBUTES',
171 | IndexName=index_name,
172 | ScanIndexForward=True,
173 | KeyConditions={
174 | 'stream_id': {
175 | 'AttributeValueList': [
176 | {
177 | 'S': stream_id
178 | },
179 | ],
180 | 'ComparisonOperator': 'EQ'
181 | },
182 | column: range_condition
183 | }
184 | )
185 |
186 | return [DynamoDB.parse_commit(r) for r in response["Items"]]
187 |
188 | def fetch_changesets_by_events_range(self, stream_id, from_event, to_event):
189 | first_changeset = self.read_changeset_containing_event(stream_id, from_event)
190 | if not first_changeset:
191 | return None
192 |
193 | if first_changeset.last_event_id >= to_event:
194 | return [first_changeset]
195 |
196 | response = self.dynamodb_ll.query(
197 | TableName=self.events_table,
198 | Select='ALL_ATTRIBUTES',
199 | IndexName="FirstEventId",
200 | ScanIndexForward=True,
201 | KeyConditions={
202 | 'stream_id': {
203 | 'AttributeValueList': [
204 | {
205 | 'S': stream_id
206 | },
207 | ],
208 | 'ComparisonOperator': 'EQ'
209 | },
210 | "first_event_id": {
211 | 'AttributeValueList': [
212 | {
213 | 'N': str(from_event)
214 | },
215 | {
216 | 'N': str(to_event)
217 | },
218 | ],
219 | 'ComparisonOperator': 'BETWEEN'
220 | }
221 | }
222 | )
223 |
224 | return [first_changeset] + [DynamoDB.parse_commit(r) for r in response["Items"]]
225 |
226 | def read_changeset_containing_event(self, stream_id, event_id):
227 | response = self.dynamodb_ll.query(
228 | TableName=self.events_table,
229 | Select='ALL_ATTRIBUTES',
230 | IndexName='LastEventId',
231 | ScanIndexForward=True,
232 | Limit=1,
233 | KeyConditions={
234 | 'stream_id': {
235 | 'AttributeValueList': [
236 | {
237 | 'S': stream_id
238 | },
239 | ],
240 | 'ComparisonOperator': 'EQ'
241 | },
242 | 'last_event_id': {
243 | 'AttributeValueList': [
244 | {
245 | 'N': str(event_id)
246 | }
247 | ],
248 | 'ComparisonOperator': 'GE'
249 | }
250 | }
251 | )
252 |
253 | changesets = [DynamoDB.parse_commit(r) for r in response["Items"]]
254 | return changesets[0] if changesets else None
255 |
256 | @classmethod
257 | def parse_commit(cls, record):
258 | logger.debug(f"Parsing DynamoDB record: {record}")
259 | stream_id = record["stream_id"]["S"]
260 | changeset_id = int(record["changeset_id"]["N"])
261 | first_event_id = int(record["first_event_id"]["N"])
262 | last_event_id = int(record["last_event_id"]["N"])
263 |
264 | events = None
265 | if "events" in record.keys():
266 | events_json = record["events"]["S"]
267 | events = json.loads(events_json)
268 |
269 | metadata = None
270 | if "metadata" in record.keys():
271 | metadata_json = record["metadata"]["S"]
272 | metadata = json.loads(metadata_json)
273 |
274 | page = None
275 | page_item = None
276 | if "page" in record.keys():
277 | page = int(record["page"]["N"])
278 | page_item = int(record["page_item"]["N"])
279 |
280 | return CommitData(stream_id, changeset_id, metadata, events,
281 | first_event_id, last_event_id, page, page_item)
282 |
283 | def get_timestamp(self):
284 | return datetime.utcnow().isoformat("T") + "Z"
285 |
286 | def get_global_counter(self):
287 | counter = self.__get_global_counter()
288 | if not counter:
289 | self.init_global_counter()
290 | counter = self.__get_global_counter()
291 | return counter
292 |
293 | def __get_global_counter(self):
294 | response = self.dynamodb_ll.query(
295 | TableName=self.events_table,
296 | ProjectionExpression='page,page_item,prev_stream_id,prev_changeset_id',
297 | Limit=1,
298 | ScanIndexForward=False,
299 | KeyConditions={
300 | 'stream_id': {
301 | 'AttributeValueList': [
302 | {
303 | 'S': self.global_counter_key
304 | },
305 | ],
306 | 'ComparisonOperator': 'EQ'
307 | },
308 | 'changeset_id': {
309 | 'AttributeValueList': [
310 | {
311 | 'N': str(self.global_counter_range)
312 | },
313 | ],
314 | 'ComparisonOperator': 'EQ'
315 | }
316 | }
317 | )
318 | if response["Count"] == 0:
319 | return None
320 |
321 | data = response["Items"][0]
322 | return GlobalCounter(int(data["page"]["N"]),
323 | int(data["page_item"]["N"]),
324 | data["prev_stream_id"]["S"],
325 | int(data["prev_changeset_id"]["N"]))
326 |
327 | def init_global_counter(self):
328 | item = {
329 | 'stream_id': { "S": self.global_counter_key },
330 | 'changeset_id': { "N": str(self.global_counter_range) },
331 | 'page': { "N": str(0) },
332 | 'page_item': { "N": str(-1) },
333 | 'prev_stream_id': { "S": "" },
334 | 'prev_changeset_id': { "N": str(0) }
335 | }
336 |
337 | condition = {
338 | 'stream_id': { "Exists": False },
339 | 'changeset_id': { "Exists": False },
340 | }
341 |
342 | try:
343 | self.dynamodb_ll.put_item(
344 | TableName=self.events_table, Item=item, Expected=condition
345 | )
346 | except botocore.exceptions.ClientError as e:
347 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
348 | return
349 | else:
350 | raise e
351 |
352 | def update_global_counter(self, prev_value, new_value):
353 | try:
354 | self.dynamodb_ll.update_item(
355 | TableName=self.events_table,
356 | Key={
357 | 'stream_id': { "S": self.global_counter_key },
358 | 'changeset_id': { "N": str(self.global_counter_range) }
359 | },
360 | AttributeUpdates={
361 | 'page': { "Value": { "N": str(new_value.page) } },
362 | 'page_item': { "Value": { "N": str(new_value.page_item) } },
363 | 'prev_stream_id': { "Value": { "S": new_value.prev_stream_id } },
364 | 'prev_stream_changeset_id': { "Value": { "N": str(new_value.prev_changeset_id) } }
365 | },
366 | Expected={
367 | 'page': { "Value": { "N": str(prev_value.page) } },
368 | 'page_item': { "Value": { "N": str(prev_value.page_item) } }
369 | }
370 | )
371 | except botocore.exceptions.ClientError as e:
372 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
373 | raise ConcurrencyException(self.global_counter_key, self.global_counter_range)
374 | else:
375 | raise e
376 |
377 | def get_global_index_value(self, stream_id, changeset_id):
378 | response = self.dynamodb_ll.query(
379 | TableName=self.events_table,
380 | ProjectionExpression='page,page_item',
381 | Limit=1,
382 | ScanIndexForward=False,
383 | KeyConditions={
384 | 'stream_id': {
385 | 'AttributeValueList': [
386 | {
387 | 'S': stream_id
388 | },
389 | ],
390 | 'ComparisonOperator': 'EQ'
391 | },
392 | 'changeset_id': {
393 | 'AttributeValueList': [
394 | {
395 | 'N': str(changeset_id)
396 | },
397 | ],
398 | 'ComparisonOperator': 'EQ'
399 | }
400 | }
401 | )
402 | if response["Count"] == 0:
403 | return None
404 |
405 | data = response["Items"][0]
406 | page = data.get("page")
407 | page_item = data.get("page_item")
408 | if page:
409 | page = int(page["N"])
410 | if page_item:
411 | page_item = int(page_item["N"])
412 |
413 | return GlobalIndex(stream_id, changeset_id, page, page_item)
414 |
415 | def set_global_index(self, global_index):
416 | stream_id = global_index.stream_id
417 | changeset_id = global_index.changeset_id
418 | page = global_index.page
419 | page_item = global_index.page_item
420 |
421 | try:
422 | self.dynamodb_ll.update_item(
423 | TableName=self.events_table,
424 | Key={
425 | 'stream_id': { "S": stream_id },
426 | 'changeset_id': { "N": str(changeset_id) }
427 | },
428 | AttributeUpdates={
429 | 'page': { "Value": { "N": str(page) } },
430 | 'page_item': { "Value": { "N": str(page_item) } }
431 | },
432 | Expected={
433 | 'page': { "Exists": False },
434 | 'item': { "Exists": False }
435 | }
436 | )
437 | except botocore.exceptions.ClientError as e:
438 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
439 | raise ConcurrencyException(self.global_counter_key, self.global_counter_range)
440 | else:
441 | raise e
442 |
443 | def fetch_global_changesets(self, checkpoint, limit):
444 | def fetch_batch(page, since_item, limit):
445 | response = self.dynamodb_ll.query(
446 | TableName=self.events_table,
447 | Select='ALL_ATTRIBUTES',
448 | IndexName='EmumerationIndex',
449 | ScanIndexForward=True,
450 | Limit=limit,
451 | KeyConditions={
452 | 'page': {
453 | 'AttributeValueList': [
454 | {
455 | 'N': str(page)
456 | },
457 | ],
458 | 'ComparisonOperator': 'EQ'
459 | },
460 | 'page_item': {
461 | 'AttributeValueList': [
462 | {
463 | 'N': str(since_item)
464 | }
465 | ],
466 | 'ComparisonOperator': 'GE'
467 | }
468 | }
469 | )
470 | return [DynamoDB.parse_commit(r) for r in response["Items"] if r["stream_id"]["S"] != self.global_counter_key]
471 |
472 | (page, page_item) = self.checkpoint_calc.to_page_item(checkpoint)
473 |
474 | changesets_left = limit
475 | last_batch = None
476 | result = []
477 | while True:
478 | last_batch = fetch_batch(page, page_item, changesets_left)
479 | if len(last_batch) > 0:
480 | result.extend(last_batch)
481 | (page, page_item) = self.checkpoint_calc.next_page_and_item(page, page_item)
482 | changesets_left = changesets_left - len(last_batch)
483 | else:
484 | break
485 |
486 | if changesets_left <= 0:
487 | break
488 |
489 | return result
490 |
491 | def fetch_global_events(self, checkpoint, event_in_checkpoint, limit):
492 | def fetch_batch(page, since_item, limit):
493 | response = self.dynamodb_ll.query(
494 | TableName=self.events_table,
495 | Select='ALL_ATTRIBUTES',
496 | IndexName='EmumerationIndex',
497 | ScanIndexForward=True,
498 | Limit=limit,
499 | KeyConditions={
500 | 'page': {
501 | 'AttributeValueList': [
502 | {
503 | 'N': str(page)
504 | },
505 | ],
506 | 'ComparisonOperator': 'EQ'
507 | },
508 | 'page_item': {
509 | 'AttributeValueList': [
510 | {
511 | 'N': str(since_item)
512 | }
513 | ],
514 | 'ComparisonOperator': 'GE'
515 | }
516 | }
517 | )
518 | return [DynamoDB.parse_commit(r) for r in response["Items"] if r["stream_id"]["S"] != self.global_counter_key]
519 |
520 | (page, page_item) = self.checkpoint_calc.to_page_item(checkpoint)
521 |
522 | changesets_left = limit
523 | last_batch = None
524 | result = []
525 | while True:
526 | last_batch = fetch_batch(page, page_item, changesets_left)
527 | if len(last_batch) > 0:
528 | result.extend(last_batch)
529 | (page, page_item) = self.checkpoint_calc.next_page_and_item(page, page_item)
530 | changesets_left = changesets_left - len(last_batch)
531 | else:
532 | break
533 |
534 | if changesets_left <= 0:
535 | break
536 |
537 | return result
538 |
539 | def get_analysis_state(self):
540 | def fetch_state():
541 | projection = 'projection_id,proj_state,version'
542 |
543 | response = self.dynamodb_ll.query(
544 | TableName=self.analysis_table,
545 | ProjectionExpression=projection,
546 | Limit=1,
547 | ScanIndexForward=False,
548 | KeyConditions={
549 | 'projection_id': {
550 | 'AttributeValueList': [
551 | {
552 | 'S': "analysis_state"
553 | },
554 | ],
555 | 'ComparisonOperator': 'EQ'
556 | }
557 | }
558 | )
559 | if response["Count"] == 0:
560 | return None
561 |
562 | data = json.loads(response["Items"][0]["proj_state"]["S"])
563 | return AnalysisState(
564 | total_streams=data["total_streams"],
565 | total_changesets=data["total_changesets"],
566 | total_events=data["total_events"],
567 | max_stream_length=data["max_stream_length"],
568 | version=int(response["Items"][0]["version"]["N"])
569 | )
570 | result = fetch_state()
571 | if not result:
572 | self.init_analysis_state()
573 | result = fetch_state()
574 | return result
575 |
576 | def init_analysis_state(self):
577 | state_value = {
578 | "total_streams": 0,
579 | "total_changesets": 0,
580 | "total_events": 0,
581 | "max_stream_length": 0,
582 | "version": 0
583 | }
584 |
585 | item = {
586 | 'projection_id': { "S": "analysis_state" },
587 | 'proj_state': { "S": json.dumps(state_value) },
588 | 'version': { "N": str(0) }
589 | }
590 |
591 | condition = {
592 | 'projection_id': { "Exists": False }
593 | }
594 |
595 | try:
596 | self.dynamodb_ll.put_item(
597 | TableName=self.analysis_table, Item=item, Expected=condition
598 | )
599 | except botocore.exceptions.ClientError as e:
600 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
601 | # means it already exists, no nead to init
602 | return
603 | else:
604 | raise e
605 |
606 | def set_analysis_state(self, state, expected_version):
607 | state_value = {
608 | "total_streams": state.total_streams,
609 | "total_changesets": state.total_changesets,
610 | "total_events": state.total_events,
611 | "max_stream_length": state.max_stream_length,
612 | "version": state.version
613 | }
614 |
615 | item = {
616 | 'projection_id': { "S": "analysis_state" },
617 | 'proj_state': { "S": json.dumps(state_value) },
618 | 'version': { "N": str(state.version) }
619 | }
620 |
621 | condition = {
622 | 'version': { "Value": { "N": str(expected_version) } }
623 | }
624 |
625 | try:
626 | self.dynamodb_ll.put_item(
627 | TableName=self.analysis_table, Item=item, Expected=condition
628 | )
629 | except botocore.exceptions.ClientError as e:
630 | if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
631 | logger.debug(f"ConditionalCheckFailedException for analysis model, expected version {expected_version}")
632 | raise ConcurrencyException("analysis_model", expected_version)
633 | else:
634 | raise e
--------------------------------------------------------------------------------
/src/tests/unit/events.json:
--------------------------------------------------------------------------------
1 | {
2 | "Version": {
3 | "resource": "/version",
4 | "path": "/version",
5 | "httpMethod": "GET",
6 | "headers": {
7 | "Accept": "*/*",
8 | "Accept-Encoding": "gzip, deflate",
9 | "CloudFront-Forwarded-Proto": "https",
10 | "CloudFront-Is-Desktop-Viewer": "true",
11 | "CloudFront-Is-Mobile-Viewer": "false",
12 | "CloudFront-Is-SmartTV-Viewer": "false",
13 | "CloudFront-Is-Tablet-Viewer": "false",
14 | "CloudFront-Viewer-Country": "IL",
15 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
16 | "User-Agent": "python-requests/2.25.1",
17 | "Via": "1.1 d4e2a230c602065d2e7043c30b343ff6.cloudfront.net (CloudFront)",
18 | "X-Amz-Cf-Id": "NQpkeUf1r6tdyMWGlFptxKrai3jIOzNPhyyW8A780XZ-Cs16YhXk7w==",
19 | "X-Amzn-Trace-Id": "Root=1-6017cc0b-4b8789ee08ca022346f60bde",
20 | "X-Forwarded-For": "79.178.12.16, 130.176.1.147",
21 | "X-Forwarded-Port": "443",
22 | "X-Forwarded-Proto": "https"
23 | },
24 | "multiValueHeaders": {
25 | "Accept": [
26 | "*/*"
27 | ],
28 | "Accept-Encoding": [
29 | "gzip, deflate"
30 | ],
31 | "CloudFront-Forwarded-Proto": [
32 | "https"
33 | ],
34 | "CloudFront-Is-Desktop-Viewer": [
35 | "true"
36 | ],
37 | "CloudFront-Is-Mobile-Viewer": [
38 | "false"
39 | ],
40 | "CloudFront-Is-SmartTV-Viewer": [
41 | "false"
42 | ],
43 | "CloudFront-Is-Tablet-Viewer": [
44 | "false"
45 | ],
46 | "CloudFront-Viewer-Country": [
47 | "IL"
48 | ],
49 | "Host": [
50 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
51 | ],
52 | "User-Agent": [
53 | "python-requests/2.25.1"
54 | ],
55 | "Via": [
56 | "1.1 d4e2a230c602065d2e7043c30b343ff6.cloudfront.net (CloudFront)"
57 | ],
58 | "X-Amz-Cf-Id": [
59 | "NQpkeUf1r6tdyMWGlFptxKrai3jIOzNPhyyW8A780XZ-Cs16YhXk7w=="
60 | ],
61 | "X-Amzn-Trace-Id": [
62 | "Root=1-6017cc0b-4b8789ee08ca022346f60bde"
63 | ],
64 | "X-Forwarded-For": [
65 | "79.178.12.16, 130.176.1.147"
66 | ],
67 | "X-Forwarded-Port": [
68 | "443"
69 | ],
70 | "X-Forwarded-Proto": [
71 | "https"
72 | ]
73 | },
74 | "queryStringParameters": null,
75 | "multiValueQueryStringParameters": null,
76 | "pathParameters": null,
77 | "stageVariables": null,
78 | "requestContext": {
79 | "resourceId": "00dbnb",
80 | "resourcePath": "/version",
81 | "httpMethod": "GET",
82 | "extendedRequestId": "aDzRxG99IAMF1nA=",
83 | "requestTime": "01/Feb/2021:09:38:19 +0000",
84 | "path": "/Prod/version",
85 | "accountId": "139683429663",
86 | "protocol": "HTTP/1.1",
87 | "stage": "Prod",
88 | "domainPrefix": "varta1icqe",
89 | "requestTimeEpoch": 1612172299274,
90 | "requestId": "047a23b4-d94d-458c-84d6-5575e0d2fcf7",
91 | "identity": {
92 | "cognitoIdentityPoolId": null,
93 | "accountId": null,
94 | "cognitoIdentityId": null,
95 | "caller": null,
96 | "sourceIp": "79.178.12.16",
97 | "principalOrgId": null,
98 | "accessKey": null,
99 | "cognitoAuthenticationType": null,
100 | "cognitoAuthenticationProvider": null,
101 | "userArn": null,
102 | "userAgent": "python-requests/2.25.1",
103 | "user": null
104 | },
105 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
106 | "apiId": "varta1icqe"
107 | },
108 | "body": null,
109 | "isBase64Encoded": false
110 | },
111 | "Commit": {
112 | "resource": "/streams/{stream_id}",
113 | "path": "/streams/7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
114 | "httpMethod": "POST",
115 | "headers": {
116 | "Accept": "*/*",
117 | "Accept-Encoding": "gzip, deflate",
118 | "CloudFront-Forwarded-Proto": "https",
119 | "CloudFront-Is-Desktop-Viewer": "true",
120 | "CloudFront-Is-Mobile-Viewer": "false",
121 | "CloudFront-Is-SmartTV-Viewer": "false",
122 | "CloudFront-Is-Tablet-Viewer": "false",
123 | "CloudFront-Viewer-Country": "IL",
124 | "Content-Type": "application/json",
125 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
126 | "User-Agent": "python-requests/2.25.1",
127 | "Via": "1.1 6c2d36902aa2beb329c88167d0ba006d.cloudfront.net (CloudFront)",
128 | "X-Amz-Cf-Id": "SkXu7r5WGv7iPmQJXv3bONnvJiBOSBRcaMGbSvIMkjTahV2-NUYAkA==",
129 | "X-Amzn-Trace-Id": "Root=1-6017cc06-0d02233a465ff990113a6fde",
130 | "X-Forwarded-For": "79.178.12.16, 130.176.1.157",
131 | "X-Forwarded-Port": "443",
132 | "X-Forwarded-Proto": "https"
133 | },
134 | "multiValueHeaders": {
135 | "Accept": [
136 | "*/*"
137 | ],
138 | "Accept-Encoding": [
139 | "gzip, deflate"
140 | ],
141 | "CloudFront-Forwarded-Proto": [
142 | "https"
143 | ],
144 | "CloudFront-Is-Desktop-Viewer": [
145 | "true"
146 | ],
147 | "CloudFront-Is-Mobile-Viewer": [
148 | "false"
149 | ],
150 | "CloudFront-Is-SmartTV-Viewer": [
151 | "false"
152 | ],
153 | "CloudFront-Is-Tablet-Viewer": [
154 | "false"
155 | ],
156 | "CloudFront-Viewer-Country": [
157 | "IL"
158 | ],
159 | "Content-Type": [
160 | "application/json"
161 | ],
162 | "Host": [
163 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
164 | ],
165 | "User-Agent": [
166 | "python-requests/2.25.1"
167 | ],
168 | "Via": [
169 | "1.1 6c2d36902aa2beb329c88167d0ba006d.cloudfront.net (CloudFront)"
170 | ],
171 | "X-Amz-Cf-Id": [
172 | "SkXu7r5WGv7iPmQJXv3bONnvJiBOSBRcaMGbSvIMkjTahV2-NUYAkA=="
173 | ],
174 | "X-Amzn-Trace-Id": [
175 | "Root=1-6017cc06-0d02233a465ff990113a6fde"
176 | ],
177 | "X-Forwarded-For": [
178 | "79.178.12.16, 130.176.1.157"
179 | ],
180 | "X-Forwarded-Port": [
181 | "443"
182 | ],
183 | "X-Forwarded-Proto": [
184 | "https"
185 | ]
186 | },
187 | "queryStringParameters": {
188 | "expected_last_changeset": "7"
189 | },
190 | "multiValueQueryStringParameters": {
191 | "expected_last_changeset": [
192 | "7"
193 | ]
194 | },
195 | "pathParameters": {
196 | "stream_id": "7ef3c378-8c97-49fe-97ba-f5afe719ea1c"
197 | },
198 | "stageVariables": null,
199 | "requestContext": {
200 | "resourceId": "4tkg6b",
201 | "resourcePath": "/streams/{stream_id}",
202 | "httpMethod": "POST",
203 | "extendedRequestId": "aDzRGFhAoAMFlPg=",
204 | "requestTime": "01/Feb/2021:09:38:14 +0000",
205 | "path": "/Prod/streams/7ef3c378-8c97-49fe-97ba-f5afe719ea1c",
206 | "accountId": "139683429663",
207 | "protocol": "HTTP/1.1",
208 | "stage": "Prod",
209 | "domainPrefix": "varta1icqe",
210 | "requestTimeEpoch": 1612172294983,
211 | "requestId": "432aaec4-def7-4091-a580-918332dccf31",
212 | "identity": {
213 | "cognitoIdentityPoolId": null,
214 | "accountId": null,
215 | "cognitoIdentityId": null,
216 | "caller": null,
217 | "sourceIp": "79.178.12.16",
218 | "principalOrgId": null,
219 | "accessKey": null,
220 | "cognitoAuthenticationType": null,
221 | "cognitoAuthenticationProvider": null,
222 | "userArn": null,
223 | "userAgent": "python-requests/2.25.1",
224 | "user": null
225 | },
226 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
227 | "apiId": "varta1icqe"
228 | },
229 | "body": "{\"events\": [{\"type\": \"init\", \"foo\": \"bar\"}, {\"type\": \"update\", \"foo\": \"baz\"}], \"metadata\": {\"timestamp\": \"123123\", \"command_id\": \"456346234\", \"issued_by\": \"test@test.com\"}}",
230 | "isBase64Encoded": false
231 | },
232 | "StreamChangesets": {
233 | "resource": "/streams/{stream_id}/changesets",
234 | "path": "/streams/fe80eaef-90c3-41be-9bc0-3f85458b9a8e/changesets",
235 | "httpMethod": "GET",
236 | "headers": {
237 | "Accept": "*/*",
238 | "Accept-Encoding": "gzip, deflate",
239 | "CloudFront-Forwarded-Proto": "https",
240 | "CloudFront-Is-Desktop-Viewer": "true",
241 | "CloudFront-Is-Mobile-Viewer": "false",
242 | "CloudFront-Is-SmartTV-Viewer": "false",
243 | "CloudFront-Is-Tablet-Viewer": "false",
244 | "CloudFront-Viewer-Country": "IL",
245 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
246 | "User-Agent": "python-requests/2.25.1",
247 | "Via": "1.1 d5d5fbb221d1e1e64574f5113ce6ed5c.cloudfront.net (CloudFront)",
248 | "X-Amz-Cf-Id": "kdTnHPsARMLeiSPzqFv0vwy4d3HFx7GDlvy-nQL4m1ImRftnVfRgFQ==",
249 | "X-Amzn-Trace-Id": "Root=1-6017cc0b-62a374ef408dfc0672b9509d",
250 | "X-Forwarded-For": "79.178.12.16, 130.176.1.89",
251 | "X-Forwarded-Port": "443",
252 | "X-Forwarded-Proto": "https"
253 | },
254 | "multiValueHeaders": {
255 | "Accept": [
256 | "*/*"
257 | ],
258 | "Accept-Encoding": [
259 | "gzip, deflate"
260 | ],
261 | "CloudFront-Forwarded-Proto": [
262 | "https"
263 | ],
264 | "CloudFront-Is-Desktop-Viewer": [
265 | "true"
266 | ],
267 | "CloudFront-Is-Mobile-Viewer": [
268 | "false"
269 | ],
270 | "CloudFront-Is-SmartTV-Viewer": [
271 | "false"
272 | ],
273 | "CloudFront-Is-Tablet-Viewer": [
274 | "false"
275 | ],
276 | "CloudFront-Viewer-Country": [
277 | "IL"
278 | ],
279 | "Host": [
280 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
281 | ],
282 | "User-Agent": [
283 | "python-requests/2.25.1"
284 | ],
285 | "Via": [
286 | "1.1 d5d5fbb221d1e1e64574f5113ce6ed5c.cloudfront.net (CloudFront)"
287 | ],
288 | "X-Amz-Cf-Id": [
289 | "kdTnHPsARMLeiSPzqFv0vwy4d3HFx7GDlvy-nQL4m1ImRftnVfRgFQ=="
290 | ],
291 | "X-Amzn-Trace-Id": [
292 | "Root=1-6017cc0b-62a374ef408dfc0672b9509d"
293 | ],
294 | "X-Forwarded-For": [
295 | "79.178.12.16, 130.176.1.89"
296 | ],
297 | "X-Forwarded-Port": [
298 | "443"
299 | ],
300 | "X-Forwarded-Proto": [
301 | "https"
302 | ]
303 | },
304 | "queryStringParameters": {
305 | "from": "1",
306 | "to": "5"
307 | },
308 | "multiValueQueryStringParameters": {
309 | "from": [
310 | ""
311 | ],
312 | "to": [
313 | ""
314 | ]
315 | },
316 | "pathParameters": {
317 | "stream_id": "fe80eaef-90c3-41be-9bc0-3f85458b9a8e"
318 | },
319 | "stageVariables": null,
320 | "requestContext": {
321 | "resourceId": "s0drj9",
322 | "resourcePath": "/streams/{stream_id}/changesets",
323 | "httpMethod": "GET",
324 | "extendedRequestId": "aDzR4HzyIAMF2hg=",
325 | "requestTime": "01/Feb/2021:09:38:19 +0000",
326 | "path": "/Prod/streams/fe80eaef-90c3-41be-9bc0-3f85458b9a8e/changesets",
327 | "accountId": "139683429663",
328 | "protocol": "HTTP/1.1",
329 | "stage": "Prod",
330 | "domainPrefix": "varta1icqe",
331 | "requestTimeEpoch": 1612172299987,
332 | "requestId": "6b2de894-8e38-43d2-8125-931fd4711e6f",
333 | "identity": {
334 | "cognitoIdentityPoolId": null,
335 | "accountId": null,
336 | "cognitoIdentityId": null,
337 | "caller": null,
338 | "sourceIp": "79.178.12.16",
339 | "principalOrgId": null,
340 | "accessKey": null,
341 | "cognitoAuthenticationType": null,
342 | "cognitoAuthenticationProvider": null,
343 | "userArn": null,
344 | "userAgent": "python-requests/2.25.1",
345 | "user": null
346 | },
347 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
348 | "apiId": "varta1icqe"
349 | },
350 | "body": null,
351 | "isBase64Encoded": false
352 | },
353 | "StreamEvents": {
354 | "resource": "/streams/{stream_id}/events",
355 | "path": "/streams/d2333e6b-65a7-4a10-9886-2dd2fe873bed/events",
356 | "httpMethod": "GET",
357 | "headers": {
358 | "Accept": "*/*",
359 | "Accept-Encoding": "gzip, deflate",
360 | "CloudFront-Forwarded-Proto": "https",
361 | "CloudFront-Is-Desktop-Viewer": "true",
362 | "CloudFront-Is-Mobile-Viewer": "false",
363 | "CloudFront-Is-SmartTV-Viewer": "false",
364 | "CloudFront-Is-Tablet-Viewer": "false",
365 | "CloudFront-Viewer-Country": "IL",
366 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
367 | "User-Agent": "python-requests/2.25.1",
368 | "Via": "1.1 9b3a0b2647b64bb06aa470977314bbb3.cloudfront.net (CloudFront)",
369 | "X-Amz-Cf-Id": "sKFw51rlf3H8dC7rDPabSsuS7EZOq8eiCPP_qn3ACjCVLkawH-quCw==",
370 | "X-Amzn-Trace-Id": "Root=1-6017cc12-6273e2734ad6ecbb59bfbd76",
371 | "X-Forwarded-For": "79.178.12.16, 130.176.1.73",
372 | "X-Forwarded-Port": "443",
373 | "X-Forwarded-Proto": "https"
374 | },
375 | "multiValueHeaders": {
376 | "Accept": [
377 | "*/*"
378 | ],
379 | "Accept-Encoding": [
380 | "gzip, deflate"
381 | ],
382 | "CloudFront-Forwarded-Proto": [
383 | "https"
384 | ],
385 | "CloudFront-Is-Desktop-Viewer": [
386 | "true"
387 | ],
388 | "CloudFront-Is-Mobile-Viewer": [
389 | "false"
390 | ],
391 | "CloudFront-Is-SmartTV-Viewer": [
392 | "false"
393 | ],
394 | "CloudFront-Is-Tablet-Viewer": [
395 | "false"
396 | ],
397 | "CloudFront-Viewer-Country": [
398 | "IL"
399 | ],
400 | "Host": [
401 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
402 | ],
403 | "User-Agent": [
404 | "python-requests/2.25.1"
405 | ],
406 | "Via": [
407 | "1.1 9b3a0b2647b64bb06aa470977314bbb3.cloudfront.net (CloudFront)"
408 | ],
409 | "X-Amz-Cf-Id": [
410 | "sKFw51rlf3H8dC7rDPabSsuS7EZOq8eiCPP_qn3ACjCVLkawH-quCw=="
411 | ],
412 | "X-Amzn-Trace-Id": [
413 | "Root=1-6017cc12-6273e2734ad6ecbb59bfbd76"
414 | ],
415 | "X-Forwarded-For": [
416 | "79.178.12.16, 130.176.1.73"
417 | ],
418 | "X-Forwarded-Port": [
419 | "443"
420 | ],
421 | "X-Forwarded-Proto": [
422 | "https"
423 | ]
424 | },
425 | "queryStringParameters": {
426 | "from": "1",
427 | "to": "5"
428 | },
429 | "multiValueQueryStringParameters": {
430 | "from": [
431 | ""
432 | ],
433 | "to": [
434 | ""
435 | ]
436 | },
437 | "pathParameters": {
438 | "stream_id": "d2333e6b-65a7-4a10-9886-2dd2fe873bed"
439 | },
440 | "stageVariables": null,
441 | "requestContext": {
442 | "resourceId": "9vyo5s",
443 | "resourcePath": "/streams/{stream_id}/events",
444 | "httpMethod": "GET",
445 | "extendedRequestId": "aDzS5FePoAMF05A=",
446 | "requestTime": "01/Feb/2021:09:38:26 +0000",
447 | "path": "/Prod/streams/d2333e6b-65a7-4a10-9886-2dd2fe873bed/events",
448 | "accountId": "139683429663",
449 | "protocol": "HTTP/1.1",
450 | "stage": "Prod",
451 | "domainPrefix": "varta1icqe",
452 | "requestTimeEpoch": 1612172306464,
453 | "requestId": "85182715-5139-4452-b09a-d155ce29321c",
454 | "identity": {
455 | "cognitoIdentityPoolId": null,
456 | "accountId": null,
457 | "cognitoIdentityId": null,
458 | "caller": null,
459 | "sourceIp": "79.178.12.16",
460 | "principalOrgId": null,
461 | "accessKey": null,
462 | "cognitoAuthenticationType": null,
463 | "cognitoAuthenticationProvider": null,
464 | "userArn": null,
465 | "userAgent": "python-requests/2.25.1",
466 | "user": null
467 | },
468 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
469 | "apiId": "varta1icqe"
470 | },
471 | "body": null,
472 | "isBase64Encoded": false
473 | },
474 | "GlobalChangesets": {
475 | "resource": "/changesets",
476 | "path": "/changesets",
477 | "httpMethod": "GET",
478 | "headers": {
479 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
480 | "Accept-Encoding": "gzip, deflate, br",
481 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8,he;q=0.7,ru;q=0.6",
482 | "cache-control": "max-age=0",
483 | "CloudFront-Forwarded-Proto": "https",
484 | "CloudFront-Is-Desktop-Viewer": "true",
485 | "CloudFront-Is-Mobile-Viewer": "false",
486 | "CloudFront-Is-SmartTV-Viewer": "false",
487 | "CloudFront-Is-Tablet-Viewer": "false",
488 | "CloudFront-Viewer-Country": "IL",
489 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
490 | "sec-ch-ua": "\"Chromium\";v=\"88\", \"Google Chrome\";v=\"88\", \";Not A Brand\";v=\"99\"",
491 | "sec-ch-ua-mobile": "?0",
492 | "sec-fetch-dest": "document",
493 | "sec-fetch-mode": "navigate",
494 | "sec-fetch-site": "none",
495 | "sec-fetch-user": "?1",
496 | "upgrade-insecure-requests": "1",
497 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
498 | "Via": "2.0 93a53b2e958fc95f0ea2869862c37e58.cloudfront.net (CloudFront)",
499 | "X-Amz-Cf-Id": "GCxa_knyfy4m50_U60PgK93uqFzRPTjocpGCR_5m1TSltAL-c-1c1Q==",
500 | "X-Amzn-Trace-Id": "Root=1-6017d00f-15062c722a883a612f60c1cb",
501 | "X-Forwarded-For": "79.178.12.16, 130.176.1.140",
502 | "X-Forwarded-Port": "443",
503 | "X-Forwarded-Proto": "https"
504 | },
505 | "multiValueHeaders": {
506 | "Accept": [
507 | "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
508 | ],
509 | "Accept-Encoding": [
510 | "gzip, deflate, br"
511 | ],
512 | "Accept-Language": [
513 | "en-GB,en-US;q=0.9,en;q=0.8,he;q=0.7,ru;q=0.6"
514 | ],
515 | "cache-control": [
516 | "max-age=0"
517 | ],
518 | "CloudFront-Forwarded-Proto": [
519 | "https"
520 | ],
521 | "CloudFront-Is-Desktop-Viewer": [
522 | "true"
523 | ],
524 | "CloudFront-Is-Mobile-Viewer": [
525 | "false"
526 | ],
527 | "CloudFront-Is-SmartTV-Viewer": [
528 | "false"
529 | ],
530 | "CloudFront-Is-Tablet-Viewer": [
531 | "false"
532 | ],
533 | "CloudFront-Viewer-Country": [
534 | "IL"
535 | ],
536 | "Host": [
537 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
538 | ],
539 | "sec-ch-ua": [
540 | "\"Chromium\";v=\"88\", \"Google Chrome\";v=\"88\", \";Not A Brand\";v=\"99\""
541 | ],
542 | "sec-ch-ua-mobile": [
543 | "?0"
544 | ],
545 | "sec-fetch-dest": [
546 | "document"
547 | ],
548 | "sec-fetch-mode": [
549 | "navigate"
550 | ],
551 | "sec-fetch-site": [
552 | "none"
553 | ],
554 | "sec-fetch-user": [
555 | "?1"
556 | ],
557 | "upgrade-insecure-requests": [
558 | "1"
559 | ],
560 | "User-Agent": [
561 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36"
562 | ],
563 | "Via": [
564 | "2.0 93a53b2e958fc95f0ea2869862c37e58.cloudfront.net (CloudFront)"
565 | ],
566 | "X-Amz-Cf-Id": [
567 | "GCxa_knyfy4m50_U60PgK93uqFzRPTjocpGCR_5m1TSltAL-c-1c1Q=="
568 | ],
569 | "X-Amzn-Trace-Id": [
570 | "Root=1-6017d00f-15062c722a883a612f60c1cb"
571 | ],
572 | "X-Forwarded-For": [
573 | "79.178.12.16, 130.176.1.140"
574 | ],
575 | "X-Forwarded-Port": [
576 | "443"
577 | ],
578 | "X-Forwarded-Proto": [
579 | "https"
580 | ]
581 | },
582 | "queryStringParameters": {
583 | "checkpoint": "44",
584 | "limit": "120"
585 | },
586 | "multiValueQueryStringParameters": {
587 | "checkpoint": [
588 | "44"
589 | ],
590 | "limit": [
591 | "120"
592 | ]
593 | },
594 | "pathParameters": null,
595 | "stageVariables": null,
596 | "requestContext": {
597 | "resourceId": "cw7vp2",
598 | "resourcePath": "/changesets",
599 | "httpMethod": "GET",
600 | "extendedRequestId": "aD1ygHNToAMFjcw=",
601 | "requestTime": "01/Feb/2021:09:55:27 +0000",
602 | "path": "/Prod/changesets",
603 | "accountId": "139683429663",
604 | "protocol": "HTTP/1.1",
605 | "stage": "Prod",
606 | "domainPrefix": "varta1icqe",
607 | "requestTimeEpoch": 1612173327956,
608 | "requestId": "eb5ba836-e3d4-440b-a044-ee51dbeb58e2",
609 | "identity": {
610 | "cognitoIdentityPoolId": null,
611 | "accountId": null,
612 | "cognitoIdentityId": null,
613 | "caller": null,
614 | "sourceIp": "79.178.12.16",
615 | "principalOrgId": null,
616 | "accessKey": null,
617 | "cognitoAuthenticationType": null,
618 | "cognitoAuthenticationProvider": null,
619 | "userArn": null,
620 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
621 | "user": null
622 | },
623 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
624 | "apiId": "varta1icqe"
625 | },
626 | "body": null,
627 | "isBase64Encoded": false
628 | },
629 | "GlobalEvents": {
630 | "resource": "/events",
631 | "path": "/events",
632 | "httpMethod": "GET",
633 | "headers": {
634 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
635 | "Accept-Encoding": "gzip, deflate, br",
636 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8,he;q=0.7,ru;q=0.6",
637 | "cache-control": "max-age=0",
638 | "CloudFront-Forwarded-Proto": "https",
639 | "CloudFront-Is-Desktop-Viewer": "true",
640 | "CloudFront-Is-Mobile-Viewer": "false",
641 | "CloudFront-Is-SmartTV-Viewer": "false",
642 | "CloudFront-Is-Tablet-Viewer": "false",
643 | "CloudFront-Viewer-Country": "IL",
644 | "Host": "varta1icqe.execute-api.us-east-1.amazonaws.com",
645 | "sec-ch-ua": "\"Chromium\";v=\"88\", \"Google Chrome\";v=\"88\", \";Not A Brand\";v=\"99\"",
646 | "sec-ch-ua-mobile": "?0",
647 | "sec-fetch-dest": "document",
648 | "sec-fetch-mode": "navigate",
649 | "sec-fetch-site": "none",
650 | "sec-fetch-user": "?1",
651 | "upgrade-insecure-requests": "1",
652 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
653 | "Via": "2.0 93a53b2e958fc95f0ea2869862c37e58.cloudfront.net (CloudFront)",
654 | "X-Amz-Cf-Id": "GCxa_knyfy4m50_U60PgK93uqFzRPTjocpGCR_5m1TSltAL-c-1c1Q==",
655 | "X-Amzn-Trace-Id": "Root=1-6017d00f-15062c722a883a612f60c1cb",
656 | "X-Forwarded-For": "79.178.12.16, 130.176.1.140",
657 | "X-Forwarded-Port": "443",
658 | "X-Forwarded-Proto": "https"
659 | },
660 | "multiValueHeaders": {
661 | "Accept": [
662 | "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9"
663 | ],
664 | "Accept-Encoding": [
665 | "gzip, deflate, br"
666 | ],
667 | "Accept-Language": [
668 | "en-GB,en-US;q=0.9,en;q=0.8,he;q=0.7,ru;q=0.6"
669 | ],
670 | "cache-control": [
671 | "max-age=0"
672 | ],
673 | "CloudFront-Forwarded-Proto": [
674 | "https"
675 | ],
676 | "CloudFront-Is-Desktop-Viewer": [
677 | "true"
678 | ],
679 | "CloudFront-Is-Mobile-Viewer": [
680 | "false"
681 | ],
682 | "CloudFront-Is-SmartTV-Viewer": [
683 | "false"
684 | ],
685 | "CloudFront-Is-Tablet-Viewer": [
686 | "false"
687 | ],
688 | "CloudFront-Viewer-Country": [
689 | "IL"
690 | ],
691 | "Host": [
692 | "varta1icqe.execute-api.us-east-1.amazonaws.com"
693 | ],
694 | "sec-ch-ua": [
695 | "\"Chromium\";v=\"88\", \"Google Chrome\";v=\"88\", \";Not A Brand\";v=\"99\""
696 | ],
697 | "sec-ch-ua-mobile": [
698 | "?0"
699 | ],
700 | "sec-fetch-dest": [
701 | "document"
702 | ],
703 | "sec-fetch-mode": [
704 | "navigate"
705 | ],
706 | "sec-fetch-site": [
707 | "none"
708 | ],
709 | "sec-fetch-user": [
710 | "?1"
711 | ],
712 | "upgrade-insecure-requests": [
713 | "1"
714 | ],
715 | "User-Agent": [
716 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36"
717 | ],
718 | "Via": [
719 | "2.0 93a53b2e958fc95f0ea2869862c37e58.cloudfront.net (CloudFront)"
720 | ],
721 | "X-Amz-Cf-Id": [
722 | "GCxa_knyfy4m50_U60PgK93uqFzRPTjocpGCR_5m1TSltAL-c-1c1Q=="
723 | ],
724 | "X-Amzn-Trace-Id": [
725 | "Root=1-6017d00f-15062c722a883a612f60c1cb"
726 | ],
727 | "X-Forwarded-For": [
728 | "79.178.12.16, 130.176.1.140"
729 | ],
730 | "X-Forwarded-Port": [
731 | "443"
732 | ],
733 | "X-Forwarded-Proto": [
734 | "https"
735 | ]
736 | },
737 | "queryStringParameters": {
738 | "checkpoint": "44.2",
739 | "limit": "120"
740 | },
741 | "multiValueQueryStringParameters": {
742 | "checkpoint": [
743 | "44.2"
744 | ],
745 | "limit": [
746 | "120"
747 | ]
748 | },
749 | "pathParameters": null,
750 | "stageVariables": null,
751 | "requestContext": {
752 | "resourceId": "cw7vp2",
753 | "resourcePath": "/events",
754 | "httpMethod": "GET",
755 | "extendedRequestId": "aD1ygHNToAMFjcw=",
756 | "requestTime": "01/Feb/2021:09:55:27 +0000",
757 | "path": "/Prod/changesets",
758 | "accountId": "139683429663",
759 | "protocol": "HTTP/1.1",
760 | "stage": "Prod",
761 | "domainPrefix": "varta1icqe",
762 | "requestTimeEpoch": 1612173327956,
763 | "requestId": "eb5ba836-e3d4-440b-a044-ee51dbeb58e2",
764 | "identity": {
765 | "cognitoIdentityPoolId": null,
766 | "accountId": null,
767 | "cognitoIdentityId": null,
768 | "caller": null,
769 | "sourceIp": "79.178.12.16",
770 | "principalOrgId": null,
771 | "accessKey": null,
772 | "cognitoAuthenticationType": null,
773 | "cognitoAuthenticationProvider": null,
774 | "userArn": null,
775 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 11_1_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
776 | "user": null
777 | },
778 | "domainName": "varta1icqe.execute-api.us-east-1.amazonaws.com",
779 | "apiId": "varta1icqe"
780 | },
781 | "body": null,
782 | "isBase64Encoded": false
783 | },
784 | "Indexer": {
785 | "Records": [
786 | {
787 | "eventID": "ba015b129643176b8d3daf022527ee91",
788 | "eventName": "MODIFY",
789 | "eventVersion": "1.1",
790 | "eventSource": "aws:dynamodb",
791 | "awsRegion": "us-east-1",
792 | "dynamodb": {
793 | "ApproximateCreationDateTime": 1612172303,
794 | "Keys": {
795 | "changeset_id": {
796 | "N": "0"
797 | },
798 | "stream_id": {
799 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
800 | }
801 | },
802 | "NewImage": {
803 | "changeset_id": {
804 | "N": "0"
805 | },
806 | "stream_id": {
807 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
808 | },
809 | "page_item": {
810 | "N": "76"
811 | },
812 | "page": {
813 | "N": "0"
814 | },
815 | "prev_stream_id": {
816 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
817 | },
818 | "prev_changeset_id": {
819 | "N": "0"
820 | },
821 | "prev_stream_changeset_id": {
822 | "N": "1"
823 | }
824 | },
825 | "SequenceNumber": "2371800000000033050948264",
826 | "SizeBytes": 212,
827 | "StreamViewType": "NEW_IMAGE"
828 | },
829 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
830 | },
831 | {
832 | "eventID": "6777d5478b055c2b5dd699fa74808a42",
833 | "eventName": "MODIFY",
834 | "eventVersion": "1.1",
835 | "eventSource": "aws:dynamodb",
836 | "awsRegion": "us-east-1",
837 | "dynamodb": {
838 | "ApproximateCreationDateTime": 1612172303,
839 | "Keys": {
840 | "changeset_id": {
841 | "N": "1"
842 | },
843 | "stream_id": {
844 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
845 | }
846 | },
847 | "NewImage": {
848 | "changeset_id": {
849 | "N": "1"
850 | },
851 | "metadata": {
852 | "S": "{\"metadata\": \"goes here\"}"
853 | },
854 | "stream_id": {
855 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
856 | },
857 | "page_item": {
858 | "N": "76"
859 | },
860 | "first_event_id": {
861 | "N": "1"
862 | },
863 | "last_event_id": {
864 | "N": "2"
865 | },
866 | "page": {
867 | "N": "0"
868 | },
869 | "events": {
870 | "S": "[{\"type\": \"init\"}, {\"type\": \"update\"}]"
871 | },
872 | "timestamp": {
873 | "S": "2021-02-01T09:38:21.882911Z"
874 | }
875 | },
876 | "SequenceNumber": "2371900000000033050948265",
877 | "SizeBytes": 278,
878 | "StreamViewType": "NEW_IMAGE"
879 | },
880 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
881 | },
882 | {
883 | "eventID": "7de65620722282f83dbf1ee6f0cefec6",
884 | "eventName": "INSERT",
885 | "eventVersion": "1.1",
886 | "eventSource": "aws:dynamodb",
887 | "awsRegion": "us-east-1",
888 | "dynamodb": {
889 | "ApproximateCreationDateTime": 1612172303,
890 | "Keys": {
891 | "changeset_id": {
892 | "N": "1"
893 | },
894 | "stream_id": {
895 | "S": "96167a05-0c84-439f-84cd-df1dea34fad1"
896 | }
897 | },
898 | "NewImage": {
899 | "changeset_id": {
900 | "N": "1"
901 | },
902 | "metadata": {
903 | "S": "{\"metadata\": \"goes here\"}"
904 | },
905 | "stream_id": {
906 | "S": "96167a05-0c84-439f-84cd-df1dea34fad1"
907 | },
908 | "first_event_id": {
909 | "N": "1"
910 | },
911 | "last_event_id": {
912 | "N": "2"
913 | },
914 | "events": {
915 | "S": "[{\"type\": \"init\"}, {\"type\": \"update\"}]"
916 | },
917 | "timestamp": {
918 | "S": "2021-02-01T09:38:23.811180Z"
919 | }
920 | },
921 | "SequenceNumber": "2372000000000033050948308",
922 | "SizeBytes": 262,
923 | "StreamViewType": "NEW_IMAGE"
924 | },
925 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
926 | },
927 | {
928 | "eventID": "7b8bdfff78a604828bf4a4dc597e9f20",
929 | "eventName": "MODIFY",
930 | "eventVersion": "1.1",
931 | "eventSource": "aws:dynamodb",
932 | "awsRegion": "us-east-1",
933 | "dynamodb": {
934 | "ApproximateCreationDateTime": 1612172304,
935 | "Keys": {
936 | "changeset_id": {
937 | "N": "0"
938 | },
939 | "stream_id": {
940 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
941 | }
942 | },
943 | "NewImage": {
944 | "changeset_id": {
945 | "N": "0"
946 | },
947 | "stream_id": {
948 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
949 | },
950 | "page_item": {
951 | "N": "77"
952 | },
953 | "page": {
954 | "N": "0"
955 | },
956 | "prev_stream_id": {
957 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
958 | },
959 | "prev_changeset_id": {
960 | "N": "0"
961 | },
962 | "prev_stream_changeset_id": {
963 | "N": "2"
964 | }
965 | },
966 | "SequenceNumber": "2372100000000033050948406",
967 | "SizeBytes": 212,
968 | "StreamViewType": "NEW_IMAGE"
969 | },
970 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
971 | },
972 | {
973 | "eventID": "e50596a66c4ee43a4e84b03b87aaa875",
974 | "eventName": "MODIFY",
975 | "eventVersion": "1.1",
976 | "eventSource": "aws:dynamodb",
977 | "awsRegion": "us-east-1",
978 | "dynamodb": {
979 | "ApproximateCreationDateTime": 1612172304,
980 | "Keys": {
981 | "changeset_id": {
982 | "N": "2"
983 | },
984 | "stream_id": {
985 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
986 | }
987 | },
988 | "NewImage": {
989 | "changeset_id": {
990 | "N": "2"
991 | },
992 | "metadata": {
993 | "S": "{\"metadata\": \"goes here 2\"}"
994 | },
995 | "stream_id": {
996 | "S": "a98663d9-9927-49a3-a7c0-23cd91a2d1bc"
997 | },
998 | "page_item": {
999 | "N": "77"
1000 | },
1001 | "first_event_id": {
1002 | "N": "3"
1003 | },
1004 | "last_event_id": {
1005 | "N": "4"
1006 | },
1007 | "page": {
1008 | "N": "0"
1009 | },
1010 | "events": {
1011 | "S": "[{\"type\": \"update2\"}, {\"type\": \"delete\"}]"
1012 | },
1013 | "timestamp": {
1014 | "S": "2021-02-01T09:38:22.525914Z"
1015 | }
1016 | },
1017 | "SequenceNumber": "2372200000000033050948407",
1018 | "SizeBytes": 283,
1019 | "StreamViewType": "NEW_IMAGE"
1020 | },
1021 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1022 | },
1023 | {
1024 | "eventID": "15bbf8b7a05a78637f6ac09abb0b35ce",
1025 | "eventName": "MODIFY",
1026 | "eventVersion": "1.1",
1027 | "eventSource": "aws:dynamodb",
1028 | "awsRegion": "us-east-1",
1029 | "dynamodb": {
1030 | "ApproximateCreationDateTime": 1612172304,
1031 | "Keys": {
1032 | "changeset_id": {
1033 | "N": "0"
1034 | },
1035 | "stream_id": {
1036 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
1037 | }
1038 | },
1039 | "NewImage": {
1040 | "changeset_id": {
1041 | "N": "0"
1042 | },
1043 | "stream_id": {
1044 | "S": "!!!RESERVED:GLOBAL-COUNTER!!!"
1045 | },
1046 | "page_item": {
1047 | "N": "78"
1048 | },
1049 | "page": {
1050 | "N": "0"
1051 | },
1052 | "prev_stream_id": {
1053 | "S": "7caf0438-c1f3-4ec8-bdfb-246bd2af85c5"
1054 | },
1055 | "prev_changeset_id": {
1056 | "N": "0"
1057 | },
1058 | "prev_stream_changeset_id": {
1059 | "N": "1"
1060 | }
1061 | },
1062 | "SequenceNumber": "2372300000000033050948553",
1063 | "SizeBytes": 212,
1064 | "StreamViewType": "NEW_IMAGE"
1065 | },
1066 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1067 | },
1068 | {
1069 | "eventID": "d5490fdf45a47a3c88b197e3a893196d",
1070 | "eventName": "MODIFY",
1071 | "eventVersion": "1.1",
1072 | "eventSource": "aws:dynamodb",
1073 | "awsRegion": "us-east-1",
1074 | "dynamodb": {
1075 | "ApproximateCreationDateTime": 1612172304,
1076 | "Keys": {
1077 | "changeset_id": {
1078 | "N": "1"
1079 | },
1080 | "stream_id": {
1081 | "S": "7caf0438-c1f3-4ec8-bdfb-246bd2af85c5"
1082 | }
1083 | },
1084 | "NewImage": {
1085 | "changeset_id": {
1086 | "N": "1"
1087 | },
1088 | "metadata": {
1089 | "S": "{\"timestamp\": \"123123\", \"command_id\": \"456346234\", \"issued_by\": \"test@test.com\"}"
1090 | },
1091 | "stream_id": {
1092 | "S": "7caf0438-c1f3-4ec8-bdfb-246bd2af85c5"
1093 | },
1094 | "page_item": {
1095 | "N": "78"
1096 | },
1097 | "first_event_id": {
1098 | "N": "1"
1099 | },
1100 | "last_event_id": {
1101 | "N": "2"
1102 | },
1103 | "page": {
1104 | "N": "0"
1105 | },
1106 | "events": {
1107 | "S": "[{\"type\": \"init\", \"foo\": \"bar\"}, {\"type\": \"update\", \"foo\": \"baz\"}]"
1108 | },
1109 | "timestamp": {
1110 | "S": "2021-02-01T09:38:23.035535Z"
1111 | }
1112 | },
1113 | "SequenceNumber": "2372400000000033050948554",
1114 | "SizeBytes": 361,
1115 | "StreamViewType": "NEW_IMAGE"
1116 | },
1117 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1118 | }
1119 | ]
1120 | },
1121 | "AssignGlobalIndex": {
1122 | "Records": [
1123 | {
1124 | "eventID": "7f3725dcbce56f6848802c05e6a4e989",
1125 | "eventName": "INSERT",
1126 | "eventVersion": "1.1",
1127 | "eventSource": "aws:dynamodb",
1128 | "awsRegion": "us-east-1",
1129 | "dynamodb": {
1130 | "ApproximateCreationDateTime": 1612188486,
1131 | "Keys": {
1132 | "changeset_id": {
1133 | "N": "2"
1134 | },
1135 | "stream_id": {
1136 | "S": "99038933-e620-444d-9033-4128254f0cbd"
1137 | }
1138 | },
1139 | "NewImage": {
1140 | "changeset_id": {
1141 | "N": "2"
1142 | },
1143 | "metadata": {
1144 | "S": "{\"timestamp\": \"123123\", \"command_id\": \"456346234\", \"issued_by\": \"test@test.com\"}"
1145 | },
1146 | "stream_id": {
1147 | "S": "99038933-e620-444d-9033-4128254f0cbd"
1148 | },
1149 | "first_event_id": {
1150 | "N": "3"
1151 | },
1152 | "last_event_id": {
1153 | "N": "4"
1154 | },
1155 | "events": {
1156 | "S": "[{\"type\": \"init\", \"foo\": \"bar\"}, {\"type\": \"update\", \"foo\": \"baz\"}]"
1157 | },
1158 | "timestamp": {
1159 | "S": "2021-02-01T14:08:06.368556Z"
1160 | }
1161 | },
1162 | "SequenceNumber": "3254400000000008621225018",
1163 | "SizeBytes": 345,
1164 | "StreamViewType": "NEW_IMAGE"
1165 | },
1166 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1167 | },
1168 | {
1169 | "eventID": "1daf56e1ae940ebf96fab135f902b1e0",
1170 | "eventName": "INSERT",
1171 | "eventVersion": "1.1",
1172 | "eventSource": "aws:dynamodb",
1173 | "awsRegion": "us-east-1",
1174 | "dynamodb": {
1175 | "ApproximateCreationDateTime": 1612188487,
1176 | "Keys": {
1177 | "changeset_id": {
1178 | "N": "1"
1179 | },
1180 | "stream_id": {
1181 | "S": "206bc1ed-8e67-4a64-a596-8b32c0c20a97"
1182 | }
1183 | },
1184 | "NewImage": {
1185 | "changeset_id": {
1186 | "N": "1"
1187 | },
1188 | "metadata": {
1189 | "S": "{\"timestamp\": \"123123\", \"command_id\": \"456346234\", \"issued_by\": \"test@test.com\"}"
1190 | },
1191 | "stream_id": {
1192 | "S": "206bc1ed-8e67-4a64-a596-8b32c0c20a97"
1193 | },
1194 | "first_event_id": {
1195 | "N": "1"
1196 | },
1197 | "last_event_id": {
1198 | "N": "2"
1199 | },
1200 | "events": {
1201 | "S": "[{\"type\": \"init\", \"foo\": \"bar\"}, {\"type\": \"update\", \"foo\": \"baz\"}]"
1202 | },
1203 | "timestamp": {
1204 | "S": "2021-02-01T14:08:07.149329Z"
1205 | }
1206 | },
1207 | "SequenceNumber": "3254500000000008621225552",
1208 | "SizeBytes": 345,
1209 | "StreamViewType": "NEW_IMAGE"
1210 | },
1211 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1212 | },
1213 | {
1214 | "eventID": "7f3725dcace56f6848802c05e6a4e989",
1215 | "eventName": "MODIFY",
1216 | "eventVersion": "1.1",
1217 | "eventSource": "aws:dynamodb",
1218 | "awsRegion": "us-east-1",
1219 | "dynamodb": {
1220 | "ApproximateCreationDateTime": 1612188486,
1221 | "Keys": {
1222 | "changeset_id": {
1223 | "N": "3"
1224 | },
1225 | "stream_id": {
1226 | "S": "990389a3-e620-444d-9033-4128254f0cbd"
1227 | }
1228 | },
1229 | "NewImage": {
1230 | "changeset_id": {
1231 | "N": "3"
1232 | },
1233 | "metadata": {
1234 | "S": "{\"timestamp\": \"123123\", \"command_id\": \"456346234\", \"issued_by\": \"test@test.com\"}"
1235 | },
1236 | "stream_id": {
1237 | "S": "990389a3-e620-444d-9033-4128254f0cbd"
1238 | },
1239 | "first_event_id": {
1240 | "N": "3"
1241 | },
1242 | "last_event_id": {
1243 | "N": "4"
1244 | },
1245 | "events": {
1246 | "S": "[{\"type\": \"init\", \"foo\": \"bar\"}, {\"type\": \"update\", \"foo\": \"baz\"}]"
1247 | },
1248 | "timestamp": {
1249 | "S": "2021-02-01T14:08:06.368556Z"
1250 | }
1251 | },
1252 | "SequenceNumber": "3254400000000008621225018",
1253 | "SizeBytes": 345,
1254 | "StreamViewType": "NEW_IMAGE"
1255 | },
1256 | "eventSourceARN": "arn:aws:dynamodb:us-east-1:139683429663:table/ees_db_dev_eventstore/stream/2021-01-31T18:10:36.698"
1257 | }
1258 | ]
1259 | }
1260 | }
--------------------------------------------------------------------------------