├── tests ├── __init__.py ├── conftest.py ├── aws_client_mockers │ ├── kinesis_client_mocker.py │ └── cw_client_mocker.py ├── test_kinesis_upscaler.py └── test_kinesis_downscaler.py ├── kinesis_autoscaler ├── __init__.py ├── constants.py ├── models │ └── autoscaler_log.py ├── kinesis_upscaler.py ├── kinesis_downscaler.py └── kinesis_autoscaler.py ├── .gitignore ├── architecture.png ├── .github ├── CODEOWNERS └── workflows │ └── ci.yml ├── setup.cfg ├── package.json ├── pyproject.toml ├── handler.py ├── serverless.yml ├── README.md ├── examples └── stream_subscription.yml ├── LICENSE └── poetry.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /kinesis_autoscaler/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | .serverless 3 | node_modules 4 | __pycache__ 5 | -------------------------------------------------------------------------------- /architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/epsagon/kinesis-autoscaler/main/architecture.png -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Each line is a file pattern followed by one or more owners. 2 | * @epsagon/the-fabulous-team 3 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # default max length of black formatter 3 | max-line-length = 88 4 | exclude= 5 | .git, 6 | .serverless, 7 | node_modules 8 | -------------------------------------------------------------------------------- /kinesis_autoscaler/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Service constants 3 | """ 4 | import os 5 | 6 | DEFAULT_REGION = "us-east-1" 7 | REGION = os.getenv("AWS_REGION", DEFAULT_REGION) 8 | 9 | DEFAULT_STAGE = "dev" 10 | STAGE = os.getenv("STAGE", DEFAULT_STAGE) 11 | 12 | LOGS_RETENTION_DAYS = 14 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kinesis-autoscaler", 3 | "version": "0.1.0", 4 | "author": "Epsagon ", 5 | "husky": { 6 | "hooks": { 7 | "pre-commit": "lint-staged" 8 | } 9 | }, 10 | "lint-staged": { 11 | "*.py": [ 12 | "flake8", 13 | "black" 14 | ] 15 | }, 16 | "dependencies": {}, 17 | "devDependencies": { 18 | "husky": "^4.3.8", 19 | "lint-staged": "^11.2.5", 20 | "serverless": "^2.64.1", 21 | "serverless-python-requirements": "^5.1.1" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "kinesis-autoscaler" 3 | version = "0.1.0" 4 | description = "Kinesis data stream autoscaler" 5 | authors = ["Epsagon "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.9" 9 | boto3 = "^1.19.3" 10 | pynamodb = "^5.1.0" 11 | 12 | [tool.poetry.dev-dependencies] 13 | black = "^21.9b0" 14 | flake8 = "^4.0.1" 15 | pytest = "^6.2.5" 16 | moto = {extras = ["dynamodb2"], version = "^2.2.15"} 17 | freezegun = "^1.1.0" 18 | pytest-mock = "^3.6.1" 19 | 20 | [build-system] 21 | requires = ["poetry-core>=1.0.0"] 22 | build-backend = "poetry.core.masonry.api" 23 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: PR CI 2 | on: pull_request 3 | 4 | jobs: 5 | ci: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | - name: Set up Python 3.9 10 | uses: actions/setup-python@v2 11 | with: 12 | python-version: 3.9 13 | - name: Install poetry 14 | run: pip install poetry 15 | - name: Install dependencies 16 | run: poetry install 17 | - name: Run formatter 18 | run: poetry run black --check . 19 | - name: Run linter 20 | run: poetry run flake8 . 21 | - name: Run tests 22 | run: poetry run pytest tests/ 23 | -------------------------------------------------------------------------------- /kinesis_autoscaler/models/autoscaler_log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Autoscaling event log DynamoDB model 3 | """ 4 | from pynamodb.models import Model 5 | from pynamodb.attributes import ( 6 | TTLAttribute, 7 | NumberAttribute, 8 | UnicodeAttribute, 9 | UTCDateTimeAttribute, 10 | ) 11 | from kinesis_autoscaler.constants import REGION, STAGE 12 | 13 | 14 | class KinesisAutoscalerLog(Model): 15 | """ 16 | Represents Kinesis autoscaling event log 17 | """ 18 | 19 | class Meta: 20 | """ 21 | Table details 22 | """ 23 | 24 | table_name = f"kinesis-autoscaler-logs-{STAGE}" 25 | region = REGION 26 | 27 | stream_name = UnicodeAttribute(hash_key=True) 28 | scaling_datetime = UTCDateTimeAttribute(range_key=True) 29 | shard_count = NumberAttribute() 30 | target_shard_count = NumberAttribute() 31 | scaling_type = UnicodeAttribute() 32 | expiration_datetime = TTLAttribute() 33 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared tests configuration 3 | """ 4 | import time 5 | from typing import Iterator 6 | import pytest 7 | from moto import mock_dynamodb2 8 | from pynamodb.models import Model 9 | from kinesis_autoscaler.models.autoscaler_log import KinesisAutoscalerLog 10 | 11 | 12 | def recreate_model_table(model: Model) -> None: 13 | """ 14 | Recreates a table for a given PynamoDB model 15 | :param model: the PynamoDB model to recreate the table for 16 | """ 17 | if model.exists(): 18 | model.delete_table() 19 | while model.exists(): 20 | time.sleep(0.1) 21 | 22 | model.create_table(wait=True) 23 | 24 | 25 | @pytest.fixture(autouse=True) 26 | def autoscaler_log_model() -> Iterator[None]: 27 | """ 28 | Sets up and tears down the autoscaler log model 29 | """ 30 | with mock_dynamodb2(): 31 | recreate_model_table(KinesisAutoscalerLog) 32 | yield 33 | KinesisAutoscalerLog.delete_table() 34 | -------------------------------------------------------------------------------- /kinesis_autoscaler/kinesis_upscaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis stream upscaler 3 | """ 4 | import math 5 | from kinesis_autoscaler.kinesis_autoscaler import KinesisAutoscaler 6 | 7 | 8 | class KinesisUpscaler(KinesisAutoscaler): 9 | """ 10 | Kinesis stream upscaler 11 | """ 12 | 13 | scaling_type = "SCALE_UP" 14 | 15 | def get_target_shard_count(self, current_shard_count: int) -> int: 16 | """ 17 | Calculates the scale-up operation target shard count. 18 | This is done in 25% increments for faster scaling operation 19 | (as described by AWS in the UpdateShardCount API docs). 20 | :param current_shard_count: the current shard count of the stream 21 | :return: the shard count the stream should scale to 22 | """ 23 | scale_up_pct = 25 24 | if current_shard_count <= 3: 25 | scale_up_pct = 100 26 | elif current_shard_count <= 50: 27 | scale_up_pct = 50 28 | 29 | return math.ceil(current_shard_count * (1 + scale_up_pct / 100)) 30 | -------------------------------------------------------------------------------- /tests/aws_client_mockers/kinesis_client_mocker.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis client mocker 3 | """ 4 | from pytest_mock import MockerFixture 5 | 6 | 7 | class KinesisClientMocker: 8 | """ 9 | Kinesis client mocker. 10 | Used for stubbing API responses and asserting calls against mocks. 11 | """ 12 | 13 | def __init__(self, client, mocker: MockerFixture): 14 | self.client = client 15 | self.mocker = mocker 16 | 17 | def describe_stream_summary(self, open_shard_count: int) -> MockerFixture: 18 | return self.mocker.patch.object( 19 | self.client, 20 | "describe_stream_summary", 21 | return_value={ 22 | "StreamDescriptionSummary": {"OpenShardCount": open_shard_count} 23 | }, 24 | ) 25 | 26 | def update_shard_count( 27 | self, 28 | stream_name: str, 29 | current_shard_count: int, 30 | target_shard_count: int, 31 | ) -> MockerFixture: 32 | return self.mocker.patch.object( 33 | self.client, 34 | "update_shard_count", 35 | return_value={ 36 | "StreamName": stream_name, 37 | "CurrentShardCount": current_shard_count, 38 | "TargetShardCount": target_shard_count, 39 | }, 40 | ) 41 | -------------------------------------------------------------------------------- /handler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handler functions for Kinesis autoscaler 3 | """ 4 | 5 | import json 6 | import logging 7 | from kinesis_autoscaler.kinesis_upscaler import KinesisUpscaler 8 | from kinesis_autoscaler.kinesis_downscaler import KinesisDownscaler 9 | 10 | logging.getLogger().setLevel(logging.INFO) 11 | 12 | 13 | def parse_sns_message(event: dict) -> dict: 14 | """ 15 | Parses SNS record and returns its message. 16 | :param event: SNS event that triggered the lambda 17 | :return: parsed SNS message 18 | """ 19 | return json.loads(event["Records"][0]["Sns"]["Message"]) 20 | 21 | 22 | def scale_up(event: dict, _context) -> None: 23 | """ 24 | Lambda handler for scaling up Kinesis streams. 25 | :param event: Lambda triggering event 26 | """ 27 | try: 28 | event_message = parse_sns_message(event) 29 | KinesisUpscaler(event_message).scale() 30 | except Exception: 31 | logging.exception("stream scale-up process failed") 32 | raise 33 | 34 | 35 | def scale_down(event: dict, _context) -> None: 36 | """ 37 | Lambda handler for scaling down Kinesis streams. 38 | :param event: Lambda triggering event 39 | """ 40 | try: 41 | event_message = parse_sns_message(event) 42 | KinesisDownscaler(event_message).scale() 43 | except Exception: 44 | logging.exception("stream scale-down process failed") 45 | raise 46 | -------------------------------------------------------------------------------- /tests/aws_client_mockers/cw_client_mocker.py: -------------------------------------------------------------------------------- 1 | """ 2 | CloudWatch client mocker 3 | """ 4 | from typing import List 5 | from pytest_mock import MockerFixture 6 | 7 | 8 | class CloudWatchClientMocker: 9 | """ 10 | CloudWatch client mocker. 11 | Used for stubbing API responses and asserting calls against mocks. 12 | """ 13 | 14 | def __init__(self, client, mocker: MockerFixture): 15 | self.client = client 16 | self.mocker = mocker 17 | 18 | def describe_alarms(self, alarm_names: List[str]) -> MockerFixture: 19 | return self.mocker.patch.object( 20 | self.client, 21 | "describe_alarms", 22 | return_value={ 23 | "MetricAlarms": [ 24 | { 25 | "AlarmName": alarm, 26 | "Metrics": [{"Id": "shardCount"}], 27 | } 28 | for alarm in alarm_names 29 | ] 30 | }, 31 | ) 32 | 33 | def put_metric_alarm(self) -> MockerFixture: 34 | return self.mocker.patch.object(self.client, "put_metric_alarm") 35 | 36 | def set_alarm_state(self) -> MockerFixture: 37 | return self.mocker.patch.object(self.client, "set_alarm_state") 38 | 39 | def get_metric_data(self, metric_data_results: List[float]) -> MockerFixture: 40 | return self.mocker.patch.object( 41 | self.client, 42 | "get_metric_data", 43 | return_value={"MetricDataResults": [{"Values": metric_data_results}]}, 44 | ) 45 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: kinesis-autoscaler 2 | 3 | custom: 4 | scaleUpTopicName: ${self:service}-scale-up-${self:provider.stage} 5 | scaleDownTopicName: ${self:service}-scale-down-${self:provider.stage} 6 | autoscalerLogsTableName: ${self:service}-logs-${self:provider.stage} 7 | 8 | provider: 9 | name: aws 10 | runtime: python3.9 11 | region: ${opt:region, 'us-east-1'} 12 | stage: ${opt:stage, 'dev'} 13 | memorySize: 1024 14 | timeout: 30 15 | environment: 16 | STAGE: ${self:provider.stage} 17 | 18 | iamRoleStatements: 19 | - Effect: Allow 20 | Action: 21 | - kinesis:UpdateShardCount 22 | - kinesis:DescribeStreamSummary 23 | Resource: '*' 24 | - Effect: Allow 25 | Action: 26 | - cloudwatch:DescribeAlarms 27 | - cloudwatch:PutMetricAlarm 28 | - cloudwatch:SetAlarmState 29 | - cloudwatch:GetMetricData 30 | Resource: '*' 31 | - Effect: Allow 32 | Action: 33 | - dynamodb:PutItem 34 | - dynamodb:DescribeTable 35 | Resource: 36 | - Fn::GetAtt: 37 | - AutoscalerLogsTable 38 | - Arn 39 | 40 | functions: 41 | scale-up: 42 | description: 'Scales up Kinesis data stream' 43 | handler: handler.scale_up 44 | events: 45 | - sns: 46 | arn: 47 | Ref: ScaleUpTopic 48 | topicName: ${self:custom.scaleUpTopicName} 49 | 50 | scale-down: 51 | description: 'Scales down Kinesis data stream' 52 | handler: handler.scale_down 53 | events: 54 | - sns: 55 | arn: 56 | Ref: ScaleDownTopic 57 | topicName: ${self:custom.scaleDownTopicName} 58 | 59 | resources: 60 | Resources: 61 | ScaleUpTopic: 62 | Type: AWS::SNS::Topic 63 | Properties: 64 | DisplayName: ${self:custom.scaleUpTopicName} 65 | TopicName: ${self:custom.scaleUpTopicName} 66 | 67 | ScaleDownTopic: 68 | Type: AWS::SNS::Topic 69 | Properties: 70 | DisplayName: ${self:custom.scaleDownTopicName} 71 | TopicName: ${self:custom.scaleDownTopicName} 72 | 73 | AutoscalerLogsTable: 74 | Type: AWS::DynamoDB::Table 75 | Properties: 76 | TableName: ${self:custom.autoscalerLogsTableName} 77 | BillingMode: PAY_PER_REQUEST 78 | AttributeDefinitions: 79 | - AttributeName: stream_name 80 | AttributeType: S 81 | - AttributeName: scaling_datetime 82 | AttributeType: S 83 | KeySchema: 84 | - AttributeName: stream_name 85 | KeyType: HASH 86 | - AttributeName: scaling_datetime 87 | KeyType: RANGE 88 | TimeToLiveSpecification: 89 | AttributeName: expiration_datetime 90 | Enabled: true 91 | 92 | Outputs: 93 | ScaleUpTopicArn: 94 | Value: 95 | Ref: ScaleUpTopic 96 | Export: 97 | Name: ScaleUpTopicArn-${self:provider.stage} 98 | 99 | ScaleDownTopicArn: 100 | Value: 101 | Ref: ScaleDownTopic 102 | Export: 103 | Name: ScaleDownTopicArn-${self:provider.stage} 104 | 105 | plugins: 106 | - serverless-python-requirements 107 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kinesis Autoscaler 2 | 3 | An easy to deploy autoscaling solution for Kinesis data streams. 4 | Read more about the motivation, design and comparison to the native on-demand mode on the [Epsagon](https://epsagon.com/observability/autoscaling-kinesis-data-streams-in-epsagon/) and [Cisco](https://techblog.cisco.com/blog/autoscaling-kinesis-data-streams-in-epsagon/) blogs. 5 | 6 | ## Introduction 7 | 8 | AWS Kinesis data streams service doesn't have native autoscaling support. 9 | This solution purpose is to fill that need by automatically handling the scaling of Kinesis data streams in order to prevent data loss, avoid manual developer intervention and save cost. 10 | 11 | ## Solution Architecture 12 | 13 | ![architecture](architecture.png) 14 | 15 | The suggested solution consists of 3 stages: triggers, delivery and execution. 16 | 17 | - **CloudWatch alarms** - Defined for each stream that is subscribed to the autoscaling service. Responsible for starting the scaling operation by publishing to the relevant autoscaling service SNS topic. 18 | - **SNS topics** - Exported by the autoscaling service and used as the entry point to the service. Responsible for invoking the relevant scaling lambda when receiving a message from the scaling alarms. 19 | - **Scaling lambdas** - Calculates the target shard count, updates the stream and alarms according to it and writes a result log to a DynamoDB table. 20 | 21 | ## Usage 22 | 23 | - [Deploy](#deployment) the autoscaling service CloudFormation stack. 24 | - Create 2 alarms (scale-up/scale-down) for each subscribed stream. 25 | - Wire the alarms to the relevant exported SNS topics of the autoscaling service. 26 | 27 | Example of how to create alarms and subscribe a stream to the autoscaler service can be found [here](https://github.com/epsagon/kinesis-autoscaler/blob/main/examples/stream_subscription.yml). 28 | 29 | ## Deployment 30 | 31 | ### Prerequisites 32 | 33 | - Node 12+ (+npm) 34 | - [Serverless Framework](https://www.serverless.com/) 2+ 35 | - Python 3.9+ 36 | - [Poetry](https://python-poetry.org/) 1+ 37 | 38 | ### Service Deployment 39 | 40 | The service is deployed using the [Serverless Framework](https://www.serverless.com/). 41 | Make sure to configure your AWS credentials (as environment variables or AWS profile) before running any command that interacts with AWS. 42 | 43 | From the project root: 44 | 45 | - `npm install` - Installs the Serverless Framework plugins 46 | - `serverless deploy` - Deploys the service to AWS as a CF stack 47 | 48 | ### Region and Stage 49 | 50 | By default, the service is deployed to `us-east-1` as a `dev` stage. 51 | Overriding that default configuration can be done by passing the stage and region flags to the Serverless Framework deploy command (e.g. `sls deploy --region eu-west-1 --stage production`). 52 | 53 | ## Usage Remarks and (current) Limitations 54 | 55 | - Alarm names should be identical and contain either `scale-up` / `scale-down` in their name. 56 | For example, `example-stream-scale-up-alarm` and `example-stream-scale-down-alarm`. 57 | - Each alarm must have a math expression with the id `shardCount` and the current shard count of the stream (which will be updated automatically on each scale operation). 58 | - Each alarm should have `incomingBytes` and/or `incomingRecords` metrics, as they are used for parsing the stream name. 59 | -------------------------------------------------------------------------------- /tests/test_kinesis_upscaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis upscaler tests 3 | """ 4 | from unittest.mock import call 5 | from datetime import datetime, timedelta, timezone 6 | from freezegun import freeze_time 7 | from pytest_mock import MockerFixture 8 | from kinesis_autoscaler.kinesis_upscaler import KinesisUpscaler 9 | from kinesis_autoscaler.models.autoscaler_log import KinesisAutoscalerLog 10 | from kinesis_autoscaler.kinesis_autoscaler import ( 11 | CW_CLIENT, 12 | KINESIS_CLIENT, 13 | LOGS_RETENTION_DAYS, 14 | ) 15 | from tests.aws_client_mockers.cw_client_mocker import CloudWatchClientMocker 16 | from tests.aws_client_mockers.kinesis_client_mocker import KinesisClientMocker 17 | 18 | 19 | @freeze_time("2021-11-16") 20 | def test_upscale_operation(mocker: MockerFixture) -> None: 21 | """ 22 | Basic sanity test to ensure the scale-up calculation is correct 23 | and that the required aws requests are sent as expected. 24 | """ 25 | stream_name = "subscribed-stream" 26 | scale_up_alarm_name = f"{stream_name}-scale-up" 27 | scale_down_alarm_name = f"{stream_name}-scale-down" 28 | current_shard_count = 2 29 | expected_target_shard_count = 4 30 | 31 | cw_client_mock = CloudWatchClientMocker(CW_CLIENT, mocker) 32 | kinesis_client_mock = KinesisClientMocker(KINESIS_CLIENT, mocker) 33 | 34 | kinesis_client_mock.describe_stream_summary(current_shard_count) 35 | update_shard_count_mock = kinesis_client_mock.update_shard_count( 36 | stream_name, current_shard_count, expected_target_shard_count 37 | ) 38 | cw_client_mock.describe_alarms( 39 | alarm_names=[scale_up_alarm_name, scale_down_alarm_name] 40 | ) 41 | put_metric_alarm_mock = cw_client_mock.put_metric_alarm() 42 | set_alarm_state_mock = cw_client_mock.set_alarm_state() 43 | 44 | event_message = { 45 | "AlarmName": scale_up_alarm_name, 46 | "Trigger": { 47 | "Metrics": [ 48 | { 49 | "Id": "shardCount", 50 | "Expression": f"{current_shard_count}", 51 | }, 52 | { 53 | "Id": "incomingBytes", 54 | "MetricStat": { 55 | "Metric": { 56 | "Dimensions": [{"value": stream_name}], 57 | }, 58 | }, 59 | }, 60 | ], 61 | }, 62 | } 63 | 64 | KinesisUpscaler(event_message).scale() 65 | 66 | update_shard_count_mock.assert_called_once_with( 67 | StreamName=stream_name, 68 | ScalingType="UNIFORM_SCALING", 69 | TargetShardCount=expected_target_shard_count, 70 | ) 71 | put_metric_alarm_mock.assert_has_calls( 72 | [ 73 | call( 74 | AlarmName=scale_up_alarm_name, 75 | Metrics=[ 76 | {"Id": "shardCount", "Expression": f"{expected_target_shard_count}"} 77 | ], 78 | ActionsEnabled=True, 79 | ), 80 | call( 81 | AlarmName=scale_down_alarm_name, 82 | Metrics=[ 83 | {"Id": "shardCount", "Expression": f"{expected_target_shard_count}"} 84 | ], 85 | ActionsEnabled=True, 86 | ), 87 | ] 88 | ) 89 | set_alarm_state_mock.assert_has_calls( 90 | [ 91 | call( 92 | AlarmName=scale_up_alarm_name, 93 | StateValue="INSUFFICIENT_DATA", 94 | StateReason="Shard count metric updated", 95 | ), 96 | call( 97 | AlarmName=scale_down_alarm_name, 98 | StateValue="INSUFFICIENT_DATA", 99 | StateReason="Shard count metric updated", 100 | ), 101 | ] 102 | ) 103 | 104 | logs = list(KinesisAutoscalerLog.scan()) 105 | assert len(logs) == 1 106 | 107 | log = logs[0] 108 | assert log.stream_name == stream_name 109 | assert log.shard_count == current_shard_count 110 | assert log.target_shard_count == expected_target_shard_count 111 | assert log.scaling_type == "SCALE_UP" 112 | 113 | frozen_datetime = datetime.utcnow().replace(tzinfo=timezone.utc) 114 | assert log.scaling_datetime == frozen_datetime 115 | assert log.expiration_datetime == frozen_datetime + timedelta( 116 | days=LOGS_RETENTION_DAYS 117 | ) 118 | -------------------------------------------------------------------------------- /tests/test_kinesis_downscaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis downscaler tests 3 | """ 4 | from unittest.mock import call 5 | from datetime import datetime, timedelta, timezone 6 | from freezegun import freeze_time 7 | from pytest_mock import MockerFixture 8 | from kinesis_autoscaler.kinesis_downscaler import KinesisDownscaler 9 | from kinesis_autoscaler.models.autoscaler_log import KinesisAutoscalerLog 10 | from kinesis_autoscaler.kinesis_autoscaler import ( 11 | CW_CLIENT, 12 | KINESIS_CLIENT, 13 | LOGS_RETENTION_DAYS, 14 | ) 15 | from tests.aws_client_mockers.cw_client_mocker import CloudWatchClientMocker 16 | from tests.aws_client_mockers.kinesis_client_mocker import KinesisClientMocker 17 | 18 | 19 | @freeze_time("2021-11-16") 20 | def test_downscale_operation(mocker: MockerFixture) -> None: 21 | """ 22 | Basic sanity test to ensure the scale-down calculation is correct 23 | and that the required aws requests are sent as expected. 24 | """ 25 | stream_name = "subscribed-stream" 26 | scale_up_alarm_name = f"{stream_name}-scale-up" 27 | scale_down_alarm_name = f"{stream_name}-scale-down" 28 | current_shard_count = 10 29 | expected_target_shard_count = 8 30 | 31 | cw_client_mock = CloudWatchClientMocker(CW_CLIENT, mocker) 32 | kinesis_client_mock = KinesisClientMocker(KINESIS_CLIENT, mocker) 33 | 34 | kinesis_client_mock.describe_stream_summary(current_shard_count) 35 | update_shard_count_mock = kinesis_client_mock.update_shard_count( 36 | stream_name, current_shard_count, expected_target_shard_count 37 | ) 38 | cw_client_mock.describe_alarms( 39 | alarm_names=[scale_up_alarm_name, scale_down_alarm_name] 40 | ) 41 | put_metric_alarm_mock = cw_client_mock.put_metric_alarm() 42 | set_alarm_state_mock = cw_client_mock.set_alarm_state() 43 | cw_client_mock.get_metric_data(metric_data_results=[0.3, 0.4, 0.2]) 44 | 45 | event_message = { 46 | "AlarmName": scale_down_alarm_name, 47 | "Trigger": { 48 | "Metrics": [ 49 | { 50 | "Id": "shardCount", 51 | "Expression": f"{current_shard_count}", 52 | }, 53 | { 54 | "Id": "incomingBytes", 55 | "MetricStat": { 56 | "Metric": { 57 | "Dimensions": [{"value": stream_name}], 58 | }, 59 | }, 60 | }, 61 | ], 62 | }, 63 | } 64 | 65 | KinesisDownscaler(event_message).scale() 66 | 67 | update_shard_count_mock.assert_called_once_with( 68 | StreamName=stream_name, 69 | ScalingType="UNIFORM_SCALING", 70 | TargetShardCount=expected_target_shard_count, 71 | ) 72 | put_metric_alarm_mock.assert_has_calls( 73 | [ 74 | call( 75 | AlarmName=scale_up_alarm_name, 76 | Metrics=[ 77 | {"Id": "shardCount", "Expression": f"{expected_target_shard_count}"} 78 | ], 79 | ActionsEnabled=True, 80 | ), 81 | call( 82 | AlarmName=scale_down_alarm_name, 83 | Metrics=[ 84 | {"Id": "shardCount", "Expression": f"{expected_target_shard_count}"} 85 | ], 86 | ActionsEnabled=True, 87 | ), 88 | ] 89 | ) 90 | set_alarm_state_mock.assert_has_calls( 91 | [ 92 | call( 93 | AlarmName=scale_up_alarm_name, 94 | StateValue="INSUFFICIENT_DATA", 95 | StateReason="Shard count metric updated", 96 | ), 97 | call( 98 | AlarmName=scale_down_alarm_name, 99 | StateValue="INSUFFICIENT_DATA", 100 | StateReason="Shard count metric updated", 101 | ), 102 | ] 103 | ) 104 | 105 | logs = list(KinesisAutoscalerLog.scan()) 106 | assert len(logs) == 1 107 | 108 | log = logs[0] 109 | assert log.stream_name == stream_name 110 | assert log.shard_count == current_shard_count 111 | assert log.target_shard_count == expected_target_shard_count 112 | assert log.scaling_type == "SCALE_DOWN" 113 | 114 | frozen_datetime = datetime.utcnow().replace(tzinfo=timezone.utc) 115 | assert log.scaling_datetime == frozen_datetime 116 | assert log.expiration_datetime == frozen_datetime + timedelta( 117 | days=LOGS_RETENTION_DAYS 118 | ) 119 | -------------------------------------------------------------------------------- /kinesis_autoscaler/kinesis_downscaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis stream downscaler 3 | """ 4 | import math 5 | from datetime import datetime, timedelta 6 | from kinesis_autoscaler.kinesis_autoscaler import KinesisAutoscaler, CW_CLIENT 7 | 8 | 9 | class KinesisDownscaler(KinesisAutoscaler): 10 | """ 11 | Kinesis stream downscaler 12 | """ 13 | 14 | scaling_type = "SCALE_DOWN" 15 | 16 | def get_target_shard_count(self, current_shard_count: int) -> int: 17 | """ 18 | Calculates the scale-down operation target shard count. 19 | This is done by quering for the max usage factor and calculating 20 | the shard count that will result in a usage factor of 50% at the end 21 | of the scaling operation. 22 | :param current_shard_count: the current shard count of the stream 23 | :return: the shard count the stream should scale to 24 | """ 25 | max_usage_factor = self.get_max_usage_factor(current_shard_count) 26 | used_shard_count = current_shard_count * max_usage_factor 27 | target_shard_count = math.ceil(used_shard_count * 2) 28 | min_possible_shard_count = math.ceil(current_shard_count / 2) 29 | return max(min_possible_shard_count, target_shard_count) 30 | 31 | def get_max_usage_factor(self, current_shard_count: int) -> float: 32 | """ 33 | Queries for the stream max usage factor in the last 24 hours 34 | (5m aggregation) and returns the max data point value. 35 | :param current_shard_count: the current shard count of the stream 36 | :return: the maximum usage factor of the stream 37 | """ 38 | current_datetime = datetime.now() 39 | 40 | response = CW_CLIENT.get_metric_data( 41 | StartTime=current_datetime - timedelta(days=1), 42 | EndTime=current_datetime, 43 | MetricDataQueries=[ 44 | { 45 | "Id": "shardCount", 46 | "Expression": str(current_shard_count), 47 | "ReturnData": False, 48 | }, 49 | { 50 | "Id": "incomingBytes", 51 | "MetricStat": { 52 | "Metric": { 53 | "Namespace": "AWS/Kinesis", 54 | "MetricName": "IncomingBytes", 55 | "Dimensions": [ 56 | {"Name": "StreamName", "Value": self.stream_name}, 57 | ], 58 | }, 59 | "Period": 300, 60 | "Stat": "Sum", 61 | }, 62 | "ReturnData": False, 63 | }, 64 | { 65 | "Id": "incomingRecords", 66 | "MetricStat": { 67 | "Metric": { 68 | "Namespace": "AWS/Kinesis", 69 | "MetricName": "IncomingRecords", 70 | "Dimensions": [ 71 | {"Name": "StreamName", "Value": self.stream_name}, 72 | ], 73 | }, 74 | "Period": 300, 75 | "Stat": "Sum", 76 | }, 77 | "ReturnData": False, 78 | }, 79 | { 80 | "Id": "incomingBytesFilledWithZeroForMissingDataPoints", 81 | "Expression": "FILL(incomingBytes,0)", 82 | "ReturnData": False, 83 | }, 84 | { 85 | "Id": "incomingRecordsFilledWithZeroForMissingDataPoints", 86 | "Expression": "FILL(incomingRecords,0)", 87 | "ReturnData": False, 88 | }, 89 | { 90 | "Id": "incomingBytesUsageFactor", 91 | "Expression": "incomingBytesFilledWithZeroForMissingDataPoints/(1024*1024*60*5*shardCount)", # noqa: E501 92 | "ReturnData": False, 93 | }, 94 | { 95 | "Id": "incomingRecordsUsageFactor", 96 | "Expression": "incomingRecordsFilledWithZeroForMissingDataPoints/(1000*60*5*shardCount)", # noqa: E501 97 | "ReturnData": False, 98 | }, 99 | { 100 | "Id": "maxIncomingUsageFactor", 101 | "Expression": "MAX([incomingBytesUsageFactor,incomingRecordsUsageFactor])", # noqa: E501 102 | "ReturnData": True, 103 | }, 104 | ], 105 | ) 106 | 107 | max_usage_factor = max(response["MetricDataResults"][0]["Values"]) 108 | return max_usage_factor 109 | -------------------------------------------------------------------------------- /examples/stream_subscription.yml: -------------------------------------------------------------------------------- 1 | service: stream-subscription-example 2 | 3 | custom: 4 | shardCount: 1 5 | streamName: subscription-example-${self:provider.stage} 6 | scaleUpAlarmName: subscription-example-scale-up-${self:provider.stage} 7 | scaleDownAlarmName: subscription-example-scale-down-${self:provider.stage} 8 | 9 | provider: 10 | name: aws 11 | region: ${opt:region, 'us-east-1'} 12 | stage: ${opt:stage, 'dev'} 13 | 14 | resources: 15 | Resources: 16 | AutoscalerExampleStream: 17 | Type: AWS::Kinesis::Stream 18 | Properties: 19 | Name: ${self:custom.streamName} 20 | ShardCount: ${self:custom.shardCount, '1'} 21 | 22 | ScaleUpAlarm: 23 | Type: AWS::CloudWatch::Alarm 24 | Properties: 25 | AlarmName: ${self:custom.scaleUpAlarmName} 26 | AlarmActions: 27 | - ${cf:kinesis-autoscaler-${self:provider.stage}.ScaleUpTopicArn} 28 | Threshold: 0.75 29 | ComparisonOperator: GreaterThanOrEqualToThreshold 30 | TreatMissingData: ignore 31 | EvaluationPeriods: 1 32 | Metrics: 33 | - Id: shardCount 34 | Expression: ${self:custom.shardCount, '1'} 35 | ReturnData: false 36 | - Id: incomingBytes 37 | MetricStat: 38 | Metric: 39 | Namespace: AWS/Kinesis 40 | MetricName: IncomingBytes 41 | Dimensions: 42 | - Name: StreamName 43 | Value: ${self:custom.streamName} 44 | Period: 300 45 | Stat: Sum 46 | ReturnData: false 47 | - Id: incomingRecords 48 | MetricStat: 49 | Metric: 50 | Namespace: AWS/Kinesis 51 | MetricName: IncomingRecords 52 | Dimensions: 53 | - Name: StreamName 54 | Value: ${self:custom.streamName} 55 | Period: 300 56 | Stat: Sum 57 | ReturnData: false 58 | - Id: incomingBytesFilledWithZeroForMissingDataPoints 59 | Expression: FILL(incomingBytes,0) 60 | ReturnData: false 61 | - Id: incomingRecordsFilledWithZeroForMissingDataPoints 62 | Expression: FILL(incomingRecords,0) 63 | ReturnData: false 64 | - Id: incomingBytesUsageFactor 65 | Expression: incomingBytesFilledWithZeroForMissingDataPoints/(1024*1024*60*5*shardCount) 66 | ReturnData: false 67 | - Id: incomingRecordsUsageFactor 68 | Expression: incomingRecordsFilledWithZeroForMissingDataPoints/(1000*60*5*shardCount) 69 | ReturnData: false 70 | - Id: maxIncomingUsageFactor 71 | Expression: MAX([incomingBytesUsageFactor,incomingRecordsUsageFactor]) 72 | ReturnData: true 73 | 74 | ScaleDownAlarm: 75 | Type: AWS::CloudWatch::Alarm 76 | Properties: 77 | AlarmName: ${self:custom.scaleDownAlarmName} 78 | AlarmActions: 79 | - ${cf:kinesis-autoscaler-${self:provider.stage}.ScaleDownTopicArn} 80 | Threshold: 0.25 81 | ComparisonOperator: LessThanOrEqualToThreshold 82 | TreatMissingData: ignore 83 | EvaluationPeriods: 288 # 86400 (day in seconds) / 300 (metric period in seconds) 84 | Metrics: 85 | - Id: shardCount 86 | Expression: ${self:custom.shardCount, '1'} 87 | ReturnData: false 88 | - Id: incomingBytes 89 | MetricStat: 90 | Metric: 91 | Namespace: AWS/Kinesis 92 | MetricName: IncomingBytes 93 | Dimensions: 94 | - Name: StreamName 95 | Value: ${self:custom.streamName} 96 | Period: 300 97 | Stat: Sum 98 | ReturnData: false 99 | - Id: incomingRecords 100 | MetricStat: 101 | Metric: 102 | Namespace: AWS/Kinesis 103 | MetricName: IncomingRecords 104 | Dimensions: 105 | - Name: StreamName 106 | Value: ${self:custom.streamName} 107 | Period: 300 108 | Stat: Sum 109 | ReturnData: false 110 | - Id: incomingBytesFilledWithZeroForMissingDataPoints 111 | Expression: FILL(incomingBytes,0) 112 | ReturnData: false 113 | - Id: incomingRecordsFilledWithZeroForMissingDataPoints 114 | Expression: FILL(incomingRecords,0) 115 | ReturnData: false 116 | - Id: incomingBytesUsageFactor 117 | Expression: incomingBytesFilledWithZeroForMissingDataPoints/(1024*1024*60*5*shardCount) 118 | ReturnData: false 119 | - Id: incomingRecordsUsageFactor 120 | Expression: incomingRecordsFilledWithZeroForMissingDataPoints/(1000*60*5*shardCount) 121 | ReturnData: false 122 | - Id: maxIncomingUsageFactor 123 | Expression: MAX([incomingBytesUsageFactor,incomingRecordsUsageFactor]) 124 | ReturnData: true 125 | -------------------------------------------------------------------------------- /kinesis_autoscaler/kinesis_autoscaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Kinesis stream base autoscaler 3 | """ 4 | import logging 5 | from datetime import datetime, timezone, timedelta 6 | from abc import ABC, abstractmethod 7 | import boto3 8 | from kinesis_autoscaler.models.autoscaler_log import KinesisAutoscalerLog 9 | from kinesis_autoscaler.constants import REGION, LOGS_RETENTION_DAYS 10 | 11 | CW_CLIENT = boto3.client("cloudwatch", region_name=REGION) 12 | KINESIS_CLIENT = boto3.client("kinesis", region_name=REGION) 13 | 14 | 15 | class KinesisAutoscaler(ABC): 16 | """ 17 | Kinesis stream base autoscaler 18 | """ 19 | 20 | def __init__(self, event_message: dict): 21 | """ 22 | Initializes base KinesisAutoscaler instance. 23 | :param event_message: triggering alarm event 24 | """ 25 | self.stream_name = None 26 | self.event_message = event_message 27 | 28 | def scale(self) -> None: 29 | """ 30 | Scales Kinesis stream according to the triggered alarm. 31 | """ 32 | self.stream_name = self.parse_stream_name() 33 | logging.info(f"Started stream scaling process. stream={self.stream_name}") 34 | 35 | alarm_shard_count = self.parse_alarm_shard_count() 36 | current_shard_count = self.get_current_shard_count() 37 | if alarm_shard_count != current_shard_count: 38 | logging.info("Alarm shard count out of sync. Syncing alarms") 39 | self.update_stream_alarms(current_shard_count) 40 | return 41 | 42 | target_shard_count = self.get_target_shard_count(current_shard_count) 43 | if current_shard_count == target_shard_count: 44 | logging.info( 45 | "Current and target shard counts are equal. Autoscaling canceled. " 46 | f"shard_count={current_shard_count}" 47 | ) 48 | return 49 | 50 | self.update_shard_count(target_shard_count) 51 | self.update_stream_alarms(target_shard_count) 52 | self.write_scaling_log_to_db(current_shard_count, target_shard_count) 53 | logging.info( 54 | f"Scaling process finished successfully. stream={self.stream_name}" 55 | ) 56 | 57 | def parse_stream_name(self) -> str: 58 | """ 59 | Parses the stream name from the required metric definitions of the alarm. 60 | :return: name of the stream to scale 61 | """ 62 | alarm_metrics = self.event_message["Trigger"]["Metrics"] 63 | for metric in alarm_metrics: 64 | if metric["Id"] in ("incomingBytes", "incomingRecords"): 65 | return metric["MetricStat"]["Metric"]["Dimensions"][0]["value"] 66 | 67 | raise ValueError("Could not parse stream name from alarm metrics") 68 | 69 | def parse_alarm_shard_count(self) -> int: 70 | """ 71 | Parses the alarm shard count from the required math definition of the alarm. 72 | :return: stream's current shard count 73 | """ 74 | alarm_metrics = self.event_message["Trigger"]["Metrics"] 75 | for metric in alarm_metrics: 76 | if metric["Id"] == "shardCount": 77 | return int(metric["Expression"]) 78 | 79 | raise ValueError("Could not parse current shard count from alarm metrics") 80 | 81 | def get_current_shard_count(self) -> int: 82 | """ 83 | Queries and returns the current open shard count of the stream. 84 | :return: stream's open shard count 85 | """ 86 | response = KINESIS_CLIENT.describe_stream_summary(StreamName=self.stream_name) 87 | return response["StreamDescriptionSummary"]["OpenShardCount"] 88 | 89 | def update_stream_alarms(self, target_shard_count: int) -> None: 90 | """ 91 | Updates the scaled stream alarms (scale-up and scale-down). 92 | Required after each scaling operation in order to sync the alarms 93 | thresholds, which are based on the current shard count of the stream. 94 | :param target_shard_count: the stream target shard count after the scale 95 | """ 96 | alarm_names = self.get_alarm_names() 97 | response = CW_CLIENT.describe_alarms(AlarmNames=list(alarm_names.values())) 98 | 99 | if len(response["MetricAlarms"]) != 2: 100 | alarm_names = [alarm["AlarmName"] for alarm in response["MetricAlarms"]] 101 | raise ValueError( 102 | "Expected to update 2 scaling alarms. " 103 | f"Found {len(alarm_names)} alarms. alarm_names={alarm_names}" 104 | ) 105 | 106 | for alarm in response["MetricAlarms"]: 107 | self.update_existing_alarm(alarm, target_shard_count) 108 | self.reset_alarm_state(alarm["AlarmName"]) 109 | 110 | def get_alarm_names(self) -> dict: 111 | """ 112 | Returns the stream alarm names based on integration requirement that 113 | relays on the alarms having the same name but with alarm type difference 114 | (scale-up/scale-down) in their name. 115 | :return: dict containing the stream alarm names 116 | """ 117 | triggered_alarm_name = self.event_message["AlarmName"] 118 | if "scale-up" in triggered_alarm_name: 119 | return { 120 | "scale_up": triggered_alarm_name, 121 | "scale_down": triggered_alarm_name.replace("scale-up", "scale-down"), 122 | } 123 | elif "scale-down" in triggered_alarm_name: 124 | return { 125 | "scale_up": triggered_alarm_name.replace("scale-down", "scale-up"), 126 | "scale_down": triggered_alarm_name, 127 | } 128 | 129 | raise ValueError( 130 | "Triggered alarm should contain scale-up/scale-down in its name" 131 | ) 132 | 133 | def update_existing_alarm(self, alarm: dict, target_shard_count: int) -> None: 134 | """ 135 | Updates a stream alarm with the new shard count and disables 136 | attached actions from being invoked when unnecessary. 137 | :param alarm: stream alarm configuration 138 | :param target_shard_count: the stream target shard count after the scale 139 | """ 140 | updated_alarm = self.copy_updateable_alarm_fields(alarm) 141 | 142 | should_disable_alarm = ( 143 | "scale-down" in updated_alarm["AlarmName"] and target_shard_count == 1 144 | ) 145 | updated_alarm["ActionsEnabled"] = not should_disable_alarm 146 | 147 | for metric in updated_alarm["Metrics"]: 148 | if metric["Id"] == "shardCount": 149 | metric["Expression"] = str(target_shard_count) 150 | 151 | CW_CLIENT.put_metric_alarm(**updated_alarm) 152 | logging.info(f"Updated stream alarm. alarm={alarm['AlarmName']}") 153 | 154 | @staticmethod 155 | def copy_updateable_alarm_fields(alarm: dict) -> dict: 156 | """ 157 | Creates a dict containing all relevant alarm fields for alarm update. 158 | This is done because AWS doesn't support updating a single field in an 159 | existing alarm, the alarm must be updated the same way it is created. 160 | :param alarm: stream alarm configuration as returned from describe operation 161 | :return: stream alarm configuration as required for alarm update operation 162 | """ 163 | alarm_keys_to_copy = ( 164 | "AlarmName", 165 | "AlarmDescription", 166 | "ActionsEnabled", 167 | "OKActions", 168 | "AlarmActions", 169 | "InsufficientDataActions", 170 | "Unit", 171 | "EvaluationPeriods", 172 | "DatapointsToAlarm", 173 | "Threshold", 174 | "ComparisonOperator", 175 | "TreatMissingData", 176 | "EvaluateLowSampleCountPercentile", 177 | "Metrics", 178 | "Tags", 179 | "ThresholdMetricId", 180 | ) 181 | return {key: alarm[key] for key in alarm_keys_to_copy if key in alarm} 182 | 183 | @staticmethod 184 | def reset_alarm_state(alarm_name: str) -> None: 185 | """ 186 | Temporarily resets an alarm state. 187 | The alarm should be back to its actual state within moments, this is done 188 | because an alarm in ALARMED state won't be triggered twice even if it should. 189 | For example, if a stream is scaled up (scale-up alarm is triggered) but 190 | after the scale-up it is still above the threshold, it won't be triggered 191 | again without resetting its state. In other words, alarm doesn't invoke actions 192 | twice without state change. 193 | :param alarm_name: name of the alarm to reset its state 194 | """ 195 | CW_CLIENT.set_alarm_state( 196 | AlarmName=alarm_name, 197 | StateValue="INSUFFICIENT_DATA", 198 | StateReason="Shard count metric updated", 199 | ) 200 | 201 | @abstractmethod 202 | def get_target_shard_count(self, current_shard_count: int) -> int: 203 | """ 204 | Calculates and returns the target shard count the stream should scale to. 205 | :param current_shard_count: the stream's current shard count 206 | :return: the shard count the stream should scale to 207 | """ 208 | pass 209 | 210 | def update_shard_count(self, target_shard_count: int) -> None: 211 | """ 212 | Updates the stream shard count using the UpdateShardCount API. 213 | :param target_shard_count: the shard count the stream should scale to 214 | """ 215 | response = KINESIS_CLIENT.update_shard_count( 216 | StreamName=self.stream_name, 217 | TargetShardCount=target_shard_count, 218 | ScalingType="UNIFORM_SCALING", 219 | ) 220 | logging.info( 221 | f"Updated shard count successfully. stream={response['StreamName']} " 222 | f"current_count={response['CurrentShardCount']} " 223 | f"target_count={response['TargetShardCount']}" 224 | ) 225 | 226 | def write_scaling_log_to_db( 227 | self, current_shard_count: int, target_shard_count: int 228 | ) -> None: 229 | """ 230 | Writes scaling log to DB. 231 | :param current_shard_count: the stream current shard count 232 | :param target_shard_count: the stream target shard count after the scale 233 | """ 234 | KinesisAutoscalerLog( 235 | stream_name=self.stream_name, 236 | scaling_datetime=datetime.utcnow().replace(tzinfo=timezone.utc), 237 | shard_count=current_shard_count, 238 | target_shard_count=target_shard_count, 239 | scaling_type=self.scaling_type, 240 | expiration_datetime=timedelta(days=LOGS_RETENTION_DAYS), 241 | ).save() 242 | 243 | @property 244 | @abstractmethod 245 | def scaling_type(self) -> str: 246 | """ 247 | The scaling type of the operation. 248 | Used for writing the scaling type in the DB logs. 249 | """ 250 | pass 251 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.4.0" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "21.2.0" 12 | description = "Classes Without Boilerplate" 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 16 | 17 | [package.extras] 18 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] 19 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] 20 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] 21 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] 22 | 23 | [[package]] 24 | name = "black" 25 | version = "21.9b0" 26 | description = "The uncompromising code formatter." 27 | category = "dev" 28 | optional = false 29 | python-versions = ">=3.6.2" 30 | 31 | [package.dependencies] 32 | click = ">=7.1.2" 33 | mypy-extensions = ">=0.4.3" 34 | pathspec = ">=0.9.0,<1" 35 | platformdirs = ">=2" 36 | regex = ">=2020.1.8" 37 | tomli = ">=0.2.6,<2.0.0" 38 | typing-extensions = [ 39 | {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, 40 | {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, 41 | ] 42 | 43 | [package.extras] 44 | colorama = ["colorama (>=0.4.3)"] 45 | d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] 46 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 47 | python2 = ["typed-ast (>=1.4.2)"] 48 | uvloop = ["uvloop (>=0.15.2)"] 49 | 50 | [[package]] 51 | name = "boto3" 52 | version = "1.19.3" 53 | description = "The AWS SDK for Python" 54 | category = "main" 55 | optional = false 56 | python-versions = ">= 3.6" 57 | 58 | [package.dependencies] 59 | botocore = ">=1.22.3,<1.23.0" 60 | jmespath = ">=0.7.1,<1.0.0" 61 | s3transfer = ">=0.5.0,<0.6.0" 62 | 63 | [package.extras] 64 | crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] 65 | 66 | [[package]] 67 | name = "botocore" 68 | version = "1.22.3" 69 | description = "Low-level, data-driven core of boto 3." 70 | category = "main" 71 | optional = false 72 | python-versions = ">= 3.6" 73 | 74 | [package.dependencies] 75 | jmespath = ">=0.7.1,<1.0.0" 76 | python-dateutil = ">=2.1,<3.0.0" 77 | urllib3 = ">=1.25.4,<1.27" 78 | 79 | [package.extras] 80 | crt = ["awscrt (==0.12.5)"] 81 | 82 | [[package]] 83 | name = "certifi" 84 | version = "2021.10.8" 85 | description = "Python package for providing Mozilla's CA Bundle." 86 | category = "dev" 87 | optional = false 88 | python-versions = "*" 89 | 90 | [[package]] 91 | name = "cffi" 92 | version = "1.15.0" 93 | description = "Foreign Function Interface for Python calling C code." 94 | category = "dev" 95 | optional = false 96 | python-versions = "*" 97 | 98 | [package.dependencies] 99 | pycparser = "*" 100 | 101 | [[package]] 102 | name = "charset-normalizer" 103 | version = "2.0.7" 104 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 105 | category = "dev" 106 | optional = false 107 | python-versions = ">=3.5.0" 108 | 109 | [package.extras] 110 | unicode_backport = ["unicodedata2"] 111 | 112 | [[package]] 113 | name = "click" 114 | version = "8.0.3" 115 | description = "Composable command line interface toolkit" 116 | category = "dev" 117 | optional = false 118 | python-versions = ">=3.6" 119 | 120 | [package.dependencies] 121 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 122 | 123 | [[package]] 124 | name = "colorama" 125 | version = "0.4.4" 126 | description = "Cross-platform colored terminal text." 127 | category = "dev" 128 | optional = false 129 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 130 | 131 | [[package]] 132 | name = "cryptography" 133 | version = "35.0.0" 134 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." 135 | category = "dev" 136 | optional = false 137 | python-versions = ">=3.6" 138 | 139 | [package.dependencies] 140 | cffi = ">=1.12" 141 | 142 | [package.extras] 143 | docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] 144 | docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] 145 | pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] 146 | sdist = ["setuptools_rust (>=0.11.4)"] 147 | ssh = ["bcrypt (>=3.1.5)"] 148 | test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] 149 | 150 | [[package]] 151 | name = "docker" 152 | version = "5.0.3" 153 | description = "A Python library for the Docker Engine API." 154 | category = "dev" 155 | optional = false 156 | python-versions = ">=3.6" 157 | 158 | [package.dependencies] 159 | pywin32 = {version = "227", markers = "sys_platform == \"win32\""} 160 | requests = ">=2.14.2,<2.18.0 || >2.18.0" 161 | websocket-client = ">=0.32.0" 162 | 163 | [package.extras] 164 | ssh = ["paramiko (>=2.4.2)"] 165 | tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"] 166 | 167 | [[package]] 168 | name = "flake8" 169 | version = "4.0.1" 170 | description = "the modular source code checker: pep8 pyflakes and co" 171 | category = "dev" 172 | optional = false 173 | python-versions = ">=3.6" 174 | 175 | [package.dependencies] 176 | mccabe = ">=0.6.0,<0.7.0" 177 | pycodestyle = ">=2.8.0,<2.9.0" 178 | pyflakes = ">=2.4.0,<2.5.0" 179 | 180 | [[package]] 181 | name = "freezegun" 182 | version = "1.1.0" 183 | description = "Let your Python tests travel through time" 184 | category = "dev" 185 | optional = false 186 | python-versions = ">=3.5" 187 | 188 | [package.dependencies] 189 | python-dateutil = ">=2.7" 190 | 191 | [[package]] 192 | name = "idna" 193 | version = "3.3" 194 | description = "Internationalized Domain Names in Applications (IDNA)" 195 | category = "dev" 196 | optional = false 197 | python-versions = ">=3.5" 198 | 199 | [[package]] 200 | name = "iniconfig" 201 | version = "1.1.1" 202 | description = "iniconfig: brain-dead simple config-ini parsing" 203 | category = "dev" 204 | optional = false 205 | python-versions = "*" 206 | 207 | [[package]] 208 | name = "jinja2" 209 | version = "3.0.3" 210 | description = "A very fast and expressive template engine." 211 | category = "dev" 212 | optional = false 213 | python-versions = ">=3.6" 214 | 215 | [package.dependencies] 216 | MarkupSafe = ">=2.0" 217 | 218 | [package.extras] 219 | i18n = ["Babel (>=2.7)"] 220 | 221 | [[package]] 222 | name = "jmespath" 223 | version = "0.10.0" 224 | description = "JSON Matching Expressions" 225 | category = "main" 226 | optional = false 227 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 228 | 229 | [[package]] 230 | name = "markupsafe" 231 | version = "2.0.1" 232 | description = "Safely add untrusted strings to HTML/XML markup." 233 | category = "dev" 234 | optional = false 235 | python-versions = ">=3.6" 236 | 237 | [[package]] 238 | name = "mccabe" 239 | version = "0.6.1" 240 | description = "McCabe checker, plugin for flake8" 241 | category = "dev" 242 | optional = false 243 | python-versions = "*" 244 | 245 | [[package]] 246 | name = "more-itertools" 247 | version = "8.11.0" 248 | description = "More routines for operating on iterables, beyond itertools" 249 | category = "dev" 250 | optional = false 251 | python-versions = ">=3.5" 252 | 253 | [[package]] 254 | name = "moto" 255 | version = "2.2.15" 256 | description = "A library that allows your python tests to easily mock out the boto library" 257 | category = "dev" 258 | optional = false 259 | python-versions = "*" 260 | 261 | [package.dependencies] 262 | boto3 = ">=1.9.201" 263 | botocore = ">=1.12.201" 264 | cryptography = ">=3.3.1" 265 | docker = {version = ">=2.5.1", optional = true, markers = "extra == \"dynamodb2\""} 266 | Jinja2 = ">=2.10.1" 267 | MarkupSafe = "!=2.0.0a1" 268 | more-itertools = "*" 269 | python-dateutil = ">=2.1,<3.0.0" 270 | pytz = "*" 271 | requests = ">=2.5" 272 | responses = ">=0.9.0" 273 | werkzeug = "*" 274 | xmltodict = "*" 275 | 276 | [package.extras] 277 | all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] 278 | apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] 279 | awslambda = ["docker (>=2.5.1)"] 280 | batch = ["docker (>=2.5.1)"] 281 | cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)"] 282 | cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] 283 | ds = ["sshpubkeys (>=3.1.0)"] 284 | dynamodb2 = ["docker (>=2.5.1)"] 285 | dynamodbstreams = ["docker (>=2.5.1)"] 286 | ec2 = ["sshpubkeys (>=3.1.0)"] 287 | efs = ["sshpubkeys (>=3.1.0)"] 288 | iotdata = ["jsondiff (>=1.1.2)"] 289 | s3 = ["PyYAML (>=5.1)"] 290 | server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools", "flask", "flask-cors"] 291 | ssm = ["PyYAML (>=5.1)", "dataclasses"] 292 | xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] 293 | 294 | [[package]] 295 | name = "mypy-extensions" 296 | version = "0.4.3" 297 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 298 | category = "dev" 299 | optional = false 300 | python-versions = "*" 301 | 302 | [[package]] 303 | name = "packaging" 304 | version = "21.2" 305 | description = "Core utilities for Python packages" 306 | category = "dev" 307 | optional = false 308 | python-versions = ">=3.6" 309 | 310 | [package.dependencies] 311 | pyparsing = ">=2.0.2,<3" 312 | 313 | [[package]] 314 | name = "pathspec" 315 | version = "0.9.0" 316 | description = "Utility library for gitignore style pattern matching of file paths." 317 | category = "dev" 318 | optional = false 319 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 320 | 321 | [[package]] 322 | name = "platformdirs" 323 | version = "2.4.0" 324 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 325 | category = "dev" 326 | optional = false 327 | python-versions = ">=3.6" 328 | 329 | [package.extras] 330 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 331 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 332 | 333 | [[package]] 334 | name = "pluggy" 335 | version = "1.0.0" 336 | description = "plugin and hook calling mechanisms for python" 337 | category = "dev" 338 | optional = false 339 | python-versions = ">=3.6" 340 | 341 | [package.extras] 342 | dev = ["pre-commit", "tox"] 343 | testing = ["pytest", "pytest-benchmark"] 344 | 345 | [[package]] 346 | name = "py" 347 | version = "1.11.0" 348 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 349 | category = "dev" 350 | optional = false 351 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 352 | 353 | [[package]] 354 | name = "pycodestyle" 355 | version = "2.8.0" 356 | description = "Python style guide checker" 357 | category = "dev" 358 | optional = false 359 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 360 | 361 | [[package]] 362 | name = "pycparser" 363 | version = "2.21" 364 | description = "C parser in Python" 365 | category = "dev" 366 | optional = false 367 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 368 | 369 | [[package]] 370 | name = "pyflakes" 371 | version = "2.4.0" 372 | description = "passive checker of Python programs" 373 | category = "dev" 374 | optional = false 375 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 376 | 377 | [[package]] 378 | name = "pynamodb" 379 | version = "5.1.0" 380 | description = "A Pythonic Interface to DynamoDB" 381 | category = "main" 382 | optional = false 383 | python-versions = ">=3.6" 384 | 385 | [package.dependencies] 386 | botocore = ">=1.12.54" 387 | 388 | [package.extras] 389 | signals = ["blinker (>=1.3,<2.0)"] 390 | 391 | [[package]] 392 | name = "pyparsing" 393 | version = "2.4.7" 394 | description = "Python parsing module" 395 | category = "dev" 396 | optional = false 397 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 398 | 399 | [[package]] 400 | name = "pytest" 401 | version = "6.2.5" 402 | description = "pytest: simple powerful testing with Python" 403 | category = "dev" 404 | optional = false 405 | python-versions = ">=3.6" 406 | 407 | [package.dependencies] 408 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 409 | attrs = ">=19.2.0" 410 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 411 | iniconfig = "*" 412 | packaging = "*" 413 | pluggy = ">=0.12,<2.0" 414 | py = ">=1.8.2" 415 | toml = "*" 416 | 417 | [package.extras] 418 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 419 | 420 | [[package]] 421 | name = "pytest-mock" 422 | version = "3.6.1" 423 | description = "Thin-wrapper around the mock package for easier use with pytest" 424 | category = "dev" 425 | optional = false 426 | python-versions = ">=3.6" 427 | 428 | [package.dependencies] 429 | pytest = ">=5.0" 430 | 431 | [package.extras] 432 | dev = ["pre-commit", "tox", "pytest-asyncio"] 433 | 434 | [[package]] 435 | name = "python-dateutil" 436 | version = "2.8.2" 437 | description = "Extensions to the standard Python datetime module" 438 | category = "main" 439 | optional = false 440 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 441 | 442 | [package.dependencies] 443 | six = ">=1.5" 444 | 445 | [[package]] 446 | name = "pytz" 447 | version = "2021.3" 448 | description = "World timezone definitions, modern and historical" 449 | category = "dev" 450 | optional = false 451 | python-versions = "*" 452 | 453 | [[package]] 454 | name = "pywin32" 455 | version = "227" 456 | description = "Python for Window Extensions" 457 | category = "dev" 458 | optional = false 459 | python-versions = "*" 460 | 461 | [[package]] 462 | name = "regex" 463 | version = "2021.10.23" 464 | description = "Alternative regular expression module, to replace re." 465 | category = "dev" 466 | optional = false 467 | python-versions = "*" 468 | 469 | [[package]] 470 | name = "requests" 471 | version = "2.26.0" 472 | description = "Python HTTP for Humans." 473 | category = "dev" 474 | optional = false 475 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 476 | 477 | [package.dependencies] 478 | certifi = ">=2017.4.17" 479 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} 480 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} 481 | urllib3 = ">=1.21.1,<1.27" 482 | 483 | [package.extras] 484 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 485 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] 486 | 487 | [[package]] 488 | name = "responses" 489 | version = "0.15.0" 490 | description = "A utility library for mocking out the `requests` Python library." 491 | category = "dev" 492 | optional = false 493 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 494 | 495 | [package.dependencies] 496 | requests = ">=2.0" 497 | six = "*" 498 | urllib3 = ">=1.25.10" 499 | 500 | [package.extras] 501 | tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "types-mock", "types-requests", "types-six", "pytest (>=4.6,<5.0)", "pytest (>=4.6)", "mypy"] 502 | 503 | [[package]] 504 | name = "s3transfer" 505 | version = "0.5.0" 506 | description = "An Amazon S3 Transfer Manager" 507 | category = "main" 508 | optional = false 509 | python-versions = ">= 3.6" 510 | 511 | [package.dependencies] 512 | botocore = ">=1.12.36,<2.0a.0" 513 | 514 | [package.extras] 515 | crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] 516 | 517 | [[package]] 518 | name = "six" 519 | version = "1.16.0" 520 | description = "Python 2 and 3 compatibility utilities" 521 | category = "main" 522 | optional = false 523 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 524 | 525 | [[package]] 526 | name = "toml" 527 | version = "0.10.2" 528 | description = "Python Library for Tom's Obvious, Minimal Language" 529 | category = "dev" 530 | optional = false 531 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 532 | 533 | [[package]] 534 | name = "tomli" 535 | version = "1.2.2" 536 | description = "A lil' TOML parser" 537 | category = "dev" 538 | optional = false 539 | python-versions = ">=3.6" 540 | 541 | [[package]] 542 | name = "typing-extensions" 543 | version = "3.10.0.2" 544 | description = "Backported and Experimental Type Hints for Python 3.5+" 545 | category = "dev" 546 | optional = false 547 | python-versions = "*" 548 | 549 | [[package]] 550 | name = "urllib3" 551 | version = "1.26.7" 552 | description = "HTTP library with thread-safe connection pooling, file post, and more." 553 | category = "main" 554 | optional = false 555 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 556 | 557 | [package.extras] 558 | brotli = ["brotlipy (>=0.6.0)"] 559 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] 560 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 561 | 562 | [[package]] 563 | name = "websocket-client" 564 | version = "1.2.1" 565 | description = "WebSocket client for Python with low level API options" 566 | category = "dev" 567 | optional = false 568 | python-versions = ">=3.6" 569 | 570 | [package.extras] 571 | optional = ["python-socks", "wsaccel"] 572 | test = ["websockets"] 573 | 574 | [[package]] 575 | name = "werkzeug" 576 | version = "2.0.2" 577 | description = "The comprehensive WSGI web application library." 578 | category = "dev" 579 | optional = false 580 | python-versions = ">=3.6" 581 | 582 | [package.extras] 583 | watchdog = ["watchdog"] 584 | 585 | [[package]] 586 | name = "xmltodict" 587 | version = "0.12.0" 588 | description = "Makes working with XML feel like you are working with JSON" 589 | category = "dev" 590 | optional = false 591 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 592 | 593 | [metadata] 594 | lock-version = "1.1" 595 | python-versions = "^3.9" 596 | content-hash = "bdce609d387b8e759f5535dcc01c75b96621089544c8f6d440e0f7bd1dec749b" 597 | 598 | [metadata.files] 599 | atomicwrites = [ 600 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, 601 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, 602 | ] 603 | attrs = [ 604 | {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, 605 | {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, 606 | ] 607 | black = [ 608 | {file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"}, 609 | {file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"}, 610 | ] 611 | boto3 = [ 612 | {file = "boto3-1.19.3-py3-none-any.whl", hash = "sha256:2dfc8cf34d6dfbdfca4c88e8fddf9fe95cde489fb83144fe35f989ec6790e325"}, 613 | {file = "boto3-1.19.3.tar.gz", hash = "sha256:e36ffaf9969648e2f435aa1f0029956fea3aac52466eef3bcb43bde498a182dd"}, 614 | ] 615 | botocore = [ 616 | {file = "botocore-1.22.3-py3-none-any.whl", hash = "sha256:aacdb9b8e09e356515966251d1e08d9929575a76af504992bfb941553dee59c2"}, 617 | {file = "botocore-1.22.3.tar.gz", hash = "sha256:53ca22aeac9b53fe5ec1f40b8ca9620ffe8b054458abfeb9ab74bbe9e0b0ecfa"}, 618 | ] 619 | certifi = [ 620 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, 621 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, 622 | ] 623 | cffi = [ 624 | {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, 625 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, 626 | {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, 627 | {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, 628 | {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, 629 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, 630 | {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, 631 | {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, 632 | {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, 633 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, 634 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, 635 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, 636 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, 637 | {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, 638 | {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, 639 | {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, 640 | {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, 641 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, 642 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, 643 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, 644 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, 645 | {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, 646 | {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, 647 | {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, 648 | {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, 649 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, 650 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, 651 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, 652 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, 653 | {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, 654 | {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, 655 | {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, 656 | {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, 657 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, 658 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, 659 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, 660 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, 661 | {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, 662 | {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, 663 | {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, 664 | {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, 665 | {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, 666 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, 667 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, 668 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, 669 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, 670 | {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, 671 | {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, 672 | {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, 673 | {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, 674 | ] 675 | charset-normalizer = [ 676 | {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, 677 | {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, 678 | ] 679 | click = [ 680 | {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, 681 | {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, 682 | ] 683 | colorama = [ 684 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 685 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 686 | ] 687 | cryptography = [ 688 | {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"}, 689 | {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"}, 690 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"}, 691 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"}, 692 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"}, 693 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"}, 694 | {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"}, 695 | {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"}, 696 | {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"}, 697 | {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"}, 698 | {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"}, 699 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"}, 700 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"}, 701 | {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"}, 702 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"}, 703 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"}, 704 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"}, 705 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"}, 706 | {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"}, 707 | {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"}, 708 | ] 709 | docker = [ 710 | {file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"}, 711 | {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, 712 | ] 713 | flake8 = [ 714 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 715 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 716 | ] 717 | freezegun = [ 718 | {file = "freezegun-1.1.0-py2.py3-none-any.whl", hash = "sha256:2ae695f7eb96c62529f03a038461afe3c692db3465e215355e1bb4b0ab408712"}, 719 | {file = "freezegun-1.1.0.tar.gz", hash = "sha256:177f9dd59861d871e27a484c3332f35a6e3f5d14626f2bf91be37891f18927f3"}, 720 | ] 721 | idna = [ 722 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, 723 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, 724 | ] 725 | iniconfig = [ 726 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 727 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 728 | ] 729 | jinja2 = [ 730 | {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, 731 | {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, 732 | ] 733 | jmespath = [ 734 | {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, 735 | {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, 736 | ] 737 | markupsafe = [ 738 | {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, 739 | {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, 740 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, 741 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, 742 | {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, 743 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, 744 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, 745 | {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, 746 | {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, 747 | {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, 748 | {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, 749 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, 750 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, 751 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, 752 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, 753 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, 754 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, 755 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, 756 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, 757 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, 758 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, 759 | {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, 760 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, 761 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, 762 | {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, 763 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, 764 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, 765 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, 766 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, 767 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, 768 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, 769 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, 770 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, 771 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, 772 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, 773 | {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, 774 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, 775 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, 776 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, 777 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, 778 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, 779 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, 780 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, 781 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, 782 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, 783 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, 784 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, 785 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, 786 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, 787 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, 788 | {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, 789 | {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, 790 | {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, 791 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, 792 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, 793 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, 794 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, 795 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, 796 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, 797 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, 798 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, 799 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, 800 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, 801 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, 802 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, 803 | {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, 804 | {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, 805 | {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, 806 | {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, 807 | ] 808 | mccabe = [ 809 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 810 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 811 | ] 812 | more-itertools = [ 813 | {file = "more-itertools-8.11.0.tar.gz", hash = "sha256:0a2fd25d343c08d7e7212071820e7e7ea2f41d8fb45d6bc8a00cd6ce3b7aab88"}, 814 | {file = "more_itertools-8.11.0-py3-none-any.whl", hash = "sha256:88afff98d83d08fe5e4049b81e2b54c06ebb6b3871a600040865c7a592061cbb"}, 815 | ] 816 | moto = [ 817 | {file = "moto-2.2.15-py2.py3-none-any.whl", hash = "sha256:5f743a8fa312b57d6ad72a17c134cf81a21e7d7cd2e602c5062d352390114af2"}, 818 | {file = "moto-2.2.15.tar.gz", hash = "sha256:c5692058863803d8a17e422cea48f6f745895760a4be05cd479c0b481cc8e3b0"}, 819 | ] 820 | mypy-extensions = [ 821 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 822 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 823 | ] 824 | packaging = [ 825 | {file = "packaging-21.2-py3-none-any.whl", hash = "sha256:14317396d1e8cdb122989b916fa2c7e9ca8e2be9e8060a6eff75b6b7b4d8a7e0"}, 826 | {file = "packaging-21.2.tar.gz", hash = "sha256:096d689d78ca690e4cd8a89568ba06d07ca097e3306a4381635073ca91479966"}, 827 | ] 828 | pathspec = [ 829 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 830 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 831 | ] 832 | platformdirs = [ 833 | {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, 834 | {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, 835 | ] 836 | pluggy = [ 837 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 838 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 839 | ] 840 | py = [ 841 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 842 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 843 | ] 844 | pycodestyle = [ 845 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 846 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 847 | ] 848 | pycparser = [ 849 | {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, 850 | {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, 851 | ] 852 | pyflakes = [ 853 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 854 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 855 | ] 856 | pynamodb = [ 857 | {file = "pynamodb-5.1.0-py3-none-any.whl", hash = "sha256:8a38fa76522878ef1a8a0b62e6dcc1f883af73182a6c30a050481d316f589d34"}, 858 | {file = "pynamodb-5.1.0.tar.gz", hash = "sha256:7f351d70b9f4da95ea2d7e50299640e4c46c83b7b24bea5daf110acd2e5aef2b"}, 859 | ] 860 | pyparsing = [ 861 | {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, 862 | {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, 863 | ] 864 | pytest = [ 865 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, 866 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, 867 | ] 868 | pytest-mock = [ 869 | {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, 870 | {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, 871 | ] 872 | python-dateutil = [ 873 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 874 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 875 | ] 876 | pytz = [ 877 | {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, 878 | {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, 879 | ] 880 | pywin32 = [ 881 | {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"}, 882 | {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"}, 883 | {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"}, 884 | {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"}, 885 | {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"}, 886 | {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"}, 887 | {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"}, 888 | {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"}, 889 | {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"}, 890 | {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"}, 891 | {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"}, 892 | {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"}, 893 | ] 894 | regex = [ 895 | {file = "regex-2021.10.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45b65d6a275a478ac2cbd7fdbf7cc93c1982d613de4574b56fd6972ceadb8395"}, 896 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74d071dbe4b53c602edd87a7476ab23015a991374ddb228d941929ad7c8c922e"}, 897 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34d870f9f27f2161709054d73646fc9aca49480617a65533fc2b4611c518e455"}, 898 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fb698037c35109d3c2e30f2beb499e5ebae6e4bb8ff2e60c50b9a805a716f79"}, 899 | {file = "regex-2021.10.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb46b542133999580ffb691baf67410306833ee1e4f58ed06b6a7aaf4e046952"}, 900 | {file = "regex-2021.10.23-cp310-cp310-win32.whl", hash = "sha256:5e9c9e0ce92f27cef79e28e877c6b6988c48b16942258f3bc55d39b5f911df4f"}, 901 | {file = "regex-2021.10.23-cp310-cp310-win_amd64.whl", hash = "sha256:ab7c5684ff3538b67df3f93d66bd3369b749087871ae3786e70ef39e601345b0"}, 902 | {file = "regex-2021.10.23-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:de557502c3bec8e634246588a94e82f1ee1b9dfcfdc453267c4fb652ff531570"}, 903 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee684f139c91e69fe09b8e83d18b4d63bf87d9440c1eb2eeb52ee851883b1b29"}, 904 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5095a411c8479e715784a0c9236568ae72509450ee2226b649083730f3fadfc6"}, 905 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b568809dca44cb75c8ebb260844ea98252c8c88396f9d203f5094e50a70355f"}, 906 | {file = "regex-2021.10.23-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eb672217f7bd640411cfc69756ce721d00ae600814708d35c930930f18e8029f"}, 907 | {file = "regex-2021.10.23-cp36-cp36m-win32.whl", hash = "sha256:a7a986c45d1099a5de766a15de7bee3840b1e0e1a344430926af08e5297cf666"}, 908 | {file = "regex-2021.10.23-cp36-cp36m-win_amd64.whl", hash = "sha256:6d7722136c6ed75caf84e1788df36397efdc5dbadab95e59c2bba82d4d808a4c"}, 909 | {file = "regex-2021.10.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f665677e46c5a4d288ece12fdedf4f4204a422bb28ff05f0e6b08b7447796d1"}, 910 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:450dc27483548214314640c89a0f275dbc557968ed088da40bde7ef8fb52829e"}, 911 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:129472cd06062fb13e7b4670a102951a3e655e9b91634432cfbdb7810af9d710"}, 912 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a940ca7e7189d23da2bfbb38973832813eab6bd83f3bf89a977668c2f813deae"}, 913 | {file = "regex-2021.10.23-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:530fc2bbb3dc1ebb17f70f7b234f90a1dd43b1b489ea38cea7be95fb21cdb5c7"}, 914 | {file = "regex-2021.10.23-cp37-cp37m-win32.whl", hash = "sha256:ded0c4a3eee56b57fcb2315e40812b173cafe79d2f992d50015f4387445737fa"}, 915 | {file = "regex-2021.10.23-cp37-cp37m-win_amd64.whl", hash = "sha256:391703a2abf8013d95bae39145d26b4e21531ab82e22f26cd3a181ee2644c234"}, 916 | {file = "regex-2021.10.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be04739a27be55631069b348dda0c81d8ea9822b5da10b8019b789e42d1fe452"}, 917 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13ec99df95003f56edcd307db44f06fbeb708c4ccdcf940478067dd62353181e"}, 918 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d1cdcda6bd16268316d5db1038965acf948f2a6f43acc2e0b1641ceab443623"}, 919 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c186691a7995ef1db61205e00545bf161fb7b59cdb8c1201c89b333141c438a"}, 920 | {file = "regex-2021.10.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b20f544cbbeffe171911f6ce90388ad36fe3fad26b7c7a35d4762817e9ea69c"}, 921 | {file = "regex-2021.10.23-cp38-cp38-win32.whl", hash = "sha256:c0938ddd60cc04e8f1faf7a14a166ac939aac703745bfcd8e8f20322a7373019"}, 922 | {file = "regex-2021.10.23-cp38-cp38-win_amd64.whl", hash = "sha256:56f0c81c44638dfd0e2367df1a331b4ddf2e771366c4b9c5d9a473de75e3e1c7"}, 923 | {file = "regex-2021.10.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80bb5d2e92b2258188e7dcae5b188c7bf868eafdf800ea6edd0fbfc029984a88"}, 924 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1dae12321b31059a1a72aaa0e6ba30156fe7e633355e445451e4021b8e122b6"}, 925 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1f2b59c28afc53973d22e7bc18428721ee8ca6079becf1b36571c42627321c65"}, 926 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d134757a37d8640f3c0abb41f5e68b7cf66c644f54ef1cb0573b7ea1c63e1509"}, 927 | {file = "regex-2021.10.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0dcc0e71118be8c69252c207630faf13ca5e1b8583d57012aae191e7d6d28b84"}, 928 | {file = "regex-2021.10.23-cp39-cp39-win32.whl", hash = "sha256:a30513828180264294953cecd942202dfda64e85195ae36c265daf4052af0464"}, 929 | {file = "regex-2021.10.23-cp39-cp39-win_amd64.whl", hash = "sha256:0f7552429dd39f70057ac5d0e897e5bfe211629652399a21671e53f2a9693a4e"}, 930 | {file = "regex-2021.10.23.tar.gz", hash = "sha256:f3f9a91d3cc5e5b0ddf1043c0ae5fa4852f18a1c0050318baf5fc7930ecc1f9c"}, 931 | ] 932 | requests = [ 933 | {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, 934 | {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, 935 | ] 936 | responses = [ 937 | {file = "responses-0.15.0-py2.py3-none-any.whl", hash = "sha256:5955ad3468fe8eb5fb736cdab4943457b7768f8670fa3624b4e26ff52dfe20c0"}, 938 | {file = "responses-0.15.0.tar.gz", hash = "sha256:866757987d1962aa908d9c8b3185739faefd72a359e95459de0c2e4e5369c9b2"}, 939 | ] 940 | s3transfer = [ 941 | {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, 942 | {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, 943 | ] 944 | six = [ 945 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 946 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 947 | ] 948 | toml = [ 949 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 950 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 951 | ] 952 | tomli = [ 953 | {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, 954 | {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, 955 | ] 956 | typing-extensions = [ 957 | {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, 958 | {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, 959 | {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, 960 | ] 961 | urllib3 = [ 962 | {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, 963 | {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, 964 | ] 965 | websocket-client = [ 966 | {file = "websocket-client-1.2.1.tar.gz", hash = "sha256:8dfb715d8a992f5712fff8c843adae94e22b22a99b2c5e6b0ec4a1a981cc4e0d"}, 967 | {file = "websocket_client-1.2.1-py2.py3-none-any.whl", hash = "sha256:0133d2f784858e59959ce82ddac316634229da55b498aac311f1620567a710ec"}, 968 | ] 969 | werkzeug = [ 970 | {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"}, 971 | {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"}, 972 | ] 973 | xmltodict = [ 974 | {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, 975 | {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, 976 | ] 977 | --------------------------------------------------------------------------------