├── app ├── __init__.py ├── helpers │ ├── __init__.py │ ├── twilio.py │ ├── sentry.py │ ├── settings.py │ ├── sqs.py │ ├── sqs_test.py │ ├── messages.py │ └── messages_test.py ├── retry_processor_test.py ├── retry_processor.py ├── webhook_processor.py ├── webhook_processor_test.py ├── fax_processor_test.py └── fax_processor.py ├── pyproject.toml ├── package.json ├── .travis.yml ├── Pipfile ├── LICENSE ├── .gitignore ├── deployment-policy.json ├── README.md ├── serverless.yml └── Pipfile.lock /app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.isort] 2 | multi_line_output = 3 3 | include_trailing_comma = true 4 | force_grid_wrap = 0 5 | use_parentheses = true 6 | line_length = 88 7 | -------------------------------------------------------------------------------- /app/helpers/twilio.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from twilio.rest import Client # type: ignore 4 | 5 | account_sid = os.environ["TWILIO_SID"] 6 | auth_token = os.environ["TWILIO_AUTH_TOKEN"] 7 | 8 | PHONE_NUMBER = os.environ["TWILIO_PHONE_NUMBER"] 9 | 10 | client = Client(account_sid, auth_token) 11 | -------------------------------------------------------------------------------- /app/helpers/sentry.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import sentry_sdk 4 | from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration 5 | 6 | sentry_sdk.init( 7 | dsn=os.environ["SENTRY_DSN"], 8 | environment=os.environ["SENTRY_ENVIRONMENT"], 9 | integrations=[AwsLambdaIntegration()], 10 | ) 11 | -------------------------------------------------------------------------------- /app/helpers/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | MAX_FAX_ATTEMPTS = int(os.environ["MAX_FAX_ATTEMPTS"]) 4 | BACKOFF_DELAY = int(os.environ["BACKOFF_DELAY"]) 5 | 6 | FAX_QUEUE_URL = os.environ["QUEUE_URL_FAX"] 7 | WEBHOOK_QUEUE_URL = os.environ["QUEUE_URL_WEBHOOK"] 8 | RETRY_QUEUE_URL = os.environ["QUEUE_URL_RETRY"] 9 | -------------------------------------------------------------------------------- /app/retry_processor_test.py: -------------------------------------------------------------------------------- 1 | from app.helpers.messages import Fax 2 | 3 | from .retry_processor import handler 4 | 5 | 6 | def test_retry_processor(mocker): 7 | mock_enqueue_fax = mocker.patch("app.retry_processor.enqueue_fax") 8 | 9 | fax = Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=9) 10 | 11 | handler({"Records": [{"body": fax.json_dumps()}]}, {}) 12 | 13 | mock_enqueue_fax.assert_called_with(fax) 14 | -------------------------------------------------------------------------------- /app/retry_processor.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from app.helpers.messages import Fax 4 | from app.helpers.sqs import enqueue_fax 5 | 6 | 7 | # Take a fax record from the retry queue and sends it back to the fax queue 8 | # to be retried 9 | def handler(event: Any, context: Any) -> Any: 10 | # We set batchSize to 1 so there should always be 1 record 11 | assert len(event["Records"]) == 1 12 | record = event["Records"][0] 13 | 14 | fax_record = Fax.json_loads(record["body"]) 15 | enqueue_fax(fax_record) 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "fax_gateway", 3 | "version": "1.0.0", 4 | "repository": "git@github.com:vote/fax_gateway.git", 5 | "private": true, 6 | "dependencies": { 7 | "serverless": "^1.78.1", 8 | "serverless-python-requirements": "^5.1.0" 9 | }, 10 | "devDependencies": { 11 | "husky": "^4.2.5", 12 | "serverless-plugin-datadog": "^0.24.0", 13 | "serverless-prune-plugin": "^1.4.2" 14 | }, 15 | "scripts": { 16 | "sls": "serverless" 17 | }, 18 | "husky": { 19 | "hooks": { 20 | "pre-commit": "pipenv run format" 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /app/webhook_processor.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | import requests 4 | 5 | from app.helpers.messages import Webhook 6 | 7 | 8 | # Take a webhook record from the queue and try to send it to the calling 9 | # application 10 | def handler(event: Any, context: Any) -> Any: 11 | # We set batchSize to 1 so there should always be 1 record 12 | assert len(event["Records"]) == 1 13 | record = event["Records"][0] 14 | 15 | webhook = Webhook.json_loads(record["body"]) 16 | 17 | response = requests.post( 18 | webhook.callback_url, 19 | data=webhook.payload.json_dumps(), 20 | headers={"Content-Type": "application/json"}, 21 | ) 22 | 23 | response.raise_for_status() 24 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | distro: bionic 2 | language: node_js 3 | node_js: 4 | - 14 5 | install: 6 | - sudo apt-get update 7 | - sudo apt-get install python3-pip python3-setuptools python3-wheel awscli 8 | - python3 -m pip install --user pipenv 9 | - yarn install 10 | - pipenv install --dev 11 | script: 12 | - pipenv run mypy 13 | deploy: 14 | # deploy master to dev 15 | - provider: script 16 | skip_cleanup: true 17 | script: >- 18 | yarn sls deploy -s dev 19 | on: 20 | branch: master 21 | # deploy v* tags to prod 22 | - provider: script 23 | skip_cleanup: true 24 | script: yarn sls deploy -s prod 25 | on: 26 | tags: true 27 | condition: $TRAVIS_TAG =~ ^v[0-9.]+$ 28 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | black = "*" 8 | mypy = "*" 9 | autoflake = "*" 10 | isort = "*" 11 | pytest = "*" 12 | freezegun = "*" 13 | pytest-mock = "*" 14 | responses = "*" 15 | 16 | [packages] 17 | boto3 = "*" 18 | twilio = "*" 19 | requests = "*" 20 | sentry-sdk = "*" 21 | dataclasses-json = "*" 22 | 23 | [requires] 24 | python_version = "3.7" 25 | 26 | [pipenv] 27 | allow_prereleases = true 28 | 29 | [scripts] 30 | autoflake = "autoflake --remove-unused-variables --remove-all-unused-imports --ignore-init-module-imports -i --recursive app" 31 | isort = "isort --recursive app" 32 | black = "black app" 33 | mypy = "mypy app" 34 | pytest = "bash -c 'TWILIO_SID=abc123 TWILIO_AUTH_TOKEN=def456 TWILIO_PHONE_NUMBER=+16175551234 MAX_FAX_ATTEMPTS=10 BACKOFF_DELAY=100 QUEUE_URL_FAX=fax-queue-url QUEUE_URL_WEBHOOK=webhook-queue-url QUEUE_URL_RETRY=retry-queue-url pytest app'" 35 | test = "bash -c 'pipenv run mypy && pipenv run pytest'" 36 | format = "bash -c 'pipenv run autoflake && pipenv run isort && pipenv run black'" 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 VoteAmerica 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /app/helpers/sqs.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import boto3 # type: ignore 4 | from botocore.config import Config # type: ignore 5 | 6 | from .messages import Fax, Webhook 7 | from .settings import BACKOFF_DELAY, FAX_QUEUE_URL, RETRY_QUEUE_URL, WEBHOOK_QUEUE_URL 8 | 9 | client = boto3.client( 10 | "sqs", config=Config(retries={"max_attempts": 10, "mode": "standard"}) 11 | ) 12 | 13 | 14 | def _enqueue_regular(queue_url: str, body: str, delay_seconds: Optional[int] = 0): 15 | client.send_message( 16 | QueueUrl=queue_url, MessageBody=body, DelaySeconds=delay_seconds 17 | ) 18 | 19 | 20 | def _enqueue_fifo(queue_url: str, body: str, message_group: str): 21 | client.send_message( 22 | QueueUrl=queue_url, MessageBody=body, MessageGroupId=message_group 23 | ) 24 | 25 | 26 | def enqueue_fax(fax: Fax): 27 | _enqueue_fifo(FAX_QUEUE_URL, fax.json_dumps(), message_group=fax.to) 28 | 29 | 30 | def enqueue_retry(fax: Fax): 31 | _enqueue_regular(RETRY_QUEUE_URL, fax.json_dumps(), delay_seconds=BACKOFF_DELAY) 32 | 33 | 34 | def enqueue_webhook(webhook: Webhook): 35 | _enqueue_regular(WEBHOOK_QUEUE_URL, webhook.json_dumps()) 36 | -------------------------------------------------------------------------------- /app/webhook_processor_test.py: -------------------------------------------------------------------------------- 1 | import pytest # type: ignore 2 | import requests 3 | import responses # type: ignore 4 | 5 | from app.helpers.messages import FaxStatus, Webhook, WebhookPayload 6 | 7 | from .webhook_processor import handler 8 | 9 | 10 | @responses.activate 11 | def test_webhook_processor_success(mocker): 12 | responses.add(responses.POST, "http://example.org/abc", status=200) 13 | 14 | webhook = Webhook( 15 | callback_url="http://example.org/abc", 16 | payload=WebhookPayload( 17 | fax_id="b", status=FaxStatus.SENT, message="c", timestamp=1590590198 18 | ), 19 | ) 20 | 21 | handler({"Records": [{"body": webhook.json_dumps()}]}, {}) 22 | 23 | assert responses.calls[0].request.body == webhook.payload.json_dumps() 24 | 25 | 26 | @responses.activate 27 | def test_webhook_processor_failure(mocker): 28 | responses.add(responses.POST, "http://example.org/abc", status=500) 29 | 30 | webhook = Webhook( 31 | callback_url="http://example.org/abc", 32 | payload=WebhookPayload( 33 | fax_id="b", status=FaxStatus.SENT, message="c", timestamp=1590590198 34 | ), 35 | ) 36 | 37 | with pytest.raises(requests.exceptions.HTTPError): 38 | handler({"Records": [{"body": webhook.json_dumps()}]}, {}) 39 | -------------------------------------------------------------------------------- /app/helpers/sqs_test.py: -------------------------------------------------------------------------------- 1 | import pytest # type: ignore 2 | 3 | from .messages import Fax, FaxStatus, Webhook, WebhookPayload 4 | 5 | 6 | def test_enqueue_fax(mocker): 7 | from .sqs import enqueue_fax 8 | 9 | mock_client = mocker.patch("app.helpers.sqs.client") 10 | 11 | fax = Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=5) 12 | 13 | enqueue_fax(fax) 14 | 15 | mock_client.send_message.assert_called_once_with( 16 | QueueUrl="fax-queue-url", 17 | MessageBody=fax.json_dumps(), 18 | MessageGroupId="b", 19 | MessageDeduplicationId="a", 20 | ) 21 | 22 | 23 | def test_enqueue_retry(mocker): 24 | from .sqs import enqueue_retry 25 | 26 | mock_client = mocker.patch("app.helpers.sqs.client") 27 | 28 | fax = Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=5) 29 | 30 | enqueue_retry(fax) 31 | 32 | mock_client.send_message.assert_called_once_with( 33 | QueueUrl="retry-queue-url", MessageBody=fax.json_dumps(), DelaySeconds=100 34 | ) 35 | 36 | 37 | def test_enqueue_webhook(mocker): 38 | from .sqs import enqueue_webhook 39 | 40 | mock_client = mocker.patch("app.helpers.sqs.client") 41 | 42 | webhook = Webhook( 43 | callback_url="a", 44 | payload=WebhookPayload( 45 | fax_id="b", status=FaxStatus.SENT, message="c", timestamp=1590590198 46 | ), 47 | ) 48 | 49 | enqueue_webhook(webhook) 50 | 51 | mock_client.send_message.assert_called_once_with( 52 | QueueUrl="webhook-queue-url", MessageBody=webhook.json_dumps(), DelaySeconds=0 53 | ) 54 | -------------------------------------------------------------------------------- /app/helpers/messages.py: -------------------------------------------------------------------------------- 1 | """ 2 | Type-safe Dataclasses representing each kind of message that we send over SQS. 3 | 4 | We wrap dataclasses_json's methods with type-safe wrappers: 5 | https://github.com/lidatong/dataclasses-json/issues/23 6 | """ 7 | 8 | import time 9 | from dataclasses import dataclass, field 10 | from enum import Enum 11 | 12 | from dataclasses_json import dataclass_json 13 | 14 | 15 | @dataclass_json 16 | @dataclass 17 | class Fax: 18 | fax_id: str 19 | to: str 20 | pdf_url: str 21 | callback_url: str 22 | retry_count: int = 0 23 | 24 | @classmethod 25 | def json_loads(cls, json: str) -> "Fax": 26 | return cls.from_json(json) # type: ignore 27 | 28 | def json_dumps(self) -> str: 29 | return self.to_json() # type: ignore 30 | 31 | 32 | class FaxStatus(Enum): 33 | SENT = "sent" 34 | TEMPORARY_FAILURE = "tmp_fail" 35 | PERMANENT_FAILURE = "perm_fail" 36 | 37 | 38 | @dataclass_json 39 | @dataclass 40 | class WebhookPayload: 41 | fax_id: str 42 | status: FaxStatus 43 | message: str 44 | timestamp: int = field(default_factory=lambda: int(time.time())) 45 | 46 | @classmethod 47 | def json_loads(cls, json: str) -> "WebhookPayload": 48 | return cls.from_json(json) # type: ignore 49 | 50 | def json_dumps(self) -> str: 51 | return self.to_json() # type: ignore 52 | 53 | 54 | @dataclass_json 55 | @dataclass 56 | class Webhook: 57 | callback_url: str 58 | payload: WebhookPayload 59 | 60 | @classmethod 61 | def json_loads(cls, json: str) -> "Webhook": 62 | return cls.from_json(json) # type: ignore 63 | 64 | def json_dumps(self) -> str: 65 | return self.to_json() # type: ignore 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | # Node 133 | node_modules 134 | 135 | # Serverless directories 136 | .serverless 137 | 138 | 139 | # Local testing 140 | /util 141 | -------------------------------------------------------------------------------- /deployment-policy.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Effect": "Allow", 6 | "Action": [ 7 | "cloudformation:ValidateTemplate" 8 | ], 9 | "Resource": [ 10 | "*" 11 | ] 12 | }, 13 | { 14 | "Effect": "Allow", 15 | "Action": [ 16 | "cloudformation:CreateStack", 17 | "cloudformation:CreateUploadBucket", 18 | "cloudformation:DeleteStack", 19 | "cloudformation:Describe*", 20 | "cloudformation:UpdateStack", 21 | "cloudformation:ListStackResources" 22 | ], 23 | "Resource": [ 24 | "arn:aws:cloudformation:us-west-2:*:stack/fax-gateway-*/*" 25 | ] 26 | }, 27 | { 28 | "Effect": "Allow", 29 | "Action": [ 30 | "s3:*" 31 | ], 32 | "Resource": [ 33 | "arn:aws:s3:::fax-gateway*serverlessdeploy*", 34 | "arn:aws:s3:::fax-gateway*serverlessdeploy*/*" 35 | ] 36 | }, 37 | { 38 | "Effect": "Allow", 39 | "Action": [ 40 | "lambda:AddPermission", 41 | "lambda:CreateAlias", 42 | "lambda:DeleteFunction", 43 | "lambda:InvokeFunction", 44 | "lambda:PublishVersion", 45 | "lambda:RemovePermission", 46 | "lambda:PutProvisionedConcurrencyConfig", 47 | "lambda:Update*", 48 | "lambda:List*", 49 | "lambda:Get*" 50 | ], 51 | "Resource": [ 52 | "arn:aws:lambda:us-west-2:*:function:fax-gateway-*-*" 53 | ] 54 | }, 55 | { 56 | "Effect": "Allow", 57 | "Action": [ 58 | "iam:PassRole", 59 | "iam:GetRole" 60 | ], 61 | "Resource": [ 62 | "arn:aws:iam::*:role/fax-gateway-*" 63 | ] 64 | }, 65 | { 66 | "Effect": "Allow", 67 | "Action": [ 68 | "cloudwatch:GetMetricStatistics" 69 | ], 70 | "Resource": [ 71 | "*" 72 | ] 73 | }, 74 | { 75 | "Effect": "Allow", 76 | "Action": [ 77 | "ssm:DescribeParameters", 78 | "ssm:GetParameter", 79 | "ssm:GetParameters" 80 | ], 81 | "Resource": "arn:aws:ssm:us-west-2:*:parameter/fax_gateway.*" 82 | }, 83 | { 84 | "Effect": "Allow", 85 | "Action": [ 86 | "*" 87 | ], 88 | "Resource": "arn:aws:sqs:us-west-2:*:fax-gateway*" 89 | }, 90 | { 91 | "Action": [ 92 | "ec2:DescribeSecurityGroups", 93 | "ec2:DescribeSecurityGroupReferences", 94 | "ec2:DescribeStaleSecurityGroups", 95 | "ec2:DescribeVpcs", 96 | "ec2:DescribeSubnets" 97 | ], 98 | "Effect": "Allow", 99 | "Resource": "*" 100 | } 101 | ] 102 | } 103 | -------------------------------------------------------------------------------- /app/helpers/messages_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from freezegun import freeze_time # type: ignore 4 | 5 | from .messages import Fax, FaxStatus, Webhook, WebhookPayload 6 | 7 | 8 | def test_fax(): 9 | # load 10 | assert ( 11 | Fax.json_loads( 12 | """ 13 | { 14 | "fax_id": "a", 15 | "to": "b", 16 | "pdf_url": "c", 17 | "callback_url": "d", 18 | "retry_count": 5 19 | } 20 | """ 21 | ) 22 | == Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=5) 23 | ) 24 | 25 | # load, default retry count 26 | assert ( 27 | Fax.json_loads( 28 | """ 29 | { 30 | "fax_id": "a", 31 | "to": "b", 32 | "pdf_url": "c", 33 | "callback_url": "d" 34 | } 35 | """ 36 | ) 37 | == Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=0) 38 | ) 39 | 40 | # dump 41 | assert json.loads( 42 | Fax( 43 | fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=5 44 | ).json_dumps() 45 | ) == { 46 | "fax_id": "a", 47 | "to": "b", 48 | "pdf_url": "c", 49 | "callback_url": "d", 50 | "retry_count": 5, 51 | } 52 | 53 | # symmetry 54 | fax = Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=5) 55 | assert fax == Fax.json_loads(fax.json_dumps()) 56 | 57 | 58 | @freeze_time("2012-01-14 05:06:07 UTC") 59 | def test_webhook(): 60 | now = 1326517567 61 | 62 | # load 63 | assert ( 64 | Webhook.json_loads( 65 | """ 66 | { 67 | "callback_url": "a", 68 | "payload": { 69 | "fax_id": "b", 70 | "status": "sent", 71 | "message": "c", 72 | "timestamp": 1590590198 73 | } 74 | } 75 | """ 76 | ) 77 | == Webhook( 78 | callback_url="a", 79 | payload=WebhookPayload( 80 | fax_id="b", status=FaxStatus.SENT, message="c", timestamp=1590590198 81 | ), 82 | ) 83 | ) 84 | 85 | # load, default timestamp 86 | assert ( 87 | Webhook.json_loads( 88 | """ 89 | { 90 | "callback_url": "a", 91 | "payload": { 92 | "fax_id": "b", 93 | "status": "tmp_fail", 94 | "message": "c" 95 | } 96 | } 97 | """ 98 | ) 99 | == Webhook( 100 | callback_url="a", 101 | payload=WebhookPayload( 102 | fax_id="b", 103 | status=FaxStatus.TEMPORARY_FAILURE, 104 | message="c", 105 | timestamp=now, 106 | ), 107 | ) 108 | ) 109 | 110 | # dump 111 | assert json.loads( 112 | Webhook( 113 | callback_url="a", 114 | payload=WebhookPayload( 115 | fax_id="b", status=FaxStatus.PERMANENT_FAILURE, message="c" 116 | ), 117 | ).json_dumps() 118 | ) == { 119 | "callback_url": "a", 120 | "payload": { 121 | "fax_id": "b", 122 | "status": "perm_fail", 123 | "message": "c", 124 | "timestamp": now, 125 | }, 126 | } 127 | 128 | # symmetry 129 | webhook = Webhook( 130 | callback_url="a", 131 | payload=WebhookPayload( 132 | fax_id="b", status=FaxStatus.SENT, message="c", timestamp=1590590198 133 | ), 134 | ) 135 | assert webhook == Webhook.json_loads(webhook.json_dumps()) 136 | -------------------------------------------------------------------------------- /app/fax_processor_test.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from freezegun import freeze_time # type: ignore 4 | 5 | from .fax_processor import handler, poll_until_fax_delivered 6 | from .helpers.messages import Fax, FaxStatus, Webhook, WebhookPayload 7 | 8 | 9 | def mock_client(mocker, statuses): 10 | next_status = 0 11 | 12 | def mock_fetch(): 13 | nonlocal next_status 14 | 15 | ret = mock.MagicMock() 16 | ret.status = statuses[next_status] 17 | 18 | next_status += 1 19 | 20 | return ret 21 | 22 | def mock_faxes(fax_sid): 23 | assert fax_sid == "mock-sid" 24 | 25 | fax = mock.MagicMock() 26 | fax.fetch.side_effect = mock_fetch 27 | 28 | return fax 29 | 30 | mock_client = mocker.patch("app.fax_processor.client") 31 | mock_client.fax.faxes.side_effect = mock_faxes 32 | mock_client.fax.faxes.create.return_value = mock.Mock(sid="mock-sid") 33 | 34 | return mock_client 35 | 36 | 37 | def test_poll_until_fax_delivered_success(mocker): 38 | statuses = ["queued", "processing", "sending", "delivered"] 39 | client = mock_client(mocker, statuses) 40 | 41 | mock_time = mocker.patch("app.fax_processor.time") 42 | 43 | assert poll_until_fax_delivered("mock-sid") == "delivered" 44 | mock_time.sleep.assert_has_calls([mock.call(15), mock.call(15), mock.call(15)]) 45 | 46 | 47 | def test_poll_until_fax_delivered_failure(mocker): 48 | statuses = ["queued", "processing", "sending", "busy"] 49 | client = mock_client(mocker, statuses) 50 | 51 | mock_time = mocker.patch("app.fax_processor.time") 52 | 53 | assert poll_until_fax_delivered("mock-sid") == "busy" 54 | mock_time.sleep.assert_has_calls([mock.call(15), mock.call(15), mock.call(15)]) 55 | 56 | 57 | def test_fax_success(mocker): 58 | mock_client(mocker, ["delivered"]) 59 | mock_enqueue_webhook = mocker.patch("app.fax_processor.enqueue_webhook") 60 | 61 | handler( 62 | { 63 | "Records": [ 64 | { 65 | "body": Fax( 66 | fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=0 67 | ).json_dumps() 68 | } 69 | ] 70 | }, 71 | {}, 72 | ) 73 | 74 | mock_enqueue_webhook.assert_called_with( 75 | Webhook( 76 | callback_url="d", 77 | payload=WebhookPayload( 78 | fax_id="a", status=FaxStatus.SENT, message="Fax sent successfully" 79 | ), 80 | ) 81 | ) 82 | 83 | 84 | @freeze_time("2012-01-14 05:06:07 UTC") 85 | def test_fax_temp_failure(mocker): 86 | mock_client(mocker, ["failed"]) 87 | mock_enqueue_webhook = mocker.patch("app.fax_processor.enqueue_webhook") 88 | mock_enqueue_retry = mocker.patch("app.fax_processor.enqueue_retry") 89 | 90 | handler( 91 | { 92 | "Records": [ 93 | { 94 | "body": Fax( 95 | fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=8 96 | ).json_dumps() 97 | } 98 | ] 99 | }, 100 | {}, 101 | ) 102 | 103 | mock_enqueue_webhook.assert_called_with( 104 | Webhook( 105 | callback_url="d", 106 | payload=WebhookPayload( 107 | fax_id="a", 108 | status=FaxStatus.TEMPORARY_FAILURE, 109 | message="Failed to deliver fax (attempt 9 of 10). Fax status: failed", 110 | ), 111 | ) 112 | ) 113 | 114 | mock_enqueue_retry.assert_called_with( 115 | Fax(fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=9) 116 | ) 117 | 118 | 119 | @freeze_time("2012-01-14 05:06:07 UTC") 120 | def test_fax_perm_failure(mocker): 121 | mock_client(mocker, ["busy"]) 122 | mock_enqueue_webhook = mocker.patch("app.fax_processor.enqueue_webhook") 123 | 124 | handler( 125 | { 126 | "Records": [ 127 | { 128 | "body": Fax( 129 | fax_id="a", to="b", pdf_url="c", callback_url="d", retry_count=9 130 | ).json_dumps() 131 | } 132 | ] 133 | }, 134 | {}, 135 | ) 136 | 137 | mock_enqueue_webhook.assert_called_with( 138 | Webhook( 139 | callback_url="d", 140 | payload=WebhookPayload( 141 | fax_id="a", 142 | status=FaxStatus.PERMANENT_FAILURE, 143 | message="Failed to deliver fax after 10 tries. Last attempt status: busy", 144 | ), 145 | ) 146 | ) 147 | -------------------------------------------------------------------------------- /app/fax_processor.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | from typing import Any 4 | 5 | from sentry_sdk import capture_exception 6 | 7 | from app.helpers.messages import Fax, FaxStatus, Webhook, WebhookPayload 8 | from app.helpers.settings import MAX_FAX_ATTEMPTS 9 | from app.helpers.sqs import enqueue_retry, enqueue_webhook 10 | from app.helpers.twilio import PHONE_NUMBER, client 11 | 12 | # Twilio fax status codes indicating still-in-progress or success -- all other 13 | # codes are considered failures. 14 | # 15 | # https://www.twilio.com/docs/fax/api/fax-resource 16 | TWILIO_STATUS_PENDING = ("queued", "processing", "sending") 17 | TWILIO_STATUS_SUCCESS = ("delivered",) 18 | 19 | # How frequently to poll for fax status, in seconds 20 | TWILIO_POLL_INTERVAL = 15 21 | 22 | # Given a Twilio fax ID, polls for status 23 | def poll_until_fax_delivered(fax_sid: str) -> Any: 24 | while True: 25 | try: 26 | fax = client.fax.faxes(fax_sid).fetch() 27 | except Exception as e: 28 | # If there was an error getting the fax status, just log it and 29 | # keep polling -- we don't want to let a transient error cause 30 | # the whole lambda function to fail. 31 | capture_exception(e) 32 | logging.exception("Error while polling for fax status") 33 | 34 | if fax.status not in TWILIO_STATUS_PENDING: 35 | return fax.status 36 | 37 | print(f"Fax has pending status: {fax.status}, waiting") 38 | time.sleep(TWILIO_POLL_INTERVAL) 39 | 40 | 41 | # Take a fax record from the queue and send it to Twilio. Poll for 42 | # success/failure. 43 | def handler(event: Any, context: Any) -> Any: 44 | # We set batchSize to 1 so there should always be 1 record 45 | assert len(event["Records"]) == 1 46 | record = event["Records"][0] 47 | fax_record = Fax.json_loads(record["body"]) 48 | 49 | print("Sending fax", fax_record) 50 | 51 | # Send the fax to Twilio 52 | twilio_fax = client.fax.faxes.create( 53 | from_=PHONE_NUMBER, 54 | to=fax_record.to, 55 | media_url=fax_record.pdf_url, 56 | # Our faxes can contain PII -- instruct Twilio to not retain a copy of 57 | # the PDF. 58 | store_media=False, 59 | # Fail fast (after 5 minutes) rather than leaving the fax queued -- we 60 | # have our own retry logic, and this lambda function times out after 15 61 | # minutes so we'd rather gracefully handle the failure ourselves rather 62 | # than have Twilio hold it in their queue for a long time. 63 | ttl=5, 64 | ) 65 | 66 | # Wait for status 67 | fax_status = poll_until_fax_delivered(twilio_fax.sid) 68 | print(f"Fax final status: {fax_status}") 69 | 70 | if fax_status in TWILIO_STATUS_SUCCESS: 71 | # Fax was sent! Queue a webhook to deliver the success notification 72 | print("Fax successful; enqueueing success webhook") 73 | try: 74 | enqueue_webhook( 75 | Webhook( 76 | callback_url=fax_record.callback_url, 77 | payload=WebhookPayload( 78 | fax_id=fax_record.fax_id, 79 | status=FaxStatus.SENT, 80 | message="Fax sent successfully", 81 | ), 82 | ) 83 | ) 84 | except Exception as e: 85 | # If there was an error queueing the webhook notification, we 86 | # just log that and don't error -- we don't want to fail, because 87 | # that will retry and re-send the fax. We'd rather just fail to 88 | # send the webhook than duplicate-send faxes. 89 | capture_exception(e) 90 | logging.exception("Error enqueueing webhook") 91 | elif fax_record.retry_count + 1 >= MAX_FAX_ATTEMPTS: 92 | # We're out of retry attempts. Report a failure to the application. 93 | print("Fax failed and no more retries available; enqueueing failure webhook") 94 | enqueue_webhook( 95 | Webhook( 96 | callback_url=fax_record.callback_url, 97 | payload=WebhookPayload( 98 | fax_id=fax_record.fax_id, 99 | status=FaxStatus.PERMANENT_FAILURE, 100 | message=f"Failed to deliver fax after {MAX_FAX_ATTEMPTS} tries. Last attempt status: {fax_status}", 101 | ), 102 | ) 103 | ) 104 | else: 105 | # Fax failed to send, but nevertheless we persist. Queue a webhook to 106 | # deliver a failure notification and also queue up a retry. 107 | # 108 | # We enqueue the retry *second* -- if we did it first, and then failed 109 | # to enqueue the webhook, we'd end up with a duplicate because this 110 | # function would error, the job would be retried by the queue, *and* 111 | # we'd have written the job to the retry queue. 112 | print( 113 | f"Fax failed (attempt {fax_record.retry_count + 1} of {MAX_FAX_ATTEMPTS}); enqueuing failure webhook and retry" 114 | ) 115 | 116 | enqueue_webhook( 117 | Webhook( 118 | callback_url=fax_record.callback_url, 119 | payload=WebhookPayload( 120 | fax_id=fax_record.fax_id, 121 | status=FaxStatus.TEMPORARY_FAILURE, 122 | message=f"Failed to deliver fax (attempt {fax_record.retry_count + 1} of {MAX_FAX_ATTEMPTS}). Fax status: {fax_status}", 123 | ), 124 | ) 125 | ) 126 | 127 | enqueue_retry( 128 | Fax( 129 | fax_id=fax_record.fax_id, 130 | to=fax_record.to, 131 | pdf_url=fax_record.pdf_url, 132 | callback_url=fax_record.callback_url, 133 | retry_count=fax_record.retry_count + 1, 134 | ) 135 | ) 136 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Fax Gateway 2 | 3 | The VoteAmerica fax gateway is a serverless application that is designed to reliably send large volumes of faxes. 4 | 5 | Learn more about the Fax Gateway in [this Twilio Blog post](https://www.twilio.com/blog/reliable-fax-pipeline-twilio-aws-expand-access-ballot-box). 6 | 7 | ## Problem Statement 8 | 9 | The fax gateway sites in front of the Twilio programmable fax API to handle a number of reliability concerns: 10 | 11 | - Most fax numbers can only receive one fax at a time. So if someone else is faxing that number, you'll get a busy signal and the fax with fail. 12 | 13 | - Similarly, you don't usually want to send more than one fax to a particular number at the same time, or you'll be competing with yourself and get more busy signals. 14 | 15 | - If the receiving fax machine is out of paper (and doesn't have available memory) or is off, your fax won't go through. 16 | 17 | - Faxes are quite slow (about 1 minute per page), so if the receiving fax line is unreliable your fax may have trouble getting through. 18 | 19 | ## Usage 20 | 21 | The Fax Gateway is a set of AWS Simple Queue Service queues, and lambda functions to process from these queues. 22 | 23 | To send a fax, you write a message to the fax queue with the following format: 24 | 25 | ```js 26 | { 27 | "fax_id": "abc123", // A unique ID for this fax 28 | "to": "+16175551234", // E.164-formatted number to send the fax to 29 | "pdf_url": "https://some-url/file.pdf", // URL of the PDF file to save (e.g. must be readable by Twilio -- e.g. a presigned S3 URL) 30 | "callback_url": "https://some-url/endpoint" // HTTP callback URL (see below) 31 | } 32 | ``` 33 | 34 | The Fax Gateway will send the fax, retrying if the receiver is busy or disconnected. The SQS queue will ensure that only one fax is being sent to a particular destination number at a time (but multiple faxes might be sent at a time if they're to different destination numbers.) 35 | 36 | ### Callbacks 37 | 38 | As the Fax Gateway runs, it will send you notifications of fax progress at the provided callback URL. You'll get a POST request with a JSON body when the fax is sent successfully, when an unsuccessful attempt is made (and will be retried), and when the fax gateway gives up permanently due to too many failures. So you may receive multiple callbacks for the same fax -- one for each failed attempt, plus a final one with the final success/failure. 39 | 40 | The payload for a successful send is: 41 | 42 | ```js 43 | { 44 | "fax_id": "abc123", // The ID you specified when you enqueued the fax to be sent 45 | "status": "sent", 46 | "message": "Fax sent successfully", 47 | "timestamp": 123456, // UNIX timestamp (seconds since the UNIX epoch) 48 | } 49 | ``` 50 | 51 | The payload for an unsuccessful attempt that will be retried is: 52 | 53 | ```js 54 | { 55 | "fax_id": "abc123", // The ID you specified when you enqueued the fax to be sent 56 | "status": "tmp_fail", 57 | "message": "Failed to deliver fax (attempt 7 of 20). Fax status: busy", 58 | "timestamp": 123456, // UNIX timestamp (seconds since the UNIX epoch) 59 | } 60 | ``` 61 | 62 | And the payload for a final unsuccessful attempt it: 63 | 64 | ```js 65 | { 66 | "fax_id": "abc123", // The ID you specified when you enqueued the fax to be sent 67 | "status": "perm_fail", 68 | "message": "Failed to deliver fax after 20 tries. Last attempt status: busy", 69 | "timestamp": 123456, // UNIX timestamp (seconds since the UNIX epoch) 70 | } 71 | ``` 72 | 73 | Callbacks will typically be delivered in-order, but they can sometimes arrive out of order. You can check the timestamp of the message to determine the correct ordering. 74 | 75 | You must return a 2xx status code. If you return a 4xx or 5xx status code, the Fax Gateway will retry sending the callback later. 76 | 77 | ## Configuration & Deployment 78 | 79 | First, run `yarn install` and `pipenv install` to get your environment set up. 80 | 81 | Then, read through the comments in `serverless.yml` to learn about how to configure Fax Gateway. At the very least, you'll need to provide your own Twilio credentials and Twilio phone numbers. There are also a number of other parameters than can be tuned depending on your needs and faxing workload. 82 | 83 | Fax Gateway is deployed with [Serverless](https://www.serverless.com/). Just run `yarn sls deploy` to deploy to the `local` environment. To deploy to another environment (e.g. `staging`, `prod`, or whatever you want to name your environments), run `yarn sls deploy -s prod`. 84 | 85 | If you use a CI system or other limited-privilege system to deploy Fax Gateway, you can use the `deployment-policy.json` policy from this repo, which has the necessary IAM privileges to deploy Fax Gateway. If you change the service name or AWS region in `serverless.yml`, you'll have to update this policy accordingly. 86 | 87 | ### Dead-Letter Queues 88 | 89 | If there's a bug or infrastructure failure (SQS, Lambda, or Twilio outage), the Fax Gateway will retry processing messages several times. If the error persists, the queue message will eventually be kicked over to a dead-letter queue (DLQ). Each of the three queues that Fax Gateway uses has its own DLQ. Messages will *not* end up here if the receiving fax machine is offline, busy, or malfunctioning -- that type of failure is expected and will be handled via the callback URLs as described above. 90 | 91 | However, if there's a problem with Fax Gateway, the underlying AWS services it depends on, or the application that's supposed to receive the callbacks, messages may end up in these DLQs. Fax Gateway does not do any handling of messages in the DLQs; you should configure your monitoring system to alert you if any messages end up in any of the DLQs. 92 | 93 | ## Design & Tradeoffs 94 | 95 | The Fax Gateway is designed for sending relatively short faxes (<5 pages) at high volume with high reliability. A number of design decision and tradeoffs have been made with this goal in mind. 96 | 97 | The most critical tradeoff is that when we read a fax from the queue, the lambda function that's responsible for sending that fax continues to run, polling Twilio for the fax's status, until the fax is sent or fails. The has some advantages over returning quickly and using the Twilio callback URL: 98 | 99 | - We get end-to-end retry guarantees: if any part of sending the fax fails, including the Lambda functions, SQS will re-deliver the message and we'll retry. This makes it very unlikely that we'll drop a fax due to a transient error. 100 | 101 | - We don't need to configure or secure an API Gateway interface. All network communications are outbound from the Fax Gateway. 102 | 103 | - It works better with the SQS FIFO queue: we don't want to acknowledge the message until the fax is sent, because we're using SQS message groups to ensure only one fax is sent to a particular recipient at a time (using the recipient fax number as the message group ID). If the lambda function returned success after sending the fax to Twilio, and relied on Twilio's callbacks, then as soon as one fax was sent to Twilio, the queue could deliver another message from the same message group -- so we'd send another fax to the same number without waiting for the first one to succeed. We could get around this by *always* returning an error from the Lambda function so the SQS-Lambda integration doesn't delete the message automatically, and then calling the `DeleteMessage` SQS API endpoints manually from the Twilio callback handler, but that would mean we don't get a lot of useful information from standard Lambda monitoring tools, because it would be more difficult to differentiate between real errors from the Lambda function, and errors that we're throwing to get around the Lambda-SQS integration behavior. 104 | 105 | However, this trade-off has one major disadvantage: *all faxes must take less than 15 minutes to deliver*, or the lambda function will time out, and we'll end up retrying and re-sending the fax. Faxes typically take less than 1 minute per page, so this shouldn't be a problem as long as your faxes are no more than a few pages long. 106 | 107 | ### Queues 108 | 109 | A quick overview of the queues and how messages flow through them. 110 | 111 | #### Fax Queue 112 | 113 | The primary queue that holds faxes that we want to send. To send a message via the Fax Gateway, upstream applications write to this queue. The is a FIFO queue, with the destination phone number as the message group ID -- so faxes will be delivered in-order and there won't ever be more than one fax to a particular destination number in-flight at a time. 114 | 115 | We read messages off this queue in the Fax Processor lambda function. This function sends the fax to Twilio and then polls for the result, not returning until the message is delivered or failed. If the message is delivered successfully, this function queues up a success webhook. If the message is not delivered successfully, the function either writes the message to the Retry Queue and queues up a temporary-failure webhook (if there are retries remaining), or just queues up a permanent-failure webhook (if this fax has exhausted all the retries allowed -- by default, 20 retries). 116 | 117 | #### Retry Queue 118 | 119 | When a fax is not sent successfully, the Fax Processor moves the message from the Fax Queue to the Retry Queue. We use the DelaySeconds parameter on the message in the Retry Queue to delay retrying sending the fax. 120 | 121 | The Retry Processor reads messages from this queue and just moves them back into the Fax Queue. 122 | 123 | #### Webhook Queue 124 | 125 | When a fax attempt is made (successful or unsuccessful), the Fax Processor writes a webhook notification to the Webhook Queue. The Webhook Processor reads from this queue and delivers the POST request, erroring if it doesn't get a 2xx response code. 126 | 127 | This queue is the most likely to end up with messages going to the dead-letter queue -- unlike the Fax Processor and Retry Processor, which only depend on robust external systems like SQS and Twilio, the Webhook Processor will error if the message's `callback_url` returns an error. By default, we retry delivering the webhook 20 times, so messages will only end up in the DLQ if there's a long outage of whatever's handling the `callback_url`. 128 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: fax-gateway 2 | 3 | provider: 4 | name: aws 5 | runtime: python3.7 6 | 7 | # The default stage is the "local" stage; CI can deploy other stages (dev and prod) 8 | stage: local 9 | 10 | # We use us-west-2 as our AWS region 11 | region: us-west-2 12 | 13 | # 30-second timeout by default 14 | timeout: 30 15 | 16 | # We deploy Fax Gateway into a VPC. You can remove this stanza to deploy it outside your VPC. 17 | vpc: 18 | securityGroupIds: 19 | - sg-09408d97ce1a97fc7 20 | subnetIds: 21 | - subnet-0d904f9f8e4e33de7 22 | - subnet-0eecb686d77fa9fea 23 | - subnet-037c3114da0477f7e 24 | - subnet-0ec8e635476a780ad 25 | 26 | # Tags that will be added to each of the deployment resources 27 | tags: 28 | env: ${self:custom.stage} 29 | 30 | # Environment variables 31 | environment: 32 | # Sentry integration: sends Lambda errors to Sentry. Remove this if you don't want to 33 | # use Sentry 34 | SENTRY_DSN: ${ssm:fax_gateway.common.sentry_dsn~true} 35 | SENTRY_ENVIRONMENT: ${self:custom.stage} 36 | 37 | # Your twilio auth parameters 38 | TWILIO_SID: ${ssm:fax_gateway.common.twilio_sid~true} 39 | TWILIO_AUTH_TOKEN: ${ssm:fax_gateway.common.twilio_auth_token~true} 40 | 41 | # The outgoing phone number the faxes will be sent from. This must be a 42 | # fax-capable number in your Twilio account. 43 | TWILIO_PHONE_NUMBER: ${ssm:fax_gateway.${self:custom.stage}.twilio_phone_number~true} 44 | 45 | # How many times to try sending a fax before giving up. Faxes can fail because 46 | # the receiver is busy or out of paper, so you should set this pretty 47 | # generously 48 | MAX_FAX_ATTEMPTS: "20" 49 | 50 | # How many seconds to wait between retries 51 | BACKOFF_DELAY: "600" 52 | 53 | # Pass the queue URLs to the function 54 | QUEUE_URL_FAX: { "Ref" : "FaxQueue" } 55 | QUEUE_URL_WEBHOOK: { "Ref" : "WebhookQueue" } 56 | QUEUE_URL_RETRY: { "Ref" : "RetryQueue" } 57 | 58 | 59 | # Memory allocated to each lambda function 60 | memorySize: 512 61 | 62 | # Allow the lambda functions to access the SQS queues 63 | iamRoleStatements: 64 | - Effect: Allow 65 | Action: 66 | - sqs:* 67 | Resource: 68 | - "Fn::GetAtt": [ FaxQueue, Arn ] 69 | - "Fn::GetAtt": [ WebhookQueue, Arn ] 70 | - "Fn::GetAtt": [ RetryQueue, Arn ] 71 | - "Fn::GetAtt": [ FaxDLQ, Arn ] 72 | - "Fn::GetAtt": [ WebhookDLQ, Arn ] 73 | - "Fn::GetAtt": [ RetryDLQ, Arn ] 74 | 75 | package: 76 | exclude: 77 | - 'node_modules/**' 78 | - '.vscode/**' 79 | - '.mypy_cache/**' 80 | - 'package.json' 81 | - 'yarn.lock' 82 | 83 | 84 | 85 | plugins: 86 | # This plugin installs our python dependencies, using docker to properly compile 87 | # them for Lambda 88 | - serverless-python-requirements 89 | 90 | # Clean up old versions so we don't exceed our code storage quota 91 | - serverless-prune-plugin 92 | 93 | # Datadog integration: remove this if you don't use datadog 94 | - serverless-plugin-datadog 95 | 96 | custom: 97 | # Specify how to build our python dependencies, and which ones are 98 | # already available in Lambda and don't need to be bundled with the 99 | # application 100 | pythonRequirements: 101 | dockerizePip: true 102 | noDeploy: [ 103 | 'boto3', 104 | 'botocore', 105 | 'docutils', 106 | 'jmespath', 107 | 'python-dateutil', 108 | 's3transfer', 109 | 'six', 110 | 'pip', 111 | 'setuptools' 112 | ] 113 | 114 | # Make stage/region accessible to other parts of the config 115 | stage: ${opt:stage, self:provider.stage} 116 | region: ${opt:region, self:provider.region} 117 | 118 | # Configure DataDog integration. If you've removed serverless-plugin-datadog 119 | # above, you can remove this. flushMetricsToLogs requires that you have 120 | # the datadog forwarder installed; see 121 | # https://github.com/DataDog/datadog-serverless-functions/tree/master/aws/logs_monitoring#installation 122 | # for details. 123 | datadog: 124 | flushMetricsToLogs: true 125 | 126 | # Automatically remove old function versions to avoid filling up your lambda code storage 127 | # quota. 128 | prune: 129 | automatic: true 130 | number: 3 131 | 132 | # SQS queue names 133 | queueNames: 134 | fax: fax-gateway-${self:custom.stage}-fax.fifo 135 | retry: fax-gateway-${self:custom.stage}-retry 136 | webhook: fax-gateway-${self:custom.stage}-webhook 137 | faxDLQ: fax-gateway-${self:custom.stage}-fax-dlq.fifo 138 | retryDLQ: fax-gateway-${self:custom.stage}-retry-dlq 139 | webhookDLQ: fax-gateway-${self:custom.stage}-webhook-dlq 140 | 141 | resources: 142 | Resources: 143 | # The fax queue is where incoming requests are written to. It's 144 | # FIFO queue that uses the destination phone number as the message 145 | # group, so we won't fax the same number twice at the same time. 146 | FaxQueue: 147 | Type: AWS::SQS::Queue 148 | Properties: 149 | FifoQueue: true 150 | QueueName: ${self:custom.queueNames.fax} 151 | ContentBasedDeduplication: true 152 | # This controls how long between retries if the fax lambda fails. 153 | # The fax lambda just send the request to Twilio -- it should never 154 | # fail unless Twilio is down (it won't fail if the fax is undeliverable). 155 | # 156 | # This VisibilityTimeout *must* be higher than the Lambda timeout -- 157 | # otherwise messages will be retried before the lambda can finish! The 158 | # fax sending lambda has a 15-minute timeout, because it doesn't return 159 | # until the fax is done sending, and faxes can take a long time (~1 minute 160 | # per page). 161 | # 162 | # So we set this pretty high -- we wait 20 minutes between retries, and give 163 | # it 3 retries before we give up 164 | VisibilityTimeout: 1200 165 | RedrivePolicy: 166 | deadLetterTargetArn: 167 | "Fn::GetAtt": [ FaxDLQ, Arn ] 168 | maxReceiveCount: 3 169 | 170 | # The webhook queue stores the callbacks we need to make to the calling 171 | # application, letting it know about success or failure of sending the 172 | # fax. 173 | WebhookQueue: 174 | Type: AWS::SQS::Queue 175 | Properties: 176 | QueueName: ${self:custom.queueNames.webhook} 177 | 178 | # This controls how long between retries if the webhook lambda fails. 179 | # This lambda can fail if the calling application can't receive the 180 | # webhook, so we use pretty aggressive retry behavior -- we include 181 | # an ID the receiver can use for deduplication/idempotence. 182 | # 183 | # We wait 3 minutes between retries, and will attempt to send the 184 | # webhook up to 20 times. 185 | VisibilityTimeout: 180 186 | RedrivePolicy: 187 | deadLetterTargetArn: 188 | "Fn::GetAtt": [ WebhookDLQ, Arn ] 189 | maxReceiveCount: 20 190 | 191 | # The retry queue stores faxes that failed to send (because the receiver was 192 | # busy, offline, or out of paper, for example). It's a regular queue rather 193 | # than a FIFO queue so that we can use per-message delays to implement 194 | # exponential backoff. 195 | RetryQueue: 196 | Type: AWS::SQS::Queue 197 | Properties: 198 | QueueName: ${self:custom.queueNames.retry} 199 | 200 | # This controls how long between retries if the retry lambda fails. 201 | # The retry lambda just enqueues the message in the fax queue (it 202 | # doesn't send it directly, because we need to fax queue's FIFO 203 | # behavior to ensure we're not sending to the same fax multiple times 204 | # at once). So we shouldn't need much retry -- this will only fail if 205 | # lambda or SQS is down: retry every 3 minutes and give up after 3 tries. 206 | VisibilityTimeout: 180 207 | RedrivePolicy: 208 | deadLetterTargetArn: 209 | "Fn::GetAtt": [ RetryDLQ, Arn ] 210 | maxReceiveCount: 3 211 | 212 | # These two queues are dead-letter queues (DLQ): if messages can't be processed 213 | # from the above queues, they end up in here after maxReceiveCount tries. 214 | # 215 | # This *does not* include expected failures to send the fax itself -- it 216 | # only includes failures due to errors in our lambda handles, or outages 217 | # of Twilio, SQS, or the calling application. 218 | # 219 | # We never read out of these queues; they're just here to store failures 220 | # for later inspection. You should monitor these queues and examine any 221 | # messages that end up in them (for example, you might want to set up a 222 | # DataDog or CloudWatch alert if the queue size is ever greater than 0). 223 | FaxDLQ: 224 | Type: AWS::SQS::Queue 225 | Properties: 226 | FifoQueue: true 227 | ContentBasedDeduplication: true 228 | QueueName: ${self:custom.queueNames.faxDLQ} 229 | MessageRetentionPeriod: 1209600 230 | RetryDLQ: 231 | Type: AWS::SQS::Queue 232 | Properties: 233 | QueueName: ${self:custom.queueNames.retryDLQ} 234 | MessageRetentionPeriod: 1209600 235 | WebhookDLQ: 236 | Type: AWS::SQS::Queue 237 | Properties: 238 | QueueName: ${self:custom.queueNames.webhookDLQ} 239 | MessageRetentionPeriod: 1209600 240 | 241 | 242 | functions: 243 | fax_processor: 244 | handler: app.fax_processor.handler 245 | timeout: 900 246 | events: 247 | - sqs: 248 | # Only accept one fax at a time -- we only have 15 minutes to 249 | # send the fax, so we can't count on delivering a bunch of faxes 250 | # within that timeframe. 251 | batchSize: 1 252 | 253 | # Don't use lambda's built-in retry -- keep it simple and just use 254 | # SQS for retry 255 | maximumRetryAttempts: 0 256 | arn: 257 | 'Fn::GetAtt': 258 | - FaxQueue 259 | - Arn 260 | 261 | retry_processor: 262 | handler: app.retry_processor.handler 263 | events: 264 | - sqs: 265 | batchSize: 1 266 | maximumRetryAttempts: 0 267 | arn: 268 | 'Fn::GetAtt': 269 | - RetryQueue 270 | - Arn 271 | 272 | webhook_processor: 273 | handler: app.webhook_processor.handler 274 | events: 275 | - sqs: 276 | batchSize: 1 277 | maximumRetryAttempts: 0 278 | arn: 279 | 'Fn::GetAtt': 280 | - WebhookQueue 281 | - Arn 282 | 283 | 284 | 285 | 286 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "c6e50b250d4b69c7d0ed35271ec84f6e482ed302f385f460316ecd265cc41380" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "boto3": { 20 | "hashes": [ 21 | "sha256:1bdab4f87ff39d5aab59b0aae69965bf604fa5608984c673877f4c62c1f16240", 22 | "sha256:2b4924ccc1603d562969b9f3c8c74ff4a1f3bdbafe857c990422c73d8e2e229e" 23 | ], 24 | "index": "pypi", 25 | "version": "==1.13.18" 26 | }, 27 | "botocore": { 28 | "hashes": [ 29 | "sha256:93574cf95a64c71d35c12c93a23f6214cf2f4b461be3bda3a436381cbe126a84", 30 | "sha256:e65eb27cae262a510e335bc0c0e286e9e42381b1da0aafaa79fa13c1d8d74a95" 31 | ], 32 | "version": "==1.16.18" 33 | }, 34 | "certifi": { 35 | "hashes": [ 36 | "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304", 37 | "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519" 38 | ], 39 | "version": "==2020.4.5.1" 40 | }, 41 | "chardet": { 42 | "hashes": [ 43 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 44 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 45 | ], 46 | "version": "==3.0.4" 47 | }, 48 | "dataclasses-json": { 49 | "hashes": [ 50 | "sha256:175a30bdbd10d85022bb8684c7e0749217547d842692b2617f982ce197ab6121", 51 | "sha256:6c022dc5598162972253c197a3af16d08c0f9eb30630da383ac165a3903a4d11" 52 | ], 53 | "index": "pypi", 54 | "version": "==0.4.3" 55 | }, 56 | "docutils": { 57 | "hashes": [ 58 | "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", 59 | "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", 60 | "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" 61 | ], 62 | "version": "==0.15.2" 63 | }, 64 | "idna": { 65 | "hashes": [ 66 | "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", 67 | "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" 68 | ], 69 | "version": "==2.9" 70 | }, 71 | "jmespath": { 72 | "hashes": [ 73 | "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", 74 | "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" 75 | ], 76 | "version": "==0.10.0" 77 | }, 78 | "marshmallow": { 79 | "hashes": [ 80 | "sha256:c2673233aa21dde264b84349dc2fd1dce5f30ed724a0a00e75426734de5b84ab", 81 | "sha256:f88fe96434b1f0f476d54224d59333eba8ca1a203a2695683c1855675c4049a7" 82 | ], 83 | "version": "==3.6.0" 84 | }, 85 | "marshmallow-enum": { 86 | "hashes": [ 87 | "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58", 88 | "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072" 89 | ], 90 | "version": "==1.5.1" 91 | }, 92 | "mypy-extensions": { 93 | "hashes": [ 94 | "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", 95 | "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" 96 | ], 97 | "version": "==0.4.3" 98 | }, 99 | "pyjwt": { 100 | "hashes": [ 101 | "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", 102 | "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96" 103 | ], 104 | "version": "==1.7.1" 105 | }, 106 | "python-dateutil": { 107 | "hashes": [ 108 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", 109 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" 110 | ], 111 | "version": "==2.8.1" 112 | }, 113 | "pytz": { 114 | "hashes": [ 115 | "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed", 116 | "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048" 117 | ], 118 | "version": "==2020.1" 119 | }, 120 | "requests": { 121 | "hashes": [ 122 | "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", 123 | "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" 124 | ], 125 | "index": "pypi", 126 | "version": "==2.23.0" 127 | }, 128 | "s3transfer": { 129 | "hashes": [ 130 | "sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13", 131 | "sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db" 132 | ], 133 | "version": "==0.3.3" 134 | }, 135 | "sentry-sdk": { 136 | "hashes": [ 137 | "sha256:0e5e947d0f7a969314aa23669a94a9712be5a688ff069ff7b9fc36c66adc160c", 138 | "sha256:799a8bf76b012e3030a881be00e97bc0b922ce35dde699c6537122b751d80e2c" 139 | ], 140 | "index": "pypi", 141 | "version": "==0.14.4" 142 | }, 143 | "six": { 144 | "hashes": [ 145 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 146 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 147 | ], 148 | "version": "==1.15.0" 149 | }, 150 | "stringcase": { 151 | "hashes": [ 152 | "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008" 153 | ], 154 | "version": "==1.2.0" 155 | }, 156 | "twilio": { 157 | "hashes": [ 158 | "sha256:7c6329118583852bb06a2065dd2987a012310e5dfd834ef821d736b059bd1c74" 159 | ], 160 | "index": "pypi", 161 | "version": "==6.41.0" 162 | }, 163 | "typing-extensions": { 164 | "hashes": [ 165 | "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5", 166 | "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae", 167 | "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392" 168 | ], 169 | "version": "==3.7.4.2" 170 | }, 171 | "typing-inspect": { 172 | "hashes": [ 173 | "sha256:3b98390df4d999a28cf5b35d8b333425af5da2ece8a4ea9e98f71e7591347b4f", 174 | "sha256:8f1b1dd25908dbfd81d3bebc218011531e7ab614ba6e5bf7826d887c834afab7", 175 | "sha256:de08f50a22955ddec353876df7b2545994d6df08a2f45d54ac8c05e530372ca0" 176 | ], 177 | "version": "==0.6.0" 178 | }, 179 | "urllib3": { 180 | "hashes": [ 181 | "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", 182 | "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" 183 | ], 184 | "markers": "python_version != '3.4'", 185 | "version": "==1.25.9" 186 | } 187 | }, 188 | "develop": { 189 | "appdirs": { 190 | "hashes": [ 191 | "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", 192 | "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" 193 | ], 194 | "version": "==1.4.4" 195 | }, 196 | "attrs": { 197 | "hashes": [ 198 | "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", 199 | "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" 200 | ], 201 | "version": "==19.3.0" 202 | }, 203 | "autoflake": { 204 | "hashes": [ 205 | "sha256:680cb9dade101ed647488238ccb8b8bfb4369b53d58ba2c8cdf7d5d54e01f95b" 206 | ], 207 | "index": "pypi", 208 | "version": "==1.3.1" 209 | }, 210 | "black": { 211 | "hashes": [ 212 | "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b", 213 | "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539" 214 | ], 215 | "index": "pypi", 216 | "version": "==19.10b0" 217 | }, 218 | "certifi": { 219 | "hashes": [ 220 | "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304", 221 | "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519" 222 | ], 223 | "version": "==2020.4.5.1" 224 | }, 225 | "chardet": { 226 | "hashes": [ 227 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 228 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 229 | ], 230 | "version": "==3.0.4" 231 | }, 232 | "click": { 233 | "hashes": [ 234 | "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", 235 | "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" 236 | ], 237 | "version": "==7.1.2" 238 | }, 239 | "freezegun": { 240 | "hashes": [ 241 | "sha256:82c757a05b7c7ca3e176bfebd7d6779fd9139c7cb4ef969c38a28d74deef89b2", 242 | "sha256:e2062f2c7f95cc276a834c22f1a17179467176b624cc6f936e8bc3be5535ad1b" 243 | ], 244 | "index": "pypi", 245 | "version": "==0.3.15" 246 | }, 247 | "idna": { 248 | "hashes": [ 249 | "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", 250 | "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" 251 | ], 252 | "version": "==2.9" 253 | }, 254 | "importlib-metadata": { 255 | "hashes": [ 256 | "sha256:2a688cbaa90e0cc587f1df48bdc97a6eadccdcd9c35fb3f976a09e3b5016d90f", 257 | "sha256:34513a8a0c4962bc66d35b359558fd8a5e10cd472d37aec5f66858addef32c1e" 258 | ], 259 | "markers": "python_version < '3.8'", 260 | "version": "==1.6.0" 261 | }, 262 | "isort": { 263 | "hashes": [ 264 | "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", 265 | "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" 266 | ], 267 | "index": "pypi", 268 | "version": "==4.3.21" 269 | }, 270 | "more-itertools": { 271 | "hashes": [ 272 | "sha256:558bb897a2232f5e4f8e2399089e35aecb746e1f9191b6584a151647e89267be", 273 | "sha256:7818f596b1e87be009031c7653d01acc46ed422e6656b394b0f765ce66ed4982" 274 | ], 275 | "version": "==8.3.0" 276 | }, 277 | "mypy": { 278 | "hashes": [ 279 | "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2", 280 | "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1", 281 | "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164", 282 | "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761", 283 | "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce", 284 | "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27", 285 | "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754", 286 | "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae", 287 | "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9", 288 | "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600", 289 | "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65", 290 | "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8", 291 | "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913", 292 | "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3" 293 | ], 294 | "index": "pypi", 295 | "version": "==0.770" 296 | }, 297 | "mypy-extensions": { 298 | "hashes": [ 299 | "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", 300 | "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" 301 | ], 302 | "version": "==0.4.3" 303 | }, 304 | "packaging": { 305 | "hashes": [ 306 | "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", 307 | "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" 308 | ], 309 | "version": "==20.4" 310 | }, 311 | "pathspec": { 312 | "hashes": [ 313 | "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0", 314 | "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061" 315 | ], 316 | "version": "==0.8.0" 317 | }, 318 | "pluggy": { 319 | "hashes": [ 320 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", 321 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" 322 | ], 323 | "version": "==0.13.1" 324 | }, 325 | "py": { 326 | "hashes": [ 327 | "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", 328 | "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" 329 | ], 330 | "version": "==1.8.1" 331 | }, 332 | "pyflakes": { 333 | "hashes": [ 334 | "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", 335 | "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" 336 | ], 337 | "version": "==2.2.0" 338 | }, 339 | "pyparsing": { 340 | "hashes": [ 341 | "sha256:67199f0c41a9c702154efb0e7a8cc08accf830eb003b4d9fa42c4059002e2492", 342 | "sha256:700d17888d441604b0bd51535908dcb297561b040819cccde647a92439db5a2a" 343 | ], 344 | "version": "==3.0.0a1" 345 | }, 346 | "pytest": { 347 | "hashes": [ 348 | "sha256:95c710d0a72d91c13fae35dce195633c929c3792f54125919847fdcdf7caa0d3", 349 | "sha256:eb2b5e935f6a019317e455b6da83dd8650ac9ffd2ee73a7b657a30873d67a698" 350 | ], 351 | "index": "pypi", 352 | "version": "==5.4.2" 353 | }, 354 | "pytest-mock": { 355 | "hashes": [ 356 | "sha256:997729451dfc36b851a9accf675488c7020beccda15e11c75632ee3d1b1ccd71", 357 | "sha256:ce610831cedeff5331f4e2fc453a5dd65384303f680ab34bee2c6533855b431c" 358 | ], 359 | "index": "pypi", 360 | "version": "==3.1.0" 361 | }, 362 | "python-dateutil": { 363 | "hashes": [ 364 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", 365 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" 366 | ], 367 | "version": "==2.8.1" 368 | }, 369 | "regex": { 370 | "hashes": [ 371 | "sha256:1386e75c9d1574f6aa2e4eb5355374c8e55f9aac97e224a8a5a6abded0f9c927", 372 | "sha256:27ff7325b297fb6e5ebb70d10437592433601c423f5acf86e5bc1ee2919b9561", 373 | "sha256:329ba35d711e3428db6b45a53b1b13a0a8ba07cbbcf10bbed291a7da45f106c3", 374 | "sha256:3a9394197664e35566242686d84dfd264c07b20f93514e2e09d3c2b3ffdf78fe", 375 | "sha256:51f17abbe973c7673a61863516bdc9c0ef467407a940f39501e786a07406699c", 376 | "sha256:579ea215c81d18da550b62ff97ee187b99f1b135fd894a13451e00986a080cad", 377 | "sha256:70c14743320a68c5dac7fc5a0f685be63bc2024b062fe2aaccc4acc3d01b14a1", 378 | "sha256:7e61be8a2900897803c293247ef87366d5df86bf701083b6c43119c7c6c99108", 379 | "sha256:8044d1c085d49673aadb3d7dc20ef5cb5b030c7a4fa253a593dda2eab3059929", 380 | "sha256:89d76ce33d3266173f5be80bd4efcbd5196cafc34100fdab814f9b228dee0fa4", 381 | "sha256:99568f00f7bf820c620f01721485cad230f3fb28f57d8fbf4a7967ec2e446994", 382 | "sha256:a7c37f048ec3920783abab99f8f4036561a174f1314302ccfa4e9ad31cb00eb4", 383 | "sha256:c2062c7d470751b648f1cacc3f54460aebfc261285f14bc6da49c6943bd48bdd", 384 | "sha256:c9bce6e006fbe771a02bda468ec40ffccbf954803b470a0345ad39c603402577", 385 | "sha256:ce367d21f33e23a84fb83a641b3834dd7dd8e9318ad8ff677fbfae5915a239f7", 386 | "sha256:ce450ffbfec93821ab1fea94779a8440e10cf63819be6e176eb1973a6017aff5", 387 | "sha256:ce5cc53aa9fbbf6712e92c7cf268274eaff30f6bd12a0754e8133d85a8fb0f5f", 388 | "sha256:d466967ac8e45244b9dfe302bbe5e3337f8dc4dec8d7d10f5e950d83b140d33a", 389 | "sha256:d881c2e657c51d89f02ae4c21d9adbef76b8325fe4d5cf0e9ad62f850f3a98fd", 390 | "sha256:e565569fc28e3ba3e475ec344d87ed3cd8ba2d575335359749298a0899fe122e", 391 | "sha256:ea55b80eb0d1c3f1d8d784264a6764f931e172480a2f1868f2536444c5f01e01" 392 | ], 393 | "version": "==2020.5.14" 394 | }, 395 | "requests": { 396 | "hashes": [ 397 | "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", 398 | "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" 399 | ], 400 | "index": "pypi", 401 | "version": "==2.23.0" 402 | }, 403 | "responses": { 404 | "hashes": [ 405 | "sha256:1a78bc010b20a5022a2c0cb76b8ee6dc1e34d887972615ebd725ab9a166a4960", 406 | "sha256:3d596d0be06151330cb230a2d630717ab20f7a81f205019481e206eb5db79915" 407 | ], 408 | "index": "pypi", 409 | "version": "==0.10.14" 410 | }, 411 | "six": { 412 | "hashes": [ 413 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", 414 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" 415 | ], 416 | "version": "==1.15.0" 417 | }, 418 | "toml": { 419 | "hashes": [ 420 | "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f", 421 | "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88" 422 | ], 423 | "version": "==0.10.1" 424 | }, 425 | "typed-ast": { 426 | "hashes": [ 427 | "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", 428 | "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", 429 | "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", 430 | "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", 431 | "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", 432 | "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", 433 | "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", 434 | "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", 435 | "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", 436 | "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", 437 | "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", 438 | "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", 439 | "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", 440 | "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", 441 | "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", 442 | "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", 443 | "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", 444 | "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", 445 | "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", 446 | "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", 447 | "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" 448 | ], 449 | "version": "==1.4.1" 450 | }, 451 | "typing-extensions": { 452 | "hashes": [ 453 | "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5", 454 | "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae", 455 | "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392" 456 | ], 457 | "version": "==3.7.4.2" 458 | }, 459 | "urllib3": { 460 | "hashes": [ 461 | "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", 462 | "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" 463 | ], 464 | "markers": "python_version != '3.4'", 465 | "version": "==1.25.9" 466 | }, 467 | "wcwidth": { 468 | "hashes": [ 469 | "sha256:cafe2186b3c009a04067022ce1dcd79cb38d8d65ee4f4791b8888d6599d1bbe1", 470 | "sha256:ee73862862a156bf77ff92b09034fc4825dd3af9cf81bc5b360668d425f3c5f1" 471 | ], 472 | "version": "==0.1.9" 473 | }, 474 | "zipp": { 475 | "hashes": [ 476 | "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", 477 | "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" 478 | ], 479 | "version": "==3.1.0" 480 | } 481 | } 482 | } 483 | --------------------------------------------------------------------------------