├── pytest.ini ├── package.json ├── redhook ├── __init__.py ├── test_firehose.py ├── config.py ├── test_transforms.py ├── transforms.py ├── firehose.py ├── test_lambda_utils.py └── lambda_utils.py ├── Pipfile ├── setup.py ├── handlers.py ├── LICENSE ├── bin ├── transform-event └── railroad ├── .gitignore ├── samples ├── event.b64-json-body.json ├── event.json-body.json ├── event.url-body.json └── event.b64-url-body.json ├── README.md ├── infrastructure ├── redhook │ └── main.tf └── modules │ └── firehose-to-redshift │ ├── iam.tf │ └── main.tf ├── serverless.example.yml └── Pipfile.lock /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | env = 3 | BASIC_AUTH_USERNAME=elizabeth 4 | BASIC_AUTH_PASSWORD=warren 5 | DELIVERY_STREAM_NAME=test 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "redhook", 3 | "description": "", 4 | "version": "0.1.0", 5 | "dependencies": {}, 6 | "devDependencies": { 7 | "serverless": "^1.60.5", 8 | "serverless-domain-manager": "^3.3.0", 9 | "serverless-plugin-aws-alerts": "^1.4.0", 10 | "serverless-python-requirements": "^5.0.1" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /redhook/__init__.py: -------------------------------------------------------------------------------- 1 | # core/__init__.py 2 | 3 | import logging 4 | 5 | root = logging.getLogger() 6 | 7 | if root.handlers: 8 | for handler in root.handlers: 9 | root.removeHandler(handler) 10 | 11 | logging.basicConfig( 12 | level=logging.INFO, 13 | format="[%(asctime)s] [%(name)s] [%(levelname)s] [%(funcName)s] [line: %(lineno)s] - %(message)s", 14 | ) 15 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | pylint = "*" 8 | black = "*" 9 | pytest = "*" 10 | boto3 = "==1.9.177" 11 | requests = "*" 12 | redhook = {editable = true,path = "."} 13 | pytest-env = "*" 14 | docopt = "*" 15 | 16 | [packages] 17 | cached-property = "==1.5.1" 18 | 19 | [requires] 20 | python_version = "3.7" 21 | 22 | [pipenv] 23 | allow_prereleases = true 24 | 25 | [scripts] 26 | test="pytest -vv" 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setuptools.setup( 7 | name="redhook", 8 | version="0.0.1", 9 | author="Elizabeth Warren", 10 | author_email="pstein@elizabethwarren.com", 11 | description="Redhook Codebase", 12 | long_description=long_description, 13 | long_description_content_type="text/markdown", 14 | url="https://www.elizabethwarren.com", 15 | packages=setuptools.find_packages(), 16 | python_requires=">=3.7", 17 | ) 18 | -------------------------------------------------------------------------------- /handlers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import datetime 4 | 5 | from redhook import transforms 6 | from redhook.firehose import add_record 7 | from redhook.lambda_utils import ( 8 | from_body, 9 | handle_response, 10 | with_basic_auth, 11 | ) 12 | 13 | 14 | @handle_response 15 | @from_body 16 | def json(data): 17 | return add_record(transforms.base(data)) 18 | 19 | 20 | @handle_response 21 | @with_basic_auth 22 | @from_body 23 | def json_with_basic_auth(data): 24 | return add_record(transforms.base(data)) 25 | 26 | 27 | if __name__ == "__main__": 28 | print("Check imports...") 29 | -------------------------------------------------------------------------------- /redhook/test_firehose.py: -------------------------------------------------------------------------------- 1 | # redhook/test_firehose.py 2 | 3 | from redhook.firehose import Firehose 4 | 5 | 6 | class FakeClient: 7 | def __init__(self): 8 | self.first = True 9 | self.call_count = 0 10 | 11 | def put_record(self, **kwargs): 12 | self.call_count += 1 13 | if self.first: 14 | self.first = False 15 | raise Exception() 16 | return True 17 | 18 | 19 | def test_firehose_retries(): 20 | fake_client = FakeClient() 21 | firehose = Firehose(client=fake_client) 22 | assert firehose.add_record({"Hello": "World"}) 23 | assert fake_client.call_count == 2 24 | -------------------------------------------------------------------------------- /redhook/config.py: -------------------------------------------------------------------------------- 1 | # core/config.py 2 | 3 | import os 4 | import os.path 5 | 6 | from cached_property import cached_property 7 | 8 | 9 | class Config: 10 | @cached_property 11 | def project_root(self): 12 | """Where is the redhook directory?""" 13 | return os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) 14 | 15 | @cached_property 16 | def delivery_stream(self): 17 | """Where are we sending the data?""" 18 | return os.environ["DELIVERY_STREAM_NAME"] 19 | 20 | @cached_property 21 | def basic_auth_username(self): 22 | return os.environ["BASIC_AUTH_USERNAME"] 23 | 24 | @cached_property 25 | def basic_auth_password(self): 26 | return os.environ["BASIC_AUTH_PASSWORD"] 27 | 28 | config = Config() 29 | -------------------------------------------------------------------------------- /redhook/test_transforms.py: -------------------------------------------------------------------------------- 1 | # tests/transforms.py 2 | 3 | import os.path 4 | from json import dumps, loads 5 | 6 | 7 | from . import transforms 8 | from .config import config 9 | 10 | 11 | def test_value(): 12 | value = {"hello": "world"} 13 | result = transforms.transform()(value) 14 | 15 | assert len(result) == 2 16 | assert result["__raw"] == dumps(value) 17 | assert result["hello"] == "world" 18 | 19 | 20 | def test_flatten(): 21 | obj = { 22 | "hello": {"world": {"a": 1, "b": 2, "c": 3}}, 23 | "goodbye": {"this": {"that": [0, 1]}, "foo": {"bar": "whoops"}}, 24 | } 25 | 26 | results = transforms.base(obj) 27 | 28 | assert len(results) == 6 29 | assert "__raw" in results 30 | assert results["hello_world_a"] == 1 31 | assert results["hello_world_b"] == 2 32 | assert results["hello_world_c"] == 3 33 | assert results["goodbye_this_that"] == [0, 1] 34 | assert results["goodbye_foo_bar"] == "whoops" 35 | 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Elizabeth-Warren 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /redhook/transforms.py: -------------------------------------------------------------------------------- 1 | # core/transforms.py 2 | 3 | from copy import deepcopy as clone 4 | from json import dumps 5 | 6 | 7 | def merge(root, *others): 8 | """Combine a bunch of dictionaries and return the result""" 9 | cp = clone(root) 10 | [cp.update(other) for other in others] 11 | return cp 12 | 13 | 14 | class transform: 15 | """compose a series of transformation functions""" 16 | 17 | @staticmethod 18 | def __recursively_flatten(current, key, result): 19 | if isinstance(current, dict): 20 | for k in current: 21 | flat_key = f"{key}_{k.lower()}" if key else k 22 | transform.__recursively_flatten(current[k], flat_key, result) 23 | else: 24 | result[key] = current 25 | return result 26 | 27 | @staticmethod 28 | def flatten_dict(value): 29 | return transform.__recursively_flatten(value, "", {}) 30 | 31 | def __init__(self): 32 | self.__chain = [] 33 | 34 | def flatten(self): 35 | """When invoked, the transform will flatten deeply nested dictionaries""" 36 | self.__chain.append(transform.flatten_dict) 37 | return self 38 | 39 | def __call__(self, value): 40 | """When invoked, this will pass the given value through the configured set of transformations""" 41 | result = clone(value) 42 | for fn in reversed(self.__chain): 43 | result = fn(result) 44 | result["__raw"] = dumps(value) 45 | return result 46 | 47 | 48 | base = transform().flatten() 49 | -------------------------------------------------------------------------------- /redhook/firehose.py: -------------------------------------------------------------------------------- 1 | # redhook/firehose.py 2 | 3 | import logging 4 | from json import dumps 5 | from os import environ 6 | from random import random 7 | from time import sleep 8 | 9 | import boto3 10 | 11 | from cached_property import cached_property 12 | 13 | from .config import config 14 | 15 | 16 | class Firehose: 17 | @staticmethod 18 | def randomized_sleep(base, fudge_factor=2): 19 | return base + (fudge_factor * random()) 20 | 21 | def __init__(self, client=None): 22 | self.__client = client 23 | 24 | @cached_property 25 | def client(self): 26 | """The firehose client""" 27 | return self.__client or boto3.client("firehose") 28 | 29 | def add_record(self, data, retries=10, sleep_time=2): 30 | """Deliver a JSON-serializable dictionary to the firehose""" 31 | # We are not going to backoff at all here, it should be able to handle 32 | # it. 33 | try: 34 | return self.client.put_record( 35 | DeliveryStreamName=config.delivery_stream, 36 | Record={"Data": dumps(data) + "\n"}, 37 | ) 38 | except Exception as e: 39 | logging.error("Failed writing data to firehose.") 40 | if not retries: 41 | raise e 42 | 43 | actual_sleep_time = Firehose.randomized_sleep(sleep_time) 44 | sleep(actual_sleep_time) 45 | logging.warning( 46 | f"Slept for {actual_sleep_time}s. Retrying (retries remaining: {retries})" 47 | ) 48 | return self.add_record(data, retries=retries - 1, sleep_time=sleep_time) 49 | 50 | 51 | add_record = Firehose().add_record 52 | -------------------------------------------------------------------------------- /bin/transform-event: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Transform Event. 3 | 4 | Usage: 5 | transform-event --transform= --filepath= 6 | transform-event (-h | --help) 7 | transform-event --version 8 | 9 | Options: 10 | -h --help Show this screen. 11 | --version Show version. 12 | --transform= What endpoint are we railroading? 13 | --filepath= Location of a file containing some sample data 14 | """ 15 | 16 | from json import dumps, loads 17 | from sys import argv 18 | 19 | from docopt import docopt 20 | from redhook import transforms 21 | 22 | 23 | def print_transformed_sample(transformed_sample): 24 | print("\n# transform ---------------------------------------------------\n") 25 | print(dumps(transformed_sample, indent=2)) 26 | print("\n# Columns -----------------------------------------------------\n") 27 | print("\n".join(transformed_sample.keys())) 28 | 29 | 30 | def transform_event(transformation, filepath): 31 | fn = getattr(transforms, transformation, None) 32 | 33 | if not fn: 34 | print("Could not find converter for '{transformation}'") 35 | exit(1) 36 | 37 | with open(filepath, "r") as f: 38 | transformed = fn(loads(f.read())) 39 | if not isinstance(transformed, list): 40 | transform = [transform] 41 | [print_transformed_sample(sample) for sample in transformed] 42 | 43 | 44 | if __name__ == "__main__": 45 | arguments = arguments = docopt(__doc__, version="Railroad 0.1.0") 46 | transformation = arguments["--transform"] 47 | filepath = arguments["--filepath"] 48 | transform_event(transformation, filepath) 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled source # 2 | ################### 3 | *.com 4 | *.class 5 | *.dll 6 | *.exe 7 | *.o 8 | *.so 9 | *.pyc 10 | 11 | # Packages # 12 | ############ 13 | # it's better to unpack these files and commit the raw source 14 | # git has its own built in compression methods 15 | *.7z 16 | *.dmg 17 | *.gz 18 | *.iso 19 | *.jar 20 | *.rar 21 | *.tar 22 | *.zip 23 | 24 | # Logs and databases # 25 | ###################### 26 | *.log 27 | *.sql 28 | *.sqlite 29 | 30 | # OS generated files # 31 | ###################### 32 | .vagrant 33 | .DS_Store 34 | .DS_Store? 35 | ._* 36 | .Spotlight-V100 37 | .Trashes 38 | ehthumbs.db 39 | Thumbs.db 40 | 41 | # Ruby related files # 42 | ###################### 43 | *.gem 44 | *.rbc 45 | .bundle 46 | .config 47 | coverage 48 | InstalledFiles 49 | lib/bundler/man 50 | pkg 51 | rdoc 52 | spec/reports 53 | test/tmp 54 | test/version_tmp 55 | tmp 56 | 57 | # YARD artifacts # 58 | ################## 59 | .yardoc 60 | _yardoc 61 | doc/ 62 | 63 | # JS/NPM related files # 64 | ######################## 65 | .*.swp 66 | .lock-wscript 67 | .wafpickle-* 68 | CVS 69 | npm-debug.log 70 | node_modules 71 | 72 | # Byte-compiled / optimized / DLL files # 73 | ######################################### 74 | __pycache__/ 75 | *.py[cod] 76 | 77 | # C extensions # 78 | ################ 79 | *.so 80 | 81 | # Distribution / packaging # 82 | ############################ 83 | .Python 84 | env/ 85 | build/ 86 | develop-eggs/ 87 | dist/ 88 | eggs/ 89 | lib/ 90 | lib64/ 91 | parts/ 92 | sdist/ 93 | var/ 94 | *.egg-info/ 95 | .installed.cfg 96 | *.egg 97 | 98 | # Installer logs # 99 | ################## 100 | pip-log.txt 101 | pip-delete-this-directory.txt 102 | -------------------------------------------------------------------------------- /bin/railroad: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Railroad. 3 | 4 | Usage: 5 | railroad --endpoint= --filepath= [--api-key=] [--username=] [--password=] 6 | railroad (-h | --help) 7 | railroad --version 8 | 9 | Options: 10 | -h --help Show this screen. 11 | --version Show version. 12 | --endpoint= What endpoint are we railroading? 13 | --filepath= Where's some ndjson to send at this thing? 14 | --api-key= The API Key to use against the endpoint. Takes precedence. 15 | --username= Basic Auth: username 16 | --password= Basic Auth: password 17 | """ 18 | 19 | from base64 import b64encode 20 | from json import dumps, loads 21 | 22 | from requests import post 23 | 24 | from docopt import docopt 25 | 26 | 27 | def railroad(endpoint, filepath, headers): 28 | with open(filepath, "r") as f: 29 | for l in f: 30 | response = post(endpoint, headers=headers, json=loads(l)).text 31 | print(f"Response was: '{response}'") 32 | 33 | 34 | if __name__ == "__main__": 35 | arguments = arguments = docopt(__doc__, version="Railroad 0.1.0") 36 | endpoint = arguments["--endpoint"] 37 | filepath = arguments["--filepath"] 38 | api_key = arguments["--api-key"] 39 | username = arguments["--username"] 40 | password = arguments["--password"] 41 | 42 | if api_key: 43 | railroad(endpoint, filepath, {"x-api-key": api_key}) 44 | elif username and password: 45 | token = b64encode(bytes(f"{username}:{password}", "utf-8")).decode("utf-8") 46 | railroad(endpoint, filepath, {"Authorization": f"Basic {token}"}) 47 | else: 48 | print( 49 | "You must provide either an API Key or a username/password combination..." 50 | ) 51 | exit(1) 52 | -------------------------------------------------------------------------------- /samples/event.b64-json-body.json: -------------------------------------------------------------------------------- 1 | { 2 | "httpMethod": "GET", 3 | "body": "eyJuYW1lIjogIlNhbSJ9", 4 | "path": "/users", 5 | "resource": "/{proxy+}", 6 | "isBase64Encoded": true, 7 | "queryStringParameters": {}, 8 | "pathParameters": { 9 | "proxy": "users" 10 | }, 11 | "headers": { 12 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 13 | "Accept-Encoding": "gzip, deflate, sdch, br", 14 | "Accept-Language": "en-US,en;q=0.8", 15 | "CloudFront-Forwarded-Proto": "https", 16 | "CloudFront-Is-Desktop-Viewer": "true", 17 | "CloudFront-Is-Mobile-Viewer": "false", 18 | "CloudFront-Is-SmartTV-Viewer": "false", 19 | "CloudFront-Is-Tablet-Viewer": "false", 20 | "CloudFront-Viewer-Country": "US", 21 | "Content-Type": "application/json", 22 | "Host": "xxxxxxxxxx.execute-api.us-east-1.amazonaws.com", 23 | "Upgrade-Insecure-Requests": "1", 24 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 25 | "Via": "1.1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.cloudfront.net (CloudFront)", 26 | "X-Amz-Cf-Id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_xxxxxxxxxxx_xxxx==", 27 | "X-Forwarded-For": "11.111.111.111, 11.111.111.111", 28 | "X-Forwarded-Port": "111", 29 | "X-Forwarded-Proto": "http" 30 | }, 31 | "requestContext": { 32 | "accountId": "111111111111", 33 | "resourceId": "xxxxxx", 34 | "stage": "prod", 35 | "requestId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 36 | "identity": { 37 | "cognitoIdentityPoolId": "", 38 | "accountId": "", 39 | "cognitoIdentityId": "", 40 | "caller": "", 41 | "apiKey": "", 42 | "sourceIp": "11.111.111.111", 43 | "cognitoAuthenticationType": "", 44 | "cognitoAuthenticationProvider": "", 45 | "userArn": "", 46 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 47 | "user": "" 48 | }, 49 | "resourcePath": "/{proxy+}", 50 | "httpMethod": "GET", 51 | "apiId": "xxxxxxxxxx" 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /samples/event.json-body.json: -------------------------------------------------------------------------------- 1 | { 2 | "httpMethod": "GET", 3 | "body": "{\"name\": \"Sam\"}", 4 | "path": "/users", 5 | "isBase64Encoded": false, 6 | "resource": "/{proxy+}", 7 | "queryStringParameters": {}, 8 | "pathParameters": { 9 | "proxy": "users" 10 | }, 11 | "headers": { 12 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 13 | "Accept-Encoding": "gzip, deflate, sdch, br", 14 | "Accept-Language": "en-US,en;q=0.8", 15 | "CloudFront-Forwarded-Proto": "https", 16 | "CloudFront-Is-Desktop-Viewer": "true", 17 | "CloudFront-Is-Mobile-Viewer": "false", 18 | "CloudFront-Is-SmartTV-Viewer": "false", 19 | "CloudFront-Is-Tablet-Viewer": "false", 20 | "CloudFront-Viewer-Country": "US", 21 | "Content-Type": "application/json", 22 | "Host": "xxxxxxxxxx.execute-api.us-east-1.amazonaws.com", 23 | "Upgrade-Insecure-Requests": "1", 24 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 25 | "Via": "1.1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.cloudfront.net (CloudFront)", 26 | "X-Amz-Cf-Id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_xxxxxxxxxxx_xxxx==", 27 | "X-Forwarded-For": "11.111.111.111, 11.111.111.111", 28 | "X-Forwarded-Port": "111", 29 | "X-Forwarded-Proto": "http" 30 | }, 31 | "requestContext": { 32 | "accountId": "111111111111", 33 | "resourceId": "xxxxxx", 34 | "stage": "prod", 35 | "requestId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 36 | "identity": { 37 | "cognitoIdentityPoolId": "", 38 | "accountId": "", 39 | "cognitoIdentityId": "", 40 | "caller": "", 41 | "apiKey": "", 42 | "sourceIp": "11.111.111.111", 43 | "cognitoAuthenticationType": "", 44 | "cognitoAuthenticationProvider": "", 45 | "userArn": "", 46 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 47 | "user": "" 48 | }, 49 | "resourcePath": "/{proxy+}", 50 | "httpMethod": "GET", 51 | "apiId": "xxxxxxxxxx" 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /samples/event.url-body.json: -------------------------------------------------------------------------------- 1 | { 2 | "httpMethod": "GET", 3 | "body": "name=Sam", 4 | "path": "/users", 5 | "resource": "/{proxy+}", 6 | "isBase64Encoded": false, 7 | "queryStringParameters": {}, 8 | "pathParameters": { 9 | "proxy": "users" 10 | }, 11 | "headers": { 12 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 13 | "Accept-Encoding": "gzip, deflate, sdch, br", 14 | "Accept-Language": "en-US,en;q=0.8", 15 | "CloudFront-Forwarded-Proto": "https", 16 | "CloudFront-Is-Desktop-Viewer": "true", 17 | "CloudFront-Is-Mobile-Viewer": "false", 18 | "CloudFront-Is-SmartTV-Viewer": "false", 19 | "CloudFront-Is-Tablet-Viewer": "false", 20 | "CloudFront-Viewer-Country": "US", 21 | "Content-Type": "application/x-www-form-urlencoded", 22 | "Host": "xxxxxxxxxx.execute-api.us-east-1.amazonaws.com", 23 | "Upgrade-Insecure-Requests": "1", 24 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 25 | "Via": "1.1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.cloudfront.net (CloudFront)", 26 | "X-Amz-Cf-Id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_xxxxxxxxxxx_xxxx==", 27 | "X-Forwarded-For": "11.111.111.111, 11.111.111.111", 28 | "X-Forwarded-Port": "111", 29 | "X-Forwarded-Proto": "http" 30 | }, 31 | "requestContext": { 32 | "accountId": "111111111111", 33 | "resourceId": "xxxxxx", 34 | "stage": "prod", 35 | "requestId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 36 | "identity": { 37 | "cognitoIdentityPoolId": "", 38 | "accountId": "", 39 | "cognitoIdentityId": "", 40 | "caller": "", 41 | "apiKey": "", 42 | "sourceIp": "11.111.111.111", 43 | "cognitoAuthenticationType": "", 44 | "cognitoAuthenticationProvider": "", 45 | "userArn": "", 46 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 47 | "user": "" 48 | }, 49 | "resourcePath": "/{proxy+}", 50 | "httpMethod": "GET", 51 | "apiId": "xxxxxxxxxx" 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /samples/event.b64-url-body.json: -------------------------------------------------------------------------------- 1 | { 2 | "httpMethod": "GET", 3 | "body": "bmFtZT1TYW0=", 4 | "path": "/users", 5 | "resource": "/{proxy+}", 6 | "isBase64Encoded": true, 7 | "queryStringParameters": {}, 8 | "pathParameters": { 9 | "proxy": "users" 10 | }, 11 | "headers": { 12 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 13 | "Accept-Encoding": "gzip, deflate, sdch, br", 14 | "Accept-Language": "en-US,en;q=0.8", 15 | "CloudFront-Forwarded-Proto": "https", 16 | "CloudFront-Is-Desktop-Viewer": "true", 17 | "CloudFront-Is-Mobile-Viewer": "false", 18 | "CloudFront-Is-SmartTV-Viewer": "false", 19 | "CloudFront-Is-Tablet-Viewer": "false", 20 | "CloudFront-Viewer-Country": "US", 21 | "Content-Type": "application/x-www-form-urlencoded", 22 | "Host": "xxxxxxxxxx.execute-api.us-east-1.amazonaws.com", 23 | "Upgrade-Insecure-Requests": "1", 24 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 25 | "Via": "1.1 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.cloudfront.net (CloudFront)", 26 | "X-Amz-Cf-Id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx_xxxxxxxxxxx_xxxx==", 27 | "X-Forwarded-For": "11.111.111.111, 11.111.111.111", 28 | "X-Forwarded-Port": "111", 29 | "X-Forwarded-Proto": "http" 30 | }, 31 | "requestContext": { 32 | "accountId": "111111111111", 33 | "resourceId": "xxxxxx", 34 | "stage": "prod", 35 | "requestId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", 36 | "identity": { 37 | "cognitoIdentityPoolId": "", 38 | "accountId": "", 39 | "cognitoIdentityId": "", 40 | "caller": "", 41 | "apiKey": "", 42 | "sourceIp": "11.111.111.111", 43 | "cognitoAuthenticationType": "", 44 | "cognitoAuthenticationProvider": "", 45 | "userArn": "", 46 | "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36", 47 | "user": "" 48 | }, 49 | "resourcePath": "/{proxy+}", 50 | "httpMethod": "GET", 51 | "apiId": "xxxxxxxxxx" 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Redhook 2 | 3 | Redhook is a generic webhook interface for landing data in [Kinesis Firehose](https://aws.amazon.com/kinesis/data-firehose/). On Elizabeth Warren's presidential campaign, Redhook was responsible for ingesting real-time data delivered to the campaign via webhooks and forwarding those data to [Redshift](https://aws.amazon.com/redshift/). We used [Civis](https://www.civisanalytics.com/)'s data platform on the campaign so Civis owned and managed the AWS account hosting our Redshift cluster. While you cannot configure a cross-account Kinesis Firehose in the AWS Console (yet), you can setup cross-account Firehoses (as we did here). 4 | 5 | During the campaign, Redhook was responsible for delivering all financial data and web analytics to the data teams. It operated at a near-zero cost and experienced no downtime -- though data delivery was delayed on occassion due to upstream problems. 6 | 7 | The code and configuration here is simple because it solves a simple problem: moving some data around. Our intention in open sourcing it is to demonstrate that some problems campaigns face do not require vendor tools and are solved reasonably effectively and efficiently with a tiny bit of code. 8 | 9 | ## Installation 10 | 11 | pipenv install -d 12 | npm install -g serverless@1.51.0 13 | npm install 14 | 15 | ## Deploy 16 | 17 | And example `serverless.yml` is provided here. The production configuration was somewhat more complicated but not particularly so. 18 | 19 | If a domain does not yet exist for your application stage 20 | 21 | sls create_domain --stage 22 | 23 | To deploy the code 24 | 25 | sls deploy --stage 26 | 27 | ## Infrastructure 28 | 29 | Sample [Terraform](https://www.terraform.io/) modules are included here that could be modified to stand-up the infrastructure required to operate Redhook. High-level, it needs a Kinesis Firehose for each table to which you want to write. We shared configuration between the infrastructure we managed with Terraform and the lambdas we deployed with [serverless](https://serverless.com/). The configuration defined in `serverless.example.yml` assumes that SSM parameters are named as they are in `infrastructure/`. 30 | 31 | ## API Keys 32 | 33 | Apis marked as private in `serverless.yml` require an api key. The `apiKeys` block in the provider definition in `serverless.yml` defines the set of API keys that get generated automatically. If the endpoint is marked as private, one of those API keys must be used as the `x-api-key` header to send data to the webhook. 34 | 35 | ## Scripts 36 | 37 | - `bin/railroad`: Fire sample data from a new-line delimited json file at your api 38 | 39 | - `bin/transform-event`: Apply the given transformation to a sample event in some file to see what the transformation does to it 40 | -------------------------------------------------------------------------------- /infrastructure/redhook/main.tf: -------------------------------------------------------------------------------- 1 | # Standup Firehoses for Redhook 2 | 3 | # Entry Point 4 | terraform { 5 | required_version = ">= 0.12.13" 6 | required_providers { 7 | aws = "2.44.0" 8 | } 9 | } 10 | 11 | 12 | # Provider 13 | provider "aws" { 14 | profile = "default" 15 | region = "us-east-1" 16 | } 17 | 18 | 19 | # In practice, these were all stored in SSM and fetched in terraform using the 20 | # "data" facilities it provides. Because this is all privileged information, it's 21 | # best to avoid stashing it in files unless you are quite confident in the security 22 | # of your deploy machines. 23 | locals { 24 | env = "dev" 25 | redshift_dsn = "redshift.com" 26 | redshift_username = "ew" 27 | redshift_password = "my-password" 28 | redshift_schema = "redhook" 29 | json_table = "json" 30 | basicauth_table = "json_with_basic_auth" 31 | alarm_sns_arn = "arn:aws:sns:us-east-1::houston-we-have-a-problem" 32 | } 33 | 34 | 35 | # Fake Deploy Bucket 36 | resource "aws_s3_bucket" "deploys" { 37 | bucket = "${local.env}.deploys" 38 | acl = "private" 39 | } 40 | 41 | resource "aws_ssm_parameter" "bucket_name" { 42 | name = "/${local.env}/s3/deploys/bucket/name" 43 | value = aws_s3_bucket.deploys.id 44 | type = "SecureString" 45 | } 46 | 47 | resource "aws_ssm_parameter" "bucket_arn" { 48 | name = "/${local.env}/s3/deploys/bucket/arn" 49 | value = aws_s3_bucket.deploys.arn 50 | type = "SecureString" 51 | } 52 | 53 | 54 | # Fake Alarms SNS Topic 55 | resource "aws_sns_topic" "alarms" { 56 | name = "${local.env}-alarms" 57 | } 58 | 59 | resource "aws_ssm_parameter" "sns_topic_alarms_arn" { 60 | name = "/${local.env}/sns/alarms/arn" 61 | type = "SecureString" 62 | value = aws_sns_topic.alarms.arn 63 | } 64 | 65 | resource "aws_ssm_parameter" "sns_topic_alarms_name" { 66 | name = "/${local.env}/sns/alarms/name" 67 | type = "SecureString" 68 | value = aws_sns_topic.alarms.name 69 | } 70 | 71 | 72 | # Firehoses 73 | module "redhook-json" { 74 | source = "../modules/firehose-to-redshift" 75 | name = "redhook-json" 76 | env = local.env 77 | redshift_jdbcurl = "jdbc:${local.redshift_dsn}" 78 | redshift_username = local.redshift_username 79 | redshift_password = local.redshift_password 80 | table_name = local.json_table 81 | alarm_sns_arn = aws_ssm_parameter.alarms.arn 82 | } 83 | 84 | module "redhook-json-with-basic-auth" { 85 | source = "../modules/firehose-to-redshift" 86 | name = "redhook-json-with-basic-auth" 87 | env = local.env 88 | redshift_jdbcurl = "jdbc:${local.redshift_dsn}" 89 | redshift_username = local.redshift_username 90 | redshift_password = local.redshift_password 91 | table_name = local.json_with_basic_auth 92 | alarm_sns_arn = aws_ssm_parameter.alarms.arn 93 | } 94 | -------------------------------------------------------------------------------- /redhook/test_lambda_utils.py: -------------------------------------------------------------------------------- 1 | # tests/lambda_utils.py 2 | 3 | import os.path 4 | from base64 import b64encode 5 | from json import loads 6 | 7 | import pytest 8 | 9 | from . import lambda_utils 10 | from .config import config 11 | 12 | 13 | @pytest.fixture 14 | def sample_json_body(): 15 | with open( 16 | os.path.join(config.project_root, "samples/event.json-body.json"), "r" 17 | ) as f: 18 | return loads(f.read()) 19 | 20 | 21 | @pytest.fixture 22 | def sample_b64_json_body(): 23 | with open( 24 | os.path.join(config.project_root, "samples/event.b64-json-body.json"), "r" 25 | ) as f: 26 | return loads(f.read()) 27 | 28 | 29 | @pytest.fixture 30 | def sample_url_body(): 31 | with open( 32 | os.path.join(config.project_root, "samples/event.url-body.json"), "r" 33 | ) as f: 34 | return loads(f.read()) 35 | 36 | 37 | @pytest.fixture 38 | def sample_b64_url_body(): 39 | with open( 40 | os.path.join(config.project_root, "samples/event.b64-url-body.json"), "r" 41 | ) as f: 42 | return loads(f.read()) 43 | 44 | 45 | def test_json_body(sample_json_body): 46 | data = lambda_utils.data_from_event(sample_json_body) 47 | assert len(data) == 1 48 | assert data["name"] == "Sam" 49 | 50 | 51 | def test_b64_json_body(sample_b64_json_body): 52 | data = lambda_utils.data_from_event(sample_b64_json_body) 53 | assert len(data) == 1 54 | assert data["name"] == "Sam" 55 | 56 | 57 | def test_url_body(sample_url_body): 58 | data = lambda_utils.data_from_event(sample_url_body) 59 | assert len(data) == 1 60 | assert data["name"] == "Sam" 61 | 62 | 63 | def test_b64_url_body(sample_b64_url_body): 64 | data = lambda_utils.data_from_event(sample_b64_url_body) 65 | print(data) 66 | assert len(data) == 1 67 | assert data["name"] == "Sam" 68 | 69 | 70 | def test_check_basic_auth(): 71 | def make_fake_event(auth_header): 72 | return {"headers": {"Authorization": auth_header}} 73 | 74 | with pytest.raises(lambda_utils.BasicAuthException, match="Missing Header"): 75 | lambda_utils.check_basic_auth({"headers": {}}) 76 | 77 | with pytest.raises( 78 | lambda_utils.BasicAuthException, match="'Basic' Not in Authorization" 79 | ): 80 | lambda_utils.check_basic_auth(make_fake_event("fail")) 81 | 82 | with pytest.raises(lambda_utils.BasicAuthException, match="Decode Failure"): 83 | lambda_utils.check_basic_auth(make_fake_event("Basic Garbage")) 84 | 85 | with pytest.raises(lambda_utils.BasicAuthException, match="Too Many Chunks"): 86 | token = b64encode(b"will:not:work").decode("utf-8") 87 | lambda_utils.check_basic_auth(make_fake_event(f"Basic {token}")) 88 | 89 | with pytest.raises(lambda_utils.BasicAuthException, match="Invalid Username"): 90 | token = b64encode(b"bailey:warren").decode("utf-8") 91 | lambda_utils.check_basic_auth(make_fake_event(f"Basic {token}")) 92 | 93 | with pytest.raises(lambda_utils.BasicAuthException, match="Invalid Password"): 94 | token = b64encode(b"elizabeth:bailey").decode("utf-8") 95 | lambda_utils.check_basic_auth(make_fake_event(f"Basic {token}")) 96 | 97 | token = b64encode(b"elizabeth:warren").decode("utf-8") 98 | assert lambda_utils.check_basic_auth(make_fake_event(f"Basic {token}")) is True 99 | -------------------------------------------------------------------------------- /serverless.example.yml: -------------------------------------------------------------------------------- 1 | service: redhook 2 | 3 | frameworkVersion: ">=1.1.0 <2.0.0" 4 | 5 | custom: 6 | stage: ${opt:stage, env:STAGE, "dev"} 7 | region: ${opt:region, env:REGION, "us-east-1"} 8 | 9 | # These SSM lookups match "infrastructure" 10 | deploy: 11 | bucket_name: ${ssm:/${self:custom.stage}/s3/deploys/bucket/name~true} 12 | bucket_arn: ${ssm:/${self:custom.stage}/s3/deploys/bucket/arn~true} 13 | 14 | customDomain: 15 | domainName: "${self:custom.stage}-EXAMPLE-SUBDOMAIN.EXAMPLE-DOMAIN.com" 16 | stage: ${self:custom.stage} 17 | certificateName: "*.EXAMPLE-DOMAIN.com" 18 | createRoute53Record: true 19 | securityPolicy: tls_1_2 # current top tier 20 | 21 | pythonRequirements: 22 | dockerizePip: true 23 | slim: true 24 | layer: true 25 | useDownloadCache: false 26 | useStaticCache: false 27 | 28 | alerts: 29 | nameTemplate: $[functionName]-$[metricName]-Alarm 30 | topics: 31 | alarm: 32 | # This SSM lookup matches "infrastructure" 33 | topic: ${ssm:/${self:custom.stage}/sns/alarms/name~true} 34 | definitions: 35 | functionInvocations: 36 | threshold: 1000 37 | functionDuration: 38 | threshold: 6000 39 | alarms: 40 | - functionThrottles 41 | - functionErrors 42 | - functionInvocations 43 | - functionDuration 44 | 45 | package: 46 | excludeDevDependencies: true 47 | 48 | provider: 49 | name: aws 50 | runtime: python3.7 51 | region: ${self:custom.region} 52 | stage: ${self:custom.stage} 53 | 54 | deploymentBucket: 55 | name: ${self:custom.deploy.bucket_name} 56 | blockPublicAccess: true 57 | 58 | apiKeys: 59 | - "${self:custom.stage}-default" 60 | 61 | iamRoleStatements: 62 | - Effect: Allow 63 | Action: 64 | - "firehose:PutRecord" 65 | Resource: 66 | # These SSM lookups match "infrastructure" 67 | - ${ssm:/${self:custom.stage}/kinesisfirehose/json/arn~true} 68 | - ${ssm:/${self:custom.stage}/kinesisfirehose/json-with-basic-auth/arn~true} 69 | 70 | functions: 71 | json: 72 | name: ${self:custom.stage}-redhook-json 73 | handler: handlers.json 74 | memorySize: 256 75 | timeout: 30 76 | layers: 77 | - {Ref: PythonRequirementsLambdaLayer} 78 | environment: 79 | STAGE: ${self:custom.stage} 80 | # This SSM lookup matches "infrastructure" 81 | DELIVERY_STREAM_NAME: ${ssm:/${self:custom.stage}/kinesisfirehose/json/name~true} 82 | events: 83 | - http: 84 | path: json 85 | method: post 86 | private: true 87 | 88 | json-with-basic-auth: 89 | name: ${self:custom.stage}-redhook-json-with-basic-auth 90 | handler: handlers.json_with_basic_auth 91 | memorySize: 256 92 | timeout: 30 93 | layers: 94 | - {Ref: PythonRequirementsLambdaLayer} 95 | environment: 96 | STAGE: ${self:custom.stage} 97 | # This SSM lookup matches "infrastructure" 98 | DELIVERY_STREAM_NAME: ${ssm:/${self:custom.stage}/kinesisfirehose/json-with-basic-auth/name~true} 99 | # Best to stash these in SSM as well 100 | BASIC_AUTH_USERNAME: EXAMPLE-USERNAME 101 | BASIC_AUTH_PASSWORD: EXAMPLE-PASSWORD 102 | events: 103 | - http: 104 | path: json-with-basic-auth 105 | method: post 106 | 107 | plugins: 108 | - serverless-python-requirements 109 | - serverless-domain-manager 110 | - serverless-plugin-aws-alerts 111 | -------------------------------------------------------------------------------- /infrastructure/modules/firehose-to-redshift/iam.tf: -------------------------------------------------------------------------------- 1 | # IAM Setup for Firehose 2 | 3 | data "aws_caller_identity" "current" {} 4 | 5 | data "aws_region" "current" {} 6 | 7 | data "aws_iam_policy_document" "main" { 8 | statement { 9 | effect = "Allow" 10 | actions = [ 11 | "glue:GetTable", 12 | "glue:GetTableVersion", 13 | "glue:GetTableVersions" 14 | ] 15 | resources = ["*"] 16 | } 17 | 18 | statement { 19 | effect = "Allow" 20 | actions = [ 21 | "s3:AbortMultipartUpload", 22 | "s3:GetBucketLocation", 23 | "s3:GetObject", 24 | "s3:ListBucket", 25 | "s3:ListBucketMultipartUploads", 26 | "s3:PutObject" 27 | ] 28 | resources = [ 29 | aws_s3_bucket.this.arn, 30 | "${aws_s3_bucket.this.arn}/*" 31 | ] 32 | } 33 | 34 | statement { 35 | effect = "Allow" 36 | actions = [ 37 | "lambda:InvokeFunction", 38 | "lambda:GetFunctionConfiguration" 39 | ] 40 | resources = [ 41 | for arn in [var.processing_fn_qualified_arn, "arn:aws:lambda:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:function:%FIREHOSE_DEFAULT_FUNCTION%:%FIREHOSE_DEFAULT_VERSION%"] : 42 | arn 43 | if arn != "DISABLED" 44 | ] 45 | } 46 | 47 | statement { 48 | effect = "Allow" 49 | actions = [ 50 | "s3:AbortMultipartUpload", 51 | "s3:GetBucketLocation", 52 | "s3:GetObject", 53 | "s3:ListBucket", 54 | "s3:ListBucketMultipartUploads", 55 | "s3:PutObject" 56 | ] 57 | resources = [ 58 | aws_s3_bucket.this.arn, 59 | "${aws_s3_bucket.this.arn}/*" 60 | ] 61 | } 62 | 63 | statement { 64 | effect = "Allow" 65 | actions = [ 66 | "kinesis:DescribeStream", 67 | "kinesis:GetShardIterator", 68 | "kinesis:GetRecords" 69 | ] 70 | resources = [ 71 | "arn:aws:kinesis:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:stream/%FIREHOSE_STREAM_NAME%" 72 | ] 73 | } 74 | 75 | statement { 76 | effect = "Allow" 77 | actions = [ 78 | "logs:PutLogEvents" 79 | ] 80 | resources = [ 81 | "arn:aws:logs:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:log-group:${aws_cloudwatch_log_group.this.name}:log-stream:*" 82 | ] 83 | } 84 | 85 | statement { 86 | effect = "Allow" 87 | actions = [ 88 | "kms:Decrypt" 89 | ] 90 | resources = [ 91 | "arn:aws:kms:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:key/%SSE_KEY_ID%" 92 | ] 93 | 94 | condition { 95 | test = "StringEquals" 96 | variable = "kms:ViaService" 97 | values = [ 98 | "kinesis.${data.aws_caller_identity.current.name}.amazonaws.com" 99 | ] 100 | } 101 | 102 | condition { 103 | test = "StringLike" 104 | variable = "kms:EncryptionContext:aws:kinesis:arn" 105 | values = [ 106 | "arn:aws:kinesis:${data.aws_caller_identity.current.name}:${data.aws_caller_identity.current.account_id}:stream/%FIREHOSE_STREAM_NAME%" 107 | ] 108 | } 109 | } 110 | } 111 | 112 | data "aws_iam_policy_document" "assumptions" { 113 | statement { 114 | effect = "Allow" 115 | actions = ["sts:AssumeRole"] 116 | principals { 117 | type = "Service" 118 | identifiers = ["firehose.amazonaws.com"] 119 | } 120 | } 121 | } 122 | 123 | resource "aws_iam_role" "firehose" { 124 | name = "${var.env}-firehose-role-${var.name}" 125 | assume_role_policy = data.aws_iam_policy_document.assumptions.json 126 | tags = var.tags 127 | } 128 | 129 | resource "aws_iam_role_policy" "main" { 130 | name = "${var.env}-firehose-role-policy-${var.name}" 131 | role = aws_iam_role.firehose.name 132 | policy = data.aws_iam_policy_document.main.json 133 | } 134 | -------------------------------------------------------------------------------- /redhook/lambda_utils.py: -------------------------------------------------------------------------------- 1 | # core/lambda_utils.py 2 | 3 | import logging 4 | from base64 import b64decode 5 | from functools import wraps 6 | from json import dumps, loads 7 | from os import environ 8 | from urllib import parse 9 | 10 | from .config import config 11 | 12 | 13 | class BasicAuthException(Exception): 14 | """Helper to emit when basic-auth check fail""" 15 | 16 | pass 17 | 18 | 19 | class StripeSignatureException(Exception): 20 | """Helper to emit when stripe sig check fail""" 21 | 22 | pass 23 | 24 | 25 | def error(): 26 | """Returns the representation of an error""" 27 | return { 28 | "statusCode": 500, 29 | "body": dumps({"message": "FAIL"}), 30 | "headers": {"Content-Type": "application/json"}, 31 | } 32 | 33 | 34 | def ok(): 35 | """Returns the representation of an OK message""" 36 | return { 37 | "statusCode": 200, 38 | "body": dumps({"message": "OK"}), 39 | "headers": {"Content-Type": "application/json"}, 40 | } 41 | 42 | 43 | def basic_auth_error(): 44 | return { 45 | "statusCode": 401, 46 | "body": "Could not verify your access level for that URL. You have to login with proper credentials", 47 | "headers": {"WWW-Authenticate": 'Basic realm="Login Required"'}, 48 | } 49 | 50 | 51 | def handle_response(f): 52 | """Decorates the passed function such that it will respond with the 53 | apporpriate response to the API gateway and send messages to cloudwatch 54 | on uncaught exceptions 55 | """ 56 | 57 | @wraps(f) 58 | def wrapped(*args, **kwargs): 59 | try: 60 | f(*args, **kwargs) 61 | except BasicAuthException: 62 | logging.error(f"{f.__name__} - basic auth check failed...") 63 | return basic_auth_error() 64 | except Exception: 65 | logging.exception(f"{f.__name__} - handler failed...") 66 | return error() 67 | else: 68 | return ok() 69 | 70 | return wrapped 71 | 72 | 73 | decoders = { 74 | "json": loads, 75 | "application/x-www-form-urlencoded": lambda x: dict(parse.parse_qsl(x)), 76 | } 77 | 78 | 79 | def get_decoder(content_type): 80 | """Get the decoder from the above map, default to json loads""" 81 | return decoders.get(content_type, loads) 82 | 83 | 84 | def data_from_event(event): 85 | # At this point, we assume that the event input is a valid api gateway 86 | # object. If it isn't, we're just going to blow up anyways 87 | body = event["body"] 88 | is_base64 = event.get("isBase64Encoded", False) 89 | content_type = event.get("headers", {}).get("Content-Type", "No Content Type") 90 | decode = get_decoder(content_type) 91 | 92 | if is_base64: 93 | body = b64decode(body).decode("utf-8") 94 | 95 | try: 96 | data = decode(body) 97 | except Exception: 98 | s = f"Failed decoding message. (Content-Type: {content_type}, isBase64?: {is_base64})" 99 | logging.exception(s) 100 | raise Exception(s) 101 | else: 102 | return data 103 | 104 | 105 | def from_body(f): 106 | """Decorates a function to extract data from the body of a message posted 107 | to redhook 108 | """ 109 | 110 | @wraps(f) 111 | def wrapped(event, context): 112 | return f(data_from_event(event)) 113 | 114 | return wrapped 115 | 116 | 117 | def check_basic_auth(event): 118 | auth = event.get("headers", {}).get("Authorization") 119 | if not auth: 120 | raise BasicAuthException("Missing Header") 121 | 122 | if not auth.startswith("Basic "): 123 | raise BasicAuthException("'Basic' Not in Authorization") 124 | 125 | try: 126 | token = auth.split(" ")[1] 127 | decoded = b64decode(token).decode("utf-8").split(":") 128 | except Exception: 129 | logging.exception("Failed decoding Basic Auth token") 130 | raise BasicAuthException("Decode Failure") 131 | 132 | if len(decoded) != 2: 133 | raise BasicAuthException("Too Many Chunks") 134 | 135 | if not decoded[0] == config.basic_auth_username: 136 | raise BasicAuthException("Invalid Username") 137 | 138 | if not decoded[1] == config.basic_auth_password: 139 | raise BasicAuthException("Invalid Password") 140 | 141 | return True 142 | 143 | 144 | def with_basic_auth(f): 145 | """Use basic authentication on the endpoint""" 146 | 147 | @wraps(f) 148 | def wrapped(event, context): 149 | check_basic_auth(event) 150 | return f(event, context) 151 | 152 | return wrapped 153 | -------------------------------------------------------------------------------- /infrastructure/modules/firehose-to-redshift/main.tf: -------------------------------------------------------------------------------- 1 | # Module to create firehose delivery streams 2 | 3 | # Required 4 | variable "name" { type = string } 5 | variable "redshift_jdbcurl" { type = string } 6 | variable "redshift_username" { type = string } 7 | variable "redshift_password" { type = string } 8 | variable "table_name" { type = string } 9 | variable "alarm_sns_arn" { type = string } 10 | 11 | # Optionals 12 | variable "env" { default = "envunset" } 13 | variable "buffer_size" { default = 1 } 14 | variable "buffer_interval" { default = 60 } 15 | variable "compression_format" { default = "GZIP" } 16 | variable "copy_options" { default = "json 'auto' gzip" } 17 | variable "tags" { default = {} } 18 | variable "log_retention_days" { default = 30 } 19 | variable "alarms_enabled" { default = true } 20 | variable "s3_alarm_success_min" { default = 0.99 } 21 | variable "s3_alarm_evaluation_periods" { default = 1 } 22 | variable "redshift_alarm_success_min" { default = 0.99 } 23 | variable "redshift_alarm_evaluation_periods" { default = 1 } 24 | variable "redshift_retry_duration" { default = 3600 } 25 | 26 | # Main 27 | resource "aws_s3_bucket" "this" { 28 | bucket = "${var.env}.firehose.intermediate.${var.name}" 29 | acl = "private" 30 | tags = var.tags 31 | } 32 | 33 | resource "aws_cloudwatch_log_group" "this" { 34 | name = "/${var.env}/kinesisfirehose/${var.name}" 35 | retention_in_days = var.log_retention_days 36 | tags = var.tags 37 | } 38 | 39 | resource "aws_cloudwatch_log_stream" "s3" { 40 | name = "S3Delivery" 41 | log_group_name = aws_cloudwatch_log_group.this.name 42 | } 43 | 44 | resource "aws_cloudwatch_log_stream" "s3_backup" { 45 | name = "S3Backup" 46 | log_group_name = aws_cloudwatch_log_group.this.name 47 | } 48 | 49 | resource "aws_cloudwatch_log_stream" "redshift" { 50 | name = "RedshiftDelivery" 51 | log_group_name = aws_cloudwatch_log_group.this.name 52 | } 53 | 54 | resource "aws_kinesis_firehose_delivery_stream" "this" { 55 | name = "${var.env}-firehose-delivery-stream-${var.name}" 56 | destination = "redshift" 57 | tags = var.tags 58 | 59 | s3_configuration { 60 | role_arn = aws_iam_role.firehose.arn 61 | bucket_arn = aws_s3_bucket.this.arn 62 | buffer_size = var.buffer_size 63 | buffer_interval = var.buffer_interval 64 | compression_format = var.compression_format 65 | 66 | cloudwatch_logging_options { 67 | enabled = true 68 | log_group_name = aws_cloudwatch_log_group.this.name 69 | log_stream_name = aws_cloudwatch_log_stream.s3.name 70 | } 71 | } 72 | 73 | redshift_configuration { 74 | role_arn = aws_iam_role.firehose.arn 75 | cluster_jdbcurl = var.redshift_jdbcurl 76 | username = var.redshift_username 77 | password = var.redshift_password 78 | data_table_name = var.table_name 79 | copy_options = var.copy_options 80 | retry_duration = var.redshift_retry_duration 81 | s3_backup_mode = "Enabled" 82 | 83 | cloudwatch_logging_options { 84 | enabled = true 85 | log_group_name = aws_cloudwatch_log_group.this.name 86 | log_stream_name = aws_cloudwatch_log_stream.redshift.name 87 | } 88 | 89 | s3_backup_configuration { 90 | role_arn = aws_iam_role.firehose.arn 91 | bucket_arn = aws_s3_bucket.this.arn 92 | buffer_size = var.buffer_size 93 | buffer_interval = var.buffer_interval 94 | compression_format = var.compression_format 95 | 96 | cloudwatch_logging_options { 97 | enabled = true 98 | log_group_name = aws_cloudwatch_log_group.this.name 99 | log_stream_name = aws_cloudwatch_log_stream.s3_backup.name 100 | } 101 | } 102 | } 103 | } 104 | 105 | # Alarms 106 | # We can monitor successful delivery of records to S3 and Redshift. Cloudwatch 107 | # alarms are, unfortunately, ill-suited to monitor incoming records for us since 108 | # we won't generally get many (any?) overnight. 109 | resource "aws_cloudwatch_metric_alarm" "s3_success" { 110 | count = var.alarms_enabled ? 1 : 0 111 | alarm_name = "${aws_kinesis_firehose_delivery_stream.this.name}-alarm-S3Success" 112 | comparison_operator = "LessThanOrEqualToThreshold" 113 | evaluation_periods = var.s3_alarm_evaluation_periods 114 | metric_name = "DeliveryToS3.Success" 115 | namespace = "AWS/Firehose" 116 | period = "60" 117 | statistic = "Sum" 118 | threshold = var.s3_alarm_success_min 119 | alarm_description = "Firehose S3 Successful Delivery Alarm for ${aws_kinesis_firehose_delivery_stream.this.name} (<= ${var.s3_alarm_success_min * 100}% succeeded)" 120 | alarm_actions = [var.alarm_sns_arn] 121 | # ok_actions = [var.alarm_sns_arn] 122 | tags = var.tags 123 | 124 | dimensions = { 125 | DeliveryStreamName = "${aws_kinesis_firehose_delivery_stream.this.name}" 126 | } 127 | } 128 | 129 | resource "aws_cloudwatch_metric_alarm" "redshift_success" { 130 | count = var.alarms_enabled ? 1 : 0 131 | alarm_name = "${aws_kinesis_firehose_delivery_stream.this.name}-alarm-RedshiftSuccess" 132 | comparison_operator = "LessThanOrEqualToThreshold" 133 | evaluation_periods = var.redshift_alarm_evaluation_periods 134 | metric_name = "DeliveryToRedshift.Success" 135 | namespace = "AWS/Firehose" 136 | period = "60" 137 | statistic = "Sum" 138 | threshold = var.redshift_alarm_success_min 139 | alarm_description = "Firehose Redshift Successful Delivery Alarm for ${aws_kinesis_firehose_delivery_stream.this.name} (<= ${var.s3_alarm_success_min * 100}% succeeded)" 140 | alarm_actions = [var.alarm_sns_arn] 141 | # ok_actions = [var.alarm_sns_arn] 142 | tags = var.tags 143 | 144 | dimensions = { 145 | DeliveryStreamName = aws_kinesis_firehose_delivery_stream.this.name 146 | } 147 | } 148 | 149 | # SSM Parameters 150 | # We share configuration between Terraform and SLS using SSM parameters. If we need 151 | # to add more please do, but the basic requirements here are to have 152 | resource "aws_ssm_parameter" "firehose_name" { 153 | name = "/${var.env}/kinesisfirehose/${var.name}/name" 154 | type = "SecureString" 155 | value = aws_kinesis_firehose_delivery_stream.this.name 156 | tags = var.tags 157 | } 158 | 159 | resource "aws_ssm_parameter" "firehose_arn" { 160 | name = "/${var.env}/kinesisfirehose/${var.name}/arn" 161 | type = "SecureString" 162 | value = aws_kinesis_firehose_delivery_stream.this.arn 163 | tags = var.tags 164 | } 165 | 166 | resource "aws_ssm_parameter" "log_group" { 167 | name = "/${var.env}/kinesisfirehose/${var.name}/log/group" 168 | type = "SecureString" 169 | value = aws_cloudwatch_log_group.this.name 170 | tags = var.tags 171 | } 172 | 173 | resource "aws_ssm_parameter" "s3_log_name" { 174 | name = "/${var.env}/kinesisfirehose/${var.name}/log/s3/name" 175 | type = "SecureString" 176 | value = aws_cloudwatch_log_stream.s3.name 177 | tags = var.tags 178 | } 179 | 180 | resource "aws_ssm_parameter" "redshift_log_name" { 181 | name = "/${var.env}/kinesisfirehose/${var.name}/log/redshift/name" 182 | type = "SecureString" 183 | value = aws_cloudwatch_log_stream.redshift.name 184 | tags = var.tags 185 | } 186 | 187 | # Outputs 188 | # We can reference these in tf topologies that use this modules. Add more as required. 189 | output "delivery_stream_name" { 190 | value = aws_kinesis_firehose_delivery_stream.this.name 191 | } 192 | 193 | output "delivery_stream_arn" { 194 | value = aws_kinesis_firehose_delivery_stream.this.arn 195 | } 196 | 197 | output "intermediary_bucket_arn" { 198 | value = aws_s3_bucket.this.arn 199 | } 200 | 201 | output "firehose_role_arn" { 202 | value = aws_iam_role.firehose.arn 203 | } 204 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "05a132dc51ba37f01195d450945f30f165d7ca9301002db676d463060bb0ff06" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "cached-property": { 20 | "hashes": [ 21 | "sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f", 22 | "sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504" 23 | ], 24 | "index": "pypi", 25 | "version": "==1.5.1" 26 | } 27 | }, 28 | "develop": { 29 | "appdirs": { 30 | "hashes": [ 31 | "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92", 32 | "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e" 33 | ], 34 | "version": "==1.4.3" 35 | }, 36 | "astroid": { 37 | "hashes": [ 38 | "sha256:71ea07f44df9568a75d0f354c49143a4575d90645e9fead6dfb52c26a85ed13a", 39 | "sha256:840947ebfa8b58f318d42301cf8c0a20fd794a33b61cc4638e28e9e61ba32f42" 40 | ], 41 | "version": "==2.3.3" 42 | }, 43 | "attrs": { 44 | "hashes": [ 45 | "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", 46 | "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" 47 | ], 48 | "version": "==19.3.0" 49 | }, 50 | "black": { 51 | "hashes": [ 52 | "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b", 53 | "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539" 54 | ], 55 | "index": "pypi", 56 | "version": "==19.10b0" 57 | }, 58 | "boto3": { 59 | "hashes": [ 60 | "sha256:a1b44a93ec385230aceaf6ebd0f46622bfef026e93fa6abc4387903131cf2b2e", 61 | "sha256:e489d8d083dbfac7c2bf44cfdce065e57f97c9aacb7684a14a1d12e70bb08d2f" 62 | ], 63 | "index": "pypi", 64 | "version": "==1.9.177" 65 | }, 66 | "botocore": { 67 | "hashes": [ 68 | "sha256:3baf129118575602ada9926f5166d82d02273c250d0feb313fc270944b27c48b", 69 | "sha256:dc080aed4f9b220a9e916ca29ca97a9d37e8e1d296fe89cbaeef929bf0c8066b" 70 | ], 71 | "version": "==1.12.253" 72 | }, 73 | "certifi": { 74 | "hashes": [ 75 | "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3", 76 | "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f" 77 | ], 78 | "version": "==2019.11.28" 79 | }, 80 | "chardet": { 81 | "hashes": [ 82 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 83 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 84 | ], 85 | "version": "==3.0.4" 86 | }, 87 | "click": { 88 | "hashes": [ 89 | "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc", 90 | "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a" 91 | ], 92 | "version": "==7.1.1" 93 | }, 94 | "docopt": { 95 | "hashes": [ 96 | "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" 97 | ], 98 | "index": "pypi", 99 | "version": "==0.6.2" 100 | }, 101 | "docutils": { 102 | "hashes": [ 103 | "sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0", 104 | "sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827", 105 | "sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99" 106 | ], 107 | "version": "==0.15.2" 108 | }, 109 | "idna": { 110 | "hashes": [ 111 | "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", 112 | "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" 113 | ], 114 | "version": "==2.9" 115 | }, 116 | "importlib-metadata": { 117 | "hashes": [ 118 | "sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302", 119 | "sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b" 120 | ], 121 | "markers": "python_version < '3.8'", 122 | "version": "==1.5.0" 123 | }, 124 | "isort": { 125 | "hashes": [ 126 | "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1", 127 | "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd" 128 | ], 129 | "version": "==4.3.21" 130 | }, 131 | "jmespath": { 132 | "hashes": [ 133 | "sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec", 134 | "sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9" 135 | ], 136 | "version": "==0.9.5" 137 | }, 138 | "lazy-object-proxy": { 139 | "hashes": [ 140 | "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d", 141 | "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449", 142 | "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08", 143 | "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a", 144 | "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50", 145 | "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd", 146 | "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239", 147 | "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb", 148 | "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea", 149 | "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e", 150 | "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156", 151 | "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142", 152 | "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442", 153 | "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62", 154 | "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db", 155 | "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531", 156 | "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383", 157 | "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a", 158 | "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357", 159 | "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4", 160 | "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0" 161 | ], 162 | "version": "==1.4.3" 163 | }, 164 | "mccabe": { 165 | "hashes": [ 166 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", 167 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" 168 | ], 169 | "version": "==0.6.1" 170 | }, 171 | "more-itertools": { 172 | "hashes": [ 173 | "sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c", 174 | "sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507" 175 | ], 176 | "version": "==8.2.0" 177 | }, 178 | "packaging": { 179 | "hashes": [ 180 | "sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3", 181 | "sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752" 182 | ], 183 | "version": "==20.3" 184 | }, 185 | "pathspec": { 186 | "hashes": [ 187 | "sha256:163b0632d4e31cef212976cf57b43d9fd6b0bac6e67c26015d611a647d5e7424", 188 | "sha256:562aa70af2e0d434367d9790ad37aed893de47f1693e4201fd1d3dca15d19b96" 189 | ], 190 | "version": "==0.7.0" 191 | }, 192 | "pluggy": { 193 | "hashes": [ 194 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", 195 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" 196 | ], 197 | "version": "==0.13.1" 198 | }, 199 | "py": { 200 | "hashes": [ 201 | "sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa", 202 | "sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0" 203 | ], 204 | "version": "==1.8.1" 205 | }, 206 | "pylint": { 207 | "hashes": [ 208 | "sha256:3db5468ad013380e987410a8d6956226963aed94ecb5f9d3a28acca6d9ac36cd", 209 | "sha256:886e6afc935ea2590b462664b161ca9a5e40168ea99e5300935f6591ad467df4" 210 | ], 211 | "index": "pypi", 212 | "version": "==2.4.4" 213 | }, 214 | "pyparsing": { 215 | "hashes": [ 216 | "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f", 217 | "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec" 218 | ], 219 | "version": "==2.4.6" 220 | }, 221 | "pytest": { 222 | "hashes": [ 223 | "sha256:0d5fe9189a148acc3c3eb2ac8e1ac0742cb7618c084f3d228baaec0c254b318d", 224 | "sha256:ff615c761e25eb25df19edddc0b970302d2a9091fbce0e7213298d85fb61fef6" 225 | ], 226 | "index": "pypi", 227 | "version": "==5.3.5" 228 | }, 229 | "pytest-env": { 230 | "hashes": [ 231 | "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2" 232 | ], 233 | "index": "pypi", 234 | "version": "==0.6.2" 235 | }, 236 | "python-dateutil": { 237 | "hashes": [ 238 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", 239 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" 240 | ], 241 | "markers": "python_version >= '2.7'", 242 | "version": "==2.8.1" 243 | }, 244 | "redhook": { 245 | "editable": true, 246 | "path": "." 247 | }, 248 | "regex": { 249 | "hashes": [ 250 | "sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431", 251 | "sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242", 252 | "sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1", 253 | "sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d", 254 | "sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045", 255 | "sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b", 256 | "sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400", 257 | "sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa", 258 | "sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0", 259 | "sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69", 260 | "sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74", 261 | "sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb", 262 | "sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26", 263 | "sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5", 264 | "sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2", 265 | "sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce", 266 | "sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab", 267 | "sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e", 268 | "sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70", 269 | "sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc", 270 | "sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0" 271 | ], 272 | "version": "==2020.2.20" 273 | }, 274 | "requests": { 275 | "hashes": [ 276 | "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee", 277 | "sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6" 278 | ], 279 | "index": "pypi", 280 | "version": "==2.23.0" 281 | }, 282 | "s3transfer": { 283 | "hashes": [ 284 | "sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d", 285 | "sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba" 286 | ], 287 | "version": "==0.2.1" 288 | }, 289 | "six": { 290 | "hashes": [ 291 | "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a", 292 | "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c" 293 | ], 294 | "version": "==1.14.0" 295 | }, 296 | "toml": { 297 | "hashes": [ 298 | "sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c", 299 | "sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e" 300 | ], 301 | "version": "==0.10.0" 302 | }, 303 | "typed-ast": { 304 | "hashes": [ 305 | "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355", 306 | "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919", 307 | "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa", 308 | "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652", 309 | "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75", 310 | "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01", 311 | "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d", 312 | "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1", 313 | "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907", 314 | "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c", 315 | "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3", 316 | "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b", 317 | "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614", 318 | "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb", 319 | "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b", 320 | "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41", 321 | "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6", 322 | "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34", 323 | "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe", 324 | "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4", 325 | "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7" 326 | ], 327 | "markers": "implementation_name == 'cpython' and python_version < '3.8'", 328 | "version": "==1.4.1" 329 | }, 330 | "urllib3": { 331 | "hashes": [ 332 | "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc", 333 | "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc" 334 | ], 335 | "markers": "python_version >= '3.4'", 336 | "version": "==1.25.8" 337 | }, 338 | "wcwidth": { 339 | "hashes": [ 340 | "sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603", 341 | "sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8" 342 | ], 343 | "version": "==0.1.8" 344 | }, 345 | "wrapt": { 346 | "hashes": [ 347 | "sha256:565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1" 348 | ], 349 | "version": "==1.11.2" 350 | }, 351 | "zipp": { 352 | "hashes": [ 353 | "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b", 354 | "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96" 355 | ], 356 | "version": "==3.1.0" 357 | } 358 | } 359 | } 360 | --------------------------------------------------------------------------------