├── .gitmodules ├── app ├── __init__.py ├── handler.py └── translator.py ├── package.json ├── Makefile ├── tox.ini ├── setup.py ├── LICENSE ├── Dockerfile ├── .gitignore ├── README.md ├── scripts └── create_jobs.py └── serverless.yml /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "lambda"] 2 | path = lambda 3 | url = https://github.com/PDAL/lambda.git 4 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- 1 | """app.""" 2 | 3 | import pkg_resources 4 | 5 | version = pkg_resources.get_distribution(__package__).version 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pdal-watchbot", 3 | "version": "1.0.0", 4 | "description": "", 5 | "devDependencies": { 6 | "serverless": "^1.23.0" 7 | }, 8 | "author": "" 9 | } 10 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | SHELL = /bin/bash 3 | 4 | build: 5 | # This should be removed once https://github.com/PDAL/lambda/issues/6 get solved 6 | cd lambda; docker build --tag lambda:pdal .; cd .. 7 | docker build --tag lambda:latest . 8 | docker run --name lambda -itd lambda:latest /bin/bash 9 | docker cp lambda:/tmp/package.zip package.zip 10 | docker stop lambda 11 | docker rm lambda 12 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = D203 3 | exclude = .git,__pycache__,docs/source/conf.py,old,build,dist 4 | max-complexity = 10 5 | max-line-length = 90 6 | 7 | [tox] 8 | envlist = py36,py37 9 | 10 | [testenv] 11 | extras = test 12 | commands= 13 | python -m pytest --cov app --cov-report term-missing --ignore=venv 14 | deps= 15 | numpy 16 | 17 | [testenv:black] 18 | basepython = python3 19 | skip_install = true 20 | deps = 21 | black 22 | commands = 23 | black 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Setup.""" 2 | 3 | from setuptools import setup, find_packages 4 | 5 | inst_reqs = [ 6 | "rio-cogeo>=1.1.5", 7 | "wget" 8 | ] 9 | extra_reqs = {"test": ["pytest", "pytest-cov"]} 10 | 11 | setup( 12 | name="app", 13 | version="0.0.2", 14 | description=u"cogeo watchbot", 15 | python_requires=">=3", 16 | keywords="AWS-Lambda Python", 17 | packages=find_packages(exclude=["ez_setup", "examples", "tests"]), 18 | include_package_data=True, 19 | zip_safe=False, 20 | install_requires=inst_reqs, 21 | extras_require=extra_reqs, 22 | ) 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Development Seed 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lambda:pdal 2 | # image reference should be changed once https://github.com/PDAL/lambda/issues/6 get resolved. 3 | 4 | ENV \ 5 | LANG=en_US.UTF-8 \ 6 | LC_ALL=en_US.UTF-8 \ 7 | CFLAGS="--std=c99" 8 | 9 | RUN \ 10 | pip3.7 install pip -U \ 11 | && pip3.7 install cython numpy --no-binary numpy 12 | 13 | ENV PACKAGE_PREFIX=/tmp/package 14 | 15 | ################################################################################ 16 | # CREATE PACKAGE # 17 | ################################################################################ 18 | COPY app app 19 | COPY setup.py setup.py 20 | 21 | RUN pip3.7 install . --no-binary rasterio,numpy -t $PACKAGE_PREFIX 22 | 23 | # # Leave module precompiles for faster Lambda startup 24 | RUN find ${PACKAGE_PREFIX}/ -type f -name '*.pyc' | while read f; do n=$(echo $f | sed 's/__pycache__\///' | sed 's/.cpython-[2-3][0-9]//'); cp $f $n; done; 25 | RUN find ${PACKAGE_PREFIX}/ -type d -a -name '__pycache__' -print0 | xargs -0 rm -rf 26 | RUN find ${PACKAGE_PREFIX}/ -type f -a -name '*.py' -print0 | xargs -0 rm -f 27 | 28 | RUN cd $PACKAGE_PREFIX && zip -r9q /tmp/package.zip * 29 | -------------------------------------------------------------------------------- /app/handler.py: -------------------------------------------------------------------------------- 1 | """Watchbot-light Worker.""" 2 | 3 | import os 4 | import json 5 | 6 | from . import translator 7 | 8 | 9 | def process(message): 10 | """Map Step: Create COGs.""" 11 | if isinstance(message, str): 12 | message = json.loads(message) 13 | 14 | src_path = message["src_path"] 15 | dst_prefix = message["dst_prefix"] 16 | 17 | bname = os.path.splitext(os.path.basename(src_path))[0] 18 | out_key = os.path.join(dst_prefix, f"{bname}.tif") 19 | translator.process( 20 | src_path, 21 | os.environ["COG_BUCKET"], 22 | out_key, 23 | resolution=message.get("resolution", 0.25), 24 | output=message.get("output", "min,max"), 25 | profile=message["profile_name"], 26 | profile_options=message["profile_options"], 27 | **message["options"] 28 | ) 29 | 30 | return True 31 | 32 | 33 | def _parse_message(message): 34 | if not message.get("Records"): 35 | return message 36 | record = message["Records"][0] 37 | body = json.loads(record["body"]) 38 | return body["Message"] 39 | 40 | 41 | def main(event, context): 42 | """ 43 | Handle events. 44 | 45 | Events: 46 | - SQS queue (MAP) 47 | 48 | """ 49 | message = _parse_message(event) 50 | return process(message) 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | *.c 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | 105 | .pytest_cache 106 | 107 | package.zip 108 | 109 | .serverless 110 | mtl.json.gz 111 | list_files.txt 112 | indexlidar2015.csv 113 | -------------------------------------------------------------------------------- /app/translator.py: -------------------------------------------------------------------------------- 1 | """translator.""" 2 | 3 | import os 4 | import uuid 5 | from urllib.parse import urlparse 6 | 7 | import wget 8 | 9 | from boto3.session import Session as boto3_session 10 | 11 | from rio_cogeo.cogeo import cog_translate 12 | from rio_cogeo.profiles import cog_profiles 13 | 14 | REGION_NAME = os.environ.get("AWS_REGION", "us-east-1") 15 | 16 | 17 | def _s3_download(path, key): 18 | session = boto3_session(region_name=REGION_NAME) 19 | s3 = session.client("s3") 20 | url_info = urlparse(path.strip()) 21 | s3_bucket = url_info.netloc 22 | s3_key = url_info.path.strip("/") 23 | s3.download_file(s3_bucket, s3_key, key) 24 | return True 25 | 26 | 27 | def _upload(path, bucket, key): 28 | session = boto3_session(region_name=REGION_NAME) 29 | s3 = session.client("s3") 30 | with open(path, "rb") as data: 31 | s3.upload_fileobj(data, bucket, key) 32 | return True 33 | 34 | 35 | def to_tiff(src_path, dst_path, resolution=0.25, output="min,max"): 36 | """Run PDAL Translate.""" 37 | os.system( 38 | f"/opt/bin/pdal translate {src_path} {dst_path} --writers.gdal.dimension=Z --writers.gdal.resolution={resolution} --writers.gdal.gdalopts=COMPRESS=DEFLATE --writers.gdal.output_type={output} --nostream" 39 | ) 40 | return True 41 | 42 | 43 | def to_cog(src_path, dst_path, profile="deflate", profile_options={}, **options): 44 | """Convert image to COG.""" 45 | output_profile = cog_profiles.get(profile) 46 | output_profile.update(dict(BIGTIFF="IF_SAFER")) 47 | output_profile.update(profile_options) 48 | 49 | config = dict( 50 | GDAL_NUM_THREADS="ALL_CPUS", 51 | GDAL_TIFF_INTERNAL_MASK=True, 52 | GDAL_TIFF_OVR_BLOCKSIZE="128", 53 | ) 54 | 55 | cog_translate( 56 | src_path, 57 | dst_path, 58 | output_profile, 59 | config=config, 60 | in_memory=False, 61 | quiet=True, 62 | allow_intermediate_compression=True, 63 | **options, 64 | ) 65 | return True 66 | 67 | 68 | def process( 69 | url, 70 | out_bucket, 71 | out_key, 72 | resolution=0.25, 73 | output="min,max", 74 | profile="deflate", 75 | profile_options={}, 76 | **options 77 | ): 78 | """Download, convert and upload.""" 79 | os.system("rm -rf /tmp/*.tif") 80 | 81 | url_info = urlparse(url.strip()) 82 | if url_info.scheme not in ["http", "https", "s3"]: 83 | raise Exception(f"Unsuported scheme {url_info.scheme}") 84 | 85 | src_path = "/tmp/" + os.path.basename(url_info.path) 86 | if url_info.scheme.startswith("http"): 87 | wget.download(url, src_path) 88 | elif url_info.scheme == "s3": 89 | _s3_download(url, src_path) 90 | 91 | uid = str(uuid.uuid4()) 92 | 93 | dst_path = f"/tmp/{uid}.tif" 94 | to_tiff(src_path, dst_path, resolution=resolution, output=output) 95 | os.remove(src_path) 96 | 97 | cog_path = f"/tmp/{uid}_cog.tif" 98 | to_cog( 99 | dst_path, 100 | cog_path, 101 | profile=profile, 102 | profile_options=profile_options, 103 | **options 104 | ) 105 | 106 | _upload(cog_path, out_bucket, out_key) 107 | 108 | # cleanup 109 | os.system("rm -rf /tmp/*.tif") 110 | 111 | return True 112 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pdal-watchbot 2 | 3 | ![](https://user-images.githubusercontent.com/10407788/68695040-4e707b00-0548-11ea-89a6-d81a97c4b62c.jpg) 4 | 5 | 6 | A PDAL fork of https://github.com/developmentseed/cogeo-watchbot-light to convert `.Laz` file to COG at scale. 7 | 8 | # What is this 9 | 10 | This repo host the code for a serverless architecture enabling creation of Cloud Optimized GeoTIFF at scale. 11 | 12 | ## Architecture 13 | 14 | ![](https://user-images.githubusercontent.com/10407788/66224855-f3c04580-e6a4-11e9-8903-8319c9a89875.png) 15 | 16 | 17 | # Deploy 18 | 19 | ### Requirements 20 | - serverless 21 | - docker 22 | - aws account 23 | 24 | 25 | 1. Install and configure serverless 26 | ```bash 27 | # Install and Configure serverless (https://serverless.com/framework/docs/providers/aws/guide/credentials/) 28 | $ npm install serverless -g 29 | ``` 30 | 31 | 2. (Temporary) Clone PDAL/lambda 32 | 33 | ```bash 34 | # In pointcloud-to-cog/ directory 35 | git clone https://github.com/PDAL/lambda 36 | cd lambda && git checkout 6eddf86 37 | ``` 38 | 39 | 2. Create Lambda package 40 | 41 | ```bash 42 | # In pointcloud-to-cog/ directory 43 | $ make build 44 | ``` 45 | 46 | 3. Deploy the Serverless stack 47 | 48 | ```bash 49 | $ sls deploy --stage production --bucket my-bucket --region us-east-1 50 | 51 | # Get Stack info 52 | $ sls info --bucket my-bucket --verbose 53 | ``` 54 | 55 | 56 | 57 | # How To 58 | 59 | ### Example (Montreal Open Data) 60 | 61 | 1. Get a list of files you want to convert 62 | ```$ 63 | $ wget http://donnees.ville.montreal.qc.ca/dataset/9ae61fa2-c852-464b-af7f-82b169b970d7/resource/ec35760c-5cbe-44a0-8ad1-30c037174b0a/download/indexlidar2015.csv 64 | 65 | # Fix list 66 | $ cat indexlidar2015.csv | grep "http://depot.ville.montreal.qc.ca" | cut -d',' -f3 > list_files.txt 67 | ``` 68 | 69 | 2. Use scripts/create_job.py 70 | 71 | ``` 72 | $ pip install rio-cogeo rio-tiler 73 | 74 | $ python scripts/create_jobs.py --help 75 | Usage: create_jobs.py [OPTIONS] [SOURCES] 76 | 77 | Create pdal-watchbot. 78 | 79 | Options: 80 | -p, --cog-profile [jpeg|webp|zstd|lzw|deflate|packbits|lzma|lerc|lerc_deflate|lerc_zstd|raw] 81 | CloudOptimized GeoTIFF profile (default: deflate). 82 | --co, --profile NAME=VALUE Driver specific creation options.See the 83 | documentation for the selected GTiff driver for more information. 84 | --options, --op NAME=VALUE rio_cogeo.cogeo.cog_translate input options. 85 | --prefix TEXT AWS S3 Key prefix. 86 | --topic TEXT SNS Topic [required] 87 | --resolution FLOAT Length of raster cell edges in X/Y units. (default: 0.25) 88 | --layers TEXT A comma separated list of statistics for 89 | which to produce raster layers. (default: 'min,max') 90 | --help Show this message and exit. 91 | ``` 92 | 93 | ```bash 94 | 95 | $ cd scripts/ 96 | $ cat ../list_files.txt | python -m create_jobs - \ 97 | -p webp \ 98 | --co blockxsize=256 \ 99 | --co blockysize=256 \ 100 | --op overview_level=6 \ 101 | --op dtype=float32 \ 102 | --op web_optimized=True \ 103 | --prefix my-prefix \ 104 | --topic arn:aws:sns:us-east-1:{AWS_ACCOUNT_ID}:pdal-watchbot-production-WatchbotTopic 105 | ``` 106 | 107 | Note: Output files will be saved in the `bucket` defined in the stack. By default (in the CLI) the prefix will be set to `cogs`. 108 | 109 | 3. Create mosaic (optional) 110 | 111 | ``` 112 | # Install cogeo-mosaic 113 | 114 | $ pip install cython==0.28 # (ref: https://github.com/tilery/python-vtzero/issues/13) 115 | $ pip install git+http://github.com/developmentseed/cogeo-mosaic 116 | 117 | # Create mosaic 118 | 119 | $ aws s3 ls my-bucket/my-prefix/ | awk '{print "s3://my-bucket/my-prefix/"$NF}' | cogeo-mosaic create - | gzip > mtl.json.gz 120 | ``` 121 | -------------------------------------------------------------------------------- /scripts/create_jobs.py: -------------------------------------------------------------------------------- 1 | """create_job: Feed SQS queue.""" 2 | 3 | import json 4 | from functools import partial 5 | from concurrent import futures 6 | from collections import Counter 7 | from urllib.parse import urlparse 8 | 9 | import click 10 | 11 | from boto3.session import Session as boto3_session 12 | 13 | from rasterio.rio import options 14 | from rio_tiler.utils import _chunks 15 | from rio_cogeo.profiles import cog_profiles 16 | 17 | 18 | def sources_callback(ctx, param, value): 19 | """ 20 | Validate scheme and uniqueness of sources. 21 | 22 | From: https://github.com/mapbox/pxm-manifest-specification/blob/master/manifest.py#L157-L179 23 | 24 | Notes 25 | ----- 26 | The callback takes a fileobj, but then converts it to a sequence 27 | of strings. 28 | 29 | Returns 30 | ------- 31 | list 32 | 33 | """ 34 | sources = list([name.strip() for name in value]) 35 | 36 | # Validate scheme. 37 | schemes = [urlparse(name.strip()).scheme for name in sources] 38 | invalid_schemes = [ 39 | scheme for scheme in schemes if scheme not in ["s3", "http", "https"] 40 | ] 41 | if len(invalid_schemes): 42 | raise click.BadParameter( 43 | "Schemes {!r} are not valid and should be on of 's3/http/https'.".format( 44 | invalid_schemes 45 | ) 46 | ) 47 | 48 | # Identify duplicate sources. 49 | dupes = [name for (name, count) in Counter(sources).items() if count > 1] 50 | if len(dupes) > 0: 51 | raise click.BadParameter( 52 | "Duplicated sources {!r} cannot be processed.".format(dupes) 53 | ) 54 | 55 | return sources 56 | 57 | 58 | def aws_send_message(message, topic, client=None): 59 | """Send SNS message.""" 60 | if not client: 61 | session = boto3_session() 62 | client = session.client('sns') 63 | return client.publish(Message=json.dumps(message), TargetArn=topic) 64 | 65 | 66 | def sns_worker(messages, topic, subject=None): 67 | """Send batch of SNS messages.""" 68 | session = boto3_session() 69 | client = session.client('sns') 70 | for message in messages: 71 | aws_send_message(message, topic, client=client) 72 | return True 73 | 74 | 75 | @click.command() 76 | @click.argument("sources", default="-", type=click.File("r"), callback=sources_callback) 77 | @click.option( 78 | "--cog-profile", 79 | "-p", 80 | "cogeo_profile", 81 | type=click.Choice(cog_profiles.keys()), 82 | default="deflate", 83 | help="CloudOptimized GeoTIFF profile (default: deflate).", 84 | ) 85 | @options.creation_options 86 | @click.option( 87 | "--options", 88 | "--op", 89 | "options", 90 | metavar="NAME=VALUE", 91 | multiple=True, 92 | callback=options._cb_key_val, 93 | help="rio_cogeo.cogeo.cog_translate input options.", 94 | ) 95 | @click.option( 96 | "--prefix", 97 | type=str, 98 | default="cogs", 99 | help="AWS S3 Key prefix." 100 | ) 101 | @click.option( 102 | "--topic", 103 | type=str, 104 | required=True, 105 | help="SNS Topic", 106 | ) 107 | @click.option( 108 | "--resolution", 109 | type=float, 110 | default=0.25, 111 | help="Length of raster cell edges in X/Y units. (default: 0.25)", 112 | ) 113 | @click.option( 114 | "--layers", 115 | type=str, 116 | default="min,max", 117 | help="A comma separated list of statistics for which to produce raster layers. (default: 'min,max')", 118 | ) 119 | def cli( 120 | sources, 121 | cogeo_profile, 122 | creation_options, 123 | options, 124 | prefix, 125 | topic, 126 | resolution, 127 | layers, 128 | ): 129 | """Create pdal-watchbot.""" 130 | def _create_message(source): 131 | message = { 132 | "src_path": source, 133 | "dst_prefix": prefix, 134 | "resolution": resolution, 135 | "output": layers, 136 | "profile_name": cogeo_profile, 137 | "profile_options": creation_options, 138 | "options": options, 139 | } 140 | return message 141 | 142 | messages = [_create_message(source) for source in sources] 143 | parts = _chunks(messages, 50) 144 | _send_message = partial(sns_worker, topic=topic) 145 | with futures.ThreadPoolExecutor(max_workers=50) as executor: 146 | executor.map(_send_message, parts) 147 | 148 | 149 | if __name__ == "__main__": 150 | cli() 151 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: pdal-watchbot 2 | 3 | provider: 4 | name: aws 5 | runtime: python3.7 6 | stage: ${opt:stage, 'production'} 7 | region: ${opt:region, 'us-east-1'} 8 | 9 | deploymentBucket: ${opt:bucket} 10 | 11 | # Add Tags to resources 12 | # stackTags: 13 | # project: labs 14 | 15 | iamRoleStatements: 16 | - Effect: "Allow" 17 | Action: 18 | - "*" 19 | Resource: 20 | - "arn:aws:s3:::${opt:bucket}*" 21 | 22 | - Effect: "Allow" 23 | Action: 24 | - "sns:Publish" 25 | Resource: 26 | - !Ref WatchbotTopic 27 | - !Ref WatchbotDLQTopic 28 | 29 | package: 30 | artifact: package.zip 31 | 32 | functions: 33 | translator: 34 | handler: app.handler.main 35 | memorySize: 3008 36 | timeout: 900 37 | reservedConcurrency: 50 38 | layers: 39 | - arn:aws:lambda:us-east-1:163178234892:layer:pdal:15 40 | environment: 41 | COG_BUCKET: ${opt:bucket} 42 | GDAL_CACHEMAX: 25% 43 | GDAL_DATA: /opt/share/gdal 44 | PROJ_LIB: /opt/share/proj 45 | PYTHONWARNINGS: ignore 46 | onError: !Ref WatchbotDLQTopic 47 | events: 48 | - sqs: 49 | arn: !GetAtt 50 | - WatchbotQueue 51 | - Arn 52 | batchSize: 1 53 | 54 | resources: 55 | Resources: 56 | # From https://www.jeremydaly.com/how-to-use-sns-and-sqs-to-distribute-and-throttle-events/ 57 | # Create SNS Topic 58 | WatchbotTopic: 59 | Type: AWS::SNS::Topic 60 | Properties: 61 | TopicName: ${self:service}-${self:provider.stage}-WatchbotTopic 62 | 63 | # Create our SQS queue 64 | WatchbotQueue: 65 | Type: "AWS::SQS::Queue" 66 | Properties: 67 | VisibilityTimeout: 900 68 | QueueName: ${self:service}-${self:provider.stage}-WatchbotQueue 69 | RedrivePolicy: 70 | deadLetterTargetArn: !GetAtt 71 | - WatchbotDeadLetterQueue 72 | - Arn 73 | maxReceiveCount: 1 74 | 75 | # Create SQS Queue Policies 76 | snsWatchbotQueuePolicy: 77 | Type: AWS::SQS::QueuePolicy 78 | Properties: 79 | PolicyDocument: 80 | Version: "2012-10-17" 81 | Statement: 82 | - Sid: "allow-sns-messages" 83 | Effect: Allow 84 | Principal: "*" 85 | Resource: !GetAtt 86 | - WatchbotQueue 87 | - Arn 88 | Action: "SQS:SendMessage" 89 | Condition: 90 | ArnEquals: 91 | "aws:SourceArn": !Ref WatchbotTopic 92 | Queues: 93 | - Ref: WatchbotQueue 94 | 95 | # Create the subscription to the 'WatchbotQueue' 96 | WatchbotQueueSubscription: 97 | Type: 'AWS::SNS::Subscription' 98 | Properties: 99 | TopicArn: !Ref WatchbotTopic 100 | Endpoint: !GetAtt 101 | - WatchbotQueue 102 | - Arn 103 | Protocol: sqs 104 | RawMessageDelivery: 'false' 105 | 106 | WatchbotDLQTopic: 107 | Type: AWS::SNS::Topic 108 | Properties: 109 | TopicName: ${self:service}-${self:provider.stage}-WatchbotDLQTopic 110 | 111 | # Create our Dead Letter SQS 112 | WatchbotDeadLetterQueue: 113 | Type: AWS::SQS::Queue 114 | Properties: 115 | QueueName: ${self:service}-${self:provider.stage}-WatchbotDeadLetterQueue 116 | 117 | # Create SQS Queue Policies 118 | snsWatchbotDLQPolicy: 119 | Type: AWS::SQS::QueuePolicy 120 | Properties: 121 | PolicyDocument: 122 | Version: "2012-10-17" 123 | Statement: 124 | - Sid: "allow-sns-messages" 125 | Effect: Allow 126 | Principal: "*" 127 | Resource: !GetAtt 128 | - WatchbotDeadLetterQueue 129 | - Arn 130 | Action: "SQS:SendMessage" 131 | Condition: 132 | ArnEquals: 133 | "aws:SourceArn": !Ref WatchbotDLQTopic 134 | Queues: 135 | - Ref: WatchbotDeadLetterQueue 136 | 137 | # Create the subscription to the 'WatchbotQueue' 138 | WatchbotDLQSubscription: 139 | Type: 'AWS::SNS::Subscription' 140 | Properties: 141 | TopicArn: !Ref WatchbotDLQTopic 142 | Endpoint: !GetAtt 143 | - WatchbotDeadLetterQueue 144 | - Arn 145 | Protocol: sqs 146 | RawMessageDelivery: 'false' 147 | 148 | Outputs: 149 | WatchbotTopic: 150 | Value: 151 | "Ref": WatchbotTopic 152 | Export: 153 | Name: ${self:service}-${self:provider.stage}-WatchbotTopic 154 | 155 | WatchbotDLQTopic: 156 | Value: 157 | "Ref": WatchbotDLQTopic 158 | Export: 159 | Name: ${self:service}-${self:provider.stage}-WatchbotDLQTopic --------------------------------------------------------------------------------