├── .gitignore ├── README.md ├── architecture.png ├── requirements.txt ├── scheduler ├── __init__.py ├── consumer.py ├── date_utils.py ├── dynamo.py ├── emitter.py ├── event_loader.py ├── handler.py ├── settings.py └── sqs.py ├── scripts └── init_services.py └── serverless.yml /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/python,serverless,node 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=python,serverless,node 3 | 4 | ### Node ### 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | .pnpm-debug.log* 13 | 14 | # Diagnostic reports (https://nodejs.org/api/report.html) 15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 16 | 17 | # Runtime data 18 | pids 19 | *.pid 20 | *.seed 21 | *.pid.lock 22 | 23 | # Directory for instrumented libs generated by jscoverage/JSCover 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | coverage 28 | *.lcov 29 | 30 | # nyc test coverage 31 | .nyc_output 32 | 33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 34 | .grunt 35 | 36 | # Bower dependency directory (https://bower.io/) 37 | bower_components 38 | 39 | # node-waf configuration 40 | .lock-wscript 41 | 42 | # Compiled binary addons (https://nodejs.org/api/addons.html) 43 | build/Release 44 | 45 | # Dependency directories 46 | node_modules/ 47 | jspm_packages/ 48 | 49 | # Snowpack dependency directory (https://snowpack.dev/) 50 | web_modules/ 51 | 52 | # TypeScript cache 53 | *.tsbuildinfo 54 | 55 | # Optional npm cache directory 56 | .npm 57 | 58 | # Optional eslint cache 59 | .eslintcache 60 | 61 | # Optional stylelint cache 62 | .stylelintcache 63 | 64 | # Microbundle cache 65 | .rpt2_cache/ 66 | .rts2_cache_cjs/ 67 | .rts2_cache_es/ 68 | .rts2_cache_umd/ 69 | 70 | # Optional REPL history 71 | .node_repl_history 72 | 73 | # Output of 'npm pack' 74 | *.tgz 75 | 76 | # Yarn Integrity file 77 | .yarn-integrity 78 | 79 | # dotenv environment variable files 80 | .env 81 | .env.development.local 82 | .env.test.local 83 | .env.production.local 84 | .env.local 85 | 86 | # parcel-bundler cache (https://parceljs.org/) 87 | .cache 88 | .parcel-cache 89 | 90 | # Next.js build output 91 | .next 92 | out 93 | 94 | # Nuxt.js build / generate output 95 | .nuxt 96 | dist 97 | 98 | # Gatsby files 99 | .cache/ 100 | # Comment in the public line in if your project uses Gatsby and not Next.js 101 | # https://nextjs.org/blog/next-9-1#public-directory-support 102 | # public 103 | 104 | # vuepress build output 105 | .vuepress/dist 106 | 107 | # vuepress v2.x temp and cache directory 108 | .temp 109 | 110 | # Docusaurus cache and generated files 111 | .docusaurus 112 | 113 | # Serverless directories 114 | .serverless/ 115 | 116 | # FuseBox cache 117 | .fusebox/ 118 | 119 | # DynamoDB Local files 120 | .dynamodb/ 121 | 122 | # TernJS port file 123 | .tern-port 124 | 125 | # Stores VSCode versions used for testing VSCode extensions 126 | .vscode-test 127 | 128 | # yarn v2 129 | .yarn/cache 130 | .yarn/unplugged 131 | .yarn/build-state.yml 132 | .yarn/install-state.gz 133 | .pnp.* 134 | 135 | ### Node Patch ### 136 | # Serverless Webpack directories 137 | .webpack/ 138 | 139 | # Optional stylelint cache 140 | 141 | # SvelteKit build / generate output 142 | .svelte-kit 143 | 144 | ### Python ### 145 | # Byte-compiled / optimized / DLL files 146 | __pycache__/ 147 | *.py[cod] 148 | *$py.class 149 | 150 | # C extensions 151 | *.so 152 | 153 | # Distribution / packaging 154 | .Python 155 | build/ 156 | develop-eggs/ 157 | dist/ 158 | downloads/ 159 | eggs/ 160 | .eggs/ 161 | lib/ 162 | lib64/ 163 | parts/ 164 | sdist/ 165 | var/ 166 | wheels/ 167 | share/python-wheels/ 168 | *.egg-info/ 169 | .installed.cfg 170 | *.egg 171 | MANIFEST 172 | 173 | # PyInstaller 174 | # Usually these files are written by a python script from a template 175 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 176 | *.manifest 177 | *.spec 178 | 179 | # Installer logs 180 | pip-log.txt 181 | pip-delete-this-directory.txt 182 | 183 | # Unit test / coverage reports 184 | htmlcov/ 185 | .tox/ 186 | .nox/ 187 | .coverage 188 | .coverage.* 189 | nosetests.xml 190 | coverage.xml 191 | *.cover 192 | *.py,cover 193 | .hypothesis/ 194 | .pytest_cache/ 195 | cover/ 196 | 197 | # Translations 198 | *.mo 199 | *.pot 200 | 201 | # Django stuff: 202 | local_settings.py 203 | db.sqlite3 204 | db.sqlite3-journal 205 | 206 | # Flask stuff: 207 | instance/ 208 | .webassets-cache 209 | 210 | # Scrapy stuff: 211 | .scrapy 212 | 213 | # Sphinx documentation 214 | docs/_build/ 215 | 216 | # PyBuilder 217 | .pybuilder/ 218 | target/ 219 | 220 | # Jupyter Notebook 221 | .ipynb_checkpoints 222 | 223 | # IPython 224 | profile_default/ 225 | ipython_config.py 226 | 227 | # pyenv 228 | # For a library or package, you might want to ignore these files since the code is 229 | # intended to run in multiple environments; otherwise, check them in: 230 | # .python-version 231 | 232 | # pipenv 233 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 234 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 235 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 236 | # install all needed dependencies. 237 | #Pipfile.lock 238 | 239 | # poetry 240 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 241 | # This is especially recommended for binary packages to ensure reproducibility, and is more 242 | # commonly ignored for libraries. 243 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 244 | #poetry.lock 245 | 246 | # pdm 247 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 248 | #pdm.lock 249 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 250 | # in version control. 251 | # https://pdm.fming.dev/#use-with-ide 252 | .pdm.toml 253 | 254 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 255 | __pypackages__/ 256 | 257 | # Celery stuff 258 | celerybeat-schedule 259 | celerybeat.pid 260 | 261 | # SageMath parsed files 262 | *.sage.py 263 | 264 | # Environments 265 | .venv 266 | env/ 267 | venv/ 268 | ENV/ 269 | env.bak/ 270 | venv.bak/ 271 | 272 | # Spyder project settings 273 | .spyderproject 274 | .spyproject 275 | 276 | # Rope project settings 277 | .ropeproject 278 | 279 | # mkdocs documentation 280 | /site 281 | 282 | # mypy 283 | .mypy_cache/ 284 | .dmypy.json 285 | dmypy.json 286 | 287 | # Pyre type checker 288 | .pyre/ 289 | 290 | # pytype static type analyzer 291 | .pytype/ 292 | 293 | # Cython debug symbols 294 | cython_debug/ 295 | 296 | # PyCharm 297 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 298 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 299 | # and can be added to the global gitignore or merged into this file. For a more nuclear 300 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 301 | #.idea/ 302 | 303 | ### Serverless ### 304 | # Ignore build directory 305 | .serverless 306 | 307 | # End of https://www.toptal.com/developers/gitignore/api/python,serverless,node -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## AWS Serverless scheduler 2 | 3 | **This code is a simplified and redesigned version of @bahrmichael's [repo](https://github.com/bahrmichael/aws-scheduler).** 4 | 5 | Right now this repo is only capable of scheduling webhook-related tasks 6 | 7 | 8 | ## Deploy 9 | 10 | This section explains how you can deploy the service yourself. Once set up use it like shown above. 11 | The following picture shows you the structure of the service. 12 | 13 | ![Detailed Overview](./architecture.png) 14 | 15 | ### Prerequisites 16 | You must have the following tools installed: 17 | - serverless framework 1.48.3 or later 18 | - node 19 | - npm 20 | - python3 21 | - pip 22 | 23 | ### Setup 24 | 25 | 1) Complete the variable names in `scheduler/settings.py` (based on the names that you have in `serverless.yml`) 26 | 2) Run `scripts/init_services.py` to create the complementary services (dynamo table, SQS and SNS) 27 | 3) Run `npm i serverless-python-requirements` for python requirments installer 28 | 4) Deploy the cloudformation stack with `sls deploy` 29 | 30 | ## Usage 31 | 32 | ### Input 33 | 34 | To schedule a trigger you have to publish an event which follows the structure below to the ARN of the input topic. You can find the ARN in the console logs after deploying 35 | 36 | ```json 37 | { 38 | "date": "utc timestamp following ISO 8601", 39 | "url": "the url that will receive the scheduled request", 40 | "method": "http method for calling the url", 41 | "payload": "any payload that your endpoint might need to receive", 42 | "headers": "any headers that your endpoint might need to receive", 43 | "cookies": "any cookies that your endpoint might need to receive", 44 | } 45 | ``` 46 | 47 | date, url and method fields are mandatory. 48 | 49 | ```python 50 | # Python example for scheduling a telegram message 10 minutes later 51 | import json 52 | import boto3 53 | from datetime import datetime, timedelta 54 | 55 | client = boto3.client('sns') 56 | token = "" 57 | chat_id="" 58 | message = "Your first scheduled message!" 59 | 60 | data = f'chat_id={chat_id}&text={message}' 61 | 62 | date = (datetime.utcnow() + timedelta(minutes=10)).isoformat() 63 | 64 | event = { 65 | "date": date, 66 | "url": f'https://api.telegram.org/bot{token}/sendMessage?{data}', 67 | "method": "post" 68 | } 69 | 70 | input_topic = "arn:aws:sns:{YOUR_AWS_REGION}:{YOUR_AWS_ACCOUNT_ID}:{YOUR_INPUT_TOPIC_NAME}" 71 | client.publish(TopicArn=input_topic, Message=json.dumps(event)) 72 | ``` 73 | 74 | ## Limitations 75 | 76 | - Events may arrive more than once 77 | - Check your AWS Lambda concurrent execution quota, if you send too many messages at the same time you might drown your lambdas 78 | - This approach costs more than using DynamoDB's TTL attribute. If delays of 30 minutes to 48 hours are acceptable for you, then check out [this article](https://medium.com/swlh/scheduling-irregular-aws-lambda-executions-through-dynamodb-ttl-attributes-acd397dfbad9). 79 | 80 | ## Contributions 81 | 82 | Contributions are welcome, both issues and code. Get in touch at twitter [@agusmdev](https://twitter.com/agusmdev) or create an issue. 83 | 84 | ## TODOs 85 | - Add batch processing for SQS and SNS messages 86 | - Add an abstraction `Scheduler` to use this architecture without friction 87 | - secure the PoC with test -------------------------------------------------------------------------------- /architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agusmdev/aws-serverless-scheduler/7e756bbbc8f3c03644d8c36c30f8e179682ffd54/architecture.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pydantic 2 | requests -------------------------------------------------------------------------------- /scheduler/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/agusmdev/aws-serverless-scheduler/7e756bbbc8f3c03644d8c36c30f8e179682ffd54/scheduler/__init__.py -------------------------------------------------------------------------------- /scheduler/consumer.py: -------------------------------------------------------------------------------- 1 | from .date_utils import date_handler 2 | from .settings import settings 3 | from .sqs import SQSHandler 4 | from .dynamo import DynamoDBEvents 5 | 6 | 7 | class EventConsumer: 8 | def __init__(self) -> None: 9 | self.sqs = SQSHandler() 10 | self.db = DynamoDBEvents() 11 | 12 | def save_event(self, event): 13 | self.db.put_event(event) 14 | 15 | def publish_to_queue(self, event): 16 | self.sqs.schedule_event(event) 17 | 18 | def process_events(self, events): 19 | for event in events: 20 | date = date_handler.from_isoformat(event["date"]) 21 | if date_handler.seconds_until_date(date) <= settings.PUBLISH_SECONDS_THRESHOLD: # Publish event to queue 22 | self.publish_to_queue(event) 23 | else: 24 | self.save_event(event) -------------------------------------------------------------------------------- /scheduler/date_utils.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone, timedelta 2 | 3 | 4 | class DateTime: 5 | @property 6 | def now(self): 7 | return datetime.now(tz=timezone.utc) 8 | 9 | def get_segment(self, date, extra_minutes): 10 | return int( 11 | (date + timedelta(minutes=extra_minutes)) 12 | .replace(second=0, microsecond=0) 13 | .timestamp() 14 | ) 15 | 16 | def get_current_segment(self, extra_minutes): 17 | return self.get_segment(self.now, extra_minutes) 18 | 19 | def seconds_until_date(self, date): 20 | return int((date - self.now).total_seconds()) 21 | 22 | def from_isoformat(self, date): 23 | return datetime.fromisoformat(date) 24 | 25 | def to_isoformat(self, date): 26 | return date.isoformat() 27 | 28 | 29 | date_handler = DateTime() -------------------------------------------------------------------------------- /scheduler/dynamo.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from boto3.dynamodb.conditions import Key 3 | from datetime import timedelta 4 | from uuid import uuid4 5 | from .settings import settings 6 | from .date_utils import date_handler 7 | 8 | 9 | class DynamoDBEvents: 10 | def __init__(self) -> None: 11 | self.table = boto3.resource('dynamodb', region_name=settings.REGION_NAME).Table(settings.EVENTS_TABLE) 12 | 13 | def get_segment_events(self, segment): 14 | return self.table.meta.client.get_paginator("query").paginate( 15 | TableName=self.table.name, 16 | KeyConditionExpression=Key('segment').eq(segment) 17 | ) 18 | 19 | def put_event(self, event): 20 | date = date_handler.from_isoformat(event["date"]) 21 | segment = date_handler.get_segment(date, extra_minutes=0) 22 | event_id = f"{int(date.timestamp() * 1000)}_{uuid4().hex}" 23 | scheduled_event = { 24 | **event, 25 | "event_id": event_id, 26 | "segment": segment, 27 | "time_to_live": int((date + timedelta(days=1)).timestamp()) 28 | } 29 | print(f"Saving event! {event_id=}") 30 | self.table.put_item(Item=scheduled_event) -------------------------------------------------------------------------------- /scheduler/emitter.py: -------------------------------------------------------------------------------- 1 | import json 2 | import requests 3 | from pydantic import BaseModel 4 | 5 | # class Methods(str, Enum): 6 | # ... 7 | 8 | 9 | class Event(BaseModel): 10 | date: str = None 11 | payload: dict = {} 12 | headers: dict = {} 13 | cookies: dict = {} 14 | method: str 15 | url: str 16 | 17 | 18 | class EventEmitter: 19 | 20 | @staticmethod 21 | def send_message(event): 22 | requests.request( 23 | url=event.url, 24 | method=event.method, 25 | headers=event.headers, 26 | cookies=event.cookies, 27 | json=event.payload, 28 | ) 29 | 30 | def emit_event(self, event): 31 | event = Event(**json.loads(event["body"])) 32 | self.send_message(event) -------------------------------------------------------------------------------- /scheduler/event_loader.py: -------------------------------------------------------------------------------- 1 | from .dynamo import DynamoDBEvents 2 | from .sqs import SQSHandler 3 | from .date_utils import date_handler 4 | from .settings import settings 5 | 6 | 7 | class EventLoader: 8 | def __init__(self) -> None: 9 | self.db = DynamoDBEvents() 10 | self.sqs = SQSHandler() 11 | 12 | def publish_event(self, event): 13 | self.sqs.schedule_event(event) 14 | 15 | def publish_events(self, events): 16 | for event in events: 17 | self.publish_event(event) 18 | 19 | def load_events(self): 20 | current_segment = date_handler.get_current_segment(extra_minutes=settings.LOADER_MINUTES_THRESHOLD) 21 | for page in self.db.get_segment_events(current_segment): 22 | self.publish_events(page.get("Items", [])) 23 | -------------------------------------------------------------------------------- /scheduler/handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | from .consumer import EventConsumer 3 | from .event_loader import EventLoader 4 | from .emitter import EventEmitter 5 | 6 | event_consumer = EventConsumer() 7 | event_emitter = EventEmitter() 8 | event_loader = EventLoader() 9 | 10 | def consumer_handler(messages, _): # Messages from SNS 11 | events = [json.loads(ev["Sns"]["Message"]) for ev in messages["Records"]] 12 | event_consumer.process_events(events) 13 | 14 | def emitter_handler(messages, _): # Events from SQS 15 | for event in messages["Records"]: 16 | event_emitter.emit_event(event) 17 | 18 | def event_loader_handler(*_): 19 | event_loader.load_events() 20 | -------------------------------------------------------------------------------- /scheduler/settings.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseSettings 2 | 3 | 4 | class AWSSettings(BaseSettings): 5 | REGION_NAME: str = "us-east-2" 6 | 7 | class Services(BaseSettings): 8 | INPUT_TOPIC: str = "serverless-scheduler-input-topic" 9 | SQS_DELIVERY: str = "serverless-scheduler-delivery" 10 | EVENTS_TABLE: str = "serverless-scheduler-events" 11 | QUEUE_URL: str = None 12 | 13 | class CommonSettings(BaseSettings): 14 | LOADER_MINUTES_THRESHOLD: int = 10 # This should be between 3 and 10 15 | PUBLISH_SECONDS_THRESHOLD: int = (LOADER_MINUTES_THRESHOLD + 5) * 60 # This should be 5 minutes greater than the LOADER 16 | 17 | 18 | class Settings(Services, AWSSettings, CommonSettings): ... 19 | 20 | settings = Settings() 21 | -------------------------------------------------------------------------------- /scheduler/sqs.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from .settings import settings 3 | from .date_utils import date_handler 4 | from decimal import Decimal 5 | import json 6 | 7 | 8 | class DecimalEncoder(json.JSONEncoder): 9 | def default(self, obj): 10 | # if passed in object is instance of Decimal 11 | # convert it to a string 12 | if isinstance(obj, Decimal): 13 | return str(obj) 14 | # otherwise use the default behavior 15 | return json.JSONEncoder.default(self, obj) 16 | 17 | 18 | class SQSHandler: 19 | def __init__(self) -> None: 20 | self.client = boto3.client("sqs", region_name=settings.REGION_NAME) 21 | 22 | def schedule_event(self, event): 23 | """ 24 | event must have `date` 25 | Args: 26 | event (Dict): _description_ 27 | """ 28 | date_to_publish = date_handler.from_isoformat(event["date"]) 29 | seconds = date_handler.seconds_until_date(date_to_publish) 30 | 31 | sqs_message = { 32 | "MessageBody": json.dumps( 33 | {**event, "now": date_handler.to_isoformat(date_handler.now)}, 34 | cls=DecimalEncoder, 35 | ), 36 | "DelaySeconds": seconds if seconds > 0 else 0, 37 | } 38 | self.client.send_message(QueueUrl=settings.QUEUE_URL, **sqs_message) 39 | print("Published event to queue") # TODO change for logger 40 | -------------------------------------------------------------------------------- /scripts/init_services.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from scheduler.settings import settings 3 | 4 | def init_db(): 5 | db_client = boto3.client('dynamodb', region_name=settings.REGION_NAME) 6 | name = settings.EVENTS_TABLE 7 | if name in db_client.list_tables()['TableNames']: 8 | print("Table already created...") 9 | return 10 | 11 | db_client.create_table( 12 | TableName=name, 13 | AttributeDefinitions=[ 14 | { 15 | 'AttributeName': 'segment', 16 | 'AttributeType': 'N' 17 | }, 18 | { 19 | 'AttributeName': 'event_id', 20 | 'AttributeType': 'S' 21 | } 22 | ], 23 | KeySchema=[ 24 | { 25 | 'AttributeName': 'segment', 26 | 'KeyType': 'HASH' 27 | }, 28 | { 29 | 'AttributeName': 'event_id', 30 | 'KeyType': 'RANGE' 31 | } 32 | ], 33 | BillingMode='PAY_PER_REQUEST', 34 | ) 35 | print(f'Creating table ...') 36 | db_client.get_waiter('table_exists').wait(TableName=name) 37 | db_client.update_time_to_live( 38 | TableName=name, 39 | TimeToLiveSpecification={ 40 | 'Enabled': True, 41 | 'AttributeName': 'time_to_live' 42 | } 43 | ) 44 | print(f'New table {name} created!') 45 | 46 | 47 | def init_queue(): 48 | sqs_client = boto3.client('sqs', region_name=settings.REGION_NAME) 49 | name = settings.SQS_DELIVERY 50 | print(f'Creating queue {name}') 51 | create_response = sqs_client.create_queue( 52 | QueueName=name, 53 | ) 54 | url = create_response['QueueUrl'] 55 | print(f'Created topic {name} with arn {url}') 56 | 57 | def init_input_topic(): 58 | sns_client = boto3.client('sns', region_name=settings.REGION_NAME) 59 | name = settings.INPUT_TOPIC 60 | print(f'Creating topic {name}') 61 | create_response = sns_client.create_topic( 62 | Name=name, 63 | ) 64 | 65 | arn = create_response['TopicArn'] 66 | print(f'Created topic {name} with arn {arn}') 67 | 68 | 69 | if __name__ == '__main__': 70 | init_db() 71 | init_queue() 72 | init_input_topic() -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | service: serverless-scheduler 2 | 3 | provider: 4 | name: aws 5 | runtime: python3.9 6 | stage: ${opt:stage, 'dev'} 7 | region: ${opt:region, 'us-east-2'} 8 | environment: 9 | STAGE: "${self:provider.stage}" 10 | REGION_NAME: "${self:provider.region}" 11 | QUEUE_URL: "${self:services.queue.url}" 12 | tags: 13 | project: serverless-scheduler 14 | # iam: 15 | # role: !Sub arn:aws:iam::${AWS::AccountId}:role/lambda_full_access # Existing role on my AWS account 16 | iamRoleStatements: 17 | - Effect: Allow 18 | Action: 19 | - dynamodb:DescribeTable 20 | - dynamodb:DeleteItem 21 | - dynamodb:PutItem 22 | - dynamodb:DescribeTable 23 | - dynamodb:Query 24 | - dynamodb:GetItem 25 | - dynamodb:GetShardIterator 26 | Resource: 27 | - "${self:services.table.arn}" 28 | - Effect: Allow 29 | Action: 30 | - lambda:InvokeFunction 31 | Resource: "*" 32 | - Effect: Allow 33 | Action: 34 | - SQS:SendMessage 35 | Resource: 36 | - "${self:services.queue.arn}" 37 | - Effect: Allow 38 | Action: 39 | - cloudwatch:PutMetricData 40 | Resource: "*" 41 | 42 | services: 43 | inbound: 44 | name: "serverless-scheduler-input-topic" 45 | arn: { "Fn::Join": [":", ["arn:aws:sns:${self:provider.region}", { "Ref": "AWS::AccountId" }, "${self:services.inbound.name}" ] ] } 46 | queue: 47 | name: "serverless-scheduler-delivery" 48 | arn: { "Fn::Join": [":", ["arn:aws:sqs:${self:provider.region}", { "Ref": "AWS::AccountId" }, "${self:services.queue.name}" ] ] } 49 | url: { "Fn::Join": ["/", ["https://sqs.${self:provider.region}.amazonaws.com", { "Ref": "AWS::AccountId" }, "${self:services.queue.name}" ] ] } 50 | table: 51 | arn: { "Fn::Join": [":", ["arn:aws:dynamodb:${self:provider.region}", { "Ref": "AWS::AccountId" }, "table/serverless-scheduler-events" ] ] } 52 | pythonRequirements: 53 | pythonBin: python3 54 | 55 | functions: 56 | consumer: 57 | handler: scheduler/handler.consumer_handler 58 | events: 59 | - sns: 60 | arn: 61 | Fn::Join: 62 | - ':' 63 | - - 'arn:aws:sns' 64 | - Ref: 'AWS::Region' 65 | - Ref: 'AWS::AccountId' 66 | - "${self:services.inbound.name}" 67 | topicName: "${self:services.inbound.name}" 68 | tags: 69 | resource: serverless-scheduler-consumer 70 | 71 | eventLoader: 72 | handler: scheduler/handler.event_loader_handler 73 | events: 74 | - schedule: rate(1 minute) 75 | # when we have to wait for DynamoDB autoscaling we may exceed the default of 6s 76 | timeout: 30 77 | tags: 78 | resource: serverless-scheduler-event-loader 79 | 80 | emitter: 81 | handler: scheduler/handler.emitter_handler 82 | events: 83 | - sqs: 84 | arn: 85 | Fn::Join: 86 | - ':' 87 | - - arn 88 | - aws 89 | - sqs 90 | - Ref: AWS::Region 91 | - Ref: AWS::AccountId 92 | - "${self:services.queue.name}" 93 | timeout: 20 94 | tags: 95 | resource: serverless-scheduler-emitter 96 | 97 | plugins: 98 | - serverless-python-requirements 99 | package: 100 | exclude: 101 | - venv/** 102 | - node_modules/** --------------------------------------------------------------------------------