├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── apigw-lambda ├── README.md ├── lambda │ └── lambda_function.py ├── main.tf ├── outputs.tf └── providers.tf ├── docker-compose.yml ├── elasticsearch ├── README.md ├── main.tf └── providers.tf ├── kinesis-lambda-dynamodb ├── README.md ├── lambda │ └── order-processor │ │ ├── deps │ │ └── requirements.txt │ │ ├── packages │ │ └── .placeholder │ │ └── src │ │ └── lambda_function.py ├── main.tf ├── providers.tf ├── put_record.py └── zip.sh ├── s3-backend ├── README.md ├── example │ └── main.tf └── main.tf ├── s3-bucket ├── README.md └── main.tf ├── s3-sqs-lambda-dynamodb ├── .gitignore ├── README.md ├── file.json ├── lambda_function.py ├── main.tf └── outputs.tf └── secrets-manager ├── README.md ├── main.tf └── providers.tf /.gitignore: -------------------------------------------------------------------------------- 1 | apigw-lambda/.terraform* 2 | apigw-lambda/terraform.tfstate* 3 | elasticsearch/.terraform* 4 | elasticsearch/terraform.tfstate* 5 | s3-backend/.terraform* 6 | s3-backend/terraform.tfstate* 7 | s3-bucket/.terraform* 8 | s3-bucket/terraform.tfstate* 9 | secrets-manager/terraform.tfstate* 10 | secrets-manager/.terraform* 11 | kinesis-lambda-dynamodb/.terraform* 12 | kinesis-lambda-dynamodb/terraform.tfstate* 13 | kinesis-lambda-dynamodb/lambda/order-processor/deployment_package.zip 14 | volume/* 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Ruan Bekker 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Thanks: https://gist.github.com/mpneuried/0594963ad38e68917ef189b4e6a269db 2 | .PHONY: help 3 | 4 | help: ## This help. 5 | @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) 6 | 7 | .DEFAULT_GOAL := help 8 | 9 | # Determine whether to use "docker-compose" or "docker compose" 10 | DOCKER_COMPOSE := $(shell which docker-compose 2>/dev/null) 11 | ifeq ($(DOCKER_COMPOSE),) 12 | DOCKER_COMPOSE := $(shell which docker 2>/dev/null) 13 | PREFIX := compose 14 | else 15 | PREFIX := 16 | endif 17 | 18 | # DOCKER TASKS 19 | up: ## Runs the containers in detached mode 20 | $(DOCKER_COMPOSE) $(PREFIX) up -d 21 | 22 | clean: ## Stops and removes all containers 23 | $(DOCKER_COMPOSE) $(PREFIX) down 24 | 25 | logs: ## View the logs from the containers 26 | $(DOCKER_COMPOSE) $(PREFIX) logs -f 27 | 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # terraform-localstack-examples 2 | Terraform on Localstack Examples 3 | 4 | ## Examples 5 | 6 | - [API Gateway, Lambda, DynamoDB](./apigw-lambda) 7 | - [Elasticsearch](./elasticsearch) 8 | - [Kinesis, Lambda, DynamoDB](./kinesis-lambda-dynamodb) 9 | - [S3 Backend](./s3-backend) 10 | - [S3 Bucket](./s3-bucket) 11 | - [S3, SQS, Lambda, DynamoDB](./s3-sqs-lambda-dynamodb) 12 | - [Secrets Manager](./secrets-manager) 13 | -------------------------------------------------------------------------------- /apigw-lambda/README.md: -------------------------------------------------------------------------------- 1 | # API Gateway and Lambda 2 | 3 | ## Localstack 4 | 5 | Boot localstack: 6 | 7 | ```bash 8 | pushd ../ 9 | make up 10 | popd 11 | ``` 12 | 13 | ## Usage 14 | 15 | Deploy the infrastructure on localstack with terraform: 16 | 17 | ```bash 18 | terraform init 19 | terraform apply -auto-approve 20 | ``` 21 | 22 | You should see something like this: 23 | 24 | ``` 25 | Apply complete! Resources: 23 added, 0 changed, 0 destroyed. 26 | 27 | Outputs: 28 | 29 | apigw_id = "vi0bygtqxi" 30 | apigw_message_path = "/message" 31 | message_invoke_url = "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message" 32 | ``` 33 | 34 | We can verify with the aws cli if we have our dynamodb table provisioned: 35 | 36 | ```bash 37 | aws --endpoint-url=http://localhost:4566 dynamodb list-tables --region eu-west-1 38 | ``` 39 | 40 | The basic lambda logic: 41 | 42 | ```python 43 | def lambda_handler(event, context): 44 | if event['path'] == '/message': 45 | payload = json.loads(event['body']) 46 | else: 47 | payload = 'welcome' 48 | return { 49 | 'statusCode': 200, 50 | 'body': payload 51 | } 52 | ``` 53 | 54 | Make a GET request against API GW: 55 | 56 | ```bash 57 | curl -H 'Content-Type: application/json' -XGET "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message" 58 | [] 59 | ``` 60 | 61 | Make a POST request against API GW: 62 | 63 | ```bash 64 | curl -H 'Content-Type: application/json' -XPOST "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message" -d '{"key": "some value"}' 65 | {"item_id": "8e24a1b6-3bd9-4306-b22a-86dc26f86bb3", "message": "some value"} 66 | ``` 67 | 68 | Make a GET to retrieve information about the item: 69 | 70 | ```bash 71 | curl -H 'Content-Type: application/json' -XGET "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message/8e24a1b6-3bd9-4306-b22a-86dc26f86bb3" 72 | {"item_id": "8e24a1b6-3bd9-4306-b22a-86dc26f86bb3", "message": "some value"} 73 | ``` 74 | 75 | Make a PUT request to update the content in DynamoDB: 76 | 77 | ```bash 78 | curl -H 'Content-Type: application/json' -XPUT "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message/8e24a1b6-3bd9-4306-b22a-86dc26f86bb3" -d '{"key": "new value"}' 79 | {"message": "Item with id 8e24a1b6-3bd9-4306-b22a-86dc26f86bb3 updated to new value"} 80 | ``` 81 | 82 | Make a DELETE request to remove the item from DynamoDB: 83 | 84 | ```bash 85 | curl -H 'Content-Type: application/json' -XDELETE "http://localhost:4566/restapis/vi0bygtqxi/dev/_user_request_/message/8e24a1b6-3bd9-4306-b22a-86dc26f86bb3" 86 | {"message": "Item with id 8e24a1b6-3bd9-4306-b22a-86dc26f86bb3 deleted"} 87 | ``` 88 | 89 | To destroy the infrastructure: 90 | 91 | ```bash 92 | terraform destroy 93 | ``` 94 | -------------------------------------------------------------------------------- /apigw-lambda/lambda/lambda_function.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | import json 4 | import boto3 5 | import logging 6 | 7 | # Configure logging 8 | logger = logging.getLogger() 9 | logger.setLevel(logging.DEBUG) 10 | 11 | dynamodb = boto3.client('dynamodb', endpoint_url='http://localstack:4566') 12 | 13 | def lambda_handler(event, context): 14 | logger.debug("Event: %s", json.dumps(event)) 15 | table_name = os.environ['DYNAMODB_TABLE_NAME'] 16 | response = None 17 | 18 | try: 19 | if event['httpMethod'] == 'POST' and event['path'] == '/message': 20 | # Access payload 21 | payload = json.loads(event['body']) 22 | logger.debug("Payload: %s", payload) 23 | item_id = str(uuid.uuid4()) 24 | message = payload.get('key', 'default_message') 25 | 26 | # Write item to table 27 | dynamodb.put_item( 28 | TableName=table_name, 29 | Item={ 30 | 'ItemID': {'S': item_id}, 31 | 'message': {'S': message} 32 | } 33 | ) 34 | response = { 35 | 'item_id': item_id, 36 | 'message': message 37 | } 38 | elif event['httpMethod'] == 'GET' and event['path'] == '/message': 39 | result = dynamodb.scan(TableName=table_name) 40 | items = result.get('Items', []) 41 | response = [ 42 | { 43 | 'item_id': item['ItemID']['S'], 44 | 'message': item['message']['S'] 45 | } for item in items 46 | ] 47 | elif event['httpMethod'] == 'GET' and event['pathParameters'] and 'item_id' in event['pathParameters']: 48 | item_id = event['pathParameters']['item_id'] 49 | 50 | # Get item from table 51 | result = dynamodb.get_item( 52 | TableName=table_name, 53 | Key={ 54 | 'ItemID': {'S': item_id} 55 | } 56 | ) 57 | if 'Item' in result: 58 | response = { 59 | 'item_id': item_id, 60 | 'message': result['Item']['message']['S'] 61 | } 62 | else: 63 | response = { 64 | 'error': 'Item not found' 65 | } 66 | elif event['httpMethod'] == 'DELETE' and event['pathParameters'] and 'item_id' in event['pathParameters']: 67 | item_id = event['pathParameters']['item_id'] 68 | 69 | # Delete item from table 70 | dynamodb.delete_item( 71 | TableName=table_name, 72 | Key={ 73 | 'ItemID': {'S': item_id} 74 | } 75 | ) 76 | response = { 77 | 'message': f'Item with id {item_id} deleted' 78 | } 79 | elif event['httpMethod'] == 'PUT' and event['pathParameters'] and 'item_id' in event['pathParameters']: 80 | item_id = event['pathParameters']['item_id'] 81 | payload = json.loads(event['body']) 82 | logger.debug("Payload: %s", payload) 83 | message = payload.get('key', 'default_message') 84 | 85 | # Update item in table 86 | dynamodb.update_item( 87 | TableName=table_name, 88 | Key={ 89 | 'ItemID': {'S': item_id} 90 | }, 91 | UpdateExpression="set message = :m", 92 | ExpressionAttributeValues={ 93 | ':m': {'S': message} 94 | }, 95 | ReturnValues="UPDATED_NEW" 96 | ) 97 | response = { 98 | 'message': f'Item with id {item_id} updated to {message}' 99 | } 100 | else: 101 | response = { 102 | 'message': 'Invalid request' 103 | } 104 | 105 | return { 106 | 'isBase64Encoded': False, 107 | 'statusCode': 200, 108 | 'headers': { 109 | 'Content-Type': 'application/json' 110 | }, 111 | 'body': json.dumps(response) 112 | } 113 | except Exception as e: 114 | logger.error("Error: %s", str(e)) 115 | return { 116 | 'isBase64Encoded': False, 117 | 'statusCode': 500, 118 | 'headers': { 119 | 'Content-Type': 'application/json' 120 | }, 121 | 'body': json.dumps({ 122 | 'error': str(e) 123 | }) 124 | } 125 | 126 | -------------------------------------------------------------------------------- /apigw-lambda/main.tf: -------------------------------------------------------------------------------- 1 | data "aws_caller_identity" "current" {} 2 | 3 | data "archive_file" "zip" { 4 | type = "zip" 5 | source_file = "${path.module}/lambda/lambda_function.py" 6 | output_file_mode = "0666" 7 | output_path = "/tmp/deployment_package.zip" 8 | } 9 | 10 | # API Gateway 11 | resource "aws_api_gateway_rest_api" "api" { 12 | name = "test-api" 13 | description = "This is my API for demonstration purposes" 14 | } 15 | 16 | # Message resource 17 | resource "aws_api_gateway_resource" "message_resource" { 18 | path_part = "message" 19 | parent_id = aws_api_gateway_rest_api.api.root_resource_id 20 | rest_api_id = aws_api_gateway_rest_api.api.id 21 | } 22 | 23 | # GET /message 24 | resource "aws_api_gateway_method" "get_message" { 25 | rest_api_id = aws_api_gateway_rest_api.api.id 26 | resource_id = aws_api_gateway_resource.message_resource.id 27 | http_method = "GET" 28 | authorization = "NONE" 29 | } 30 | 31 | resource "aws_api_gateway_integration" "get_message_integration" { 32 | rest_api_id = aws_api_gateway_rest_api.api.id 33 | resource_id = aws_api_gateway_resource.message_resource.id 34 | http_method = aws_api_gateway_method.get_message.http_method 35 | integration_http_method = "POST" 36 | type = "AWS_PROXY" 37 | uri = aws_lambda_function.lambda.invoke_arn 38 | } 39 | 40 | # GET /message/item_id 41 | resource "aws_api_gateway_method" "get_message_item" { 42 | rest_api_id = aws_api_gateway_rest_api.api.id 43 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 44 | http_method = "GET" 45 | authorization = "NONE" 46 | } 47 | 48 | resource "aws_api_gateway_integration" "get_message_item_integration" { 49 | rest_api_id = aws_api_gateway_rest_api.api.id 50 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 51 | http_method = aws_api_gateway_method.get_message_item.http_method 52 | integration_http_method = "POST" 53 | type = "AWS_PROXY" 54 | uri = aws_lambda_function.lambda.invoke_arn 55 | } 56 | 57 | 58 | # POST /message 59 | resource "aws_api_gateway_method" "post_message" { 60 | rest_api_id = aws_api_gateway_rest_api.api.id 61 | resource_id = aws_api_gateway_resource.message_resource.id 62 | http_method = "POST" 63 | authorization = "NONE" 64 | } 65 | 66 | resource "aws_api_gateway_integration" "post_message_integration" { 67 | rest_api_id = aws_api_gateway_rest_api.api.id 68 | resource_id = aws_api_gateway_resource.message_resource.id 69 | http_method = aws_api_gateway_method.post_message.http_method 70 | integration_http_method = "POST" 71 | type = "AWS_PROXY" 72 | uri = aws_lambda_function.lambda.invoke_arn 73 | } 74 | 75 | # DELETE /message/{item_id} 76 | resource "aws_api_gateway_resource" "message_item_id_resource" { 77 | path_part = "{item_id}" 78 | parent_id = aws_api_gateway_resource.message_resource.id 79 | rest_api_id = aws_api_gateway_rest_api.api.id 80 | } 81 | 82 | resource "aws_api_gateway_method" "delete_message_item" { 83 | rest_api_id = aws_api_gateway_rest_api.api.id 84 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 85 | http_method = "DELETE" 86 | authorization = "NONE" 87 | } 88 | 89 | resource "aws_api_gateway_integration" "delete_message_item_integration" { 90 | rest_api_id = aws_api_gateway_rest_api.api.id 91 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 92 | http_method = aws_api_gateway_method.delete_message_item.http_method 93 | integration_http_method = "POST" 94 | type = "AWS_PROXY" 95 | uri = aws_lambda_function.lambda.invoke_arn 96 | } 97 | 98 | # PUT /message/{item_id} 99 | resource "aws_api_gateway_method" "put_message_item" { 100 | rest_api_id = aws_api_gateway_rest_api.api.id 101 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 102 | http_method = "PUT" 103 | authorization = "NONE" 104 | } 105 | 106 | resource "aws_api_gateway_integration" "put_message_item_integration" { 107 | rest_api_id = aws_api_gateway_rest_api.api.id 108 | resource_id = aws_api_gateway_resource.message_item_id_resource.id 109 | http_method = aws_api_gateway_method.put_message_item.http_method 110 | integration_http_method = "POST" 111 | type = "AWS_PROXY" 112 | uri = aws_lambda_function.lambda.invoke_arn 113 | } 114 | 115 | # Deployment 116 | resource "aws_api_gateway_deployment" "deployment" { 117 | rest_api_id = aws_api_gateway_rest_api.api.id 118 | 119 | triggers = { 120 | redeployment = data.archive_file.zip.output_base64sha256 121 | } 122 | 123 | lifecycle { 124 | create_before_destroy = true 125 | } 126 | 127 | depends_on = [ 128 | aws_api_gateway_rest_api.api, 129 | aws_api_gateway_method.get_message, 130 | aws_api_gateway_method.post_message, 131 | aws_api_gateway_method.delete_message_item, 132 | aws_api_gateway_method.put_message_item, 133 | aws_api_gateway_integration.get_message_integration, 134 | aws_api_gateway_integration.post_message_integration, 135 | aws_api_gateway_integration.delete_message_item_integration, 136 | aws_api_gateway_integration.put_message_item_integration 137 | ] 138 | } 139 | 140 | resource "aws_api_gateway_stage" "demo_stage" { 141 | deployment_id = aws_api_gateway_deployment.deployment.id 142 | rest_api_id = aws_api_gateway_rest_api.api.id 143 | stage_name = "dev" 144 | } 145 | 146 | # DynamoDB 147 | resource "aws_dynamodb_table" "items" { 148 | name = "items" 149 | read_capacity = "2" 150 | write_capacity = "5" 151 | hash_key = "ItemID" 152 | 153 | attribute { 154 | name = "ItemID" 155 | type = "S" 156 | } 157 | } 158 | 159 | # Lambda Permissions 160 | resource "aws_lambda_permission" "apigw_lambda_get_message" { 161 | statement_id = "AllowExecutionFromAPIGatewayGetMessage" 162 | action = "lambda:InvokeFunction" 163 | function_name = aws_lambda_function.lambda.function_name 164 | principal = "apigateway.amazonaws.com" 165 | source_arn = "arn:aws:execute-api:eu-west-1:${data.aws_caller_identity.current.account_id}:${aws_api_gateway_rest_api.api.id}/*/${aws_api_gateway_method.get_message.http_method}${aws_api_gateway_resource.message_resource.path}" 166 | } 167 | 168 | resource "aws_lambda_permission" "apigw_lambda_get_message_item" { 169 | statement_id = "AllowExecutionFromAPIGatewayGetMessageItem" 170 | action = "lambda:InvokeFunction" 171 | function_name = aws_lambda_function.lambda.function_name 172 | principal = "apigateway.amazonaws.com" 173 | source_arn = "arn:aws:execute-api:eu-west-1:${data.aws_caller_identity.current.account_id}:${aws_api_gateway_rest_api.api.id}/*/${aws_api_gateway_method.get_message_item.http_method}${aws_api_gateway_resource.message_item_id_resource.path}" 174 | } 175 | 176 | resource "aws_lambda_permission" "apigw_lambda_post_message" { 177 | statement_id = "AllowExecutionFromAPIGatewayPostMessage" 178 | action = "lambda:InvokeFunction" 179 | function_name = aws_lambda_function.lambda.function_name 180 | principal = "apigateway.amazonaws.com" 181 | source_arn = "arn:aws:execute-api:eu-west-1:${data.aws_caller_identity.current.account_id}:${aws_api_gateway_rest_api.api.id}/*/${aws_api_gateway_method.post_message.http_method}${aws_api_gateway_resource.message_resource.path}" 182 | } 183 | 184 | resource "aws_lambda_permission" "apigw_lambda_delete_message_item" { 185 | statement_id = "AllowExecutionFromAPIGatewayDeleteMessageItem" 186 | action = "lambda:InvokeFunction" 187 | function_name = aws_lambda_function.lambda.function_name 188 | principal = "apigateway.amazonaws.com" 189 | source_arn = "arn:aws:execute-api:eu-west-1:${data.aws_caller_identity.current.account_id}:${aws_api_gateway_rest_api.api.id}/*/${aws_api_gateway_method.delete_message_item.http_method}${aws_api_gateway_resource.message_item_id_resource.path}" 190 | } 191 | 192 | resource "aws_lambda_permission" "apigw_lambda_put_message_item" { 193 | statement_id = "AllowExecutionFromAPIGatewayPutMessageItem" 194 | action = "lambda:InvokeFunction" 195 | function_name = aws_lambda_function.lambda.function_name 196 | principal = "apigateway.amazonaws.com" 197 | source_arn = "arn:aws:execute-api:eu-west-1:${data.aws_caller_identity.current.account_id}:${aws_api_gateway_rest_api.api.id}/*/${aws_api_gateway_method.put_message_item.http_method}${aws_api_gateway_resource.message_item_id_resource.path}" 198 | } 199 | 200 | # Lambda Function 201 | resource "aws_lambda_function" "lambda" { 202 | filename = data.archive_file.zip.output_path 203 | source_code_hash = data.archive_file.zip.output_base64sha256 204 | function_name = "test-lambda" 205 | role = aws_iam_role.role.arn 206 | handler = "lambda_function.lambda_handler" 207 | runtime = "python3.7" 208 | timeout = 10 209 | 210 | environment { 211 | variables = { 212 | DYNAMODB_TABLE_NAME = aws_dynamodb_table.items.name 213 | } 214 | } 215 | } 216 | 217 | # IAM Role 218 | resource "aws_iam_role" "role" { 219 | name = "myrole" 220 | 221 | assume_role_policy = < 8 | 9 | 1. AWS CLI to do a `PutRecord` with the data value "pizza" base64 encoded 10 | 2. The Kinesis Stream has a Event Trigger to Invoke the Lambda Function 11 | 3. The Lambda Function receives the data in the event body and writes to DynamoDB 12 | 4. AWS CLI to do a `Scan` on DynamoDB to preview the data in the table 13 | 14 | ## Requirements 15 | 16 | 1. AWS CLI 17 | 2. Python and Pip 18 | 3. Terraform 19 | 4. Docker Compose 20 | 21 | ## Usage 22 | 23 | Boot localstack: 24 | 25 | ```bash 26 | $ pushd ../ 27 | $ make up 28 | $ popd 29 | ``` 30 | 31 | Create the deployment package for Lambda: 32 | 33 | ```bash 34 | $ ./zip.sh 35 | ``` 36 | 37 | Provision Infrastructure: 38 | 39 | ```bash 40 | $ terraform init 41 | $ terraform plan 42 | $ terraform apply 43 | ``` 44 | 45 | To use the awscli we need to use `--endpoint-url http://localhost:4566`, but I will alias it to `awslocal` for simplicity: 46 | 47 | ```bash 48 | $ alias awslocal="aws --endpoint-url http://localhost:4566 --region eu-west-1" 49 | ``` 50 | 51 | Now we should be able to list our resources: 52 | 53 | ```bash 54 | $ awslocal dynamodb list-tables 55 | { 56 | "TableNames": [ 57 | "orders" 58 | ] 59 | } 60 | ``` 61 | 62 | Put a record to the Kinesis Stream: 63 | 64 | ```bash 65 | $ awslocal kinesis put-record --stream-name orders_processor --partition-key 123 --data $(echo -n "pizza" | base64) 66 | { 67 | "ShardId": "shardId-000000000000", 68 | "SequenceNumber": "49626853442679825006635798069828080735600763790688256002" 69 | } 70 | ``` 71 | 72 | View the logs from localstack: 73 | 74 | ```bash 75 | $ docker logs -f localstack 76 | 77 | > START RequestId: 29eceff2-c4c1-17d0-a874-27f0dd913a86 Version: $LATEST 78 | > EventID: shardId-000000000000:49626853442679825006635798069828080735600763790688256002, HashKey: 76379068825600, Data: pizza 79 | > DynamoDB RequestID: 974099a3-2f49-4f0f-b7e4-2c53b07db028 80 | > END RequestId: 29eceff2-c4c1-17d0-a874-27f0dd913a86 81 | > REPORT RequestId: 29eceff2-c4c1-17d0-a874-27f0dd913a86 Init Duration: 221.72 ms Duration: 34.28 ms Billed Duration: 100 ms Memory Size: 1536 MB Max Memory Used: 40 MB 82 | ``` 83 | 84 | Scan the DynamoDB Table: 85 | 86 | ```bash 87 | $ awslocal dynamodb scan --table-name orders 88 | { 89 | "Items": [ 90 | { 91 | "EventID": { 92 | "S": "shardId-000000000000:49626853442679825006635798069828080735600763790688256002" 93 | }, 94 | "OrderData": { 95 | "S": "pizza" 96 | }, 97 | "OrderID": { 98 | "S": "76379068825600" 99 | }, 100 | "Timestamp": { 101 | "S": "2022-02-17T16:29:36" 102 | } 103 | } 104 | ], 105 | "Count": 1, 106 | "ScannedCount": 1, 107 | "ConsumedCapacity": null 108 | } 109 | ``` 110 | 111 | GetItem using DynamoDB: 112 | 113 | ```bash 114 | $ awslocal dynamodb get-item --table-name orders --key '{"OrderID": {"S": "76379068825600"}}' 115 | { 116 | "Item": { 117 | "EventID": { 118 | "S": "shardId-000000000000:49626853442679825006635798069828080735600763790688256002" 119 | }, 120 | "OrderData": { 121 | "S": "pizza" 122 | }, 123 | "OrderID": { 124 | "S": "76379068825600" 125 | }, 126 | "Timestamp": { 127 | "S": "2022-02-17T16:29:36" 128 | } 129 | } 130 | } 131 | ``` 132 | 133 | ## Code Structure 134 | 135 | ```bash 136 | . 137 | ├── LICENSE 138 | ├── NOTES.md 139 | ├── README.md 140 | ├── docker-compose.yml - Localstack 141 | ├── iac 142 | │ └── main.tf - AWS Infrastructure via Terraform 143 | ├── lambda 144 | │ └── order-processor - Lambda Function Folder 145 | │ ├── deployment_package.zip - Location where the zip.sh will package the lambda and dependencies for Terraform 146 | │ ├── deps - Lambda is using Python Runtime and the packaging will reference the requirements.txt 147 | │ │ └── requirements.txt 148 | │ ├── packages - The requirement packages will be installed to this directory by the zip.sh 149 | │ ├── src 150 | │ └── lambda_function.py - Lambda Function Source Code 151 | ├── put_record.py - Python Equivalent of doing a PutRecord to Kinesis 152 | └── zip.sh - Script that will loop through each function folder, zip the deployment package 153 | ``` 154 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/lambda/order-processor/deps/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/lambda/order-processor/packages/.placeholder: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ruanbekker/terraform-localstack-examples/1a647db2e202cc787383dc1ca1955d15dbeb0570/kinesis-lambda-dynamodb/lambda/order-processor/packages/.placeholder -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/lambda/order-processor/src/lambda_function.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | from base64 import b64decode 3 | from datetime import datetime as dt 4 | 5 | ddb = boto3.Session(region_name='eu-west-1').client( 6 | 'dynamodb', 7 | aws_access_key_id='localstack', 8 | aws_secret_access_key='localstack', 9 | endpoint_url='http://localstack:4566' 10 | ) 11 | 12 | def decode_base64(string_to_decode): 13 | #if type(string_to_decode) is not bytes: 14 | # string_to_decode = string_to_decode.encode('utf-8') 15 | response = b64decode(string_to_decode).decode('utf-8') 16 | return response 17 | 18 | def write_to_dynamodb(hashkey, event_id, value): 19 | response = ddb.put_item( 20 | TableName='orders', 21 | Item={ 22 | 'OrderID': {'S': hashkey}, 23 | 'EventID': {'S': event_id}, 24 | 'OrderData': {'S': value}, 25 | 'Timestamp': {'S': dt.now().strftime("%Y-%m-%dT%H:%M:%S")} 26 | } 27 | ) 28 | return response 29 | 30 | def lambda_handler(event, request): 31 | for record in event['Records']: 32 | event_id = record['eventID'] 33 | hashkey = event_id[-15:-1] 34 | value = decode_base64(record['kinesis']['data']) 35 | item = write_to_dynamodb(hashkey, event_id, value) 36 | print('EventID: {}, HashKey: {}, Data: {}'.format(event_id, hashkey, value)) 37 | print('DynamoDB RequestID: {}'.format(item['ResponseMetadata']['RequestId'])) 38 | #print(event) 39 | return event 40 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/main.tf: -------------------------------------------------------------------------------- 1 | data "archive_file" "order_processor_package" { 2 | type = "zip" 3 | source_file = "${path.module}/lambda/order-processor/src/lambda_function.py" 4 | output_file_mode = "0666" 5 | output_path = "/tmp/deployment_package.zip" 6 | } 7 | 8 | resource "aws_dynamodb_table" "orders" { 9 | name = "orders" 10 | read_capacity = "2" 11 | write_capacity = "5" 12 | hash_key = "OrderID" 13 | 14 | attribute { 15 | name = "OrderID" 16 | type = "S" 17 | } 18 | } 19 | 20 | resource "aws_kinesis_stream" "orders_processor" { 21 | name = "orders_processor" 22 | shard_count = 1 23 | retention_period = 30 24 | 25 | shard_level_metrics = [ 26 | "IncomingBytes", 27 | "OutgoingBytes", 28 | ] 29 | } 30 | 31 | data "aws_iam_policy_document" "assume_role" { 32 | statement { 33 | effect = "Allow" 34 | 35 | principals { 36 | type = "Service" 37 | identifiers = ["lambda.amazonaws.com"] 38 | } 39 | 40 | actions = ["sts:AssumeRole"] 41 | } 42 | } 43 | 44 | resource "aws_iam_role" "iam_for_lambda" { 45 | name = "iam_for_lambda" 46 | assume_role_policy = data.aws_iam_policy_document.assume_role.json 47 | } 48 | 49 | resource "aws_lambda_function" "order_processor" { 50 | function_name = "order_processor" 51 | filename = "${path.module}/lambda/order-processor/deployment_package.zip" 52 | handler = "lambda_function.lambda_handler" 53 | role = aws_iam_role.iam_for_lambda.arn 54 | runtime = "python3.7" 55 | timeout = 60 56 | memory_size = 128 57 | source_code_hash = data.archive_file.order_processor_package.output_base64sha256 58 | } 59 | 60 | resource "aws_lambda_event_source_mapping" "order_processor_trigger" { 61 | event_source_arn = aws_kinesis_stream.orders_processor.arn 62 | function_name = "order_processor" 63 | batch_size = 1 64 | starting_position = "LATEST" 65 | enabled = true 66 | maximum_record_age_in_seconds = 604800 67 | } 68 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/providers.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = "~> 1.0" 3 | 4 | required_providers { 5 | aws = { 6 | source = "hashicorp/aws" 7 | version = "~> 5.60" 8 | } 9 | archive = { 10 | source = "hashicorp/archive" 11 | version = "~> 2.4" 12 | } 13 | } 14 | } 15 | 16 | provider "aws" { 17 | region = "eu-west-1" 18 | access_key = "localstack" 19 | secret_key = "localstack" 20 | skip_credentials_validation = true 21 | skip_metadata_api_check = true 22 | skip_requesting_account_id = true 23 | 24 | endpoints { 25 | dynamodb = "http://localhost:4566" 26 | lambda = "http://localhost:4566" 27 | kinesis = "http://localhost:4566" 28 | s3 = "http://localhost:4566" 29 | iam = "http://localhost:4566" 30 | } 31 | } 32 | 33 | provider "archive" {} 34 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/put_record.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import boto3 4 | 5 | kinesis = boto3.Session(region_name='eu-west-1').client('kinesis', aws_access_key_id='localstack', aws_secret_access_key='localstack', endpoint_url='http://localhost:4566') 6 | response = kinesis.put_record(StreamName='orders_processor', Data=b'chips', PartitionKey='1') 7 | print(response) 8 | -------------------------------------------------------------------------------- /kinesis-lambda-dynamodb/zip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for function in $(ls lambda/) 4 | do 5 | pushd "lambda/$function" 6 | if [ -f "deployment_package.zip" ]; then rm -f deployment_package.zip; fi 7 | python3 -m pip install --target ./packages --requirement ./deps/requirements.txt 8 | pushd packages 9 | zip -r ../deployment_package.zip . 10 | popd 11 | pushd src/ 12 | zip -g ../deployment_package.zip lambda_function.py 13 | popd 14 | rm -rf packages/* 15 | popd 16 | done 17 | -------------------------------------------------------------------------------- /s3-backend/README.md: -------------------------------------------------------------------------------- 1 | # Terraform S3 Backend 2 | 3 | S3 Backend for State and DynamoDB Table for State Locking. 4 | 5 | ## Deploy Infra 6 | 7 | Run localstack: 8 | 9 | ```bash 10 | $ pushd ../ 11 | $ make up 12 | $ popd 13 | ``` 14 | 15 | Deploy the S3 Bucket and DynamoDB Table: 16 | 17 | ``` 18 | Apply complete! Resources: 3 added, 0 changed, 0 destroyed. 19 | 20 | Outputs: 21 | 22 | bucket_name = "terraform-state" 23 | dynamodb_table_name = "terraform-state-lock"s 24 | ``` 25 | 26 | ## List Resources 27 | 28 | Using the aws cli, list the s3 buckets: 29 | 30 | ``` 31 | $ aws --endpoint-url="http://localhost:4566" --region eu-west-1 s3 ls / 32 | 33 | 2022-02-19 17:58:47 terraform-state 34 | ``` 35 | 36 | Then list the DynamoDB Tables: 37 | 38 | ``` 39 | $ aws --endpoint-url="http://localhost:4566" --region eu-west-1 dynamodb list-tables 40 | { 41 | "TableNames": [ 42 | "terraform-state-lock" 43 | ] 44 | } 45 | ``` 46 | 47 | ## Chicken and the Egg Problem 48 | 49 | You will notice that our state resides locally, as we have not defined a state backend and therefore terraform defaults to local storage: 50 | 51 | ``` 52 | $ ls | grep state 53 | terraform.tfstate 54 | ``` 55 | 56 | Since we now have infrastructure for our state, we can migrate the local storage state to our remote state on s3, we can do that by adding the backend to the `main.tf` (a full example of the main.tf can be found in `example/main.tf`): 57 | 58 | ``` 59 | ... 60 | 61 | terraform { 62 | backend "s3" { 63 | bucket = "terraform-state" 64 | key = "terraform-state/terraform.tfstate" 65 | region = "eu-west-1" 66 | endpoint = "http://localhost:4566" 67 | skip_credentials_validation = true 68 | skip_metadata_api_check = true 69 | force_path_style = true 70 | dynamodb_table = "terraform-state-lock" 71 | dynamodb_endpoint = "http://localhost:4566" 72 | encrypt = true 73 | } 74 | } 75 | 76 | ... 77 | ``` 78 | 79 | Then we need to reinitialize to migrate our local storate to s3: 80 | 81 | ``` 82 | $ terraform init 83 | 84 | Initializing the backend... 85 | Do you want to copy existing state to the new backend? 86 | Pre-existing state was found while migrating the previous "local" backend to the 87 | newly configured "s3" backend. No existing state was found in the newly 88 | configured "s3" backend. Do you want to copy this state to the new "s3" 89 | backend? Enter "yes" to copy and "no" to start with an empty state. 90 | 91 | Enter a value: yes 92 | 93 | Successfully configured the backend "s3"! Terraform will automatically 94 | use this backend unless the backend configuration changes. 95 | 96 | Initializing provider plugins... 97 | - Reusing previous version of hashicorp/aws from the dependency lock file 98 | - Using previously-installed hashicorp/aws v4.2.0 99 | 100 | Terraform has been successfully initialized! 101 | 102 | You may now begin working with Terraform. Try running "terraform plan" to see 103 | any changes that are required for your infrastructure. All Terraform commands 104 | should now work. 105 | 106 | If you ever set or change modules or backend configuration for Terraform, 107 | rerun this command to reinitialize your working directory. If you forget, other 108 | commands will detect it and remind you to do so if necessary. 109 | ``` 110 | 111 | As we can see our state has been migrated: 112 | 113 | ``` 114 | $ aws --endpoint-url="http://localhost:4566" --region eu-west-1 s3 ls s3://terraform-state/terraform-state/ 115 | 2022-02-19 18:11:16 5635 terraform.tfstate 116 | ``` 117 | 118 | We can then remove the local state: 119 | 120 | ``` 121 | $ rm -rf terraform.tfstate* 122 | ``` 123 | 124 | And verify by doing a plan: 125 | 126 | ``` 127 | $ terraform plan 128 | aws_s3_bucket.state: Refreshing state... [id=terraform-state] 129 | aws_dynamodb_table.state_lock: Refreshing state... [id=terraform-state-lock] 130 | aws_s3_bucket_public_access_block.state: Refreshing state... [id=terraform-state] 131 | aws_s3_bucket_server_side_encryption_configuration.sse: Refreshing state... [id=terraform-state] 132 | 133 | No changes. Infrastructure is up-to-date. 134 | 135 | This means that Terraform did not detect any differences between your configuration and the remote system(s). As a result, there are no actions to take. 136 | ``` 137 | 138 | ## Using S3 Backend 139 | 140 | For any infrustructure's state to be stored on S3, you will need the following: 141 | 142 | ``` 143 | terraform { 144 | backend "s3" { 145 | bucket = "terraform-state" 146 | key = "terraform-state/terraform.tfstate" 147 | region = "eu-west-1" 148 | endpoint = "http://localhost:4566" 149 | skip_credentials_validation = true 150 | skip_metadata_api_check = true 151 | force_path_style = true 152 | dynamodb_table = "terraform-state-lock" 153 | dynamodb_endpoint = "http://localhost:4566" 154 | encrypt = true 155 | } 156 | } 157 | ``` 158 | 159 | But remember to seperate the infra with the `key` on S3 of choice, but that is up to you how you would like to use it. 160 | -------------------------------------------------------------------------------- /s3-backend/example/main.tf: -------------------------------------------------------------------------------- 1 | # PROVIDERS 2 | provider "aws" { 3 | region = "eu-west-1" 4 | access_key = "localstack" 5 | secret_key = "localstack" 6 | skip_credentials_validation = true 7 | skip_metadata_api_check = true 8 | skip_requesting_account_id = true 9 | s3_use_path_style = true 10 | 11 | endpoints { 12 | dynamodb = "http://localhost:4566" 13 | s3 = "http://localhost:4566" 14 | sts = "http://localhost:4566" 15 | iam = "http://localhost:4566" 16 | } 17 | } 18 | 19 | terraform { 20 | required_providers { 21 | aws = { 22 | source = "hashicorp/aws" 23 | version = "5.60.0" 24 | } 25 | } 26 | 27 | backend "s3" { 28 | bucket = "terraform-state" 29 | key = "terraform-state/example/terraform.tfstate" 30 | region = "eu-west-1" 31 | access_key = "localstack" 32 | secret_key = "localstack" 33 | skip_credentials_validation = true 34 | skip_metadata_api_check = true 35 | force_path_style = true 36 | dynamodb_table = "terraform-state-lock" 37 | encrypt = false 38 | 39 | endpoints = { 40 | s3 = "http://localhost:4566" 41 | sts = "http://localhost:4566" 42 | dynamodb = "http://localhost:4566" 43 | iam = "http://localhost:4566" 44 | } 45 | } 46 | } 47 | 48 | data "aws_caller_identity" "current" {} 49 | 50 | resource "aws_s3_bucket" "this" { 51 | bucket = "my-bucket-${data.aws_caller_identity.current.account_id}" 52 | 53 | tags = { 54 | Name = "my-bucket" 55 | Owner = "devops" 56 | } 57 | } 58 | 59 | resource "aws_s3_bucket_server_side_encryption_configuration" "sse" { 60 | bucket = aws_s3_bucket.this.bucket 61 | 62 | rule { 63 | apply_server_side_encryption_by_default { 64 | sse_algorithm = "AES256" 65 | } 66 | } 67 | } 68 | 69 | # OUTPUTS 70 | output "bucket_arn" { 71 | value = aws_s3_bucket.this.arn 72 | } 73 | 74 | -------------------------------------------------------------------------------- /s3-backend/main.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_providers { 3 | aws = { 4 | source = "hashicorp/aws" 5 | version = "5.60.0" 6 | } 7 | } 8 | } 9 | 10 | # PROVIDERS 11 | provider "aws" { 12 | region = "eu-west-1" 13 | access_key = "localstack" 14 | secret_key = "localstack" 15 | skip_credentials_validation = true 16 | skip_metadata_api_check = true 17 | skip_requesting_account_id = true 18 | s3_use_path_style = true 19 | 20 | endpoints { 21 | dynamodb = "http://localhost:4566" 22 | s3 = "http://localhost:4566" 23 | } 24 | } 25 | 26 | # RESOURCES 27 | resource "aws_s3_bucket" "state" { 28 | bucket = "terraform-state" 29 | 30 | lifecycle { 31 | prevent_destroy = true 32 | } 33 | } 34 | 35 | resource "aws_s3_bucket_server_side_encryption_configuration" "sse" { 36 | bucket = aws_s3_bucket.state.bucket 37 | 38 | rule { 39 | apply_server_side_encryption_by_default { 40 | sse_algorithm = "AES256" 41 | } 42 | } 43 | } 44 | 45 | resource "aws_s3_bucket_public_access_block" "state" { 46 | bucket = aws_s3_bucket.state.id 47 | 48 | block_public_acls = true 49 | ignore_public_acls = true 50 | block_public_policy = true 51 | restrict_public_buckets = true 52 | } 53 | 54 | resource "aws_dynamodb_table" "state_lock" { 55 | name = "terraform-state-lock" 56 | read_capacity = 1 57 | write_capacity = 1 58 | hash_key = "LockID" 59 | 60 | attribute { 61 | name = "LockID" 62 | type = "S" 63 | } 64 | } 65 | 66 | # OUTPUTS 67 | output "bucket_name" { 68 | value = aws_s3_bucket.state.bucket 69 | } 70 | 71 | output "dynamodb_table_name" { 72 | value = aws_dynamodb_table.state_lock.id 73 | } 74 | -------------------------------------------------------------------------------- /s3-bucket/README.md: -------------------------------------------------------------------------------- 1 | 2 | # S3 Bucket Localstack Terraform Example 3 | 4 | Run localstack: 5 | 6 | ``` 7 | $ pushd ../ 8 | $ make up 9 | $ popd 10 | ``` 11 | 12 | Deploy Infrastructure 13 | 14 | ``` 15 | $ terraform init 16 | $ terraform plan 17 | $ terraform apply -auto-approve 18 | ``` 19 | 20 | List Buckets: 21 | 22 | ``` 23 | $ aws --endpoint-url http://localhost:4566 --region eu-west-1 s3 ls / 24 | ``` 25 | -------------------------------------------------------------------------------- /s3-bucket/main.tf: -------------------------------------------------------------------------------- 1 | # PROVIDERS 2 | provider "aws" { 3 | region = "eu-west-1" 4 | access_key = "localstack" 5 | secret_key = "localstack" 6 | skip_credentials_validation = true 7 | skip_metadata_api_check = true 8 | skip_requesting_account_id = true 9 | s3_use_path_style = true 10 | 11 | endpoints { 12 | s3 = "http://localhost:4566" 13 | sts = "http://localhost:4566" 14 | } 15 | } 16 | 17 | data "aws_caller_identity" "current" {} 18 | 19 | resource "aws_s3_bucket" "this" { 20 | bucket = "my-bucket-${data.aws_caller_identity.current.account_id}" 21 | 22 | tags = { 23 | Name = "my-bucket" 24 | Owner = "devops" 25 | } 26 | } 27 | 28 | resource "aws_s3_bucket_server_side_encryption_configuration" "sse" { 29 | bucket = aws_s3_bucket.this.bucket 30 | 31 | rule { 32 | apply_server_side_encryption_by_default { 33 | sse_algorithm = "AES256" 34 | } 35 | } 36 | } 37 | 38 | # OUTPUTS 39 | output "bucket_arn" { 40 | value = aws_s3_bucket.this.arn 41 | } 42 | 43 | -------------------------------------------------------------------------------- /s3-sqs-lambda-dynamodb/.gitignore: -------------------------------------------------------------------------------- 1 | .terraform* 2 | terraform.tfstate* -------------------------------------------------------------------------------- /s3-sqs-lambda-dynamodb/README.md: -------------------------------------------------------------------------------- 1 | # s3-sqs-lambda-dynamodb 2 | 3 | ## About 4 | 5 | Components: 6 | - S3 Bucket 7 | - SQS Queue 8 | - Lambda Function 9 | - DynamoDB Table 10 | 11 | 1. When a json object arrives on s3 a notification is triggered to send it to sqs 12 | 2. The lambda function is triggered when a item is added to SQS and consumes the data 13 | 3. The lambda function reads the data in the json object key and writes it to DynamoDB 14 | 15 | 16 | ## Walkthrough 17 | 18 | Lambda Event from SQS: 19 | 20 | ``` 21 | localstack | > START RequestId: ead233f7-12f8-15f3-c5c4-061388ed806c Version: $LATEST 22 | localstack | > {'Records': [{'body': '{"Records": [{"eventVersion": "2.1", "eventSource": "aws:s3", "awsRegion": "eu-west-1", "eventTime": "2022-03-01T22:02:31.030Z", "eventName": "ObjectCreated:Put", "userIdentity": {"principalId": "AIDAJDPLRKLG7UEXAMPLE"}, "requestParameters": {"sourceIPAddress": "127.0.0.1"}, "responseElements": {"x-amz-request-id": "19108f27", "x-amz-id-2": "eftixk72aD6Ap51TnqcoF8eFidJG9Z/2"}, "s3": {"s3SchemaVersion": "1.0", "configurationId": "testConfigRule", "bucket": {"name": "my-bucket-000000000000", "ownerIdentity": {"principalId": "A3NL1KOZZKExample"}, "arn": "arn:aws:s3:::my-bucket-000000000000"}, "object": {"key": "orders/2022/03/01/file.json", "size": 51, "eTag": "\\"d9010ae140b4fd2d75578c7210449f27\\"", "versionId": null, "sequencer": "0055AED6DCD90281E5"}}}]}', 'receiptHandle': 'vadmxpgzrigofwxnfskctmaivdwgqgnifywarqatzntnbjvkcsmenveddffplqyeayewuvdfkyvplzpudognwwztucpruexwqqwiddbjurgdffdneawpxqxswyitwbrghnuxrhhpqkbemggjelzlldupirdevifjmfbvkqhioefbkrtuztmqatqvm', 'md5OfBody': '6d064eca382e1deff9230c604baad820', 'eventSourceARN': 'arn:aws:sqs:eu-west-1:000000000000:orders-queue', 'eventSource': 'aws:sqs', 'awsRegion': 'eu-west-1', 'messageId': '6162988a-3a8b-e587-fdcd-68903f98a845', 'attributes': {}, 'messageAttributes': {}, 'md5OfMessageAttributes': None, 'sqs': True}]} 23 | localstack | > END RequestId: ead233f7-12f8-15f3-c5c4-061388ed806c 24 | localstack | > REPORT RequestId: ead233f7-12f8-15f3-c5c4-061388ed806c Init Duration: 107.05 ms Duration: 14.63 ms Billed Duration: 100 ms Memory Size: 1536 MB Max Memory Used: 24 MB 25 | ``` 26 | 27 | Deploy Infra: 28 | 29 | ```bash 30 | $ pushd ../ 31 | $ make up 32 | $ popd 33 | $ terraform apply -auto-approveaws_lambda_function.order_processor: Refreshing state... [id=order-processor] 34 | 35 | Apply complete! Resources: 1 added, 1 changed, 1 destroyed. 36 | 37 | Outputs: 38 | 39 | dynamodb_table = "orders-table" 40 | lambda_function = "order-processor" 41 | s3_bucket = "my-bucket-000000000000" 42 | sqs_queue = "orders-queue" 43 | ``` 44 | 45 | Create `file.json`: 46 | 47 | ```json 48 | {"order_id": "20220301_001", "order_value": 12.30} 49 | ``` 50 | 51 | Put to S3: 52 | 53 | ```bash 54 | $ aws --profile localstack --endpoint-url http://localhost:4566 s3 cp file.json s3://my-bucket-000000000000/orders/2022/03/01/file.json 55 | upload: ./file.json to s3://my-bucket-000000000000/orders/2022/03/01/file.json 56 | ``` 57 | 58 | View logs: 59 | 60 | ```bash 61 | $ docker-compose -f logs 62 | ... 63 | localstack | 2022-03-01T22:56:06:DEBUG:localstack.services.awslambda.lambda_executors: Lambda arn:aws:lambda:eu-west-1:000000000000:function:order-processor result / log output: 64 | localstack | {"statusCode":200,"body":{"ConsumedCapacity":{"TableName":"orders-table","CapacityUnits":1.0},"ResponseMetadata":{"RequestId":"ced062ff-8163-482f-ba03-0431c17e3522","HTTPStatusCode":200,"HTTPHeaders":{"content-type":"application/x-amz-json-1.0","content-length":"69","connection":"close","x-amz-crc32":"3899374354","x-amzn-requestid":"ced062ff-8163-482f-ba03-0431c17e3522","access-control-allow-origin":"*","access-control-allow-methods":"HEAD,GET,PUT,POST,DELETE,OPTIONS,PATCH","access-control-allow-headers":"authorization,content-type,content-length,content-md5,cache-control,x-amz-content-sha256,x-amz-date,x-amz-security-token,x-amz-user-agent,x-amz-target,x-amz-acl,x-amz-version-id,x-localstack-target,x-amz-tagging,amz-sdk-invocation-id,amz-sdk-request","access-control-expose-headers":"x-amz-version-id","date":"Tue, 01 Mar 2022 22:56:06 GMT","server":"hypercorn-h11"},"RetryAttempts":0}}} 65 | localstack | > START RequestId: cbb481eb-963f-1e4b-9cac-4ecad11b853b Version: $LATEST 66 | localstack | > END RequestId: cbb481eb-963f-1e4b-9cac-4ecad11b853b 67 | localstack | > REPORT RequestId: cbb481eb-963f-1e4b-9cac-4ecad11b853b Init Duration: 428.72 ms Duration: 237.41 ms Billed Duration: 300 ms Memory Size: 1536 MB Max Memory Used: 40 MB 68 | ``` 69 | 70 | View DynamoDB Table: 71 | 72 | ```bash 73 | $ aws --profile localstack --endpoint-url http://localhost:4566 dynamodb scan --table-name orders-table 74 | { 75 | "Items": [ 76 | { 77 | "OrderID": { 78 | "S": "20220301_001" 79 | }, 80 | "Timestamp": { 81 | "S": "2022-03-01T22:56:17" 82 | }, 83 | "OrderValue": { 84 | "S": "12.3" 85 | } 86 | } 87 | ], 88 | "Count": 1, 89 | "ScannedCount": 1, 90 | "ConsumedCapacity": null 91 | } 92 | ``` 93 | -------------------------------------------------------------------------------- /s3-sqs-lambda-dynamodb/file.json: -------------------------------------------------------------------------------- 1 | {"order_id": "20220301_001", "order_value": 12.30} 2 | -------------------------------------------------------------------------------- /s3-sqs-lambda-dynamodb/lambda_function.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | from datetime import datetime as dt 4 | #from decimal import Decimal 5 | 6 | def parse_event(payload): 7 | for record in payload['Records']: 8 | body = json.loads(record['body']) 9 | bucket = body['Records'][0]['s3']['bucket']['name'] 10 | key = body['Records'][0]['s3']['object']['key'] 11 | 12 | s3 = boto3.client( 13 | 's3', 14 | endpoint_url='http://localstack:4566', 15 | aws_access_key_id = 'localstack', 16 | aws_secret_access_key = 'localstack' 17 | ) 18 | response = s3.get_object(Bucket=bucket, Key=key) 19 | object_content = json.loads(response['Body'].read().decode('utf-8')) 20 | 21 | return object_content 22 | 23 | def write_to_dynamodb(object_content): 24 | ddb = boto3.client( 25 | 'dynamodb', 26 | endpoint_url='http://localstack:4566', 27 | aws_access_key_id = 'localstack', 28 | aws_secret_access_key = 'localstack' 29 | ) 30 | response = ddb.put_item( 31 | TableName='orders-table', 32 | Item={ 33 | 'OrderID': {'S': object_content['order_id']}, 34 | 'OrderValue': {'S': str(object_content['order_value'])}, 35 | 'Timestamp': {'S': dt.now().strftime("%Y-%m-%dT%H:%M:%S")} 36 | } 37 | ) 38 | return response 39 | 40 | def lambda_handler(event, context): 41 | object_data = parse_event(event) 42 | response = write_to_dynamodb(object_data) 43 | return { 44 | 'statusCode': 200, 45 | 'body': response 46 | } -------------------------------------------------------------------------------- /s3-sqs-lambda-dynamodb/main.tf: -------------------------------------------------------------------------------- 1 | # PROVIDERS 2 | provider "aws" { 3 | region = "eu-west-1" 4 | access_key = "localstack" 5 | secret_key = "localstack" 6 | skip_credentials_validation = true 7 | skip_metadata_api_check = true 8 | skip_requesting_account_id = true 9 | s3_use_path_style = true 10 | 11 | endpoints { 12 | dynamodb = "http://localhost:4566" 13 | s3 = "http://localhost:4566" 14 | iam = "http://localhost:4566" 15 | lambda = "http://localhost:4566" 16 | sqs = "http://localhost:4566" 17 | sts = "http://localhost:4566" 18 | } 19 | } 20 | 21 | data "aws_caller_identity" "current" {} 22 | 23 | resource "aws_s3_bucket" "bucket" { 24 | bucket = "my-bucket-${data.aws_caller_identity.current.account_id}" 25 | } 26 | 27 | resource "aws_s3_bucket_notification" "bucket_notification" { 28 | bucket = aws_s3_bucket.bucket.id 29 | 30 | queue { 31 | queue_arn = aws_sqs_queue.queue.arn 32 | events = ["s3:ObjectCreated:*"] 33 | filter_suffix = ".json" 34 | } 35 | } 36 | 37 | resource "aws_sqs_queue" "queue" { 38 | name = "orders-queue" 39 | delay_seconds = 10 40 | max_message_size = 2048 41 | message_retention_seconds = 86400 42 | receive_wait_time_seconds = 10 43 | 44 | policy = <