├── docs └── Architecture.png ├── CODE_OF_CONDUCT.md ├── versions.tf ├── outputs.tf ├── docker-compose.yml ├── .gitignore ├── .github └── workflows │ └── localstack-terraform-test.yml ├── vars.tf ├── LICENSE ├── lambda └── index.py ├── provider.tf ├── tests └── localstack.tftest.hcl ├── CONTRIBUTING.md ├── README.md └── main.tf /docs/Architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/localstack-terraform-test/HEAD/docs/Architecture.png -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | 3 | required_version = ">= 1.7.0" 4 | 5 | required_providers { 6 | aws = { 7 | source = "hashicorp/aws" 8 | 9 | # LocalStack does not support validation AWS Step Functions definition introduced in v5.67.0 10 | # See https://github.com/localstack/localstack/issues/11553 and https://github.com/localstack/localstack/pull/11660 11 | version = "<= 5.66.0" 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /outputs.tf: -------------------------------------------------------------------------------- 1 | # Output the S3 bucket name 2 | output "s3_bucket_name" { 3 | value = aws_s3_bucket.my_bucket.id 4 | } 5 | 6 | output "state_machine_arn" { 7 | value = aws_sfn_state_machine.dynamodb_updater_workflow.arn 8 | } 9 | 10 | output "lambda_arn" { 11 | value = aws_lambda_function.upload_trigger_lambda.arn 12 | } 13 | 14 | output "file_name_check" { 15 | value = jsondecode(data.aws_dynamodb_table_item.test.item)["FileName"]["S"] 16 | } 17 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | localstack: 3 | container_name: localstack 4 | image: localstack/localstack:latest 5 | read_only: false # required to write to /tmp 6 | ports: 7 | - "4566:4566" 8 | - "4571:4571" 9 | environment: 10 | - DEBUG=1 11 | - DOCKER_HOST=unix:///var/run/docker.sock 12 | volumes: 13 | - "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack" 14 | - "/var/run/docker.sock:/var/run/docker.sock" # required for some AWS services like AWS Lambda 15 | security_opt: 16 | - no-new-privileges:true 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | volume/ 2 | *.swp 3 | package-lock.json 4 | .pytest_cache 5 | *.egg-info 6 | *.zip 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # Environments 14 | .env 15 | .venv 16 | env/ 17 | venv/ 18 | ENV/ 19 | env.bak/ 20 | venv.bak/ 21 | 22 | # CDK Context & Staging files 23 | .cdk.staging/ 24 | cdk.out/ 25 | 26 | # Byte-compiled / optimized / DLL 27 | __pycache__/ 28 | *.py[cod] 29 | *$py.class 30 | 31 | # C extensions 32 | *.so 33 | 34 | # Coverage.py 35 | .coverage 36 | coverage.xml 37 | htmlcov/ 38 | 39 | # Environment 40 | .venv/ 41 | node_modules/ 42 | 43 | # macOS 44 | .DS_Store 45 | 46 | # mypy 47 | .mypy_cache/ 48 | 49 | # PyCharm 50 | .idea/ 51 | 52 | # pyenv 53 | .python-version 54 | 55 | ### Terraform ### 56 | # Local .terraform directories 57 | **/.terraform/* 58 | .terraform.lock.hcl 59 | 60 | # .tfstate files 61 | *.tfstate 62 | *.tfstate.* -------------------------------------------------------------------------------- /.github/workflows/localstack-terraform-test.yml: -------------------------------------------------------------------------------- 1 | name: LocalStack Terraform Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - '**' 7 | 8 | workflow_dispatch: {} 9 | 10 | jobs: 11 | localstack-terraform-test: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - name: Build and Start LocalStack Container 18 | run: | 19 | docker compose up -d 20 | 21 | - name: Setup Terraform 22 | uses: hashicorp/setup-terraform@v3 23 | with: 24 | terraform_version: latest 25 | 26 | - name: Run Terraform Init and Validation 27 | run: | 28 | terraform init 29 | terraform validate 30 | terraform fmt --recursive --check 31 | terraform plan 32 | terraform show 33 | 34 | - name: Run Terraform Test 35 | run: | 36 | terraform test 37 | 38 | - name: Stop and Delete LocalStack Container 39 | if: always() 40 | run: docker compose down -------------------------------------------------------------------------------- /vars.tf: -------------------------------------------------------------------------------- 1 | # Define variables 2 | variable "s3_bucket_name" { 3 | description = "The name of the S3 bucket" 4 | type = string 5 | default = "my-test-bucket" 6 | } 7 | 8 | variable "dynamodb_table_name" { 9 | description = "The name of the DynamoDB table" 10 | type = string 11 | default = "Files" 12 | } 13 | 14 | variable "dynamodb_hash_key" { 15 | description = "The hash key of the DynamoDB table" 16 | type = string 17 | default = "FileName" 18 | } 19 | 20 | variable "lambda_name" { 21 | description = "The name of the Lambda function" 22 | type = string 23 | default = "upload_trigger_lambda" 24 | } 25 | 26 | 27 | variable "s3_object_key" { 28 | description = "The key of the S3 object" 29 | type = string 30 | default = "README.md" 31 | } 32 | 33 | variable "sfn_name" { 34 | description = "The name of the Step Functions state machine" 35 | type = string 36 | default = "UploadStateMachine" 37 | } 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | -------------------------------------------------------------------------------- /lambda/index.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import time 5 | import uuid 6 | 7 | import boto3 8 | 9 | sfn_client = boto3.client( 10 | "stepfunctions", endpoint_url="http://localstack:4566", region_name="eu-central-1" 11 | ) 12 | 13 | logger = logging.getLogger() 14 | logger.setLevel(logging.INFO) 15 | 16 | 17 | def generate_execution_name(): 18 | unique_id = str(uuid.uuid4()) 19 | current_time = int(time.time()) 20 | execution_name = f"Execution-{current_time}-{unique_id}" 21 | return execution_name 22 | 23 | 24 | def lambda_handler(event, context): 25 | s3_bucket = event["Records"][0]["s3"]["bucket"]["name"] 26 | s3_object_key = event["Records"][0]["s3"]["object"]["key"] 27 | 28 | input_data = {"bucket": s3_bucket, "fileName": s3_object_key} 29 | 30 | logger.info(f"Input Data: {input_data}") 31 | logger.info(f"Event: {event}") 32 | 33 | # Define the Step Function's ARN 34 | state_machine_arn = os.environ.get("SM_ARN") 35 | 36 | # Start the Step Function execution 37 | response = sfn_client.start_execution( 38 | stateMachineArn=state_machine_arn, 39 | name=generate_execution_name(), 40 | input=json.dumps(input_data), 41 | ) 42 | 43 | # Log the response for debugging 44 | logger.info(f"Step Function Execution Response: {response}") 45 | 46 | return { 47 | "statusCode": 200, 48 | "body": json.dumps("Step Function execution started successfully."), 49 | } 50 | -------------------------------------------------------------------------------- /provider.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | access_key = "test" 3 | secret_key = "test" 4 | region = "eu-central-1" 5 | 6 | s3_use_path_style = true 7 | skip_requesting_account_id = true 8 | skip_credentials_validation = true 9 | skip_metadata_api_check = true 10 | 11 | endpoints { 12 | apigateway = "http://localhost:4566" 13 | apigatewayv2 = "http://localhost:4566" 14 | cloudformation = "http://localhost:4566" 15 | cloudwatch = "http://localhost:4566" 16 | cloudwatchlogs = "http://localhost:4566" 17 | dynamodb = "http://localhost:4566" 18 | ec2 = "http://localhost:4566" 19 | es = "http://localhost:4566" 20 | elasticache = "http://localhost:4566" 21 | firehose = "http://localhost:4566" 22 | iam = "http://localhost:4566" 23 | kinesis = "http://localhost:4566" 24 | lambda = "http://localhost:4566" 25 | rds = "http://localhost:4566" 26 | redshift = "http://localhost:4566" 27 | route53 = "http://localhost:4566" 28 | s3 = "http://localhost:4566" 29 | secretsmanager = "http://localhost:4566" 30 | ses = "http://localhost:4566" 31 | sns = "http://localhost:4566" 32 | sqs = "http://localhost:4566" 33 | ssm = "http://localhost:4566" 34 | stepfunctions = "http://localhost:4566" 35 | sts = "http://localhost:4566" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /tests/localstack.tftest.hcl: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | access_key = "test" 3 | secret_key = "test" 4 | region = "eu-central-1" 5 | 6 | s3_use_path_style = true 7 | skip_requesting_account_id = true 8 | skip_credentials_validation = true 9 | skip_metadata_api_check = true 10 | 11 | endpoints { 12 | apigateway = "http://localhost:4566" 13 | apigatewayv2 = "http://localhost:4566" 14 | cloudformation = "http://localhost:4566" 15 | cloudwatch = "http://localhost:4566" 16 | cloudwatchlogs = "http://localhost:4566" 17 | dynamodb = "http://localhost:4566" 18 | ec2 = "http://localhost:4566" 19 | es = "http://localhost:4566" 20 | elasticache = "http://localhost:4566" 21 | firehose = "http://localhost:4566" 22 | iam = "http://localhost:4566" 23 | kinesis = "http://localhost:4566" 24 | lambda = "http://localhost:4566" 25 | rds = "http://localhost:4566" 26 | redshift = "http://localhost:4566" 27 | route53 = "http://localhost:4566" 28 | s3 = "http://localhost:4566" 29 | secretsmanager = "http://localhost:4566" 30 | ses = "http://localhost:4566" 31 | sns = "http://localhost:4566" 32 | sqs = "http://localhost:4566" 33 | ssm = "http://localhost:4566" 34 | stepfunctions = "http://localhost:4566" 35 | sts = "http://localhost:4566" 36 | } 37 | } 38 | 39 | run "check_s3_bucket_name" { 40 | 41 | command = apply 42 | 43 | assert { 44 | condition = output.s3_bucket_name == var.s3_bucket_name 45 | error_message = "S3 bucket name does not match" 46 | } 47 | 48 | } 49 | 50 | run "check_lambda_function" { 51 | 52 | command = apply 53 | 54 | assert { 55 | condition = output.lambda_arn != null 56 | error_message = "Lambda function not created" 57 | } 58 | 59 | } 60 | 61 | run "check_name_of_filename_written_to_dynamodb" { 62 | 63 | command = apply 64 | 65 | assert { 66 | condition = output.file_name_check == var.s3_object_key 67 | error_message = "Write to DynamoDB failed" 68 | } 69 | 70 | } 71 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Testing AWS infrastructure using LocalStack and Terraform Test 2 | 3 | This pattern provides a solution to test IaC in Terraform locally without the need to provision infrastructure in AWS. It uses the [Terraform Test framework](https://developer.hashicorp.com/terraform/language/tests) introduced with Terraform version 1.6 and we showcase how to integrate it with LocalStack for Cost Optimization, Speed and Efficiency, Consistency and Reproducibility, Isolation and Safety and Simplified Development Workflow. 4 | 5 | Running tests against LocalStack eliminates the need to use actual AWS services, thus avoiding costs associated with creating, modifying, and destroying resources in AWS. Testing locally is significantly faster than deploying resources in AWS. 6 | 7 | This rapid feedback loop accelerates development and debugging. Since LocalStack runs locally, you can develop and test your Terraform scripts without an internet connection. LocalStack provides a consistent environment for testing. This consistency ensures that tests yield the same results regardless of external AWS changes or network issues. 8 | 9 | Integration with a CI/CD pipeline allows for automated testing of Terraform scripts and modules. This ensures infrastructure code is thoroughly tested before deployment. Testing with LocalStack ensures that you don't accidentally affect live AWS resources or production environments. This isolation makes it safe to experiment and test various configurations. Developers can debug Terraform scripts locally with immediate feedback, streamlining the development process. 10 | 11 | You can simulate different AWS regions, accounts, and service configurations to match your production environments more closely. 12 | 13 | ## Prerequisites 14 | 15 | - Docker Installed and configured to enable default Docker socket (/var/run/docker.sock). 16 | 17 | - [Docker Installation Guide for Linux](https://docs.docker.com/engine/install/). 18 | 19 | - [Docker Desktop for Windows](https://docs.docker.com/desktop/install/windows-install/). 20 | 21 | - [Docker Desktop for Mac](https://docs.docker.com/desktop/install/mac-install/). 22 | 23 | - Docker Compose [installed](https://docs.docker.com/compose/install/). 24 | 25 | - AWS Command Line Interface (AWS CLI), [installed](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) and [configured](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html). 26 | 27 | - Terraform CLI, [installed](https://developer.hashicorp.com/terraform/cli) (Terraform documentation). 28 | 29 | - Terraform AWS Provider, [configured](https://hashicorp.github.io/terraform-provider-aws/) (Terraform documentation). 30 | 31 | ## Target architecture 32 | 33 | The code in this repository helps you set up the following target architecture. 34 | 35 | ![Architecture](docs/Architecture.png) 36 | 37 | The diagram illustrates a CI/CD pipeline for a LocalStack Docker Container setup. Here's a breakdown of the components and their interactions: 38 | 39 | **Source Code Repository** 40 | 41 | 1. A user commits code changes to a Source Code Repository. 42 | 43 | **CI/CD Pipeline** 44 | 45 | 2. The code changes trigger a Build process. 46 | 47 | 3. The Build process also triggers Tests to ensure the code changes are functional. 48 | 49 | **LocalStack Docker Container** 50 | 51 | The LocalStack Docker Container hosts the following AWS services locally: 52 | 53 | 4. An Amazon S3 bucket for storing files. 54 | 55 | 5. Amazon CloudWatch for monitoring and logging. 56 | 57 | 6. An AWS Lambda Function for running serverless code. 58 | 59 | 7. An AWS Step Function for orchestrating multi-step workflows. 60 | 61 | 8. An Amazon DynamoDB for storing NoSQL data. 62 | 63 | **Workflow** 64 | 65 | - The user commits code changes to the Source Code Repository (1). 66 | 67 | - The CI/CD Pipeline detects the changes and triggers a Build process for static Terraform code analysis and for building the LocalStack Docker container (2) and running the Tests (3). The Test stage runs the tests for our infrastructure against LocalStack without deploying any resources in AWS Cloud (Steps 3-8). 68 | 69 | Within the LocalStack Docker Container the test: 70 | 71 | - uploads an object into an S3 bucket (Step 4), 72 | 73 | - invokes an AWS Lambda function through an Amazon S3 event notification (Step 4) with logs stored on Amazon CloudWatch (Step 5), 74 | 75 | - which in turn will start the execution of a state machine (Step 6), 76 | 77 | - that will write the name of the S3 object into a DynamoDB table (Step 7). 78 | 79 | - We then verify that the name of the object uploaded matches the entry in the DynamoDB table (Step 8). 80 | 81 | The provided tests include also examples to verify that the S3 bucket is deployed with the given name and that the AWS Lambda function has been successfully deployed. 82 | 83 | The LocalStack Docker Container provides a local development environment that emulates various AWS services, allowing developers to test and iterate on their applications without incurring costs on the actual AWS Cloud. 84 | 85 | ## Terraform Test 86 | 87 | ### Run Local Stack Container 88 | 89 | In the cloned repository start Local Start Docker execution in detached mode by enter the following command in bash shell. 90 | 91 | ```shell 92 | docker compose up -d 93 | ``` 94 | 95 | Wait until the Local Stack container is up and running. 96 | 97 | ### Terraform Initialization 98 | 99 | Enter the following command from the cloned repsotiory to initialize Terraform. 100 | 101 | ```shell 102 | terraform init 103 | ``` 104 | 105 | ### Run Terraform Test 106 | 107 | Enter the following command to execute Terraform Test. 108 | 109 | ```shell 110 | terraform test 111 | ``` 112 | 113 | Verify that all tests successfully passed. 114 | 115 | The output should be similar to: 116 | ```shell 117 | tests/localstack.tftest.hcl... in progress 118 | run "check_s3_bucket_name"... pass 119 | run "check_lambda_function"... pass 120 | run "check_name_of_filename_written_to_dynamodb"... pass 121 | tests/localstack.tftest.hcl... tearing down 122 | tests/localstack.tftest.hcl... pass 123 | 124 | Success! 3 passed, 0 failed. 125 | ``` 126 | 127 | ### Resource Cleanup 128 | 129 | Enter the following command to destroy Local Stack Container. 130 | 131 | ```shell 132 | docker compose down 133 | ``` 134 | 135 | 136 | ## Debugging with AWS CLI 137 | 138 | ### Run Local Stack Container 139 | 140 | In the cloned repository start Local Start Docker execution in detached mode by enter the following command in bash shell. 141 | 142 | ```shell 143 | docker-compose up -d 144 | ``` 145 | 146 | Wait until the Local Stack container is up and running. 147 | 148 | 149 | ### Authentication 150 | 151 | Export the following environment variable to be able to run AWS CLI commands in the local running container that emulates AWS Cloud. 152 | 153 | ```shell 154 | export AWS_ACCESS_KEY_ID=test 155 | export AWS_SECRET_ACCESS_KEY=test 156 | export AWS_SESSION_TOKEN=test 157 | export AWS_REGION=eu-central-1 158 | ``` 159 | 160 | ### Create Resources Locally 161 | 162 | Create resources in the local running container. 163 | 164 | ```shell 165 | terraform init 166 | terraform plan 167 | terraform apply -auto-approve 168 | ``` 169 | 170 | You can finally execute AWS CLI commands on the deployed resources for example to check that a state machine has been created. 171 | ```shell 172 | aws --endpoint-url http://localhost:4566 stepfunctions list-state-machines 173 | ``` 174 | 175 | ### Destroy the resources 176 | 177 | ```shell 178 | terraform destroy -auto-approve 179 | ``` 180 | 181 | Enter the following command to destroy Local Stack Container. 182 | ```shell 183 | docker compose down 184 | ``` 185 | 186 | ## GitHub Actions 187 | 188 | We provide an example how to integrate LocalStack and Terraform Test in a CI/CD pipeline with [GitHub Actions](.github/workflows/localstack-terraform-test.yml). 189 | 190 | ## Authors 191 | 192 | Pattern created by Ivan Girardi (AWS) and Ioannis Kalyvas (AWS). 193 | 194 | ## Security 195 | 196 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 197 | 198 | ## License 199 | 200 | This library is licensed under the MIT-0 License. See the LICENSE file. 201 | -------------------------------------------------------------------------------- /main.tf: -------------------------------------------------------------------------------- 1 | 2 | # Create an S3 Bucket 3 | resource "aws_s3_bucket" "my_bucket" { 4 | # checkov:skip=CKV2_AWS_62: "Ensure S3 buckets should have event notifications enabled" 5 | # checkov:skip=CKV_AWS_18: "Ensure the S3 bucket has access logging enabled" 6 | # checkov:skip=CKV_AWS_144: "Ensure that S3 bucket has cross-region replication enabled" 7 | # checkov:skip=CKV_AWS_145: "Ensure that S3 buckets are encrypted with KMS by default" 8 | bucket = var.s3_bucket_name 9 | } 10 | 11 | resource "aws_s3_bucket_versioning" "my_bucket_versioning" { 12 | bucket = aws_s3_bucket.my_bucket.id 13 | versioning_configuration { 14 | status = "Enabled" 15 | } 16 | } 17 | 18 | resource "aws_s3_bucket_public_access_block" "my_bucket_policy" { 19 | bucket = aws_s3_bucket.my_bucket.id 20 | block_public_acls = true 21 | block_public_policy = true 22 | ignore_public_acls = true 23 | restrict_public_buckets = true 24 | } 25 | 26 | resource "aws_s3_bucket_server_side_encryption_configuration" "my_bucket_encryption" { 27 | bucket = aws_s3_bucket.my_bucket.bucket 28 | 29 | rule { 30 | apply_server_side_encryption_by_default { 31 | sse_algorithm = "AES256" 32 | } 33 | } 34 | } 35 | 36 | resource "aws_s3_bucket_lifecycle_configuration" "this" { 37 | bucket = aws_s3_bucket.my_bucket.id 38 | 39 | rule { 40 | id = "retention-policy" 41 | 42 | expiration { 43 | days = 7 44 | } 45 | abort_incomplete_multipart_upload { 46 | days_after_initiation = 7 47 | } 48 | 49 | status = "Enabled" 50 | } 51 | } 52 | 53 | # Create a DynamoDB Table 54 | resource "aws_dynamodb_table" "files" { 55 | # checkov:skip=CKV_AWS_119: "Test DynamoDB table does not need to be encrypted using a KMS Customer Managed CMK" 56 | name = var.dynamodb_table_name 57 | billing_mode = "PAY_PER_REQUEST" 58 | hash_key = var.dynamodb_hash_key 59 | attribute { 60 | name = var.dynamodb_hash_key 61 | type = "S" 62 | } 63 | point_in_time_recovery { 64 | enabled = true 65 | } 66 | } 67 | 68 | data "aws_iam_policy_document" "assume_role" { 69 | statement { 70 | effect = "Allow" 71 | 72 | principals { 73 | type = "Service" 74 | identifiers = ["lambda.amazonaws.com"] 75 | } 76 | 77 | actions = ["sts:AssumeRole"] 78 | } 79 | } 80 | 81 | # Create an IAM Role for Lambda 82 | resource "aws_iam_role" "lambda_execution_role" { 83 | name = "lambda_execution_role" 84 | assume_role_policy = data.aws_iam_policy_document.assume_role.json 85 | } 86 | 87 | data "aws_iam_policy_document" "lambda_policy" { 88 | statement { 89 | effect = "Allow" 90 | 91 | actions = [ 92 | "states:StartExecution", 93 | ] 94 | 95 | resources = [ 96 | aws_sfn_state_machine.dynamodb_updater_workflow.arn, 97 | ] 98 | } 99 | 100 | statement { 101 | effect = "Allow" 102 | 103 | actions = [ 104 | "logs:CreateLogStream", 105 | "logs:PutLogEvents", 106 | "logs:DescribeLogGroups" 107 | ] 108 | 109 | resources = ["${aws_cloudwatch_log_group.MyLambdaLogGroup.arn}:*"] 110 | } 111 | 112 | statement { 113 | effect = "Allow" 114 | actions = [ 115 | "cloudwatch:PutMetricData", 116 | "logs:CreateLogDelivery", 117 | "logs:GetLogDelivery", 118 | "logs:UpdateLogDelivery", 119 | "logs:DeleteLogDelivery", 120 | "logs:ListLogDeliveries", 121 | "logs:PutResourcePolicy", 122 | "logs:DescribeResourcePolicies", 123 | ] 124 | resources = ["*"] 125 | } 126 | } 127 | 128 | resource "aws_iam_policy" "lambda_policy" { 129 | policy = data.aws_iam_policy_document.lambda_policy.json 130 | name = "lambda_dynamodb_policy" 131 | description = "Policy to allow Lambda to start a Step Function" 132 | } 133 | 134 | # Attach the Lambda policy to the Lambda execution role 135 | resource "aws_iam_role_policy_attachment" "lambda_dynamodb_attachment" { 136 | policy_arn = aws_iam_policy.lambda_policy.arn 137 | role = aws_iam_role.lambda_execution_role.name 138 | } 139 | 140 | # tflint-ignore: terraform_required_providers 141 | data "archive_file" "python_zip" { 142 | type = "zip" 143 | source_dir = "${path.module}/lambda/" 144 | output_path = "${path.module}/lambda/lambda-trigger-sm.zip" 145 | } 146 | 147 | # Create a Lambda Function 148 | resource "aws_lambda_function" "upload_trigger_lambda" { 149 | # checkov:skip=CKV_AWS_117: "Test Lambda function does not need to be inside a VPC" 150 | # checkov:skip=CKV_AWS_116: "Test Lambda function does not need a Dead Letter Queue(DLQ)" 151 | # checkov:skip=CKV_AWS_173: "Test Lambda function does not need encryption for environmental variables" 152 | # checkov:skip=CKV_AWS_272: "Test Lambda function does not need code-signing" 153 | # checkov:skip=CKV_AWS_115: "Test Lambda function does not need function-level concurrent execution limit" 154 | function_name = var.lambda_name 155 | handler = "index.lambda_handler" 156 | runtime = "python3.8" 157 | role = aws_iam_role.lambda_execution_role.arn 158 | 159 | filename = "${path.module}/lambda/lambda-trigger-sm.zip" 160 | source_code_hash = data.archive_file.python_zip.output_base64sha256 161 | timeout = 120 162 | 163 | tracing_config { 164 | mode = "Active" 165 | } 166 | 167 | environment { 168 | variables = { 169 | SM_ARN = aws_sfn_state_machine.dynamodb_updater_workflow.arn 170 | } 171 | } 172 | } 173 | 174 | resource "aws_cloudwatch_log_group" "MyLambdaLogGroup" { 175 | # checkov:skip=CKV_AWS_338: "Test logs do not require retention for 1 year" 176 | # checkov:skip=CKV_AWS_158: "Test logs do not require encrypted by KMS" 177 | retention_in_days = 1 178 | name = "/aws/lambda/${aws_lambda_function.upload_trigger_lambda.function_name}" 179 | } 180 | 181 | resource "aws_cloudwatch_log_group" "MySFNLogGroup" { 182 | # checkov:skip=CKV_AWS_338: "Test logs do not require retention for 1 year" 183 | # checkov:skip=CKV_AWS_158: "Test logs do not require encrypted by KMS" 184 | name_prefix = "/aws/vendedlogs/states/${var.sfn_name}-" 185 | retention_in_days = 1 186 | } 187 | 188 | data "aws_iam_policy_document" "sf_policy" { 189 | statement { 190 | effect = "Allow" 191 | 192 | actions = [ 193 | "dynamodb:PutItem", 194 | ] 195 | 196 | resources = [ 197 | aws_dynamodb_table.files.arn, 198 | ] 199 | } 200 | 201 | statement { 202 | effect = "Allow" 203 | actions = [ 204 | "logs:CreateLogDelivery", 205 | "logs:GetLogDelivery", 206 | "logs:UpdateLogDelivery", 207 | "logs:DeleteLogDelivery", 208 | "logs:ListLogDeliveries", 209 | "logs:PutResourcePolicy", 210 | "logs:DescribeResourcePolicies", 211 | "logs:DescribeLogGroups" 212 | ] 213 | resources = ["*"] 214 | } 215 | 216 | } 217 | 218 | # Attach a policy to the IAM role that allows PutItem in DynamoDB and CloudWatch Logs 219 | resource "aws_iam_policy" "state_machine_policy" { 220 | name = "state_machine_policy" 221 | description = "Policy to allow PutItem in DynamoDB and permissions for CloudWatch Logs" 222 | policy = data.aws_iam_policy_document.sf_policy.json 223 | 224 | } 225 | 226 | data "aws_iam_policy_document" "assume_role_sf" { 227 | statement { 228 | effect = "Allow" 229 | 230 | principals { 231 | type = "Service" 232 | identifiers = ["states.amazonaws.com"] 233 | } 234 | 235 | actions = ["sts:AssumeRole"] 236 | } 237 | } 238 | 239 | # Create an IAM role for the Step Function 240 | resource "aws_iam_role" "step_function_role" { 241 | name = "step_function_role" 242 | assume_role_policy = data.aws_iam_policy_document.assume_role_sf.json 243 | 244 | } 245 | 246 | resource "aws_iam_role_policy_attachment" "attach_state_machine_policy" { 247 | policy_arn = aws_iam_policy.state_machine_policy.arn 248 | role = aws_iam_role.step_function_role.name 249 | } 250 | 251 | resource "aws_sfn_state_machine" "dynamodb_updater_workflow" { 252 | name = var.sfn_name 253 | tracing_configuration { 254 | enabled = true 255 | } 256 | definition = jsonencode({ 257 | Comment = "A Step Function that writes to DynamoDB", 258 | StartAt = "Upload", 259 | States = { 260 | Upload = { 261 | Type = "Task", 262 | Resource = "arn:aws:states:::dynamodb:putItem", 263 | Parameters = { 264 | "TableName" : aws_dynamodb_table.files.name, 265 | "Item" : { 266 | "FileName" : { "S.$" : "$.fileName" }, 267 | } 268 | }, 269 | End = true, 270 | } 271 | } 272 | }) 273 | role_arn = aws_iam_role.step_function_role.arn 274 | logging_configuration { 275 | level = "ALL" 276 | include_execution_data = true 277 | log_destination = "${aws_cloudwatch_log_group.MySFNLogGroup.arn}:*" 278 | } 279 | timeouts { 280 | create = "1m" 281 | } 282 | } 283 | 284 | resource "aws_lambda_permission" "allow_bucket" { 285 | statement_id = "AllowExecutionFromS3Bucket" 286 | action = "lambda:InvokeFunction" 287 | function_name = aws_lambda_function.upload_trigger_lambda.function_name 288 | principal = "s3.amazonaws.com" 289 | source_arn = aws_s3_bucket.my_bucket.arn 290 | } 291 | 292 | resource "aws_s3_bucket_notification" "bucket_notification" { 293 | bucket = aws_s3_bucket.my_bucket.id 294 | 295 | lambda_function { 296 | lambda_function_arn = aws_lambda_function.upload_trigger_lambda.arn 297 | events = ["s3:ObjectCreated:*"] 298 | } 299 | 300 | depends_on = [aws_lambda_permission.allow_bucket] 301 | } 302 | 303 | # tflint-ignore: terraform_required_providers 304 | resource "time_sleep" "wait" { 305 | create_duration = "15s" 306 | triggers = { 307 | s3_object = local.key_json 308 | } 309 | 310 | } 311 | 312 | data "aws_dynamodb_table_item" "test" { 313 | 314 | table_name = var.dynamodb_table_name 315 | key = time_sleep.wait.triggers.s3_object 316 | } 317 | locals { 318 | key_json = jsonencode({ 319 | "FileName" = { 320 | "S" = aws_s3_object.object.key 321 | } 322 | }) 323 | # tflint-ignore: terraform_unused_declarations 324 | first_decode = jsondecode(data.aws_dynamodb_table_item.test.item) 325 | } 326 | 327 | resource "aws_s3_object" "object" { 328 | bucket = var.s3_bucket_name 329 | key = var.s3_object_key 330 | source = "${path.root}/${var.s3_object_key}" 331 | depends_on = [aws_s3_bucket.my_bucket, aws_s3_bucket_notification.bucket_notification] 332 | } 333 | --------------------------------------------------------------------------------