├── .envrc ├── .github └── workflows │ └── README.yml ├── .gitignore ├── .tool-versions ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── codebuild_builder ├── cfn.yaml.tmpl └── lambda.py ├── lambda_builders ├── cfn.yaml.tmpl ├── nodejs.js └── python.py ├── main.tf ├── outputs.tf ├── tests ├── changes │ ├── Makefile │ ├── lambda.tf.py │ ├── main.tf │ ├── src │ │ ├── build.sh │ │ └── lambda.py │ ├── terraform.tf.py │ └── test_changes.py ├── filename │ ├── Makefile │ ├── main.tf │ ├── src │ │ └── lambda.py │ ├── terraform.tf.py │ └── test_filename.py ├── golang │ ├── Makefile │ ├── main.tf │ ├── src │ │ ├── buildspec.yml │ │ ├── main.go │ │ └── main_test.go │ ├── terraform.tf.py │ └── test_golang.py ├── nodejs │ ├── Makefile │ ├── main.tf │ ├── src │ │ ├── build.sh │ │ ├── index.js │ │ └── package.json │ ├── terraform.tf.py │ └── test_nodejs.py ├── python │ ├── Makefile │ ├── main.tf │ ├── src │ │ ├── build.sh │ │ ├── lambda.py │ │ └── requirements.txt │ ├── terraform.tf.py │ └── test_python.py └── s3 │ ├── Makefile │ ├── main.tf │ ├── src │ └── lambda.py │ ├── terraform.tf.py │ └── test_s3.py ├── validate.py ├── variables.tf ├── versions.tf └── zip_files ├── .gitignore └── README.md /.envrc: -------------------------------------------------------------------------------- 1 | layout python3 2 | 3 | pip install black flake8 isort 'pretf[aws]==0.7.3' pytest pytest-xdist 4 | -------------------------------------------------------------------------------- /.github/workflows/README.yml: -------------------------------------------------------------------------------- 1 | name: README.md 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-18.04 11 | steps: 12 | - uses: actions/checkout@v2 13 | - run: docker run --rm -v $PWD:/data claranet/terraform-docs:0.8.1 terraform-docs-replace-012 --sort-inputs-by-required --with-aggregate-type-defaults --no-providers md README.md 14 | - uses: claranet/git-auto-commit-action@v3.0.0 15 | with: 16 | file_pattern: README.md 17 | commit_message: Update README.md using terraform-docs 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .direnv 3 | .terraform 4 | *.tf.json 5 | *.zip 6 | tests/changes/src/*.json 7 | tests/filename/src/*.json 8 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | terraform 0.12.26 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # terraform-aws-lambda-builder changes 2 | 3 | ## v1.0.1 4 | 5 | ### Fixed 6 | 7 | * Fixed dependency in S3 mode 8 | 9 | ## v1.0.0 10 | 11 | ### Added 12 | 13 | * New `build_mode` value `CODEBUILD` with example using the `go1.x` runtime. 14 | 15 | ### Breaking changes 16 | 17 | * `builder_memory_size` renamed to `lambda_builder_memory_size`. 18 | * `builder_timeout` renamed to `lambda_builder_timeout`. 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Raymond Butcher 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all 2 | all: 3 | isort --recursive *.py codebuild_builder lambda_builders tests 4 | black *.py codebuild_builder lambda_builders tests 5 | flake8 --ignore E501 *.py codebuild_builder lambda_builders tests 6 | terraform fmt -recursive 7 | 8 | .PHONY: clean 9 | clean: 10 | find tests -maxdepth 3 -name '*.json' -delete 11 | find zip_files -name '*.zip' -delete 12 | 13 | .PHONY: test tests 14 | test tests: 15 | pytest -v -n auto --dist=loadfile tests 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # terraform-aws-lambda-builder 2 | 3 | This Terraform module packages and deploys an AWS Lambda function. It optionally runs a build script *inside Lambda or CodeBuild* to build the Lambda package. This is great for building and deploying Go, Node.js, Python and other Lambda functions without needing any of their toolchains installed. 4 | 5 | ## Features 6 | 7 | * Supports `source_dir` to automatically create Lambda packages. 8 | * Handles source code changes automatically and correctly. 9 | * No unexpected changes in Terraform plans. 10 | * Supports `LAMBDA build_mode` to run a build scripts inside Lambda. 11 | * Runs inside Lambda using the same runtime environment as the target Lambda function. 12 | * Define your own build script with shell commands like `pip install`, `npm install`, etc. 13 | * No reliance on `pip`, `virtualenv`, `npm`, etc being on the machine running Terraform. 14 | * Smaller zip files to upload because `pip install`, etc. doesn't run locally. 15 | * Supports `CODEBUILD build_mode` to run inside CodeBuild. 16 | * Define your own build steps in `buildspec.yml` with shell commands like `go build`, etc. 17 | * No reliance on `go`, etc being on the machine running Terraform. 18 | * Smaller zip files to upload because `go get`, `go build`, etc. doesn't run locally. 19 | * Supports `S3/FILENAME build_mode` to just get the zip functionality. 20 | * For when there are no build steps but you still want the `source_dir` zip functionality. 21 | * Helps you to avoid: 22 | * Extra setup requirements on the machine running Terraform. 23 | * Separate build steps to create packages before running Terraform. 24 | * Committing built package zip files to version control. 25 | 26 | ## Requirements 27 | 28 | * Python 29 | 30 | Python is used to create deterministic zip files. Terraform's `archive_file` data source is not used because it sometimes [produces different results](https://github.com/terraform-providers/terraform-provider-archive/issues/34) which lead to spurious resource changes when working in teams. 31 | 32 | ## Example 33 | 34 | ```terraform 35 | module "lambda_function" { 36 | source = "github.com/raymondbutcher/terraform-aws-lambda-builder" 37 | 38 | # Standard aws_lambda_function attributes. 39 | function_name = "example" 40 | handler = "lambda.handler" 41 | runtime = "python3.6" 42 | s3_bucket = aws_s3_bucket.packages.id 43 | timeout = 30 44 | 45 | # Enable build functionality. 46 | build_mode = "LAMBDA" 47 | source_dir = "${path.module}/src" 48 | 49 | # Create and use a role with CloudWatch Logs permissions. 50 | role_cloudwatch_logs = true 51 | } 52 | ``` 53 | 54 | See the [tests](tests) directory for more working examples. 55 | 56 | ## Build modes 57 | 58 | The `build_mode` input variable can be set to one of: 59 | 60 | * `CODEBUILD` 61 | * Zips `source_dir`, uploads it to `s3_bucket` and runs CodeBuild to build the final package. 62 | * `LAMBDA` 63 | * Zips `source_dir`, uploads it to `s3_bucket` and runs `build.sh` inside Lambda to build the final package. 64 | * `S3` 65 | * Zips `source_dir` and uploads to `s3_bucket` at `s3_key`. 66 | * `FILENAME` 67 | * Zips `source_dir` and uploads it directly to the Lambda service. 68 | * `DISABLED` (default) 69 | * Disables build functionality. 70 | 71 | ### CodeBuild build mode 72 | 73 | If running in `CODEBUILD` build mode, then this module will use CodeBuild and your `buildspec.yml` file to create a new package for the Lambda function to use. 74 | 75 | The `CODEBUILD` build mode works as follows. 76 | 77 | * Terraform runs [zip.py](https://github.com/raymondbutcher/terraform-archive-stable) which: 78 | * Creates a zip file from the source directory. 79 | * Timestamps and permissions are normalised so the resulting file hash is consistent and only affected by meaningful changes. 80 | * Terraform uploads the zip file to the S3 bucket. 81 | * Terraform creates a CloudFormation stack which: 82 | * Creates a CodeBuild project which: 83 | * Uses the S3 bucket zip file as the source. 84 | * Uses the `buildspec.yml` file from the zipped source directory. 85 | * Should build and output artifacts to include in the new zip file. 86 | * Creates a custom resource Lambda function which: 87 | * Starts the CodeBuild project. 88 | * Gets invoked again when CodeBuild finishes. 89 | * Verifies that CodeBuild has uploaded the new zip file. 90 | * Outputs the location of the new zip file for Terraform to use. 91 | * Terraform creates a Lambda function using the new zip file. 92 | 93 | ### Lambda build mode 94 | 95 | If running in `LAMBDA` build mode, then this module will run `build.sh` from `source_dir` inside the target Lambda runtime environment, and then create a new package for the final Lambda function to use. 96 | 97 | The `LAMBDA` build mode works as follows. 98 | 99 | * Terraform runs [zip.py](https://github.com/raymondbutcher/terraform-archive-stable) which: 100 | * Creates a zip file from the source directory. 101 | * Timestamps and permissions are normalised so the resulting file hash is consistent and only affected by meaningful changes. 102 | * Terraform uploads the zip file to the S3 bucket. 103 | * Terraform creates a CloudFormation stack which: 104 | * Creates a custom resource Lambda function which: 105 | * Downloads the zip file from the S3 bucket. 106 | * Extracts the zip file. 107 | * Runs the build script. 108 | * Creates a new zip file. 109 | * Uploads it to the S3 bucket in another location. 110 | * Outputs the location of the new zip file for Terraform to use. 111 | * Terraform creates a Lambda function using the new zip file. 112 | 113 | Different runtimes have different tools installed. Here are some notes about what is available to use in `build.sh`. 114 | 115 | | Runtime | Notes | 116 | |------------|---------------------| 117 | | nodejs10.x | `npm install` works | 118 | | nodejs12.x | `npm install` works | 119 | | nodejs14.x | waiting on [this](https://github.com/aws-cloudformation/aws-cloudformation-coverage-roadmap/issues/80), try CodeBuild instead | 120 | | python2.7 | `pip` not included | 121 | | python3.6 | `pip install` works | 122 | | python3.7 | `pip install` works | 123 | | python3.8 | `pip install` works | 124 | 125 | Runtimes not listed above have not been tested. 126 | 127 | ### S3 build mode 128 | 129 | The `S3` build mode zips `source_dir` and uploads it to S3 using `s3_bucket` and `s3_key`. It automatically sets `source_code_hash` to ensure changes to the source code get deployed. 130 | 131 | ### Filename build mode 132 | 133 | The `FILENAME` build mode zips `source_dir` and writes it to `filename`. The package is uploaded directly to the Lambda service. It automatically sets `source_code_hash` to ensure changes to the source code get deployed. 134 | 135 | ### Disabled build mode 136 | 137 | The `DISABLED` build mode disables build functionality, making this module do nothing except create a Lambda function resource and optionally its IAM role. 138 | 139 | ## Automatic role creation 140 | 141 | If a `role` is not provided then one will be created automatically. There are various input variables which add policies to this role. If `dead_letter_config` or `vpc_config` are set, then the required policies are automatically attached to this role. 142 | 143 | 147 | 148 | ## Inputs 149 | 150 | | Name | Description | Type | Default | Required | 151 | |------|-------------|------|---------|:-----:| 152 | | create\_role | Create an IAM role for the function. Only required when `role` is a computed/unknown value. | `bool` | n/a | yes | 153 | | dead\_letter\_config | Nested block to configure the function's dead letter queue. See details below. |
object({| n/a | yes | 154 | | description | Description of what your Lambda Function does. | `string` | n/a | yes | 155 | | environment | The Lambda environment's configuration settings. |
target_arn = string
})
object({| n/a | yes | 156 | | filename | The path to the function's deployment package within the local filesystem. If defined, The s3\_-prefixed options cannot be used. | `string` | n/a | yes | 157 | | function\_name | A unique name for your Lambda Function. | `string` | n/a | yes | 158 | | handler | The function entrypoint in your code. | `string` | n/a | yes | 159 | | kms\_key\_arn | Amazon Resource Name (ARN) of the AWS Key Management Service (KMS) key that is used to encrypt environment variables. | `string` | n/a | yes | 160 | | layers | List of Lambda Layer Version ARNs (maximum of 5) to attach to your Lambda Function. | `list(string)` | n/a | yes | 161 | | memory\_size | Amount of memory in MB your Lambda Function can use at runtime. | `number` | n/a | yes | 162 | | publish | Whether to publish creation/change as new Lambda Function Version. | `bool` | n/a | yes | 163 | | reserved\_concurrent\_executions | The amount of reserved concurrent executions for this lambda function. A value of 0 disables lambda from being triggered and -1 removes any concurrency limitations. | `number` | n/a | yes | 164 | | role | IAM role attached to the Lambda Function. This governs both who / what can invoke your Lambda Function, as well as what resources our Lambda Function has access to. | `string` | n/a | yes | 165 | | role\_custom\_policies\_count | The number of `role_custom_policies` to attach. Only required when `role_custom_policies` is a computed/unknown value. | `number` | n/a | yes | 166 | | role\_policy\_arns\_count | The number of `role_policy_arns` to attach. Only required when `role_policy_arns` is a computed/unknown value. | `number` | n/a | yes | 167 | | runtime | The identifier of the function's runtime. | `string` | n/a | yes | 168 | | s3\_bucket | The S3 bucket location containing the function's deployment package. Conflicts with filename. This bucket must reside in the same AWS region where you are creating the Lambda function. | `string` | n/a | yes | 169 | | s3\_key | The S3 key of an object containing the function's deployment package. Conflicts with filename. | `string` | n/a | yes | 170 | | s3\_object\_version | The object version containing the function's deployment package. Conflicts with filename. | `string` | n/a | yes | 171 | | source\_code\_hash | Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the package file specified with either filename or s3\_key. | `string` | n/a | yes | 172 | | tags | A mapping of tags to assign to the object. | `map(string)` | n/a | yes | 173 | | timeout | The amount of time your Lambda Function has to run in seconds. | `number` | n/a | yes | 174 | | tracing\_config | Provide this to configure tracing. |
variables = map(string)
})
object({| n/a | yes | 175 | | vpc\_config | Provide this to allow your function to access your VPC. |
mode = string
})
object({| n/a | yes | 176 | | build\_mode | The build mode to use, one of `CODEBUILD`, `DISABLED`, `FILENAME`, `LAMBDA`, `S3`. | `string` | `"DISABLED"` | no | 177 | | codebuild\_environment\_compute\_type | Compute type for CodeBuild. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html | `string` | `"BUILD_GENERAL1_SMALL"` | no | 178 | | codebuild\_environment\_image | Image for CodeBuild. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-available.html | `string` | `"aws/codebuild/amazonlinux2-x86_64-standard:3.0"` | no | 179 | | codebuild\_environment\_type | The type of CodeBuild build environment to use. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html | `string` | `"LINUX_CONTAINER"` | no | 180 | | codebuild\_queued\_timeout\_in\_minutes | The number of minutes CodeBuild is allowed to be queued before it times out. | `number` | `15` | no | 181 | | codebuild\_timeout\_in\_minutes | The number of minutes CodeBuild is allowed to run before it times out. | `number` | `60` | no | 182 | | empty\_dirs | Include empty directories in the Lambda package. | `bool` | `false` | no | 183 | | enabled | Create resources. | `bool` | `true` | no | 184 | | lambda\_builder\_memory\_size | Memory size for the builder Lambda function. | `number` | `512` | no | 185 | | lambda\_builder\_timeout | Timeout for the builder Lambda function. | `number` | `900` | no | 186 | | role\_cloudwatch\_logs | If `role` is not provided, one will be created with a policy that enables CloudWatch Logs. | `bool` | `false` | no | 187 | | role\_custom\_policies | If `role` is not provided, one will be created with these JSON policies attached. | `list(string)` | `[]` | no | 188 | | role\_policy\_arns | If `role` is not provided, one will be created with these policy ARNs attached. | `list(string)` | `[]` | no | 189 | | source\_dir | Local source directory for the Lambda package. This will be zipped and uploaded to the S3 bucket. Requires `s3_bucket`. Conflicts with `s3_key`, `s3_object_version` and `filename`. | `string` | `""` | no | 190 | 191 | ## Outputs 192 | 193 | | Name | Description | 194 | |------|-------------| 195 | | arn | The Amazon Resource Name (ARN) identifying your Lambda Function. | 196 | | dead\_letter\_config | The function's dead letter queue configuration. | 197 | | description | Description of what your Lambda Function does. | 198 | | environment | The Lambda environment's configuration settings. | 199 | | function\_name | The unique name for your Lambda Function. | 200 | | handler | The function entrypoint in your code. | 201 | | invoke\_arn | The ARN to be used for invoking Lambda Function from API Gateway. | 202 | | kms\_key\_arn | The ARN for the KMS encryption key. | 203 | | last\_modified | The date this resource was last modified. | 204 | | layers | List of Lambda Layer Version ARNs attached to your Lambda Function. | 205 | | log\_group\_name | The log group name for your Lambda Function. | 206 | | log\_group\_name\_edge | The log group name for your Lambda@Edge Function. | 207 | | memory\_size | Amount of memory in MB your Lambda Function can use at runtime. | 208 | | publish | Whether creation/changes will publish a new Lambda Function Version. | 209 | | qualified\_arn | The Amazon Resource Name (ARN) identifying your Lambda Function Version (if versioning is enabled via publish = true). | 210 | | reserved\_concurrent\_executions | The amount of reserved concurrent executions for this lambda function. | 211 | | role | IAM role attached to the Lambda Function. | 212 | | role\_name | The name of the IAM role attached to the Lambda Function. | 213 | | runtime | The identifier of the function's runtime. | 214 | | s3\_bucket | The S3 bucket location containing the function's deployment package. | 215 | | s3\_key | The S3 key of an object containing the function's deployment package. | 216 | | s3\_object\_version | The object version containing the function's deployment package. | 217 | | source\_code\_hash | Base64-encoded representation of raw SHA-256 sum of the zip file. | 218 | | source\_code\_size | The size in bytes of the function .zip file. | 219 | | tags | A mapping of tags assigned to the object. | 220 | | timeout | The amount of time your Lambda Function has to run in seconds. | 221 | | tracing\_config | The tracing configuration. | 222 | | version | Latest published version of your Lambda Function. | 223 | | vpc\_config | The VPC configuration. | 224 | 225 | 226 | -------------------------------------------------------------------------------- /codebuild_builder/cfn.yaml.tmpl: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: Builds a Lambda function 3 | Parameters: 4 | Bucket: 5 | Description: S3 bucket containing Lambda packages 6 | Type: String 7 | KeyPrefix: 8 | Description: S3 key prefix for Lambda files 9 | Type: String 10 | KeySource: 11 | Description: S3 key for this Lambda function's source zip 12 | Type: String 13 | KeyTarget: 14 | Description: S3 key for the built package zip 15 | Type: String 16 | KeyTargetPath: 17 | Description: S3 key path for the built package zip 18 | Type: String 19 | KeyTargetName: 20 | Description: S3 key name for the built package zip 21 | Type: String 22 | Resources: 23 | CodeBuildRole: 24 | Type: AWS::IAM::Role 25 | Properties: 26 | AssumeRolePolicyDocument: 27 | Version: "2012-10-17" 28 | Statement: 29 | - Effect: Allow 30 | Principal: 31 | Service: 32 | - codebuild.amazonaws.com 33 | Action: 34 | - sts:AssumeRole 35 | Policies: 36 | - PolicyName: Logs 37 | PolicyDocument: 38 | Version: '2012-10-17' 39 | Statement: 40 | - Effect: Allow 41 | Action: 42 | - 'logs:CreateLogStream' 43 | - 'logs:PutLogEvents' 44 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:log-group:/aws/codebuild/*:*" 45 | - Sid: AllowLambdaToCreateLogGroups 46 | Effect: Allow 47 | Action: 48 | - 'logs:CreateLogGroup' 49 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:*" 50 | - PolicyName: S3 51 | PolicyDocument: 52 | Version: '2012-10-17' 53 | Statement: 54 | - Effect: Allow 55 | Action: 56 | - s3:GetObject 57 | - s3:PutObject 58 | Resource: !Sub "arn:aws:s3:::$${Bucket}/$${KeyPrefix}/*" 59 | CodeBuild: 60 | Type: AWS::CodeBuild::Project 61 | Properties: 62 | Artifacts: 63 | Type: S3 64 | Location: !Ref Bucket 65 | Path: !Ref KeyTargetPath 66 | Name: !Ref KeyTargetName 67 | Packaging: ZIP 68 | Description: !Sub "Builds s3://$${Bucket}/$${KeyTarget}" 69 | Environment: 70 | ComputeType: ${jsonencode(codebuild_environment_compute_type)} 71 | Image: ${jsonencode(codebuild_environment_image)} 72 | Type: ${jsonencode(codebuild_environment_type)} 73 | QueuedTimeoutInMinutes: ${jsonencode(codebuild_queued_timeout_in_minutes)} 74 | ServiceRole: !GetAtt CodeBuildRole.Arn 75 | Source: 76 | Type: S3 77 | Location: !Sub "$${Bucket}/$${KeySource}" 78 | TimeoutInMinutes: ${jsonencode(codebuild_timeout_in_minutes)} 79 | CodeBuildEvent: 80 | Type: AWS::Events::Rule 81 | Properties: 82 | EventPattern: { "source": [ "aws.codebuild" ], "detail-type": [ "CodeBuild Build State Change" ], "detail": { "build-status": [ "FAILED", "STOPPED", "SUCCEEDED" ], "project-name": [ !Ref CodeBuild ] } } 83 | Targets: 84 | - Arn: !GetAtt LambdaFunction.Arn 85 | Id: !Ref LambdaFunction 86 | CodeBuildEventPermission: 87 | Type: AWS::Lambda::Permission 88 | Properties: 89 | Action: lambda:InvokeFunction 90 | FunctionName: !Ref LambdaFunction 91 | Principal: events.amazonaws.com 92 | SourceArn: !GetAtt CodeBuildEvent.Arn 93 | LambdaRole: 94 | Type: AWS::IAM::Role 95 | Properties: 96 | AssumeRolePolicyDocument: 97 | Version: "2012-10-17" 98 | Statement: 99 | - Effect: Allow 100 | Principal: 101 | Service: 102 | - lambda.amazonaws.com 103 | Action: 104 | - sts:AssumeRole 105 | Policies: 106 | - PolicyName: CodeBuild 107 | PolicyDocument: 108 | Version: '2012-10-17' 109 | Statement: 110 | - Effect: Allow 111 | Action: 112 | - codebuild:StartBuild 113 | Resource: !GetAtt CodeBuild.Arn 114 | - PolicyName: Logs 115 | PolicyDocument: 116 | Version: '2012-10-17' 117 | Statement: 118 | - Effect: Allow 119 | Action: 120 | - 'logs:CreateLogStream' 121 | - 'logs:PutLogEvents' 122 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:log-group:/aws/lambda/*:*" 123 | - Sid: AllowLambdaToCreateLogGroups 124 | Effect: Allow 125 | Action: 126 | - 'logs:CreateLogGroup' 127 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:*" 128 | - PolicyName: S3 129 | PolicyDocument: 130 | Version: '2012-10-17' 131 | Statement: 132 | - Effect: Allow 133 | Action: 134 | - s3:GetObject 135 | - s3:DeleteObject 136 | Resource: !Sub "arn:aws:s3:::$${Bucket}/$${KeyPrefix}/*" 137 | LambdaFunction: 138 | Type: AWS::Lambda::Function 139 | Properties: 140 | Description: !Sub "Builds s3://$${Bucket}/$${KeyTarget}" 141 | Handler: ${jsonencode(lambda_builder_handler)} 142 | MemorySize: ${jsonencode(lambda_builder_memory_size)} 143 | Runtime: ${jsonencode(lambda_builder_runtime)} 144 | Timeout: ${jsonencode(lambda_builder_timeout)} 145 | Code: 146 | ZipFile: ${jsonencode(lambda_builder_code)} 147 | Role: !GetAtt LambdaRole.Arn 148 | LambdaPackage: 149 | Type: Custom::LambdaPackage 150 | Properties: 151 | ServiceToken: !GetAtt LambdaFunction.Arn 152 | CodeBuildProjectName: !Ref CodeBuild 153 | CodeBuildEvent: !Ref CodeBuildEvent # ensures the event rule exists before lambda and codebuild 154 | Bucket: !Ref Bucket 155 | KeyTarget: !Ref KeyTarget 156 | Outputs: 157 | Bucket: 158 | Description: S3 bucket containing the built Lambda package 159 | Value: !Ref Bucket 160 | Key: 161 | Description: S3 key for the built Lambda package 162 | Value: !Ref KeyTarget 163 | -------------------------------------------------------------------------------- /codebuild_builder/lambda.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import boto3 4 | import cfnresponse 5 | 6 | codebuild_client = boto3.client("codebuild") 7 | s3_client = boto3.client("s3") 8 | 9 | 10 | def handler(event, context): 11 | physical_resource_id = None 12 | codebuild_running = False 13 | try: 14 | 15 | if event.get("RequestType") in ("Create", "Update"): 16 | 17 | bucket = event["ResourceProperties"]["Bucket"] 18 | key_target = event["ResourceProperties"]["KeyTarget"] 19 | physical_resource_id = f"arn:aws:s3:::{bucket}/{key_target}" 20 | 21 | start_build(event) 22 | codebuild_running = True 23 | 24 | elif event.get("source") == "aws.codebuild": 25 | 26 | codebuild_status = event["detail"]["build-status"] 27 | 28 | # Replace event with original event, will be used by cfnresponse. 29 | env = event["detail"]["additional-information"]["environment"][ 30 | "environment-variables" 31 | ] 32 | for var in env: 33 | if var["name"] == "CFN_EVENT": 34 | event = json.loads(var["value"]) 35 | break 36 | else: 37 | raise ValueError(env) 38 | 39 | bucket = event["ResourceProperties"]["Bucket"] 40 | key_target = event["ResourceProperties"]["KeyTarget"] 41 | physical_resource_id = f"arn:aws:s3:::{bucket}/{key_target}" 42 | 43 | if codebuild_status != "SUCCEEDED": 44 | raise ValueError(codebuild_status) 45 | 46 | # Ensure zip was uploaded (must be in buildspec). 47 | s3_client.head_object(Bucket=bucket, Key=key_target) 48 | 49 | # Delete previous zip after updates. 50 | if event["RequestType"] == "Update": 51 | old_physical_resource_id = event["PhysicalResourceId"] 52 | if old_physical_resource_id != physical_resource_id: 53 | delete(old_physical_resource_id) 54 | 55 | elif event.get("RequestType") == "Delete": 56 | 57 | physical_resource_id = event["PhysicalResourceId"] 58 | delete(physical_resource_id) 59 | 60 | bucket = event["ResourceProperties"]["Bucket"] 61 | key_target = event["ResourceProperties"]["KeyTarget"] 62 | delete(f"arn:aws:s3:::{bucket}/{key_target}") 63 | 64 | else: 65 | 66 | raise ValueError(event) 67 | 68 | status = cfnresponse.SUCCESS 69 | 70 | except Exception: 71 | 72 | status = cfnresponse.FAILED 73 | 74 | print(event) 75 | 76 | raise 77 | 78 | finally: 79 | 80 | if not codebuild_running: 81 | response_data = {} 82 | cfnresponse.send( 83 | event, context, status, response_data, physical_resource_id 84 | ) 85 | 86 | 87 | def start_build(event): 88 | 89 | env = {} 90 | 91 | bucket = event["ResourceProperties"]["Bucket"] 92 | key_target = event["ResourceProperties"]["KeyTarget"] 93 | 94 | env["TARGET_BUCKET"] = bucket 95 | env["TARGET_KEY"] = key_target 96 | env["TARGET_URL"] = f"s3://{bucket}/{key_target}" 97 | 98 | env["CFN_EVENT"] = json.dumps(event) 99 | 100 | response = codebuild_client.start_build( 101 | projectName=event["ResourceProperties"]["CodeBuildProjectName"], 102 | environmentVariablesOverride=[ 103 | {"name": key, "value": value} for (key, value) in env.items() 104 | ], 105 | ) 106 | print(response) 107 | 108 | 109 | def delete(physical_resource_id): 110 | if physical_resource_id.startswith("arn:aws:s3:::"): 111 | bucket_and_key = physical_resource_id.split(":")[-1] 112 | bucket, key = bucket_and_key.split("/", 1) 113 | s3_client.delete_object(Bucket=bucket, Key=key) 114 | -------------------------------------------------------------------------------- /lambda_builders/cfn.yaml.tmpl: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: Builds a Lambda function 3 | Parameters: 4 | Bucket: 5 | Description: S3 bucket containing Lambda packages 6 | Type: String 7 | KeyPrefix: 8 | Description: S3 key prefix for Lambda files 9 | Type: String 10 | KeySource: 11 | Description: S3 key for this Lambda function's source zip 12 | Type: String 13 | KeyTarget: 14 | Description: S3 key for the built package zip 15 | Type: String 16 | KeyTargetPath: 17 | Description: S3 key path for the built package zip 18 | Type: String 19 | KeyTargetName: 20 | Description: S3 key name for the built package zip 21 | Type: String 22 | Resources: 23 | LambdaRole: 24 | Type: AWS::IAM::Role 25 | Properties: 26 | AssumeRolePolicyDocument: 27 | Version: "2012-10-17" 28 | Statement: 29 | - 30 | Effect: Allow 31 | Principal: 32 | Service: 33 | - lambda.amazonaws.com 34 | Action: 35 | - sts:AssumeRole 36 | Policies: 37 | - PolicyName: CFN 38 | PolicyDocument: 39 | Version: '2012-10-17' 40 | Statement: 41 | - Effect: Allow 42 | Action: 43 | - cloudformation:DescribeStacks 44 | Resource: !Sub "arn:aws:cloudformation:$${AWS::Region}:$${AWS::AccountId}:*" 45 | - PolicyName: Logs 46 | PolicyDocument: 47 | Version: '2012-10-17' 48 | Statement: 49 | - Effect: Allow 50 | Action: 51 | - 'logs:CreateLogStream' 52 | - 'logs:PutLogEvents' 53 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:log-group:/aws/lambda/*:*" 54 | - Sid: AllowLambdaToCreateLogGroups 55 | Effect: Allow 56 | Action: 57 | - 'logs:CreateLogGroup' 58 | Resource: !Sub "arn:aws:logs:$${AWS::Region}:$${AWS::AccountId}:*" 59 | - PolicyName: S3 60 | PolicyDocument: 61 | Version: '2012-10-17' 62 | Statement: 63 | - Effect: Allow 64 | Action: 65 | - s3:DeleteObject 66 | - s3:GetObject 67 | - s3:PutObject 68 | Resource: !Sub "arn:aws:s3:::$${Bucket}/$${KeyPrefix}/*" 69 | LambdaFunction: 70 | Type: AWS::Lambda::Function 71 | Properties: 72 | Description: !Sub "Builds s3://$${Bucket}/$${KeyTarget}" 73 | Handler: ${jsonencode(lambda_builder_handler)} 74 | MemorySize: ${jsonencode(lambda_builder_memory_size)} 75 | Runtime: ${jsonencode(lambda_runtime)} 76 | Timeout: ${jsonencode(lambda_builder_timeout)} 77 | Code: 78 | ZipFile: ${jsonencode(lambda_builder_code)} 79 | Role: !GetAtt LambdaRole.Arn 80 | LambdaPackage: 81 | Type: Custom::LambdaPackage 82 | Properties: 83 | ServiceToken: !GetAtt LambdaFunction.Arn 84 | Bucket: !Ref Bucket 85 | KeySource: !Ref KeySource 86 | KeyTarget: !Ref KeyTarget 87 | Outputs: 88 | Bucket: 89 | Description: S3 bucket containing the built Lambda package 90 | Value: !Ref Bucket 91 | Key: 92 | Description: S3 key for the built Lambda package 93 | Value: !Ref KeyTarget 94 | -------------------------------------------------------------------------------- /lambda_builders/nodejs.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const aws = require('aws-sdk'), 4 | cfnresponse = require('cfn-response'), 5 | fs = require('fs'), 6 | path = require('path'), 7 | cp = require('child_process'), 8 | s3 = new aws.S3(); 9 | 10 | exports.handler = async (event, context) => { 11 | let physicalResourceId = null, 12 | status = null; 13 | try { 14 | 15 | console.log(JSON.stringify(event)); 16 | 17 | const bucket = event.ResourceProperties.Bucket, 18 | keyTarget = event.ResourceProperties.KeyTarget; 19 | physicalResourceId = `arn:aws:s3:::${bucket}/${keyTarget}`; 20 | 21 | if (event.RequestType == "Create") { 22 | 23 | await createZip(event); 24 | 25 | } else if (event.RequestType == "Update") { 26 | 27 | await createZip(event); 28 | 29 | const oldPhysicalResourceId = event.PhysicalResourceId; 30 | 31 | if (physicalResourceId != oldPhysicalResourceId) { 32 | await deleteZip(oldPhysicalResourceId); 33 | } 34 | 35 | } else if (event.RequestType == "Delete") { 36 | 37 | const physicalResourceId = event.PhysicalResourceId; 38 | 39 | await deleteZip(physicalResourceId); 40 | 41 | } else { 42 | 43 | throw new Error("unknown event.RequestType: " + event.RequestType); 44 | 45 | } 46 | 47 | status = cfnresponse.SUCCESS; 48 | 49 | } catch (error) { 50 | 51 | console.log(error); 52 | status = cfnresponse.FAILED; 53 | 54 | } finally { 55 | 56 | // cfnresponse calls context.done() when it has finished 57 | cfnresponse.send(event, context, status, {}, physicalResourceId); 58 | await new Promise(function (resolve) { }); 59 | 60 | } 61 | }; 62 | 63 | async function createZip(event) { 64 | const p = event.ResourceProperties, 65 | bucket = p.Bucket, 66 | keySource = p.KeySource, 67 | keyTarget = p.KeyTarget, 68 | env = Object.assign({}, process.env); 69 | 70 | env.HOME = '/tmp'; // npm writes to home dir which is readonly in Lambda 71 | 72 | const exec = (cmd, cwd) => cp.execSync(cmd, { cwd, env, stdio: 'inherit' }); 73 | 74 | console.log('Installing yazl'); 75 | exec('npm install yazl unzipper', '/tmp'); 76 | const yazl = require('/tmp/node_modules/yazl'), 77 | unzipper = require('/tmp/node_modules/unzipper'); 78 | 79 | const downloadPath = "/tmp/source.zip"; 80 | console.log(`Downloading s3://${bucket}/${keySource} to ${downloadPath}`); 81 | const obj = await s3.getObject({ Bucket: bucket, Key: keySource }).promise(); 82 | await new Promise(resolve => { 83 | const s = fs.createWriteStream(downloadPath); 84 | s.on('finish', resolve); 85 | s.write(obj.Body); 86 | s.end(); 87 | }); 88 | 89 | const buildPath = "/tmp/build"; 90 | console.log(`Preparing build path ${buildPath}`); 91 | exec(`rm -rf ${buildPath}`); 92 | exec(`mkdir ${buildPath}`); 93 | process.chdir(buildPath); 94 | 95 | console.log(`Extracting ${downloadPath} to ${buildPath}`); 96 | await new Promise(resolve => { 97 | fs.createReadStream(downloadPath).pipe(unzipper.Extract({ path: buildPath }).on("close", resolve)); 98 | }); 99 | fs.unlinkSync(downloadPath); 100 | 101 | console.log("Running build script"); 102 | fs.chmodSync("./build.sh", "755"); 103 | exec("ls -alh"); 104 | exec("./build.sh"); 105 | 106 | const builtPath = "/tmp/built.zip"; 107 | console.log(`Creating ${builtPath} from ${buildPath}`); 108 | await new Promise(resolve => { 109 | const zipfile = new yazl.ZipFile(); 110 | zipfile.outputStream.pipe(fs.createWriteStream(builtPath)).on("close", resolve); 111 | for (const absPath of walkSync(buildPath)) { 112 | const relPath = path.relative(buildPath, absPath); 113 | zipfile.addFile(absPath, relPath); 114 | } 115 | zipfile.end(); 116 | }); 117 | 118 | console.log(`Uploading zip to s3://${bucket}/${keyTarget}`); 119 | await s3.putObject({ Bucket: bucket, Key: keyTarget, Body: fs.createReadStream(builtPath) }).promise(); 120 | } 121 | 122 | async function deleteZip(resourceId) { 123 | const arnParts = resourceId.split(":"), 124 | bucketAndKey = arnParts[arnParts.length - 1], 125 | bucket = bucketAndKey.substring(0, bucketAndKey.indexOf('/')), 126 | key = bucketAndKey.substring(bucketAndKey.indexOf('/') + 1); 127 | 128 | console.log(`Deleting s3://${bucket}/${key}`); 129 | await s3.deleteObject({ Bucket: bucket, Key: key }).promise(); 130 | } 131 | 132 | function* walkSync(dir) { 133 | const files = fs.readdirSync(dir); 134 | for (const file of files) { 135 | const fpath = path.join(dir, file), 136 | isDir = fs.statSync(fpath).isDirectory(); 137 | if (isDir) { 138 | yield* walkSync(fpath); 139 | } else { 140 | yield fpath; 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /lambda_builders/python.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import shutil 4 | import zipfile 5 | 6 | import boto3 7 | import cfnresponse 8 | 9 | s3_client = boto3.client("s3") 10 | 11 | 12 | def handler(event, context): 13 | physical_resource_id = None 14 | try: 15 | 16 | bucket = event["ResourceProperties"]["Bucket"] 17 | key_target = event["ResourceProperties"]["KeyTarget"] 18 | arn = "arn:aws:s3:::{}/{}".format(bucket, key_target) 19 | physical_resource_id = arn 20 | 21 | if event["RequestType"] == "Create": 22 | 23 | build(event) 24 | 25 | elif event["RequestType"] == "Update": 26 | 27 | build(event) 28 | 29 | old_physical_resource_id = event["PhysicalResourceId"] 30 | 31 | if physical_resource_id != old_physical_resource_id: 32 | delete(old_physical_resource_id) 33 | 34 | elif event["RequestType"] == "Delete": 35 | 36 | physical_resource_id = event["PhysicalResourceId"] 37 | 38 | delete(physical_resource_id) 39 | 40 | else: 41 | 42 | raise ValueError(event["RequestType"]) 43 | 44 | status = cfnresponse.SUCCESS 45 | 46 | except Exception: 47 | 48 | status = cfnresponse.FAILED 49 | 50 | raise 51 | 52 | finally: 53 | response_data = {} 54 | cfnresponse.send(event, context, status, response_data, physical_resource_id) 55 | 56 | 57 | def build(event): 58 | 59 | bucket = event["ResourceProperties"]["Bucket"] 60 | key_source = event["ResourceProperties"]["KeySource"] 61 | key_target = event["ResourceProperties"]["KeyTarget"] 62 | 63 | # Download the source zip. 64 | download_path = "/tmp/source.zip" 65 | print("Downloading s3://{}/{} to {}".format(bucket, key_source, download_path)) 66 | s3_client.download_file(bucket, key_source, download_path) 67 | 68 | # Prepare the build directory. 69 | build_path = "/tmp/build" 70 | print("Preparing build path {}".format(build_path)) 71 | if os.path.exists(build_path): 72 | shutil.rmtree(build_path) 73 | os.mkdir(build_path) 74 | os.chdir(build_path) 75 | 76 | # Extract the source zip. 77 | print("Extracting {} to {}".format(download_path, build_path)) 78 | with zipfile.ZipFile(download_path, "r") as archive: 79 | archive.extractall() 80 | os.remove(download_path) 81 | 82 | # Run the build script from the zip. 83 | print("Running build script") 84 | os.chmod("./build.sh", 0o755) 85 | exit_status = os.system("./build.sh") 86 | if exit_status != 0: 87 | raise ValueError(exit_status) 88 | 89 | # Zip up the directory and then upload it. 90 | with io.BytesIO() as zip_buffer: 91 | 92 | print("Zipping {}".format(build_path)) 93 | with zipfile.ZipFile(zip_buffer, "a") as zip_file: 94 | for root, sub_dirs, files in os.walk(build_path): 95 | for file_name in files: 96 | absolute_path = os.path.join(root, file_name) 97 | relative_path = os.path.relpath(absolute_path, build_path) 98 | zip_file.write(absolute_path, relative_path) 99 | zip_buffer.seek(0) 100 | 101 | print("Uploading zip to s3://{}/{}".format(bucket, key_target)) 102 | s3_client.put_object(Bucket=bucket, Key=key_target, Body=zip_buffer) 103 | 104 | 105 | def delete(physical_resource_id): 106 | 107 | # The custom resource identifier is an S3 object ARN. 108 | # Extract the bucket and key from it. 109 | arn = physical_resource_id 110 | bucket_and_key = arn.split(":")[-1] 111 | bucket, key = bucket_and_key.split("/", 1) 112 | 113 | # Delete the S3 object. 114 | print("Deleting s3://{}/{}".format(bucket, key)) 115 | s3_client.delete_object(Bucket=bucket, Key=key) 116 | -------------------------------------------------------------------------------- /main.tf: -------------------------------------------------------------------------------- 1 | ################################ 2 | # Validate the input variables # 3 | ################################ 4 | 5 | data "external" "validate" { 6 | count = var.enabled ? 1 : 0 7 | 8 | program = ["python", "${path.module}/validate.py"] 9 | query = { 10 | build_mode = var.build_mode 11 | filename = var.filename != null ? var.filename : "" 12 | s3_bucket = var.s3_bucket != null ? var.s3_bucket : "" 13 | s3_key = var.s3_key != null ? var.s3_key : "" 14 | s3_object_version = var.s3_object_version != null ? var.s3_object_version : "" 15 | source_code_hash = var.source_code_hash != null ? var.source_code_hash : "" 16 | source_dir = var.source_dir 17 | zip_files_dir = "${path.module}/zip_files" 18 | } 19 | } 20 | 21 | ############################################### 22 | # Create a zip file from the source directory # 23 | # (if build mode is not DISABLED) # 24 | ############################################### 25 | 26 | data "aws_caller_identity" "current" { 27 | count = var.enabled && var.build_mode != "DISABLED" ? 1 : 0 28 | } 29 | 30 | data "aws_partition" "current" { 31 | count = var.enabled && var.build_mode != "DISABLED" ? 1 : 0 32 | } 33 | 34 | data "aws_region" "current" { 35 | count = var.enabled && var.build_mode != "DISABLED" ? 1 : 0 36 | } 37 | 38 | module "source_zip_file" { 39 | source = "github.com/raymondbutcher/terraform-archive-stable?ref=v0.0.4" 40 | 41 | enabled = var.enabled && var.build_mode != "DISABLED" 42 | 43 | empty_dirs = var.empty_dirs 44 | output_path = var.enabled && var.build_mode == "FILENAME" ? var.filename : var.enabled && var.build_mode != "DISABLED" ? "${path.module}/zip_files/${data.aws_partition.current[0].partition}-${data.aws_region.current[0].name}-${data.aws_caller_identity.current[0].account_id}-${var.function_name}.zip" : "" 45 | source_dir = var.source_dir 46 | } 47 | 48 | ############################################ 49 | # Upload the zip file to S3 # 50 | # (if build mode is CODEBUILD, LAMBDA, S3) # 51 | ############################################ 52 | 53 | resource "aws_s3_bucket_object" "source_zip_file" { 54 | count = var.enabled && contains(["CODEBUILD", "LAMBDA", "S3"], var.build_mode) ? 1 : 0 55 | 56 | bucket = var.s3_bucket 57 | key = contains(["CODEBUILD", "LAMBDA"], var.build_mode) ? "${var.function_name}/${module.source_zip_file.output_sha}/source.zip" : var.s3_key 58 | source = module.source_zip_file.output_path 59 | 60 | lifecycle { 61 | create_before_destroy = true 62 | } 63 | } 64 | 65 | locals { 66 | source_zip_file_s3_key = var.enabled && contains(["CODEBUILD", "LAMBDA", "S3"], var.build_mode) ? aws_s3_bucket_object.source_zip_file[0].key : null 67 | } 68 | 69 | ############################################### 70 | # Build the final package with CloudFormation # 71 | # (if build mode is CODEBUILD, LAMBDA) # 72 | ############################################### 73 | 74 | locals { 75 | cloudformation_parameters = { 76 | Bucket = var.s3_bucket 77 | KeyPrefix = var.function_name 78 | KeySource = local.source_zip_file_s3_key 79 | } 80 | codebuild_cloudformation_template_body = var.enabled && var.build_mode == "CODEBUILD" ? templatefile("${path.module}/codebuild_builder/cfn.yaml.tmpl", { 81 | codebuild_environment_compute_type = var.codebuild_environment_compute_type 82 | codebuild_environment_image = var.codebuild_environment_image 83 | codebuild_environment_type = var.codebuild_environment_type 84 | codebuild_queued_timeout_in_minutes = var.codebuild_queued_timeout_in_minutes 85 | codebuild_timeout_in_minutes = var.codebuild_timeout_in_minutes 86 | lambda_builder_code = file("${path.module}/codebuild_builder/lambda.py") 87 | lambda_builder_handler = "index.handler" 88 | lambda_builder_memory_size = 128 89 | lambda_builder_runtime = "python3.7" 90 | lambda_builder_timeout = 60 91 | }) : null 92 | lambda_builder_filenames = { 93 | "nodejs10.x" = "nodejs.js" 94 | "nodejs12.x" = "nodejs.js" 95 | "python2.7" = "python.py" 96 | "python3.6" = "python.py" 97 | "python3.7" = "python.py" 98 | "python3.8" = "python.py" 99 | } 100 | lambda_cloudformation_template_body = var.enabled && var.build_mode == "LAMBDA" ? templatefile("${path.module}/lambda_builders/cfn.yaml.tmpl", { 101 | lambda_builder_code = file("${path.module}/lambda_builders/${local.lambda_builder_filenames[var.runtime]}") 102 | lambda_builder_handler = "index.handler" 103 | lambda_builder_memory_size = var.lambda_builder_memory_size 104 | lambda_builder_timeout = var.lambda_builder_timeout 105 | lambda_runtime = var.runtime 106 | }) : null 107 | cloudformation_template_body = coalesce(local.codebuild_cloudformation_template_body, local.lambda_cloudformation_template_body, "unused") 108 | } 109 | 110 | # Create a random build_id that changes when the source zip 111 | # or CloudFormation details changes. 112 | 113 | resource "random_string" "build_id" { 114 | count = var.enabled && contains(["CODEBUILD", "LAMBDA"], var.build_mode) ? 1 : 0 115 | 116 | length = 16 117 | upper = false 118 | special = false 119 | 120 | keepers = { 121 | cloudformation_parameters = sha1(jsonencode(local.cloudformation_parameters)) 122 | cloudformation_template_body = sha1(local.cloudformation_template_body) 123 | } 124 | } 125 | 126 | # Create a CloudFormation stack that builds a Lambda package and 127 | # then outputs the location of the built package. Use the above 128 | # build_id as part of the stack name. Stack name changes force 129 | # the stack to be recreated. The result is a new build whenever 130 | # there are changes to the source_dir or changes to this module. 131 | 132 | resource "aws_cloudformation_stack" "builder" { 133 | count = var.enabled && contains(["CODEBUILD", "LAMBDA"], var.build_mode) ? 1 : 0 134 | 135 | name = "${var.s3_bucket}-${random_string.build_id[0].result}" 136 | capabilities = ["CAPABILITY_IAM"] 137 | on_failure = "DELETE" 138 | 139 | parameters = merge(local.cloudformation_parameters, { 140 | KeyTarget = "${var.function_name}/${module.source_zip_file.output_sha}/${random_string.build_id[0].result}.zip" 141 | KeyTargetName = "${random_string.build_id[0].result}.zip" 142 | KeyTargetPath = "${var.function_name}/${module.source_zip_file.output_sha}" 143 | }) 144 | 145 | template_body = local.cloudformation_template_body 146 | 147 | lifecycle { 148 | create_before_destroy = true 149 | } 150 | } 151 | 152 | locals { 153 | built_s3_key = var.enabled && contains(["CODEBUILD", "LAMBDA"], var.build_mode) ? aws_cloudformation_stack.builder[0].outputs.Key : null 154 | } 155 | 156 | ####################################### 157 | # Create an IAM role for the function # 158 | # (if role is not supplied) # 159 | ####################################### 160 | 161 | module "role" { 162 | source = "git::https://gitlab.com/claranet-pcp/terraform/aws/terraform-aws-lambda-role.git?ref=v0.1.0" 163 | 164 | enabled = var.enabled && coalesce(var.create_role, var.role == null) 165 | 166 | function_name = var.function_name 167 | cloudwatch_logs = var.role_cloudwatch_logs 168 | custom_policies = var.role_custom_policies 169 | custom_policies_count = var.role_custom_policies_count 170 | dead_letter_config = var.dead_letter_config 171 | policy_arns = var.role_policy_arns 172 | policy_arns_count = var.role_policy_arns_count 173 | tags = var.tags 174 | vpc_config = var.vpc_config 175 | } 176 | 177 | ############################## 178 | # Create the Lambda function # 179 | ############################## 180 | 181 | resource "aws_lambda_function" "built" { 182 | count = var.enabled ? 1 : 0 183 | 184 | description = var.description 185 | filename = var.filename 186 | function_name = var.function_name 187 | handler = var.handler 188 | kms_key_arn = var.kms_key_arn 189 | layers = var.layers 190 | memory_size = var.memory_size 191 | publish = var.publish 192 | reserved_concurrent_executions = var.reserved_concurrent_executions 193 | role = var.role != null ? var.role : module.role.arn 194 | runtime = var.runtime 195 | s3_bucket = var.s3_bucket 196 | s3_key = contains(["CODEBUILD", "LAMBDA", "S3"], var.build_mode) ? coalesce(local.built_s3_key, local.source_zip_file_s3_key) : var.s3_key 197 | s3_object_version = var.s3_object_version 198 | source_code_hash = contains(["FILENAME", "S3"], var.build_mode) ? module.source_zip_file.output_base64sha256 : var.source_code_hash 199 | tags = var.tags 200 | timeout = var.timeout 201 | 202 | dynamic "dead_letter_config" { 203 | for_each = var.dead_letter_config == null ? [] : [var.dead_letter_config] 204 | content { 205 | target_arn = dead_letter_config.value.target_arn 206 | } 207 | } 208 | 209 | dynamic "environment" { 210 | for_each = var.environment == null ? [] : [var.environment] 211 | content { 212 | variables = environment.value.variables 213 | } 214 | } 215 | 216 | dynamic "tracing_config" { 217 | for_each = var.tracing_config == null ? [] : [var.tracing_config] 218 | content { 219 | mode = tracing_config.value.mode 220 | } 221 | } 222 | 223 | dynamic "vpc_config" { 224 | for_each = var.vpc_config == null ? [] : [var.vpc_config] 225 | content { 226 | security_group_ids = vpc_config.value.security_group_ids 227 | subnet_ids = vpc_config.value.subnet_ids 228 | } 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /outputs.tf: -------------------------------------------------------------------------------- 1 | output "arn" { 2 | description = "The Amazon Resource Name (ARN) identifying your Lambda Function." 3 | value = var.enabled ? aws_lambda_function.built[0].arn : null 4 | } 5 | 6 | output "dead_letter_config" { 7 | description = "The function's dead letter queue configuration." 8 | value = var.enabled ? aws_lambda_function.built[0].dead_letter_config : null 9 | } 10 | 11 | output "description" { 12 | description = "Description of what your Lambda Function does." 13 | value = var.enabled ? aws_lambda_function.built[0].description : null 14 | } 15 | 16 | output "environment" { 17 | description = "The Lambda environment's configuration settings." 18 | value = var.enabled ? aws_lambda_function.built[0].environment : null 19 | } 20 | 21 | output "function_name" { 22 | description = "The unique name for your Lambda Function." 23 | value = var.enabled ? aws_lambda_function.built[0].function_name : null 24 | } 25 | 26 | output "handler" { 27 | description = "The function entrypoint in your code." 28 | value = var.enabled ? aws_lambda_function.built[0].handler : null 29 | } 30 | 31 | output "kms_key_arn" { 32 | description = "The ARN for the KMS encryption key." 33 | value = var.enabled ? aws_lambda_function.built[0].kms_key_arn : null 34 | } 35 | 36 | output "layers" { 37 | description = "List of Lambda Layer Version ARNs attached to your Lambda Function." 38 | value = var.enabled ? aws_lambda_function.built[0].layers : null 39 | } 40 | 41 | output "last_modified" { 42 | description = "The date this resource was last modified." 43 | value = var.enabled ? aws_lambda_function.built[0].last_modified : null 44 | } 45 | 46 | output "log_group_name" { 47 | description = "The log group name for your Lambda Function." 48 | value = var.enabled ? "/aws/lambda/${aws_lambda_function.built[0].function_name}" : null 49 | } 50 | 51 | output "log_group_name_edge" { 52 | description = "The log group name for your Lambda@Edge Function." 53 | value = var.enabled ? "/aws/lambda/us-east-1.${aws_lambda_function.built[0].function_name}" : null 54 | } 55 | 56 | output "memory_size" { 57 | description = "Amount of memory in MB your Lambda Function can use at runtime." 58 | value = var.enabled ? aws_lambda_function.built[0].memory_size : null 59 | } 60 | 61 | output "qualified_arn" { 62 | description = "The Amazon Resource Name (ARN) identifying your Lambda Function Version (if versioning is enabled via publish = true)." 63 | value = var.enabled ? aws_lambda_function.built[0].qualified_arn : null 64 | } 65 | 66 | output "invoke_arn" { 67 | description = "The ARN to be used for invoking Lambda Function from API Gateway." 68 | value = var.enabled ? aws_lambda_function.built[0].invoke_arn : null 69 | } 70 | 71 | output "publish" { 72 | description = "Whether creation/changes will publish a new Lambda Function Version." 73 | value = var.enabled ? aws_lambda_function.built[0].publish : null 74 | } 75 | 76 | output "reserved_concurrent_executions" { 77 | description = "The amount of reserved concurrent executions for this lambda function." 78 | value = var.enabled ? aws_lambda_function.built[0].reserved_concurrent_executions : null 79 | } 80 | 81 | output "role" { 82 | description = "IAM role attached to the Lambda Function." 83 | value = var.enabled ? aws_lambda_function.built[0].role : null 84 | } 85 | 86 | output "role_name" { 87 | description = "The name of the IAM role attached to the Lambda Function." 88 | value = var.enabled ? element(split("/", aws_lambda_function.built[0].role), 1) : null 89 | } 90 | 91 | output "runtime" { 92 | description = "The identifier of the function's runtime." 93 | value = var.enabled ? aws_lambda_function.built[0].runtime : null 94 | } 95 | 96 | output "s3_bucket" { 97 | description = "The S3 bucket location containing the function's deployment package." 98 | value = var.enabled ? aws_lambda_function.built[0].s3_bucket : null 99 | } 100 | 101 | output "s3_key" { 102 | description = "The S3 key of an object containing the function's deployment package." 103 | value = var.enabled ? aws_lambda_function.built[0].s3_key : null 104 | } 105 | 106 | output "s3_object_version" { 107 | description = "The object version containing the function's deployment package." 108 | value = var.enabled ? aws_lambda_function.built[0].s3_object_version : null 109 | } 110 | 111 | output "source_code_hash" { 112 | description = "Base64-encoded representation of raw SHA-256 sum of the zip file." 113 | value = var.enabled ? aws_lambda_function.built[0].source_code_hash : null 114 | } 115 | 116 | output "source_code_size" { 117 | description = "The size in bytes of the function .zip file." 118 | value = var.enabled ? aws_lambda_function.built[0].source_code_size : null 119 | } 120 | 121 | output "tags" { 122 | description = "A mapping of tags assigned to the object." 123 | value = var.enabled ? aws_lambda_function.built[0].tags : null 124 | } 125 | 126 | output "timeout" { 127 | description = "The amount of time your Lambda Function has to run in seconds." 128 | value = var.enabled ? aws_lambda_function.built[0].timeout : null 129 | } 130 | 131 | output "tracing_config" { 132 | description = "The tracing configuration." 133 | value = var.enabled ? aws_lambda_function.built[0].tracing_config : null 134 | } 135 | 136 | output "version" { 137 | description = "Latest published version of your Lambda Function." 138 | value = var.enabled ? aws_lambda_function.built[0].version : null 139 | } 140 | 141 | output "vpc_config" { 142 | description = "The VPC configuration." 143 | value = var.enabled ? aws_lambda_function.built[0].vpc_config : null 144 | } 145 | -------------------------------------------------------------------------------- /tests/changes/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -v 4 | -------------------------------------------------------------------------------- /tests/changes/lambda.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.api import block 2 | from pretf.collections import collect 3 | 4 | 5 | def pretf_blocks(path): 6 | function_names = [] 7 | for version in ("3.6", "3.7"): 8 | python_lambda = yield python_lambda_resources(version=version) 9 | function_names.append(python_lambda.function_name) 10 | yield block("output", "function_names", {"value": function_names}) 11 | 12 | 13 | @collect 14 | def python_lambda_resources(var): 15 | 16 | yield block("variable", "version", {}) 17 | 18 | label = f"python_{var.version.replace('.', '')}" 19 | runtime = f"python{var.version}" 20 | 21 | random = yield block( 22 | "resource", 23 | "random_id", 24 | label, 25 | {"prefix": f"terraform-aws-lambda-builder-tests-{label}-", "byte_length": 8}, 26 | ) 27 | 28 | role = yield block( 29 | "module", 30 | f"{label}_role", 31 | { 32 | "source": "git::https://gitlab.com/claranet-pcp/terraform/aws/terraform-aws-lambda-role.git?ref=v0.0.1", 33 | "function_name": random.hex, 34 | "cloudwatch_logs": True, 35 | }, 36 | ) 37 | 38 | func = yield block( 39 | "module", 40 | f"{label}_lambda", 41 | { 42 | "source": "../../", 43 | "build_mode": "LAMBDA", 44 | "create_role": False, 45 | "function_name": random.hex, 46 | "handler": "lambda.handler", 47 | "role": role.arn, 48 | "runtime": runtime, 49 | "s3_bucket": block("aws_s3_bucket", "packages", {}).id, 50 | "source_dir": "./src", 51 | "timeout": 30, 52 | }, 53 | ) 54 | 55 | yield block("output", "function_name", {"value": func.function_name}) 56 | -------------------------------------------------------------------------------- /tests/changes/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | resource "random_id" "bucket_name" { 7 | prefix = "terraform-aws-lambda-builder-tests-" 8 | byte_length = 8 9 | } 10 | 11 | resource "aws_s3_bucket" "packages" { 12 | bucket = random_id.bucket_name.hex 13 | acl = "private" 14 | } 15 | 16 | output "bucket" { 17 | value = aws_s3_bucket.packages.id 18 | } 19 | -------------------------------------------------------------------------------- /tests/changes/src/build.sh: -------------------------------------------------------------------------------- 1 | # version.json gets added by the test script 2 | # then the source zip will contain version.json 3 | # this build script moves the file, so the final 4 | # lambda package will have result.json instead. 5 | mv version.json result.json 6 | -------------------------------------------------------------------------------- /tests/changes/src/lambda.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import json 3 | 4 | 5 | def handler(event, context): 6 | with open("result.json") as open_file: 7 | result = json.load(open_file) 8 | result["files"] = sorted(glob.glob("*")) 9 | return result 10 | -------------------------------------------------------------------------------- /tests/changes/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="tests.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/changes/test_changes.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from pretf import test 4 | from pretf.aws import get_session 5 | 6 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 7 | lambda_client = session.client("lambda") 8 | s3_client = session.client("s3") 9 | 10 | 11 | # TODO: check s3 bucket to ensure change of files and cleanup of old ones, and empty after destroy 12 | 13 | 14 | class TestChanges(test.SimpleTest): 15 | def test_init_terraform(self): 16 | """ 17 | Configure and initialize the backend. 18 | 19 | """ 20 | 21 | self.pretf.init() 22 | 23 | def test_deploy_lambda_function(self): 24 | """ 25 | Write "1" into a file in the source directory, 26 | run "terraform apply" to deploy the function, 27 | then invoke the function and check it returned "1". 28 | 29 | """ 30 | 31 | self.set_source_version(1) 32 | self.apply_terraform_and_check_version(1) 33 | 34 | def test_update_lambda_function(self): 35 | """ 36 | Write "2" into a file in the source directory, 37 | run "terraform apply" to update the function, 38 | then invoke the function and check it returned "2". 39 | 40 | """ 41 | 42 | self.set_source_version(2) 43 | self.apply_terraform_and_check_version(2) 44 | 45 | def apply_terraform_and_check_version(self, version): 46 | # Run terraform apply and get the functions from the outputs. 47 | outputs = self.pretf.apply() 48 | function_names = outputs["function_names"] 49 | assert len(function_names) == 2 50 | 51 | # Check each function. 52 | for function_name in function_names: 53 | 54 | # Invoke the function and parse the result. 55 | response = lambda_client.invoke(FunctionName=function_name) 56 | payload = json.load(response["Payload"]) 57 | 58 | # Check that the version matches what was written to version.json. 59 | assert payload["version"] == version 60 | 61 | # Check that the list of files in the package was altered by build.sh 62 | # which renames version.json to result.json. 63 | assert payload["files"] == ["build.sh", "lambda.py", "result.json"] 64 | 65 | def set_source_version(self, version): 66 | with open("src/version.json", "w") as open_file: 67 | json.dump({"version": version}, open_file) 68 | 69 | @test.always 70 | def test_destroy(self): 71 | """ 72 | Clean up after the test. 73 | 74 | """ 75 | 76 | self.pretf.destroy() 77 | -------------------------------------------------------------------------------- /tests/filename/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -v 4 | -------------------------------------------------------------------------------- /tests/filename/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | module "build_and_upload_directly" { 7 | source = "../../" 8 | 9 | build_mode = "FILENAME" 10 | filename = "${path.module}/package.zip" 11 | function_name = "terraform-aws-lambda-builder-filename1" 12 | handler = "lambda.handler" 13 | runtime = "python3.6" 14 | source_dir = "${path.module}/src" 15 | } 16 | 17 | module "upload_directly" { 18 | source = "../../" 19 | 20 | function_name = "terraform-aws-lambda-builder-filename2" 21 | handler = "lambda.handler" 22 | runtime = "python3.6" 23 | filename = "${path.module}/test2.zip" 24 | } 25 | -------------------------------------------------------------------------------- /tests/filename/src/lambda.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | 5 | def handler(event, context): 6 | data = {"success": True} 7 | if os.path.exists("hello.json"): 8 | with open("hello.json") as open_file: 9 | data.update(json.load(open_file)) 10 | return data 11 | -------------------------------------------------------------------------------- /tests/filename/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="filename/terraform.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/filename/test_filename.py: -------------------------------------------------------------------------------- 1 | import json 2 | import zipfile 3 | 4 | from pretf import test, workflow 5 | from pretf.aws import get_session 6 | 7 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 8 | lambda_client = session.client("lambda") 9 | 10 | 11 | class TestFilename(test.SimpleTest): 12 | def test_init_terraform(self): 13 | """ 14 | Configure and initialize the backend. 15 | 16 | """ 17 | 18 | workflow.delete_files("**/*.json", "*.zip") 19 | self.pretf.init() 20 | 21 | def test_deploy_lambda_function(self): 22 | """ 23 | Deploy the Lambda function. 24 | 25 | """ 26 | 27 | # Create a zip file to use. 28 | with zipfile.ZipFile("test2.zip", "w") as zip_file: 29 | zip_file.write("src/lambda.py", "lambda.py") 30 | 31 | self.pretf.apply() 32 | 33 | def test_invoke_lambda_functions(self): 34 | """ 35 | Invoke the Lambda functions. 36 | 37 | """ 38 | 39 | response = lambda_client.invoke( 40 | FunctionName="terraform-aws-lambda-builder-filename1" 41 | ) 42 | payload = json.load(response["Payload"]) 43 | assert payload == {"success": True} 44 | assert "hello" not in payload 45 | 46 | response = lambda_client.invoke( 47 | FunctionName="terraform-aws-lambda-builder-filename2" 48 | ) 49 | payload = json.load(response["Payload"]) 50 | assert payload == {"success": True} 51 | assert "hello" not in payload 52 | 53 | def test_change(self): 54 | """ 55 | Change the contents of source_dir so the module 56 | updates the function. Deploy the updated function 57 | and invoke it to check for the changed payload. 58 | 59 | """ 60 | 61 | with open("src/hello.json", "w") as open_file: 62 | json.dump({"hello": True}, open_file) 63 | 64 | self.pretf.apply() 65 | 66 | response = lambda_client.invoke( 67 | FunctionName="terraform-aws-lambda-builder-filename1" 68 | ) 69 | payload = json.load(response["Payload"]) 70 | assert payload == {"success": True, "hello": True} 71 | 72 | @test.always 73 | def test_destroy(self): 74 | """ 75 | Clean up after the test. 76 | 77 | """ 78 | 79 | self.pretf.destroy() 80 | -------------------------------------------------------------------------------- /tests/golang/Makefile: -------------------------------------------------------------------------------- 1 | # Create, test, and destroy the Lambda function. 2 | .PHONY: test 3 | test: 4 | pytest -v 5 | 6 | # Test the deployed Lambda function by invoking it. 7 | # This is useful if you want to test the function, 8 | # but not create and destroy it with Terraform too. 9 | .PHONY: invoke 10 | invoke: 11 | pytest -v test_golang.py::TestGolang::test_invoke_lambda_function 12 | 13 | # Builds the function locally in Docker, not exactly like CodeBuild, 14 | # just enough to check if the Go code is right. 15 | .PHONY: local 16 | local: 17 | docker run -v $(PWD)/src:/tmp/src golang:1.14 sh -c " \ 18 | cd /tmp/src && \ 19 | go fmt && \ 20 | cp -r . /go/src/lambda && \ 21 | cd /go/src/lambda && \ 22 | go get -d -v ./... && \ 23 | go test && \ 24 | go build -o main main.go \ 25 | " 26 | -------------------------------------------------------------------------------- /tests/golang/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | resource "random_id" "bucket_name" { 7 | prefix = "terraform-aws-lambda-builder-tests-" 8 | byte_length = 8 9 | } 10 | 11 | resource "aws_s3_bucket" "packages" { 12 | bucket = random_id.bucket_name.hex 13 | acl = "private" 14 | } 15 | 16 | module "lambda_function" { 17 | source = "../../" 18 | 19 | build_mode = "CODEBUILD" 20 | function_name = "terraform-aws-lambda-builder-golang" 21 | handler = "main" 22 | role_cloudwatch_logs = true 23 | runtime = "go1.x" 24 | s3_bucket = aws_s3_bucket.packages.id 25 | source_dir = "${path.module}/src" 26 | timeout = 30 27 | } 28 | 29 | output "function_name" { 30 | value = module.lambda_function.function_name 31 | } 32 | -------------------------------------------------------------------------------- /tests/golang/src/buildspec.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | 3 | env: 4 | variables: 5 | GOARCH: amd64 6 | GOOS: linux 7 | 8 | phases: 9 | pre_build: 10 | commands: 11 | - ln -s "${CODEBUILD_SRC_DIR}" /go/src/lambda 12 | - cd /go/src/lambda 13 | - go get -d -v ./... 14 | build: 15 | commands: 16 | - go test 17 | - go build -o main main.go 18 | 19 | artifacts: 20 | files: 21 | - main 22 | -------------------------------------------------------------------------------- /tests/golang/src/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "github.com/aws/aws-lambda-go/lambda" 6 | ) 7 | 8 | type Event struct { 9 | Name string `json:"name"` 10 | } 11 | 12 | func HandleRequest(event Event) (string, error) { 13 | return fmt.Sprintf("Hello %s!", event.Name), nil 14 | } 15 | 16 | func main() { 17 | lambda.Start(HandleRequest) 18 | } 19 | -------------------------------------------------------------------------------- /tests/golang/src/main_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "testing" 4 | 5 | func TestHandler(t *testing.T) { 6 | 7 | event := Event{Name: "gotest"} 8 | 9 | response, err := HandleRequest(event) 10 | 11 | if err != nil { 12 | t.Errorf("unexpected err, got: %v, wanted: %v", err, nil) 13 | } 14 | 15 | expected := "Hello gotest!" 16 | if response != expected { 17 | t.Errorf("unexpected response, got: %v, wanted %v", response, expected) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/golang/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="golang.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/golang/test_golang.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from pretf import test 4 | from pretf.aws import get_session 5 | 6 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 7 | lambda_client = session.client("lambda") 8 | 9 | 10 | FUNCTION_NAME = "terraform-aws-lambda-builder-golang" 11 | 12 | 13 | class TestGolang(test.SimpleTest): 14 | def test_init_terraform(self): 15 | """ 16 | Configure and initialize the backend. 17 | 18 | """ 19 | 20 | self.pretf.init() 21 | 22 | def test_deploy_lambda_function(self): 23 | """ 24 | Deploy the Lambda function. 25 | 26 | """ 27 | 28 | outputs = self.pretf.apply() 29 | function_name = outputs["function_name"] 30 | assert function_name == FUNCTION_NAME 31 | 32 | def test_invoke_lambda_function(self): 33 | """ 34 | Invoke the Lambda function to ensure the Go function works. 35 | 36 | """ 37 | 38 | response = lambda_client.invoke( 39 | FunctionName=FUNCTION_NAME, 40 | Payload=json.dumps({"name": "Pytest"}), 41 | ) 42 | payload = json.load(response["Payload"]) 43 | 44 | assert payload == "Hello Pytest!" 45 | 46 | @test.always 47 | def test_destroy(self): 48 | """ 49 | Clean up after the test. 50 | 51 | """ 52 | 53 | self.pretf.destroy() 54 | -------------------------------------------------------------------------------- /tests/nodejs/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -v 4 | -------------------------------------------------------------------------------- /tests/nodejs/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | resource "random_id" "bucket_name" { 7 | prefix = "terraform-aws-lambda-builder-tests-" 8 | byte_length = 8 9 | } 10 | 11 | resource "aws_s3_bucket" "packages" { 12 | bucket = random_id.bucket_name.hex 13 | acl = "private" 14 | } 15 | 16 | module "lambda_function_10" { 17 | source = "../../" 18 | 19 | build_mode = "LAMBDA" 20 | function_name = "terraform-aws-lambda-builder-nodejs-10" 21 | handler = "index.handler" 22 | role_cloudwatch_logs = true 23 | runtime = "nodejs10.x" 24 | s3_bucket = aws_s3_bucket.packages.id 25 | source_dir = "${path.module}/src" 26 | timeout = 30 27 | } 28 | 29 | module "lambda_function_12" { 30 | source = "../../" 31 | 32 | build_mode = "LAMBDA" 33 | function_name = "terraform-aws-lambda-builder-nodejs-12" 34 | handler = "index.handler" 35 | role_cloudwatch_logs = true 36 | runtime = "nodejs12.x" 37 | s3_bucket = aws_s3_bucket.packages.id 38 | source_dir = "${path.module}/src" 39 | timeout = 30 40 | } 41 | 42 | output "function_names" { 43 | value = [ 44 | module.lambda_function_10.function_name, 45 | module.lambda_function_12.function_name, 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /tests/nodejs/src/build.sh: -------------------------------------------------------------------------------- 1 | npm install 2 | -------------------------------------------------------------------------------- /tests/nodejs/src/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const jwt = require('jsonwebtoken'); 4 | 5 | exports.handler = function (event, context, callback) { 6 | const token = jwt.sign({ foo: 'bar' }, 'shhhhh'); 7 | callback(null, { success: true, token: token }); 8 | }; 9 | -------------------------------------------------------------------------------- /tests/nodejs/src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "jsonwebtoken": "^7.4.3" 4 | } 5 | } -------------------------------------------------------------------------------- /tests/nodejs/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="nodejs/terraform.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/nodejs/test_nodejs.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | from pretf import test 5 | from pretf.aws import get_session 6 | 7 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 8 | lambda_client = session.client("lambda") 9 | 10 | 11 | FUNCTION_NAMES = [ 12 | "terraform-aws-lambda-builder-nodejs-10", 13 | "terraform-aws-lambda-builder-nodejs-12", 14 | ] 15 | 16 | 17 | class TestNodejs(test.SimpleTest): 18 | def test_init_terraform(self): 19 | """ 20 | Configure and initialize the backend. 21 | 22 | """ 23 | 24 | self.pretf.init() 25 | 26 | def test_deploy_lambda_functions(self): 27 | """ 28 | Deploy the Lambda functions. 29 | 30 | """ 31 | 32 | outputs = self.pretf.apply() 33 | function_names = outputs["function_names"] 34 | assert function_names == FUNCTION_NAMES 35 | 36 | @pytest.mark.parametrize("function_name", FUNCTION_NAMES) 37 | def test_invoke_lambda_function(self, function_name): 38 | """ 39 | Invoke the Lambda function to ensure jsonwebtoken works. 40 | (jsonwebtoken was installed by npm in the build script) 41 | 42 | """ 43 | 44 | response = lambda_client.invoke(FunctionName=function_name) 45 | payload = json.load(response["Payload"]) 46 | 47 | assert payload["success"] 48 | assert "token" in payload 49 | 50 | @test.always 51 | def test_destroy(self): 52 | """ 53 | Clean up after the test. 54 | 55 | """ 56 | 57 | self.pretf.destroy() 58 | -------------------------------------------------------------------------------- /tests/python/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -v 4 | -------------------------------------------------------------------------------- /tests/python/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | resource "random_id" "bucket_name" { 7 | prefix = "terraform-aws-lambda-builder-tests-" 8 | byte_length = 8 9 | } 10 | 11 | resource "aws_s3_bucket" "packages" { 12 | bucket = random_id.bucket_name.hex 13 | acl = "private" 14 | } 15 | 16 | module "lambda_function_36" { 17 | source = "../../" 18 | 19 | build_mode = "LAMBDA" 20 | function_name = "terraform-aws-lambda-builder-python-36" 21 | handler = "lambda.handler" 22 | role_cloudwatch_logs = true 23 | runtime = "python3.6" 24 | s3_bucket = aws_s3_bucket.packages.id 25 | source_dir = "${path.module}/src" 26 | timeout = 30 27 | } 28 | 29 | module "lambda_function_37" { 30 | source = "../../" 31 | 32 | build_mode = "LAMBDA" 33 | function_name = "terraform-aws-lambda-builder-python-37" 34 | handler = "lambda.handler" 35 | role_cloudwatch_logs = true 36 | runtime = "python3.7" 37 | s3_bucket = aws_s3_bucket.packages.id 38 | source_dir = "${path.module}/src" 39 | timeout = 30 40 | } 41 | 42 | module "lambda_function_38" { 43 | source = "../../" 44 | 45 | build_mode = "LAMBDA" 46 | function_name = "terraform-aws-lambda-builder-python-38" 47 | handler = "lambda.handler" 48 | role_cloudwatch_logs = true 49 | runtime = "python3.8" 50 | s3_bucket = aws_s3_bucket.packages.id 51 | source_dir = "${path.module}/src" 52 | timeout = 30 53 | } 54 | 55 | output "function_names" { 56 | value = [ 57 | module.lambda_function_36.function_name, 58 | module.lambda_function_37.function_name, 59 | module.lambda_function_38.function_name, 60 | ] 61 | } 62 | -------------------------------------------------------------------------------- /tests/python/src/build.sh: -------------------------------------------------------------------------------- 1 | pip install -r requirements.txt -t . 2 | -------------------------------------------------------------------------------- /tests/python/src/lambda.py: -------------------------------------------------------------------------------- 1 | def handler(event, context): 2 | import timeprint 3 | 4 | with timeprint: 5 | import numpy as np 6 | 7 | assert np.array_equal(np.array([1, 2]) + 3, np.array([4, 5])) 8 | 9 | return {"success": True} 10 | -------------------------------------------------------------------------------- /tests/python/src/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | timeprint 3 | -------------------------------------------------------------------------------- /tests/python/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="python.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/python/test_python.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | from pretf import test 5 | from pretf.aws import get_session 6 | 7 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 8 | lambda_client = session.client("lambda") 9 | 10 | 11 | FUNCTION_NAMES = [ 12 | "terraform-aws-lambda-builder-python-36", 13 | "terraform-aws-lambda-builder-python-37", 14 | "terraform-aws-lambda-builder-python-38", 15 | ] 16 | 17 | 18 | class TestPython(test.SimpleTest): 19 | def test_init_terraform(self): 20 | """ 21 | Configure and initialize the backend. 22 | 23 | """ 24 | 25 | self.pretf.init() 26 | 27 | def test_deploy_lambda_functions(self): 28 | """ 29 | Deploy the Lambda function. 30 | 31 | """ 32 | 33 | outputs = self.pretf.apply() 34 | function_names = outputs["function_names"] 35 | assert function_names == FUNCTION_NAMES 36 | 37 | @pytest.mark.parametrize("function_name", FUNCTION_NAMES) 38 | def test_invoke_lambda_function(self, function_name): 39 | """ 40 | Invoke the Lambda function to ensure it works. 41 | The function uses numpy which should have been 42 | installed by build script. 43 | 44 | """ 45 | 46 | response = lambda_client.invoke(FunctionName=function_name) 47 | payload = json.load(response["Payload"]) 48 | 49 | assert payload["success"] 50 | 51 | @test.always 52 | def test_destroy(self): 53 | """ 54 | Clean up after the test. 55 | 56 | """ 57 | 58 | self.pretf.destroy() 59 | -------------------------------------------------------------------------------- /tests/s3/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test 2 | test: 3 | pytest -v 4 | -------------------------------------------------------------------------------- /tests/s3/main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | profile = "rbutcher" 3 | region = "eu-west-1" 4 | } 5 | 6 | resource "random_id" "bucket_name" { 7 | prefix = "terraform-aws-lambda-builder-tests-" 8 | byte_length = 8 9 | } 10 | 11 | resource "aws_s3_bucket" "packages" { 12 | bucket = random_id.bucket_name.hex 13 | acl = "private" 14 | } 15 | 16 | module "zip_and_upload_without_build" { 17 | source = "../../" 18 | 19 | build_mode = "S3" 20 | function_name = "terraform-aws-lambda-builder-s3" 21 | handler = "lambda.handler" 22 | runtime = "python3.6" 23 | s3_bucket = aws_s3_bucket.packages.id 24 | s3_key = "direct-s3-test.zip" 25 | source_dir = "${path.module}/src" 26 | } 27 | -------------------------------------------------------------------------------- /tests/s3/src/lambda.py: -------------------------------------------------------------------------------- 1 | def handler(event, context): 2 | return {"success": True} 3 | -------------------------------------------------------------------------------- /tests/s3/terraform.tf.py: -------------------------------------------------------------------------------- 1 | from pretf.aws import terraform_backend_s3 2 | 3 | 4 | def pretf_blocks(): 5 | yield terraform_backend_s3( 6 | bucket="terraform-aws-lambda-builder", 7 | dynamodb_table="terraform-aws-lambda-builder", 8 | key="s3.tfstate", 9 | profile="rbutcher", 10 | region="eu-west-1", 11 | ) 12 | -------------------------------------------------------------------------------- /tests/s3/test_s3.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from pretf import test 4 | from pretf.aws import get_session 5 | 6 | session = get_session(profile_name="rbutcher", region_name="eu-west-1") 7 | lambda_client = session.client("lambda") 8 | 9 | 10 | class TestS3(test.SimpleTest): 11 | def test_init_terraform(self): 12 | """ 13 | Configure and initialize the backend. 14 | 15 | """ 16 | 17 | self.pretf.init() 18 | 19 | def test_deploy_lambda_function(self): 20 | """ 21 | Deploy the Lambda function. 22 | 23 | """ 24 | 25 | self.pretf.apply() 26 | 27 | def test_invoke_lambda_function(self): 28 | """ 29 | Invoke the Lambda function. 30 | 31 | """ 32 | 33 | response = lambda_client.invoke(FunctionName="terraform-aws-lambda-builder-s3") 34 | payload = json.load(response["Payload"]) 35 | assert payload == {"success": True} 36 | 37 | @test.always 38 | def test_destroy(self): 39 | """ 40 | Clean up after the test. 41 | 42 | """ 43 | 44 | self.pretf.destroy() 45 | -------------------------------------------------------------------------------- /validate.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import glob 3 | import json 4 | import os 5 | import sys 6 | import time 7 | 8 | CODEBUILD = "CODEBUILD" 9 | DISABLED = "DISABLED" 10 | FILENAME = "FILENAME" 11 | LAMBDA = "LAMBDA" 12 | S3 = "S3" 13 | 14 | query = json.load(sys.stdin) 15 | 16 | 17 | def conflict(*names): 18 | for name in names: 19 | if query[name]: 20 | build_mode = query["build_mode"] 21 | sys.stderr.write( 22 | "build mode {} does not support var.{}".format(build_mode, name) 23 | ) 24 | sys.exit(1) 25 | 26 | 27 | def require(*names): 28 | for name in names: 29 | if not query[name]: 30 | build_mode = query["build_mode"] 31 | sys.stderr.write("build mode {} requires var.{}".format(build_mode, name)) 32 | sys.exit(1) 33 | 34 | 35 | if query["build_mode"] in (CODEBUILD, LAMBDA): 36 | 37 | require("s3_bucket", "source_dir") 38 | conflict("filename", "s3_key", "s3_object_version", "source_code_hash") 39 | 40 | elif query["build_mode"] == DISABLED: 41 | 42 | conflict("source_dir") 43 | 44 | elif query["build_mode"] == FILENAME: 45 | 46 | require("filename", "source_dir") 47 | conflict("s3_bucket", "s3_key", "s3_object_version", "source_code_hash") 48 | 49 | elif query["build_mode"] == S3: 50 | 51 | require("s3_bucket", "s3_key", "source_dir") 52 | conflict("filename", "s3_object_version", "source_code_hash") 53 | 54 | else: 55 | 56 | sys.stderr.write("invalid build mode {}".format(query["build_mode"])) 57 | sys.exit(1) 58 | 59 | # Delete zips more than a day old. 60 | zip_files = glob.glob(os.path.join(query["zip_files_dir"], "*.zip")) 61 | for path in zip_files: 62 | try: 63 | file_time = os.path.getmtime(path) 64 | file_age = time.time() - file_time 65 | if file_age > 60 * 60 * 24: 66 | os.remove(path) 67 | except OSError as error: 68 | if error.errno != errno.ENOENT: 69 | raise 70 | 71 | json.dump({}, sys.stdout) 72 | -------------------------------------------------------------------------------- /variables.tf: -------------------------------------------------------------------------------- 1 | # Builder arguments. 2 | 3 | variable "build_mode" { 4 | description = "The build mode to use, one of `CODEBUILD`, `DISABLED`, `FILENAME`, `LAMBDA`, `S3`." 5 | type = string 6 | default = "DISABLED" 7 | } 8 | 9 | variable "codebuild_environment_compute_type" { 10 | description = "Compute type for CodeBuild. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html" 11 | type = string 12 | default = "BUILD_GENERAL1_SMALL" 13 | } 14 | 15 | variable "codebuild_environment_image" { 16 | description = "Image for CodeBuild. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-available.html" 17 | type = string 18 | default = "aws/codebuild/amazonlinux2-x86_64-standard:3.0" 19 | } 20 | 21 | variable "codebuild_environment_type" { 22 | description = "The type of CodeBuild build environment to use. See https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html" 23 | default = "LINUX_CONTAINER" 24 | } 25 | 26 | variable "codebuild_queued_timeout_in_minutes" { 27 | description = "The number of minutes CodeBuild is allowed to be queued before it times out." 28 | type = number 29 | default = 15 30 | } 31 | 32 | variable "codebuild_timeout_in_minutes" { 33 | description = "The number of minutes CodeBuild is allowed to run before it times out." 34 | type = number 35 | default = 60 36 | } 37 | 38 | variable "create_role" { 39 | description = "Create an IAM role for the function. Only required when `role` is a computed/unknown value." 40 | type = bool 41 | default = null 42 | } 43 | 44 | variable "empty_dirs" { 45 | description = "Include empty directories in the Lambda package." 46 | type = bool 47 | default = false 48 | } 49 | 50 | variable "enabled" { 51 | description = "Create resources." 52 | type = bool 53 | default = true 54 | } 55 | 56 | variable "lambda_builder_memory_size" { 57 | description = "Memory size for the builder Lambda function." 58 | type = number 59 | default = 512 60 | } 61 | 62 | variable "lambda_builder_timeout" { 63 | description = "Timeout for the builder Lambda function." 64 | type = number 65 | default = 900 66 | } 67 | 68 | variable "role_cloudwatch_logs" { 69 | description = "If `role` is not provided, one will be created with a policy that enables CloudWatch Logs." 70 | type = bool 71 | default = false 72 | } 73 | 74 | variable "role_custom_policies" { 75 | description = "If `role` is not provided, one will be created with these JSON policies attached." 76 | type = list(string) 77 | default = [] 78 | } 79 | 80 | variable "role_custom_policies_count" { 81 | description = "The number of `role_custom_policies` to attach. Only required when `role_custom_policies` is a computed/unknown value." 82 | type = number 83 | default = null 84 | } 85 | 86 | variable "role_policy_arns" { 87 | description = "If `role` is not provided, one will be created with these policy ARNs attached." 88 | type = list(string) 89 | default = [] 90 | } 91 | 92 | variable "role_policy_arns_count" { 93 | description = "The number of `role_policy_arns` to attach. Only required when `role_policy_arns` is a computed/unknown value." 94 | type = number 95 | default = null 96 | } 97 | 98 | variable "source_dir" { 99 | description = "Local source directory for the Lambda package. This will be zipped and uploaded to the S3 bucket. Requires `s3_bucket`. Conflicts with `s3_key`, `s3_object_version` and `filename`." 100 | type = string 101 | default = "" 102 | } 103 | 104 | # Standard Lambda resource arguments. 105 | 106 | variable "dead_letter_config" { 107 | description = "Nested block to configure the function's dead letter queue. See details below." 108 | type = object({ 109 | target_arn = string 110 | }) 111 | default = null 112 | } 113 | 114 | variable "description" { 115 | description = "Description of what your Lambda Function does." 116 | type = string 117 | default = null 118 | } 119 | 120 | variable "environment" { 121 | description = "The Lambda environment's configuration settings." 122 | type = object({ 123 | variables = map(string) 124 | }) 125 | default = null 126 | } 127 | 128 | variable "filename" { 129 | description = "The path to the function's deployment package within the local filesystem. If defined, The s3_-prefixed options cannot be used." 130 | type = string 131 | default = null 132 | } 133 | 134 | variable "function_name" { 135 | description = "A unique name for your Lambda Function." 136 | type = string 137 | } 138 | 139 | variable "handler" { 140 | description = "The function entrypoint in your code." 141 | type = string 142 | } 143 | 144 | variable "kms_key_arn" { 145 | description = "Amazon Resource Name (ARN) of the AWS Key Management Service (KMS) key that is used to encrypt environment variables." 146 | type = string 147 | default = null 148 | } 149 | 150 | variable "layers" { 151 | description = "List of Lambda Layer Version ARNs (maximum of 5) to attach to your Lambda Function." 152 | type = list(string) 153 | default = null 154 | } 155 | 156 | variable "memory_size" { 157 | description = "Amount of memory in MB your Lambda Function can use at runtime." 158 | type = number 159 | default = null 160 | } 161 | 162 | variable "publish" { 163 | description = "Whether to publish creation/change as new Lambda Function Version." 164 | type = bool 165 | default = null 166 | } 167 | 168 | variable "reserved_concurrent_executions" { 169 | description = "The amount of reserved concurrent executions for this lambda function. A value of 0 disables lambda from being triggered and -1 removes any concurrency limitations." 170 | type = number 171 | default = null 172 | } 173 | 174 | variable "role" { 175 | description = "IAM role attached to the Lambda Function. This governs both who / what can invoke your Lambda Function, as well as what resources our Lambda Function has access to." 176 | type = string 177 | default = null 178 | } 179 | 180 | variable "runtime" { 181 | description = "The identifier of the function's runtime." 182 | type = string 183 | } 184 | 185 | variable "s3_bucket" { 186 | description = "The S3 bucket location containing the function's deployment package. Conflicts with filename. This bucket must reside in the same AWS region where you are creating the Lambda function." 187 | type = string 188 | default = null 189 | } 190 | 191 | variable "s3_key" { 192 | description = "The S3 key of an object containing the function's deployment package. Conflicts with filename." 193 | type = string 194 | default = null 195 | } 196 | 197 | variable "s3_object_version" { 198 | description = "The object version containing the function's deployment package. Conflicts with filename." 199 | type = string 200 | default = null 201 | } 202 | 203 | variable "source_code_hash" { 204 | description = "Used to trigger updates. Must be set to a base64-encoded SHA256 hash of the package file specified with either filename or s3_key." 205 | type = string 206 | default = null 207 | } 208 | 209 | variable "tags" { 210 | description = "A mapping of tags to assign to the object." 211 | type = map(string) 212 | default = null 213 | } 214 | 215 | variable "timeout" { 216 | description = "The amount of time your Lambda Function has to run in seconds." 217 | type = number 218 | default = null 219 | } 220 | 221 | variable "tracing_config" { 222 | description = "Provide this to configure tracing." 223 | type = object({ 224 | mode = string 225 | }) 226 | default = null 227 | } 228 | 229 | variable "vpc_config" { 230 | description = "Provide this to allow your function to access your VPC." 231 | type = object({ 232 | security_group_ids = list(string) 233 | subnet_ids = list(string) 234 | }) 235 | default = null 236 | } 237 | -------------------------------------------------------------------------------- /versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = ">= 0.12.0" 3 | } 4 | -------------------------------------------------------------------------------- /zip_files/.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | -------------------------------------------------------------------------------- /zip_files/README.md: -------------------------------------------------------------------------------- 1 | this is where zips go 2 | --------------------------------------------------------------------------------
security_group_ids = list(string)
subnet_ids = list(string)
})