├── lombok.config ├── sam-tests ├── delete.json ├── read.json ├── create.json ├── update.json ├── s3-step2.tf └── s3-step1.tf ├── .gitignore ├── .rpdk-config ├── src ├── test │ └── java │ │ └── io │ │ └── cloudsoft │ │ └── terraform │ │ └── infrastructure │ │ ├── ReadHandlerTest.java │ │ ├── UpdateHandlerTest.java │ │ ├── DeleteHandlerTest.java │ │ ├── CreateHandlerTest.java │ │ ├── ListHandlerTest.java │ │ ├── HandlerTestFixture.java │ │ ├── TerraformBaseHandlerTest.java │ │ └── TerraformParametersTest.java └── main │ └── java │ └── io │ └── cloudsoft │ └── terraform │ └── infrastructure │ ├── ListHandler.java │ ├── TerraformBaseHandler.java │ ├── CallbackContext.java │ ├── Configuration.java │ ├── UpdateHandler.java │ ├── ReadHandler.java │ ├── commands │ ├── RemoteTerraformOutputsProcess.java │ ├── RemoteDetachedTerraformProcess.java │ ├── RemoteDetachedTerraformProcessNohup.java │ ├── RemoteDetachedTerraformProcessSystemd.java │ ├── RemoteTerraformProcess.java │ └── SshToolbox.java │ ├── ConnectorHandlerFailures.java │ ├── BucketUtils.java │ ├── DeleteHandler.java │ ├── TerraformParameters.java │ ├── CreateHandler.java │ └── TerraformBaseWorker.java ├── template.yml ├── terraform-example.tf ├── resource-role.yaml ├── README.md ├── .github └── workflows │ └── cicd.yml ├── terraform-example.cfn.yaml ├── cloudsoft-terraform-infrastructure.json ├── doc ├── user-guide.md ├── installation-guide.md └── developer-guide.md ├── setup.yaml └── pom.xml /lombok.config: -------------------------------------------------------------------------------- 1 | lombok.addLombokGeneratedAnnotation = true 2 | -------------------------------------------------------------------------------- /sam-tests/delete.json: -------------------------------------------------------------------------------- 1 | { 2 | "desiredResourceState": {}, 3 | "logicalResourceIdentifier": "MyResource" 4 | } 5 | -------------------------------------------------------------------------------- /sam-tests/read.json: -------------------------------------------------------------------------------- 1 | { 2 | "desiredResourceState": { 3 | "Name": "example-s3" 4 | }, 5 | "logicalResourceIdentifier": "MyResource" 6 | } 7 | -------------------------------------------------------------------------------- /sam-tests/create.json: -------------------------------------------------------------------------------- 1 | { 2 | "desiredResourceState": { 3 | "ConfigurationUrl": "https://raw.githubusercontent.com/cloudsoft/aws-cfn-connector-for-terraform/master/sam-tests/s3-step1.tf" 4 | }, 5 | "logicalResourceIdentifier": "MyResource" 6 | } 7 | -------------------------------------------------------------------------------- /sam-tests/update.json: -------------------------------------------------------------------------------- 1 | { 2 | "desiredResourceState": { 3 | "ConfigurationUrl": "https://raw.githubusercontent.com/cloudsoft/aws-cfn-connector-for-terraform/master/sam-tests/s3-step2.tf" 4 | }, 5 | "logicalResourceIdentifier": "MyResource" 6 | } 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # macOS 2 | .DS_Store 3 | ._* 4 | 5 | # Maven outputs 6 | .classpath 7 | 8 | # IntelliJ 9 | *.iml 10 | .idea 11 | out.java 12 | out/ 13 | .settings 14 | .project 15 | 16 | # auto-generated files 17 | target/ 18 | cloudsoft-terraform-infrastructure.zip 19 | 20 | # our logs 21 | rpdk.log 22 | submit.log 23 | 24 | # local config files 25 | *-local.yaml 26 | 27 | docs/ 28 | -------------------------------------------------------------------------------- /.rpdk-config: -------------------------------------------------------------------------------- 1 | { 2 | "typeName": "Cloudsoft::Terraform::Infrastructure", 3 | "language": "java", 4 | "runtime": "java8", 5 | "entrypoint": "io.cloudsoft.terraform.infrastructure.HandlerWrapper::handleRequest", 6 | "testEntrypoint": "io.cloudsoft.terraform.infrastructure.HandlerWrapper::testEntrypoint", 7 | "settings": { 8 | "namespace": [ 9 | "io", 10 | "cloudsoft", 11 | "terraform", 12 | "infrastructure" 13 | ], 14 | "protocolVersion": "2.0.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/ReadHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import org.mockito.junit.jupiter.MockitoExtension; 6 | 7 | import java.io.IOException; 8 | 9 | @ExtendWith(MockitoExtension.class) 10 | public class ReadHandlerTest extends HandlerTestFixture { 11 | 12 | @Test 13 | public void handleRequestCallWorkerRun() throws IOException { 14 | doWorkerRun(() -> new ReadHandler()); 15 | } 16 | 17 | } -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/UpdateHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import org.mockito.junit.jupiter.MockitoExtension; 6 | 7 | import java.io.IOException; 8 | 9 | @ExtendWith(MockitoExtension.class) 10 | public class UpdateHandlerTest extends HandlerTestFixture { 11 | 12 | @Test 13 | public void handleRequestCallWorkerRun() throws IOException { 14 | doWorkerRun(() -> new UpdateHandler()); 15 | } 16 | 17 | } -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/DeleteHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import org.mockito.junit.jupiter.MockitoExtension; 6 | 7 | import java.io.IOException; 8 | 9 | @ExtendWith(MockitoExtension.class) 10 | public class DeleteHandlerTest extends HandlerTestFixture { 11 | 12 | @Test 13 | public void handleRequestCallWorkerRun() throws IOException { 14 | doWorkerRun(() -> new DeleteHandler()); 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /template.yml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Transform: AWS::Serverless-2016-10-31 3 | Description: AWS SAM template for the Cloudsoft::Terraform::Infrastructure resource type 4 | 5 | Globals: 6 | Function: 7 | Timeout: 600 # docker start-up times can be long for SAM CLI 8 | 9 | Resources: 10 | TypeFunction: 11 | Type: AWS::Serverless::Function 12 | Properties: 13 | Handler: io.cloudsoft.terraform.infrastructure.HandlerWrapper::handleRequest 14 | Runtime: java8 15 | CodeUri: ./target/cloudsoft-terraform-infrastructure-handler-1.0-SNAPSHOT.jar 16 | 17 | TestEntrypoint: 18 | Type: AWS::Serverless::Function 19 | Properties: 20 | Handler: io.cloudsoft.terraform.infrastructure.HandlerWrapper::testEntrypoint 21 | Runtime: java8 22 | CodeUri: ./target/cloudsoft-terraform-infrastructure-handler-1.0-SNAPSHOT.jar 23 | -------------------------------------------------------------------------------- /terraform-example.tf: -------------------------------------------------------------------------------- 1 | # this file is read from GitHub by terraform-example.cfn.yaml 2 | # local changes will have no effect unless you change that file! 3 | # (but see the docs for more examples) 4 | 5 | provider "aws" { 6 | region = "eu-central-1" 7 | } 8 | 9 | resource "aws_s3_bucket" "bucket1" { 10 | bucket = "cfn-terraform-connector-example-bucket1" 11 | acl = "private" 12 | } 13 | 14 | resource "aws_s3_bucket" "bucket2" { 15 | bucket = "cfn-terraform-connector-example-bucket2" 16 | acl = "private" 17 | } 18 | 19 | output "bucket1-id" { 20 | value = aws_s3_bucket.bucket1.id 21 | } 22 | 23 | output "bucket1-arn" { 24 | value = aws_s3_bucket.bucket1.arn 25 | } 26 | 27 | output "bucket1-region" { 28 | value = aws_s3_bucket.bucket1.region 29 | } 30 | 31 | output "bucket2-id" { 32 | value = aws_s3_bucket.bucket2.id 33 | } 34 | 35 | output "bucket2-arn" { 36 | value = aws_s3_bucket.bucket2.arn 37 | } 38 | 39 | output "bucket2-region" { 40 | value = aws_s3_bucket.bucket2.region 41 | } 42 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/CreateHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.junit.jupiter.api.Test; 4 | import org.junit.jupiter.api.extension.ExtendWith; 5 | import org.mockito.junit.jupiter.MockitoExtension; 6 | import software.amazon.awssdk.services.s3.S3Client; 7 | import software.amazon.awssdk.services.ssm.SsmClient; 8 | 9 | import java.io.IOException; 10 | 11 | import static org.mockito.Mockito.mock; 12 | 13 | @ExtendWith(MockitoExtension.class) 14 | public class CreateHandlerTest extends HandlerTestFixture { 15 | 16 | @Test 17 | public void handleRequestCallWorkerRun() throws IOException { 18 | doWorkerRun(() -> new CreateHandler()); 19 | } 20 | 21 | public static void main(String[] args) throws IOException { 22 | CreateHandlerTest t = new CreateHandlerTest(); 23 | t.s3Client = mock(S3Client.class); 24 | t.ssmClient = mock(SsmClient.class); 25 | 26 | t.setup(); 27 | t.handleRequestCallWorkerRun(); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /sam-tests/s3-step2.tf: -------------------------------------------------------------------------------- 1 | # this file is read from GitHub to perform the SAM tests. 2 | # if you wish to use a different example for SAM tests, 3 | # create your own TF files e.g. in a branch or gist or S3, 4 | # and change the JSON files in the folder to point to them. 5 | # local changes will have no effect on tests! 6 | 7 | provider "aws" { 8 | region = "eu-central-1" 9 | } 10 | 11 | resource "aws_s3_bucket" "bucket1" { 12 | bucket = "denis-example7-bucket1" 13 | acl = "private" 14 | } 15 | 16 | resource "aws_s3_bucket" "bucket3" { 17 | bucket = "cfn-terraform-connector-example-bucket3" 18 | acl = "private" 19 | } 20 | 21 | output "bucket1-id" { 22 | value = aws_s3_bucket.bucket1.id 23 | } 24 | 25 | output "bucket1-arn" { 26 | value = aws_s3_bucket.bucket1.arn 27 | } 28 | 29 | output "bucket1-region" { 30 | value = aws_s3_bucket.bucket1.region 31 | } 32 | 33 | output "bucket3-id" { 34 | value = aws_s3_bucket.bucket3.id 35 | } 36 | 37 | output "bucket3-arn" { 38 | value = aws_s3_bucket.bucket3.arn 39 | } 40 | 41 | output "bucket3-region" { 42 | value = aws_s3_bucket.bucket3.region 43 | } -------------------------------------------------------------------------------- /sam-tests/s3-step1.tf: -------------------------------------------------------------------------------- 1 | # this file is read from GitHub to perform the SAM tests. 2 | # if you wish to use a different example for SAM tests, 3 | # create your own TF files e.g. in a branch or gist or S3, 4 | # and change the JSON files in the folder to point to them. 5 | # local changes will have no effect on tests! 6 | 7 | provider "aws" { 8 | region = "eu-central-1" 9 | } 10 | 11 | resource "aws_s3_bucket" "bucket1" { 12 | bucket = "cfn-terraform-connector-example-bucket1" 13 | acl = "private" 14 | } 15 | 16 | resource "aws_s3_bucket" "bucket2" { 17 | bucket = "cfn-terraform-connector-example-bucket2" 18 | acl = "private" 19 | } 20 | 21 | output "bucket1-id" { 22 | value = aws_s3_bucket.bucket1.id 23 | } 24 | 25 | output "bucket1-arn" { 26 | value = aws_s3_bucket.bucket1.arn 27 | } 28 | 29 | output "bucket1-region" { 30 | value = aws_s3_bucket.bucket1.region 31 | } 32 | 33 | output "bucket2-id" { 34 | value = aws_s3_bucket.bucket2.id 35 | } 36 | 37 | output "bucket2-arn" { 38 | value = aws_s3_bucket.bucket2.arn 39 | } 40 | 41 | output "bucket2-region" { 42 | value = aws_s3_bucket.bucket2.region 43 | } 44 | -------------------------------------------------------------------------------- /resource-role.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: > 3 | This CloudFormation template creates a role assumed by CloudFormation 4 | during CRUDL operations to mutate resources on behalf of the customer. 5 | 6 | Resources: 7 | ExecutionRole: 8 | Type: AWS::IAM::Role 9 | Properties: 10 | MaxSessionDuration: 8400 11 | AssumeRolePolicyDocument: 12 | Version: '2012-10-17' 13 | Statement: 14 | - Effect: Allow 15 | Principal: 16 | Service: resources.cloudformation.amazonaws.com 17 | Action: sts:AssumeRole 18 | Path: "/" 19 | Policies: 20 | - PolicyName: ResourceTypePolicy 21 | PolicyDocument: 22 | Version: '2012-10-17' 23 | Statement: 24 | - Effect: Allow 25 | Action: 26 | - "s3:CreateBucket" 27 | - "s3:DeleteBucket" 28 | - "s3:GetObject" 29 | - "s3:GetObjectVersion" 30 | - "s3:PutObject" 31 | - "ssm:GetParameter" 32 | Resource: "*" 33 | Outputs: 34 | ExecutionRoleArn: 35 | Value: 36 | Fn::GetAtt: ExecutionRole.Arn 37 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/ListHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.util.ArrayList; 4 | import java.util.List; 5 | 6 | import software.amazon.cloudformation.proxy.OperationStatus; 7 | import software.amazon.cloudformation.proxy.ProgressEvent; 8 | 9 | public class ListHandler extends TerraformBaseHandler { 10 | 11 | private enum Steps { 12 | // none 13 | } 14 | 15 | @Override 16 | protected TerraformBaseWorker newWorker() { 17 | return new Worker(); 18 | } 19 | 20 | protected static class Worker extends TerraformBaseWorker { 21 | 22 | public Worker() { super("List", Steps.class); } 23 | 24 | protected boolean userLogsEnabled() { 25 | return false; 26 | } 27 | 28 | @Override 29 | protected ProgressEvent runStep() { 30 | final List models = new ArrayList<>(); 31 | 32 | // handler not used yet, always return empty 33 | 34 | return ProgressEvent.builder() 35 | .resourceModels(models) 36 | .status(OperationStatus.SUCCESS) 37 | .build(); 38 | } 39 | 40 | } 41 | 42 | } -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/TerraformBaseHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import software.amazon.cloudformation.proxy.*; 4 | 5 | public abstract class TerraformBaseHandler extends BaseHandler { 6 | 7 | @Override 8 | public ProgressEvent handleRequest( 9 | final AmazonWebServicesClientProxy proxy, 10 | final ResourceHandlerRequest request, 11 | final CallbackContext callbackContext, 12 | final Logger logger) { 13 | 14 | if (logger==null) { 15 | System.err.println("No logger set."); 16 | throw new NullPointerException("logger"); 17 | } 18 | 19 | try { 20 | TerraformBaseWorker worker = newWorker(); 21 | worker.init(proxy, request, callbackContext, logger); 22 | return worker.runHandlingError(); 23 | 24 | } catch (Exception e) { 25 | e.printStackTrace(); 26 | logger.log("Failed to create worker: "+e); 27 | return ProgressEvent.builder() 28 | .status(OperationStatus.FAILED) 29 | .message(e.toString()) 30 | .build(); 31 | } 32 | } 33 | 34 | protected abstract TerraformBaseWorker newWorker(); 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/CallbackContext.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import lombok.Data; 4 | import lombok.NoArgsConstructor; 5 | import software.amazon.cloudformation.proxy.ResourceHandlerRequest; 6 | 7 | @Data 8 | @NoArgsConstructor 9 | public class CallbackContext { 10 | 11 | /** This ID is new for each sequence of steps requested by a client, eg a CREATE, an UPDATE, another UPDATE 12 | * will all have different IDs (but it will be the same in each step of that ID). 13 | * It looks like {@link ResourceHandlerRequest#getClientRequestToken()} would serve the same purpose but not 100% sure. 14 | *

15 | * This is in contrast to {@link ResourceModel#getIdentifier()} which is the same for all steps against the same element in a stack. 16 | *

17 | * We do have access to the element name in the TF stack {@link ResourceHandlerRequest#getLogicalResourceIdentifier()}, 18 | * but unfortunately it seems we aren't able to know to the stack UID or stack name (hence the choice to include 19 | * the date in the model identifier for easier identification). 20 | */ 21 | public String commandRequestId; 22 | public String stepId; 23 | public int lastDelaySeconds; 24 | public String logBucketName; 25 | 26 | // cache this for the duration of a command 27 | public String processManager; 28 | 29 | // needed for creation only 30 | public String createdModelIdentifier; 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/Configuration.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.apache.commons.lang3.RandomStringUtils; 4 | import org.json.JSONObject; 5 | import org.json.JSONTokener; 6 | 7 | import java.text.SimpleDateFormat; 8 | import java.util.Date; 9 | import java.util.Map; 10 | import java.util.TimeZone; 11 | 12 | class Configuration extends BaseConfiguration { 13 | 14 | public Configuration() { 15 | super("cloudsoft-terraform-infrastructure.json"); 16 | } 17 | 18 | public JSONObject resourceSchemaJSONObject() { 19 | return new JSONObject(new JSONTokener(this.getClass().getClassLoader().getResourceAsStream(schemaFilename))); 20 | } 21 | 22 | /** 23 | * Providers should implement this method if their resource has a 'Tags' property to define resource-level tags * @return 24 | */ 25 | public Map resourceDefinedTags(final ResourceModel resourceModel) { 26 | return null; 27 | } 28 | 29 | public static String getDateTimeString() { 30 | SimpleDateFormat df = new SimpleDateFormat("yyyyMMdd-HHmmss"); 31 | df.setTimeZone(TimeZone.getTimeZone("UTC")); 32 | return df.format(new Date()); 33 | } 34 | 35 | public static String getIdentifier(boolean includeDateTime, int randomLength) { 36 | return 37 | (includeDateTime ? getDateTimeString()+"-" : "") 38 | + RandomStringUtils.randomAlphanumeric(randomLength); 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cloudsoft::Terraform::Infrastructure 2 | 3 | ## Quick start 4 | 5 | First, you need to install the custom type into CloudFomation. See the [installation guide](./doc/installation-guide.md) documentation. 6 | 7 | Once done, you can use Terraform in CloudFormation templates by specifying the type `Cloudsoft::Terraform::Infrastructure`. 8 | For example: 9 | 10 | ``` 11 | AWSTemplateFormatVersion: 2010-09-09 12 | Description: Terraform in CloudFormation example, using the Terraform Connector for CloudFormation 13 | Resources: 14 | TerraformEc2Example: 15 | Type: Cloudsoft::Terraform::Infrastructure 16 | Properties: 17 | ConfigurationContent: | 18 | resource "aws_instance" "my-test-instance" { 19 | ami = "XXXXXXX" 20 | instance_type = "t2.micro" 21 | } 22 | ``` 23 | 24 | The Terraform configuration does not need to be in-lined; you can instead use 25 | `ConfigurationUrl` or `ConfigurationS3Path` to point at a configuration. 26 | 27 | You can then: 28 | 29 | * Use Terraform in AWS Service Catalog 30 | * Mix and match Terraform with CloudFormation in IaC templatees 31 | 32 | Features: 33 | 34 | * View Terraform outputs as CloudFormation outputs 35 | * Drive Terraform updates through CloudFormation 36 | * Read and delete 37 | 38 | For more information on how to use the custom type, see the [user guide](./doc/user-guide.md) documentation. 39 | 40 | ## Development 41 | 42 | To setup your local environment, please see the [developer guide](./doc/developer-guide.md) documentation. 43 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/UpdateHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | 5 | import software.amazon.cloudformation.proxy.ProgressEvent; 6 | 7 | public class UpdateHandler extends TerraformBaseHandler { 8 | 9 | private enum Steps { 10 | UPDATE_SYNC_FILE, 11 | UPDATE_RUN_TF_APPLY, 12 | UPDATE_WAIT_ON_APPLY_THEN_RETURN 13 | } 14 | 15 | @Override 16 | protected TerraformBaseWorker newWorker() { 17 | return new Worker(); 18 | } 19 | 20 | protected static class Worker extends TerraformBaseWorker { 21 | 22 | public Worker() { super("Update", Steps.class); } 23 | 24 | @Override 25 | protected ProgressEvent runStep() throws IOException { 26 | switch (currentStep) { 27 | case UPDATE_SYNC_FILE: 28 | getAndUploadConfiguration(false); 29 | advanceTo(Steps.UPDATE_RUN_TF_APPLY); 30 | return statusInProgress(); 31 | 32 | case UPDATE_RUN_TF_APPLY: 33 | advanceTo(Steps.UPDATE_WAIT_ON_APPLY_THEN_RETURN); 34 | tfApply().start(); 35 | return statusInProgress(); 36 | 37 | case UPDATE_WAIT_ON_APPLY_THEN_RETURN: 38 | if (checkStillRunningOrError(tfApply())) { 39 | return statusInProgress(); 40 | } 41 | 42 | return statusSuccess(); 43 | 44 | default: 45 | throw new IllegalStateException("Invalid step: " + callbackContext.stepId); 46 | } 47 | } 48 | 49 | } 50 | 51 | } -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/ReadHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | 5 | import io.cloudsoft.terraform.infrastructure.commands.RemoteTerraformOutputsProcess; 6 | import software.amazon.cloudformation.proxy.ProgressEvent; 7 | 8 | public class ReadHandler extends TerraformBaseHandler { 9 | 10 | private enum Steps { 11 | // nonoe 12 | } 13 | 14 | @Override 15 | protected TerraformBaseWorker newWorker() { 16 | return new Worker(); 17 | } 18 | 19 | protected static class Worker extends TerraformBaseWorker { 20 | 21 | public Worker() { super("Read", Steps.class); } 22 | 23 | protected boolean userLogsEnabled() { 24 | return false; 25 | } 26 | 27 | @Override 28 | protected ProgressEvent runStep() throws IOException { 29 | 30 | // this is set by the framework 31 | // model.setIdentifier 32 | 33 | // these are set by the call to loadMetadata 34 | // model.setLogBucketName 35 | // model.setLogBucketUrl 36 | 37 | // these are left null by the current implementation 38 | // (we could cache them as part of metadata) 39 | // model.setConfigurationContent(prevModel.getConfigurationContent()); 40 | // model.setConfigurationS3Path(prevModel.getConfigurationS3Path()); 41 | // model.setConfigurationUrl(prevModel.getConfigurationUrl()); 42 | // model.setVariables(prevModel.getVariables()); 43 | 44 | // and the two "real" ones we look up each time to make sure they are current 45 | RemoteTerraformOutputsProcess outputCmd = RemoteTerraformOutputsProcess.of(this); 46 | outputCmd.run(); 47 | model.setOutputsStringified(outputCmd.getOutputAsJsonStringized()); 48 | model.setOutputs(outputCmd.getOutputAsMap()); 49 | 50 | return statusSuccess(); 51 | } 52 | 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yml: -------------------------------------------------------------------------------- 1 | name: Cloudsoft::Terraform::Infrastructure CI/CD 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | schedule: 8 | - cron: '0 0 */3 * *' # Run every 3 days 9 | pull_request: 10 | types: 11 | - opened 12 | - synchronize 13 | - reopened 14 | 15 | jobs: 16 | build: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Checkout project 20 | uses: actions/checkout@v1 21 | - name: Set up JDK 1.8 22 | uses: actions/setup-java@v1 23 | with: 24 | java-version: 1.8 25 | - name: Set up Python environement 26 | uses: actions/setup-python@v1 27 | with: 28 | python-version: '3.x' 29 | architecture: 'x64' 30 | - name: Set up AWS CLI 31 | uses: chrislennon/action-aws-cli@1.1 32 | - name: Set up CFN CLI 33 | run: pip3 install cloudformation-cli cloudformation-cli-java-plugin 34 | - name: Set up nightly version 35 | if: github.event_name == 'schedule' && startsWith(github.repository, 'cloudsoft') 36 | run: | 37 | CURRENT_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) 38 | NEW_VERSION=$(echo $CURRENT_VERSION | grep -E -o "[^-SNAPSHOT]+")-$(date '+%Y%m%d-%H%M%S') 39 | mvn versions:set -DnewVersion=$NEW_VERSION 40 | echo "::set-env name=TAG::v$NEW_VERSION" 41 | - name: Build & unit tests 42 | run: | 43 | mvn -B package --file pom.xml 44 | env: 45 | AWS_REGION: eu-west-1 46 | # TODO: Add SAM tests when building nightly 47 | - name: Package nightly 48 | if: github.event_name == 'schedule' && startsWith(github.repository, 'cloudsoft') 49 | run: cfn submit --dry-run 50 | - name: Publish nightly 51 | if: github.event_name == 'schedule' && startsWith(github.repository, 'cloudsoft') 52 | uses: ncipollo/release-action@v1 53 | with: 54 | body: Automatic nightly release, made by GitHub actions 55 | name: ${{ env.TAG }} 56 | tag: ${{ env.TAG }} 57 | token: ${{ secrets.GITHUB_TOKEN }} 58 | artifacts: cloudsoft-terraform-infrastructure.zip -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/RemoteTerraformOutputsProcess.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import com.fasterxml.jackson.core.type.TypeReference; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import io.cloudsoft.terraform.infrastructure.TerraformBaseWorker; 6 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 7 | import io.cloudsoft.terraform.infrastructure.commands.SshToolbox.PostRunBehaviour; 8 | import software.amazon.cloudformation.proxy.Logger; 9 | 10 | import java.io.IOException; 11 | import java.util.Map; 12 | 13 | public class RemoteTerraformOutputsProcess extends RemoteTerraformProcess { 14 | 15 | private final ObjectMapper objectMapper; 16 | private String outputJsonStringized = null; 17 | 18 | public static RemoteTerraformOutputsProcess of(TerraformBaseWorker w) { 19 | return new RemoteTerraformOutputsProcess(w.getParameters(), w.getLogger(), w.getModel().getIdentifier(), w.getCallbackContext().getCommandRequestId()); 20 | } 21 | 22 | protected RemoteTerraformOutputsProcess(TerraformParameters params, Logger logger, String modelIdentifier, String commandIdentifier) { 23 | super(params, logger, modelIdentifier, commandIdentifier); 24 | this.objectMapper = new ObjectMapper(); 25 | } 26 | 27 | public void run() throws IOException { 28 | ssh.runSSHCommand(String.format("cd %s && terraform output -json", getWorkDir()), PostRunBehaviour.FAIL, PostRunBehaviour.FAIL); 29 | outputJsonStringized = ssh.lastStdout; 30 | logger.log("Outputs from TF: '" + outputJsonStringized + "'"); 31 | if (outputJsonStringized == null || outputJsonStringized.isEmpty()) { 32 | outputJsonStringized = "{}"; 33 | } 34 | } 35 | 36 | public Map getOutputAsMap() throws IOException { 37 | return objectMapper.readValue(outputJsonStringized, new TypeReference>() { 38 | }); 39 | } 40 | 41 | public String getOutputAsJsonStringized() { 42 | return outputJsonStringized; 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/ConnectorHandlerFailures.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import com.google.common.base.Strings; 4 | 5 | /** Unchecked exceptions indicating a handler request should fail. 6 | * These are used primarily to ensure an appropriate level of logging to the consumer. 7 | */ 8 | public class ConnectorHandlerFailures { 9 | 10 | public static RuntimeException handled(String message) { 11 | return new Handled(message); 12 | } 13 | public static RuntimeException unhandled(String message) { 14 | return new Unhandled(message); 15 | } 16 | public static RuntimeException unhandled(String message, Throwable cause) { 17 | return new Unhandled(message, cause); 18 | } 19 | 20 | /** An exception which has been handled and logged, with a nice message supplied here. 21 | * The catcher should not do any further logging, but can include the message as a reason 22 | * why the failure occurred. 23 | */ 24 | public static class Handled extends RuntimeException { 25 | private static final long serialVersionUID = -6582312522891789442L; 26 | protected Handled(String message) { 27 | super(message); 28 | } 29 | } 30 | 31 | /** An exception which is expected, and has a nice message, but which has not 32 | * been handled or logged. The catcher should log, and if a cause is supplied should 33 | * log its trace, and can include the message as a reason why the failure occurred. 34 | */ 35 | public static class Unhandled extends RuntimeException { 36 | private static final long serialVersionUID = -4661749698259957836L; 37 | public Unhandled(String message) { 38 | super(message); 39 | } 40 | protected Unhandled(String message, Throwable cause) { 41 | super(message, cause); 42 | } 43 | } 44 | 45 | /** Return a simple message, viz. the Exception.getMessage if non-blank (dropping the exception class), otherwise a toString */ 46 | public static String simpleMessage(Exception e) { 47 | if (e==null) return "No details"; 48 | if (Strings.isNullOrEmpty(e.getMessage())) return e.toString(); 49 | return e.getMessage(); 50 | } 51 | 52 | 53 | } 54 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/ListHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.junit.jupiter.api.BeforeEach; 4 | import org.junit.jupiter.api.Test; 5 | import org.junit.jupiter.api.extension.ExtendWith; 6 | import org.mockito.Mock; 7 | import org.mockito.junit.jupiter.MockitoExtension; 8 | import software.amazon.cloudformation.proxy.*; 9 | 10 | import static org.assertj.core.api.Assertions.assertThat; 11 | import static org.mockito.Mockito.mock; 12 | 13 | @ExtendWith(MockitoExtension.class) 14 | public class ListHandlerTest { 15 | 16 | @Mock 17 | private AmazonWebServicesClientProxy proxy; 18 | 19 | @Mock 20 | private Logger logger; 21 | 22 | @BeforeEach 23 | public void setup() { 24 | proxy = mock(AmazonWebServicesClientProxy.class); 25 | logger = mock(Logger.class); 26 | } 27 | 28 | @Test 29 | public void handleRequest_SimpleSuccess() { 30 | final ListHandler handler = new ListHandler() { 31 | @Override 32 | protected TerraformBaseWorker newWorker() { 33 | TerraformBaseWorker result = super.newWorker(); 34 | result.storeMetadataOnServer = false; 35 | result.setParameters(HandlerTestFixture.newTerraformParametersForTests(logger, proxy, null, null)); 36 | return result; 37 | } 38 | }; 39 | 40 | final ResourceModel model = ResourceModel.builder().build(); 41 | 42 | final ResourceHandlerRequest request = ResourceHandlerRequest.builder() 43 | .desiredResourceState(model) 44 | .build(); 45 | 46 | final ProgressEvent response = 47 | handler.handleRequest(proxy, request, null, logger); 48 | 49 | assertThat(response).isNotNull(); 50 | assertThat(response.getStatus()).isEqualTo(OperationStatus.SUCCESS); 51 | assertThat(response.getCallbackContext()).isNull(); 52 | assertThat(response.getCallbackDelaySeconds()).isEqualTo(0); 53 | assertThat(response.getResourceModel()).isNull(); 54 | assertThat(response.getResourceModels()).isNotNull(); 55 | assertThat(response.getMessage()).isNull(); 56 | assertThat(response.getErrorCode()).isNull(); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /terraform-example.cfn.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: 2010-09-09 2 | Description: Terraform in CloudFormation example, using the Terraform Connector for CloudFormation 3 | Parameters: 4 | CFNBucket1Name: 5 | Type: String 6 | Default: terraform-rp-example-bucket1-default-from-cfn-2 7 | CFNBucket2Name: 8 | Type: String 9 | Default: terraform-rp-example-bucket2-default-from-cfn-2 10 | Resources: 11 | TerraformExample: 12 | Type: Cloudsoft::Terraform::Infrastructure 13 | Properties: 14 | LogBucketName: cloudsoft-terraform-infrastructure-my-user-logs 15 | ConfigurationContent: | 16 | variable "AWSRegion" { 17 | type = string 18 | default = "eu-central-1" 19 | } 20 | 21 | variable "TFBucket1Name" { 22 | type = string 23 | default = "terraform-rp-example-bucket1-default-from-tf-overridden-by-cfn-so-not-used-or-needed" 24 | } 25 | 26 | variable "TFBucket2Name" { 27 | type = string 28 | } 29 | 30 | provider "aws" { 31 | region = var.AWSRegion 32 | } 33 | 34 | resource "aws_s3_bucket" "bucket1" { 35 | bucket = var.TFBucket1Name 36 | acl = "private" 37 | } 38 | 39 | resource "aws_s3_bucket" "bucket2" { 40 | bucket = var.TFBucket2Name 41 | acl = "private" 42 | } 43 | 44 | output "bucket1-id" { 45 | value = aws_s3_bucket.bucket1.id 46 | } 47 | 48 | output "bucket1-arn" { 49 | value = aws_s3_bucket.bucket1.arn 50 | } 51 | 52 | output "bucket1-region" { 53 | value = aws_s3_bucket.bucket1.region 54 | } 55 | 56 | output "bucket2-id" { 57 | value = aws_s3_bucket.bucket2.id 58 | } 59 | 60 | output "bucket2-arn" { 61 | value = aws_s3_bucket.bucket2.arn 62 | } 63 | 64 | output "bucket2-region" { 65 | value = aws_s3_bucket.bucket2.region 66 | } 67 | Variables: 68 | AWSRegion: !Ref "AWS::Region" 69 | TFBucket1Name: !Ref CFNBucket1Name 70 | TFBucket2Name: !Ref CFNBucket2Name 71 | 72 | Outputs: 73 | TerraformOutputs: 74 | Description: Outputs from Terraform as a stringified JSON map 75 | Value: !GetAtt TerraformExample.OutputsStringified 76 | LogBucketUrl: 77 | Value: !GetAtt TerraformExample.LogBucketUrl 78 | 79 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/RemoteDetachedTerraformProcess.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import java.io.IOException; 4 | 5 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 6 | import software.amazon.cloudformation.proxy.Logger; 7 | 8 | abstract public class RemoteDetachedTerraformProcess extends RemoteTerraformProcess { 9 | 10 | protected String stdoutLogFileName, stderrLogFileName; 11 | final protected TerraformCommand tfCommand; 12 | 13 | public enum TerraformCommand { 14 | TF_INIT, 15 | TF_APPLY, 16 | TF_DESTROY, 17 | } 18 | 19 | protected RemoteDetachedTerraformProcess(TerraformParameters params, Logger logger, TerraformCommand tc, String modelIdentifier, String commandIdentifier) { 20 | super(params, logger, modelIdentifier, commandIdentifier); 21 | tfCommand = tc; 22 | } 23 | 24 | public String getCommandName() { 25 | return tfCommand.toString(); 26 | } 27 | 28 | public String getFullStdout() throws IOException { 29 | return ssh.catFileIfExists(stdoutLogFileName); 30 | } 31 | 32 | public String getFullStderr() throws IOException { 33 | return ssh.catFileIfExists(stderrLogFileName); 34 | } 35 | 36 | public String getIncrementalStdout() throws IOException { 37 | return ssh.catIncrementalFileIfExists(stdoutLogFileName); 38 | } 39 | 40 | public String getIncrementalStderr() throws IOException { 41 | return ssh.catIncrementalFileIfExists(stderrLogFileName); 42 | } 43 | 44 | protected String getTerraformCommand() { 45 | switch (tfCommand) { 46 | case TF_INIT: 47 | return "terraform init -lock=true -no-color -input=false"; 48 | case TF_APPLY: 49 | return "terraform apply -lock=true -no-color -input=false -auto-approve"; 50 | case TF_DESTROY: 51 | return "terraform destroy -lock=true -no-color -auto-approve"; 52 | default: 53 | throw new IllegalArgumentException("Unknown command " + tfCommand.toString()); 54 | } 55 | } 56 | 57 | abstract public void start() throws IOException; 58 | abstract public boolean isRunning() throws IOException; 59 | abstract public boolean wasFailure() throws IOException; 60 | abstract public String getErrorString() throws IOException; 61 | abstract public void cleanup() throws IOException; 62 | 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/BucketUtils.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import org.bouncycastle.util.io.Streams; 4 | 5 | import software.amazon.awssdk.core.sync.RequestBody; 6 | import software.amazon.awssdk.services.s3.S3Client; 7 | import software.amazon.awssdk.services.s3.model.CreateBucketRequest; 8 | import software.amazon.awssdk.services.s3.model.DeleteBucketRequest; 9 | import software.amazon.awssdk.services.s3.model.GetObjectRequest; 10 | import software.amazon.awssdk.services.s3.model.PutObjectRequest; 11 | import software.amazon.cloudformation.proxy.AmazonWebServicesClientProxy; 12 | 13 | public class BucketUtils { 14 | 15 | private AmazonWebServicesClientProxy proxy; 16 | private S3Client s3Client; 17 | 18 | public BucketUtils(AmazonWebServicesClientProxy proxy) { 19 | this(proxy, S3Client.create()); 20 | } 21 | public BucketUtils(AmazonWebServicesClientProxy proxy, S3Client s3Client) { 22 | this.proxy = proxy; 23 | this.s3Client = s3Client; 24 | } 25 | 26 | public void createBucket(String bucketName) { 27 | CreateBucketRequest createBucketRequest = CreateBucketRequest.builder() 28 | .bucket(bucketName) 29 | .build(); 30 | proxy.injectCredentialsAndInvokeV2(createBucketRequest, request -> s3Client.createBucket(request)); 31 | } 32 | 33 | public void deleteBucket(String bucketName) { 34 | DeleteBucketRequest deleteBucketRequest = DeleteBucketRequest.builder() 35 | .bucket(bucketName) 36 | .build(); 37 | proxy.injectCredentialsAndInvokeV2(deleteBucketRequest, request -> s3Client.deleteBucket(request)); 38 | } 39 | 40 | public byte[] download(final String bucket, final String key) { 41 | GetObjectRequest getObjectRequest = GetObjectRequest.builder() 42 | .bucket(bucket) 43 | .key(key) 44 | .build(); 45 | byte result[][] = { new byte[0] }; 46 | proxy.injectCredentialsAndInvokeV2(getObjectRequest, request -> s3Client.getObject(request, 47 | (response, stream) -> { 48 | result[0] = Streams.readAll(stream); 49 | return response; 50 | })); 51 | return result[0]; 52 | } 53 | 54 | public void upload(String bucketName, String objectKey, RequestBody contents, String mimeType) { 55 | final PutObjectRequest putReq = PutObjectRequest.builder() 56 | .bucket(bucketName) 57 | .key(objectKey) 58 | .contentType(mimeType) 59 | .build(); 60 | proxy.injectCredentialsAndInvokeV2(putReq, request -> s3Client.putObject(request, contents)); 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/DeleteHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | 5 | import io.cloudsoft.terraform.infrastructure.commands.RemoteTerraformProcess; 6 | import software.amazon.cloudformation.proxy.ProgressEvent; 7 | 8 | public class DeleteHandler extends TerraformBaseHandler { 9 | 10 | /** TODO it might be nice if we detect failure and can delete if it isn't a failure, 11 | * and then possibly to expose this is a parameter "always", "never", "on_success" */ 12 | private static boolean DELETE_LOGS = false; 13 | 14 | private enum Steps { 15 | DELETE_RUN_TF_DESTROY, 16 | DELETE_WAIT_ON_DESTROY_THEN_RMDIR_AND_RETURN 17 | } 18 | 19 | @Override 20 | protected TerraformBaseWorker newWorker() { 21 | return new Worker(); 22 | } 23 | 24 | protected static class Worker extends TerraformBaseWorker { 25 | 26 | public Worker() { super("Delete", Steps.class); } 27 | 28 | @Override 29 | protected ProgressEvent runStep() throws IOException { 30 | switch (currentStep) { 31 | case DELETE_RUN_TF_DESTROY: 32 | advanceTo(Steps.DELETE_WAIT_ON_DESTROY_THEN_RMDIR_AND_RETURN); 33 | tfDestroy().start(); 34 | return statusInProgress(); 35 | 36 | case DELETE_WAIT_ON_DESTROY_THEN_RMDIR_AND_RETURN: 37 | if (checkStillRunningOrError(tfDestroy())) { 38 | return statusInProgress(); 39 | } 40 | 41 | RemoteTerraformProcess.of(this).rmWorkDir(); 42 | 43 | if (DELETE_LOGS) { 44 | if (callbackContext.logBucketName != null) { 45 | try { 46 | new BucketUtils(proxy).deleteBucket(callbackContext.logBucketName); 47 | log(String.format("Deleted bucket for logs at s3://%s/", callbackContext.logBucketName)); 48 | callbackContext.logBucketName = null; 49 | setModelLogBucketUrlFromCallbackContextName(); 50 | 51 | } catch (Exception e) { 52 | String message = String.format("Failed to delete log bucket %s: %s (%s)", callbackContext.logBucketName, e.getClass().getName(), e.getMessage()); 53 | log(message); 54 | throw ConnectorHandlerFailures.handled(message+". "+ 55 | "The terraform-deployed infrastructure has been destroyed, " 56 | + "but the log bucket will need manual removal."); 57 | } 58 | } 59 | } 60 | 61 | return statusSuccess(); 62 | default: 63 | throw new IllegalStateException("invalid step " + callbackContext.stepId); 64 | } 65 | } 66 | 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/HandlerTestFixture.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import static org.mockito.Mockito.doReturn; 4 | import static org.mockito.Mockito.mock; 5 | import static org.mockito.Mockito.spy; 6 | import static org.mockito.Mockito.verify; 7 | 8 | import java.io.IOException; 9 | import java.util.function.Supplier; 10 | 11 | import org.junit.jupiter.api.BeforeEach; 12 | import org.junit.jupiter.api.extension.ExtendWith; 13 | import org.mockito.Mock; 14 | import org.mockito.junit.jupiter.MockitoExtension; 15 | 16 | import junit.framework.Assert; 17 | import software.amazon.awssdk.services.s3.S3Client; 18 | import software.amazon.awssdk.services.ssm.SsmClient; 19 | import software.amazon.cloudformation.proxy.AmazonWebServicesClientProxy; 20 | import software.amazon.cloudformation.proxy.Logger; 21 | import software.amazon.cloudformation.proxy.OperationStatus; 22 | import software.amazon.cloudformation.proxy.ProgressEvent; 23 | import software.amazon.cloudformation.proxy.ResourceHandlerRequest; 24 | 25 | @ExtendWith(MockitoExtension.class) 26 | public class HandlerTestFixture { 27 | 28 | @Mock 29 | protected AmazonWebServicesClientProxy proxy; 30 | 31 | @Mock 32 | protected S3Client s3Client; 33 | 34 | @Mock 35 | protected SsmClient ssmClient; 36 | 37 | protected Logger logger; 38 | 39 | @BeforeEach 40 | public void setup() { 41 | proxy = mock(AmazonWebServicesClientProxy.class); 42 | logger = new Logger() { 43 | @Override 44 | public void log(String arg) { 45 | System.out.println("LOG: " + arg); 46 | } 47 | }; 48 | } 49 | 50 | public void doWorkerRun(Supplier handlerFactory) throws IOException { 51 | final ResourceModel model = ResourceModel.builder().build(); 52 | final ResourceHandlerRequest request = ResourceHandlerRequest.builder() 53 | .desiredResourceState(model) 54 | .build(); 55 | final CallbackContext callbackContext = new CallbackContext(); 56 | final ProgressEvent progressEvent = ProgressEvent.builder() 57 | .resourceModel(model) 58 | .status(OperationStatus.SUCCESS) 59 | .build(); 60 | 61 | // null value for all parameters 62 | final TerraformBaseWorker worker = handlerFactory.get().newWorker(); 63 | worker.storeMetadataOnServer = false; 64 | worker.setParameters(newTerraformParametersForTests(logger, proxy, ssmClient, s3Client)); 65 | TerraformBaseWorker spyWorker = spy(worker); 66 | 67 | final TerraformBaseHandler handler = handlerFactory.get(); 68 | TerraformBaseHandler spyHandler = spy(handler); 69 | 70 | doReturn(progressEvent).when(spyWorker).runStep(); 71 | doReturn(spyWorker).when(spyHandler).newWorker(); 72 | 73 | ProgressEvent result = spyHandler.handleRequest(proxy, request, callbackContext, logger); 74 | 75 | Assert.assertEquals(result, progressEvent); 76 | verify(spyWorker).runHandlingError(); 77 | } 78 | 79 | static TerraformParameters newTerraformParametersForTests(Logger logger, AmazonWebServicesClientProxy proxy, SsmClient ssmClient, S3Client s3Client) { 80 | return new TerraformParameters(logger, proxy, ssmClient, s3Client) { 81 | // this one is read during init 82 | @Override 83 | public String getLogsS3BucketName() { 84 | return null; 85 | } 86 | }; 87 | } 88 | 89 | } 90 | -------------------------------------------------------------------------------- /cloudsoft-terraform-infrastructure.json: -------------------------------------------------------------------------------- 1 | { 2 | "typeName": "Cloudsoft::Terraform::Infrastructure", 3 | "description": "The Cloudsoft::Terraform::Infrastructure resource creates and manages a terraform template, through an existing terraform server", 4 | "sourceUrl": "https://github.com/cloudsoft/aws-cfn-connector-for-terraform", 5 | "definitions": { 6 | "ConfigurationContent": { 7 | "description": "Inlined Terraform Configuration, passed to the Terraform server", 8 | "type": "string" 9 | }, 10 | "ConfigurationUrl": { 11 | "description": "Public HTTP URL of a Terraform Configuration, to be downloaded by the connector and passed to the Terraform server", 12 | "type": "string" 13 | }, 14 | "ConfigurationS3Path": { 15 | "description": "S3 path object representing a Terraform Configuration, to be downloaded by the connector and passed to the Terraform server (the current account must have access to this resource)", 16 | "type": "string" 17 | }, 18 | "Variables": { 19 | "description": "Variables to set as part of the Terraform Configuration", 20 | "type": "object" 21 | } 22 | }, 23 | "properties": { 24 | "Identifier": { 25 | "description": "Identifier for this deployment", 26 | "type": "string" 27 | }, 28 | "Outputs": { 29 | "description": "Outputs produced by the Terraform configuration, as a map", 30 | "type": "object" 31 | }, 32 | "OutputsStringified": { 33 | "description": "Outputs produced by the Terraform configuration, as a JSON-stringified representation of the Outputs map", 34 | "type": "string" 35 | }, 36 | "LogBucketName": { 37 | "description": "Optional name of S3 bucket where logs will be transferred; if the bucket exists, it must be writeable by this resource provider's execution role; if it does not exist, it will be created (and never deleted) by this resource provider", 38 | "type": "string" 39 | }, 40 | "LogBucketUrl": { 41 | "description": "URL to access the bucket where logs are written, if configured", 42 | "type": "string" 43 | }, 44 | "ConfigurationContent": { 45 | "$ref": "#/definitions/ConfigurationContent" 46 | }, 47 | "ConfigurationUrl": { 48 | "$ref": "#/definitions/ConfigurationUrl" 49 | }, 50 | "ConfigurationS3Path": { 51 | "$ref": "#/definitions/ConfigurationS3Path" 52 | }, 53 | "Variables": { 54 | "$ref": "#/definitions/Variables" 55 | } 56 | }, 57 | "additionalProperties": false, 58 | "primaryIdentifier": [ 59 | "/properties/Identifier" 60 | ], 61 | "additionalIdentifiers": [ 62 | [ "/properties/LogBucketUrl" ] 63 | ], 64 | "readOnlyProperties": [ 65 | "/properties/Identifier", 66 | "/properties/Outputs", 67 | "/properties/OutputsStringified", 68 | "/properties/LogBucketUrl" 69 | ], 70 | "handlers": { 71 | "create": { 72 | "permissions": [ 73 | "s3:GetObject", 74 | "s3:GetObjectVersion", 75 | "s3:PutObject", 76 | "s3:CreateBucket", 77 | "ssm:GetParameter" 78 | ] 79 | }, 80 | "read": { 81 | "permissions": [ 82 | "ssm:GetParameter" 83 | ] 84 | }, 85 | "update": { 86 | "permissions": [ 87 | "s3:GetObject", 88 | "s3:GetObjectVersion", 89 | "s3:PutObject", 90 | "ssm:GetParameter" 91 | ] 92 | }, 93 | "delete": { 94 | "permissions": [ 95 | "s3:GetObject", 96 | "s3:GetObjectVersion", 97 | "s3:PutObject", 98 | "s3:DeleteBucket", 99 | "ssm:GetParameter" 100 | ] 101 | }, 102 | "list": { 103 | "permissions": [ 104 | "ssm:GetParameter" 105 | ] 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/RemoteDetachedTerraformProcessNohup.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import java.io.IOException; 4 | 5 | import io.cloudsoft.terraform.infrastructure.TerraformBaseWorker; 6 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 7 | import io.cloudsoft.terraform.infrastructure.commands.SshToolbox.PostRunBehaviour; 8 | import software.amazon.cloudformation.proxy.Logger; 9 | 10 | public class RemoteDetachedTerraformProcessNohup extends RemoteDetachedTerraformProcess { 11 | private final String pidFileName; 12 | protected final String exitstatusFileName; 13 | 14 | public static RemoteDetachedTerraformProcessNohup of(TerraformBaseWorker w, TerraformCommand command) { 15 | return new RemoteDetachedTerraformProcessNohup(w.getParameters(), w.getLogger(), command, w.getModel().getIdentifier(), w.getCallbackContext().getCommandRequestId()); 16 | } 17 | 18 | public RemoteDetachedTerraformProcessNohup(TerraformParameters parameters, Logger logger, TerraformCommand tc, String modelIdentifier, String commandIdentifier) { 19 | super(parameters, logger, tc, modelIdentifier, commandIdentifier); 20 | stdoutLogFileName = getFileName(true, "stdout.log"); 21 | stderrLogFileName = getFileName(true, "stderr.log"); 22 | exitstatusFileName = getFileName(true, "exitstatus.log"); 23 | pidFileName = getFileName(true, "pid.txt"); 24 | } 25 | 26 | private String getFileName(boolean isAbsolute, String trailer) { 27 | return (isAbsolute ? getWorkDir()+"/" : "") + 28 | String.format("terraform-%s-%s-", commandIdentifier, getCommandName().toLowerCase()) + 29 | trailer; 30 | } 31 | 32 | public boolean wasFailure() { 33 | String err = getErrorString(); 34 | if (err==null || err.trim().length()==0) { 35 | // still running 36 | return false; 37 | } 38 | try { 39 | return Integer.parseInt(err.trim())!=0; 40 | } catch (Exception e) { 41 | throw new IllegalStateException("Unparseable error status: '"+err.trim()+"'"); 42 | } 43 | } 44 | 45 | public String getErrorString() { 46 | try { 47 | return ssh.catFileIfExists(exitstatusFileName); 48 | } catch (IOException e) { 49 | throw new RuntimeException(e); 50 | } 51 | } 52 | 53 | public boolean isRunning() throws IOException { 54 | ssh.runSSHCommand(String.format("if ! cat %s >/dev/null; then echo 'failed to cat the pidfile'; elif [ -f /proc/`cat %s`/environ ]; then echo true; else echo false; fi", pidFileName, pidFileName), PostRunBehaviour.IGNORE, PostRunBehaviour.IGNORE); 55 | String out = ssh.lastStdout.trim(); 56 | if (out.equals("true")) { 57 | return true; 58 | } else if (out.equals("false")) { 59 | return false; 60 | } else { 61 | throw new IllegalStateException("Unexpected output from isRunning: '"+out+"'"); 62 | } 63 | } 64 | 65 | public void start() throws IOException { 66 | String scriptName = "./"+getFileName(false, "script.sh"); 67 | String fullCmd = String.join("\n", 68 | "cd "+getWorkDir(), 69 | ssh.setupIncrementalFileCommand(stdoutLogFileName), 70 | ssh.setupIncrementalFileCommand(stderrLogFileName), 71 | "cat > "+scriptName+" << EOF", 72 | getTerraformCommand(), 73 | "echo \\$? > "+exitstatusFileName, 74 | "EOF", 75 | "chmod +x "+scriptName, 76 | String.format("nohup %s %s 2>%s & echo $! >%s", scriptName, stdoutLogFileName, stderrLogFileName, pidFileName) 77 | ); 78 | ssh.runSSHCommand(fullCmd, PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 79 | } 80 | 81 | @Override 82 | public void cleanup() { 83 | } 84 | 85 | } 86 | -------------------------------------------------------------------------------- /doc/user-guide.md: -------------------------------------------------------------------------------- 1 | # User guide 2 | 3 | Once the CloudFormation connector for Terraform [is installed](installation-guide.md), you can begin to use the new custom type 4 | `Cloudsoft::Terraform::Infrastructure` to deploy Terraform configuration. This can be added along side any other CloudFormation 5 | resources. 6 | 7 | ## Syntax 8 | 9 | To declare this type in your CloudFormation template, use the following syntax, 10 | with exactly _one_ `Configuration` property specified. 11 | The URL and S3 variants can point at a `TF` text file or a `ZIP` archive. 12 | 13 | ### JSON 14 | ```json 15 | { 16 | "Type" : "Cloudsoft::Terraform::Infrastructure", 17 | "Properties" : { 18 | "Variables": { 19 | "variable1": "string1", 20 | "variable2": "string2", 21 | "variable3": "string3" 22 | }, 23 | "ConfigurationContent": "String", 24 | "ConfigurationUrl": "String", 25 | "ConfigurationS3Path": "String" 26 | } 27 | } 28 | ``` 29 | 30 | ### YAML 31 | ```yaml 32 | Type: Cloudsoft::Terraform::Infrastructure 33 | Properties: 34 | Variables: 35 | variable1: string1 36 | variable2: string2 37 | variable3: string3 38 | ConfigurationContent: String 39 | ConfigurationUrl: String 40 | ConfigurationS3Path: String 41 | ``` 42 | 43 | ## Properties 44 | 45 | | Key | Description | Required | 46 | |-----|-------------|----------| 47 | | `ConfigurationContent` | Inlined Terraform configuration text to be uploaded to the Terraform server. | Conditional.

Exactly one of `ConfigurationContent`, `ConfigurationUrl` or `ConfigurationS3Path` must be specified. | 48 | | `ConfigurationUrl` | Public HTTP URL of a Terraform configuration. This will be downloaded from within CloudFormation and uploaded to the Terraform server. | (as above) | 49 | | `ConfigurationS3Path` | S3 path object representing a Terraform configuration. The current account must have access to this resource. This will be downloaded from within CloudFormation and uploaded to the Terraform server. | (as above) | 50 | | `Variables` | Variables to make available to the Terraform configuration by means of an `.auto.tfvars.json` file. | Optional in the CloudFormation template, although may be required by the Terraform configuration. | 51 | | `LogBucketName` | The name of an S3 bucket to create (if not present) and write log files. This property value can contain a `*` character which will be replaced by the model's identifier, ensuring creation of a new bucket for each stack which the user will have access to. If the value does not contain `*` the bucket should either be intended for a single user or else manually configured with appropriate permissions for all users to see and for this resource provider to write to, otherwise the bucket may be unusable or inaccessible to some. Restrictions on bucket names apply (between 3 and 63 characters long, no capital letters, etc). | Optional; useful if the Terraform is not behaving as expected | 52 | 53 | ## Return Values 54 | 55 | The resource provider will set the following outputs on the resource. 56 | 57 | | Key | Type | Description | 58 | |-----|------|-------------| 59 | | `Outputs` | Object | All output coming from the Terraform configuration, as a map. | 60 | | `OutputsStringified` | String | All output coming from the Terraform configuration, as a JSON string of the map. | 61 | | `LogBucketUrl` | String | A URL where logs can be found if S3 logs are configured. Note that this is only set if a log bucket is explicitly requested, either with the `LogBucketName` property in CFN or a `/cfn/terraform/logs-s3-bucket-name` parameter in SSM. | 62 | 63 | You can use the `Fn::GetAtt` intrinsic function to access these values, 64 | e.g. in the `Outputs` section of your CloudFormation to set an output on the stack and see it. 65 | (At present there is no other way to see these values without inspecting the logs.) 66 | 67 | It can also sometimes be useful to send the `OutputsStringified` to another Lambda to parse the JSON 68 | and retrieve selected fields for use elsewhere in your stack. 69 | (At present it is not possible using `Fn::GetAtt` to access a specific field within the `Outputs`.) 70 | 71 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/TerraformBaseHandlerTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import junit.framework.Assert; 4 | import org.junit.jupiter.api.Test; 5 | import org.junit.jupiter.api.extension.ExtendWith; 6 | import org.mockito.Mock; 7 | import org.mockito.junit.jupiter.MockitoExtension; 8 | import software.amazon.cloudformation.proxy.*; 9 | 10 | import java.io.ByteArrayOutputStream; 11 | import java.io.IOException; 12 | import java.io.PrintStream; 13 | 14 | import static junit.framework.Assert.assertEquals; 15 | import static junit.framework.Assert.assertTrue; 16 | import static org.junit.jupiter.api.Assertions.assertThrows; 17 | import static org.mockito.Mockito.verify; 18 | 19 | @ExtendWith(MockitoExtension.class) 20 | public class TerraformBaseHandlerTest { 21 | 22 | private enum NoSteps {} 23 | 24 | public static class EmptyHandler extends TerraformBaseHandler { 25 | EmptyWorker w; 26 | @Override 27 | protected TerraformBaseWorker newWorker() { 28 | return w = new EmptyWorker(); 29 | } 30 | } 31 | 32 | public static class EmptyWorker extends TerraformBaseWorker { 33 | public EmptyWorker() { super("Empty", NoSteps.class); } 34 | 35 | @Override 36 | public ProgressEvent runStep() { 37 | return statusSuccess(); 38 | } 39 | 40 | // visible for testing 41 | @Override 42 | public void init(AmazonWebServicesClientProxy proxy, ResourceHandlerRequest request, CallbackContext callbackContext, Logger logger) { 43 | super.init(proxy, request, callbackContext, logger); 44 | } 45 | 46 | @Override 47 | public void log(String message) { 48 | super.log(message); 49 | } 50 | } 51 | 52 | 53 | @Mock 54 | Logger logger; 55 | 56 | private EmptyHandler lastHandler; 57 | 58 | private ProgressEvent runEmptyHandler(AmazonWebServicesClientProxy proxy, ResourceHandlerRequest request, CallbackContext callbackContext, Logger logger) { 59 | EmptyHandler h = new EmptyHandler(); 60 | this.lastHandler = h; 61 | return h.handleRequest(proxy, request, callbackContext, logger); 62 | } 63 | 64 | private ProgressEvent runEmptyHandlerWithDefaults() { 65 | final ResourceModel model = ResourceModel.builder().build(); 66 | final ResourceHandlerRequest request = ResourceHandlerRequest.builder() 67 | .desiredResourceState(model) 68 | .build(); 69 | final CallbackContext callbackContext = new CallbackContext(); 70 | 71 | return runEmptyHandler(null, request, callbackContext, logger); 72 | } 73 | 74 | @Test 75 | public void failsIfRequestIsNull() { 76 | System.err.println("Expecting logged NPE to follow; ignore it"); 77 | ProgressEvent result = runEmptyHandler(null, null, null, logger); 78 | Assert.assertNotNull(result); 79 | Assert.assertEquals(result.getStatus(), OperationStatus.FAILED); 80 | String msg = result.getMessage(); 81 | Assert.assertTrue("'"+msg+"' did not contain 'request'", msg.contains("request")); 82 | Assert.assertTrue("'"+msg+"' did not contain NPE", msg.contains(NullPointerException.class.getSimpleName())); 83 | } 84 | 85 | @Test 86 | public void initSuccessfully() { 87 | ProgressEvent result = runEmptyHandlerWithDefaults(); 88 | assertEquals(ResourceModel.builder().build(), result.getResourceModel()); 89 | } 90 | 91 | @Test 92 | public void cantInitTwice() { 93 | runEmptyHandlerWithDefaults(); 94 | assertThrows(IllegalStateException.class, () -> lastHandler.w.init(null, null, null, logger)); 95 | } 96 | 97 | @Test 98 | public void logPrintsOutMessages() throws IOException { 99 | runEmptyHandlerWithDefaults(); 100 | 101 | String message = "This is a message"; 102 | ByteArrayOutputStream bo = new ByteArrayOutputStream(); 103 | System.setOut(new PrintStream(bo)); 104 | 105 | lastHandler.w.log(message); 106 | 107 | bo.flush(); 108 | String allWrittenLines = new String(bo.toByteArray()); 109 | 110 | assertTrue(allWrittenLines.contains(message)); 111 | assertTrue(allWrittenLines.contains(TerraformBaseWorker.LOG_MESSAGE_SEPARATOR)); 112 | verify(logger).log(message); 113 | } 114 | 115 | } 116 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/RemoteDetachedTerraformProcessSystemd.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import java.io.IOException; 4 | import java.util.Arrays; 5 | import java.util.List; 6 | 7 | import io.cloudsoft.terraform.infrastructure.TerraformBaseWorker; 8 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 9 | import io.cloudsoft.terraform.infrastructure.commands.SshToolbox.PostRunBehaviour; 10 | import software.amazon.cloudformation.proxy.Logger; 11 | 12 | public class RemoteDetachedTerraformProcessSystemd extends RemoteDetachedTerraformProcess { 13 | 14 | public static RemoteDetachedTerraformProcessSystemd of(TerraformBaseWorker w, TerraformCommand tc) { 15 | return new RemoteDetachedTerraformProcessSystemd(w.getParameters(), w.getLogger(), tc, w.getModel().getIdentifier(), w.getCallbackContext().getCommandRequestId()); 16 | } 17 | 18 | protected RemoteDetachedTerraformProcessSystemd(TerraformParameters params, Logger logger, TerraformCommand tc, String modelIdentifier, String commandIdentifier) { 19 | super(params, logger, tc, modelIdentifier, commandIdentifier); 20 | stdoutLogFileName = String.format("%s/%s-stdout.log", getLogDir(), getUnitPrefix()); 21 | stderrLogFileName = String.format("%s/%s-stderr.log", getLogDir(), getUnitPrefix()); 22 | } 23 | 24 | protected String getLogDir() { 25 | return getWorkDir(); 26 | } 27 | 28 | private String getUnitPrefix() { 29 | return "terraform-"+modelIdentifier+"-"+commandIdentifier+"-"+getCommandName().toLowerCase(); 30 | } 31 | 32 | private String getUnitFullName() { 33 | return getUnitPrefix()+".service"; 34 | } 35 | 36 | private String getRemotePropertyValue(String propName) throws IOException { 37 | ssh.runSSHCommand(String.format("systemctl --user show --property %s %s | cut -d= -f2", 38 | propName, getUnitFullName()), PostRunBehaviour.IGNORE, PostRunBehaviour.IGNORE); 39 | return ssh.lastStdout.replaceAll("\n", ""); 40 | } 41 | 42 | public void start() throws IOException { 43 | final List commands = Arrays.asList( 44 | ssh.setupIncrementalFileCommand(stdoutLogFileName), 45 | ssh.setupIncrementalFileCommand(stderrLogFileName), 46 | "loginctl enable-linger", 47 | String.format("systemd-run" 48 | + " --unit="+getUnitPrefix() 49 | + " --user" 50 | + " --remain-after-exit" // this is required, otherwise we don't get exit code 51 | + " -p WorkingDirectory="+getWorkDir() 52 | + " -p StandardOutput=file:"+stdoutLogFileName 53 | + " -p StandardError=file:"+stderrLogFileName 54 | // indirection through `env` solves potential issue where cmd wants absolue path 55 | + " /usr/bin/env " + getTerraformCommand() 56 | 57 | // note: could use -t and redirects, but better if we don't need them, and -p seems to work! 58 | // + " -t" 59 | // + " < /dev/null > "+stdoutLogFileName+" 2> "+stderrLogFileName 60 | 61 | ) 62 | ); 63 | ssh.runSSHCommand(String.join("; ", commands), PostRunBehaviour.FAIL, 64 | PostRunBehaviour.IGNORE /* prints the unit prefix */ ); 65 | } 66 | 67 | private String getSubState() throws IOException { 68 | return getRemotePropertyValue("SubState"); 69 | } 70 | 71 | public boolean isRunning() throws IOException { 72 | // this doesn't work for transients where we say "remain-after-exit" 73 | // return "active".equals(getActiveState()); 74 | 75 | return "running".equals(getSubState()); 76 | } 77 | 78 | private String getResult() throws IOException { 79 | return getRemotePropertyValue("Result"); 80 | } 81 | 82 | public boolean wasFailure() throws IOException { 83 | return !"success".equals(getResult()); 84 | } 85 | 86 | private String getMainExitCode() throws IOException { 87 | return getRemotePropertyValue("ExecMainCode"); 88 | } 89 | 90 | public String getErrorString() throws IOException { 91 | return String.format("result %s (%s)", getResult(), getMainExitCode()); 92 | } 93 | 94 | @Override 95 | public void cleanup() throws IOException { 96 | // stop every run from polluting the user systemctl history 97 | ssh.runSSHCommand("systemctl --user stop "+getUnitFullName(), 98 | PostRunBehaviour.WARN, PostRunBehaviour.IGNORE); 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /setup.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: "2010-09-09" 2 | Description: > 3 | This CloudFormation template creates the necessary resources or the Cloudsoft::Terraform::Infrastructure to work 4 | as expected. For instance, this creates: 5 | - a role assumed by CloudFormation during CRUDL operations to ship logs to CloudWatch. 6 | - a log group were the log will be ship to. 7 | - parameters for the SSH connection to the Terraform server 8 | 9 | Resources: 10 | LoggingRole: 11 | Type: AWS::IAM::Role 12 | Properties: 13 | AssumeRolePolicyDocument: 14 | Version: '2012-10-17' 15 | Statement: 16 | - Effect: Allow 17 | Principal: 18 | Service: cloudformation.amazonaws.com 19 | Action: sts:AssumeRole 20 | Path: "/" 21 | Policies: 22 | - PolicyName: LogAndMetricsDeliveryRolePolicy 23 | PolicyDocument: 24 | Version: '2012-10-17' 25 | Statement: 26 | - Effect: Allow 27 | Action: 28 | - logs:CreateLogGroup 29 | - logs:CreateLogStream 30 | - logs:DescribeLogGroups 31 | - logs:DescribeLogStreams 32 | - logs:PutLogEvents 33 | - cloudwatch:ListMetrics 34 | - cloudwatch:PutMetricData 35 | Resource: "*" 36 | LogGroup: 37 | Type: AWS::Logs::LogGroup 38 | Properties: 39 | LogGroupName: cloudsoft-terraform-infrastructure-logs 40 | RetentionInDays: 7 41 | 42 | TerraformSSHHostParameter: 43 | Type: AWS::SSM::Parameter 44 | Properties: 45 | Description: Terraform server hostname or IP address to connect to for SSH connections 46 | Name: /cfn/terraform/ssh-host 47 | Type: String 48 | Value: FIXME 49 | TerraformSSHPortParameter: 50 | Type: AWS::SSM::Parameter 51 | Properties: 52 | Description: Terraform server port to connect to for SSH connections 53 | Name: /cfn/terraform/ssh-port 54 | Type: String 55 | Value: '22' 56 | TerraformSSHUsernameParameter: 57 | Type: AWS::SSM::Parameter 58 | Properties: 59 | Description: Username of the Terraform server host for SSH connections 60 | Name: /cfn/terraform/ssh-username 61 | Type: String 62 | Value: FIXME 63 | TerraformSSHKeyParameter: 64 | Type: AWS::SSM::Parameter 65 | Properties: 66 | Description: Private SSH key of the CloudFormation client for SSH connections 67 | Name: /cfn/terraform/ssh-key 68 | Type: String 69 | Value: | 70 | -----BEGIN OPENSSH PRIVATE KEY----- 71 | FIXME - this is required to connect to the server 72 | -----END OPENSSH PRIVATE KEY----- 73 | TerraformSSHFingerprintParameter: 74 | Type: AWS::SSM::Parameter 75 | Properties: 76 | Description: | 77 | Public SSH key fingerprint of the Terraform server to verify SSH connections. 78 | Any key value will enforce a fingerprint check during SSH connection. 79 | This can be set to "default" to not require fingerprint checks, 80 | for instance if you trust the environment wherte this is running. 81 | Name: /cfn/terraform/ssh-fingerprint 82 | Type: String 83 | Value: 'default' 84 | TerraformProcessManagerParameter: 85 | Type: AWS::SSM::Parameter 86 | Properties: 87 | Description: Process manager to use on the server to interact with a detached Terraform process ("systemd" or "nohup") 88 | Name: /cfn/terraform/process-manager 89 | Type: String 90 | Value: 'nohup' 91 | TerraformLogsS3BucketName: 92 | Type: AWS::SSM::Parameter 93 | Properties: 94 | Description: | 95 | Optional bucket where all Terraform logs will be shipped. 96 | Any value other than "default" will result in a bucket being created as necessary and logs copied to it, 97 | so that the user can easily inspect the logs (without needing special CloudWatch access). 98 | This parameter value can contain a * character which will be replaced by the model's identifier, 99 | ensuring creation of a new bucket for each run which the user will have access to but other users will not. 100 | If the value does not contain * the bucket should be created ahead of time with appropriate permissions for 101 | users to see and for this resource provider to write to, otherwise the bucket may be unusable or inaccessible to some users. 102 | Note this can be overridden by the user with a property on the resource (see the user guide). 103 | Name: /cfn/terraform/logs-s3-bucket-name 104 | Type: String 105 | Value: 'default' 106 | 107 | Outputs: 108 | LoggingRoleArn: 109 | Value: 110 | Fn::GetAtt: LoggingRole.Arn 111 | LogGroup: 112 | Value: 113 | Ref: LogGroup 114 | TerraformSSHHostParameter: 115 | Value: 116 | Ref: TerraformSSHHostParameter 117 | TerraformSSHPortParameter: 118 | Value: 119 | Ref: TerraformSSHPortParameter 120 | TerraformSSHUsernameParameter: 121 | Value: 122 | Ref: TerraformSSHUsernameParameter 123 | TerraformSSHKeyParameter: 124 | Value: 125 | Ref: TerraformSSHKeyParameter 126 | TerraformSSHFingerprintParameter: 127 | Value: 128 | Ref: TerraformSSHFingerprintParameter 129 | TerraformProcessManagerParameter: 130 | Value: 131 | Ref: TerraformProcessManagerParameter 132 | TerraformLogsS3BucketName: 133 | Value: 134 | Ref: TerraformLogsS3BucketName 135 | -------------------------------------------------------------------------------- /doc/installation-guide.md: -------------------------------------------------------------------------------- 1 | # Installation guide 2 | 3 | The Terraform resource provider for CloudFormation adds a new CloudFormation resource type, `Cloudsoft::Terraform::Infrastructure`, which allows you to deploy a Terraform infrastructure as a part of a CloudFormation stack using a Terraform configuration that is a part of a CloudFormation template. 4 | 5 | This page will guide you on how to install the Terraform resource provider for CloudFormation. 6 | 7 | ## Prerequisites 8 | 9 | ### Terraform server 10 | 11 | The connector requires a running Terraform (version 0.12 or later) server that: 12 | - runs Linux 13 | - can accept SSH connections from AWS Lambda 14 | - is configured with the correct credentials for the target clouds 15 | (for example, if the Terraform server needs to manage resources through its AWS provider, 16 | the configured Linux user needs to have a valid `~/.aws/credentials` file, even though 17 | Terraform does not use AWS CLI) 18 | - has the command-line tools to extract archived Terraform configurations (right now this 19 | is ZIP, which requires `unzip`, which, for example, can be installed on Ubuntu Linux 20 | with `apt-get install unzip`) 21 | 22 | ### AWS CLI 23 | 24 | You will need to have the AWS CLI installed and configured on your local machine. Please [see the documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) how to achieve this. 25 | 26 | ## Installation 27 | 28 | 1. Download the [`resource-role.yaml`](https://raw.githubusercontent.com/cloudsoft/aws-cfn-connector-for-terraform/master/resource-role.yaml) template and create a stack using the command below. 29 | Note the ARN of the created execution role for use later. 30 | ```sh 31 | aws cloudformation create-stack \ 32 | --template-body "file://resource-role.yaml" \ 33 | --stack-name CloudsoftTerraformInfrastructureExecutionRole \ 34 | --capabilities CAPABILITY_IAM 35 | ``` 36 | 37 | 1. Download the [`setup.yaml`](https://raw.githubusercontent.com/cloudsoft/aws-cfn-connector-for-terraform/master/setup.yaml) template. 38 | Edit the parameters as needed. More detail on parameters is below. Note that the following ones (marked `FIXME` in the file) are required. 39 | 40 | - `/cfn/terraform/ssh-host` 41 | - `/cfn/terraform/ssh-username` 42 | - `/cfn/terraform/ssh-key` 43 | 44 | 1. Create the `setup` stack using the command below. Note the ARN of the created logging role and the log group for use later. 45 | ```sh 46 | aws cloudformation create-stack \ 47 | --template-body "file://setup.yaml" \ 48 | --stack-name CloudsoftTerraformInfrastructureSetup \ 49 | --capabilities CAPABILITY_IAM 50 | ``` 51 | 52 | 1. Register the `Cloudsoft::Terraform::Infrastructure` CloudFormation type, using the command below, with the values returned above. 53 | ```sh 54 | EXECUTION_ROLE_ARN=... 55 | LOGGING_ROLE_ARN=... 56 | LOG_GROUP_NAME=... 57 | 58 | aws cloudformation register-type \ 59 | --type RESOURCE \ 60 | --type-name Cloudsoft::Terraform::Infrastructure \ 61 | --schema-handler-package https://github.com/cloudsoft/aws-cfn-connector-for-terraform/releases/download/latest/cloudsoft-terraform-infrastructure.zip \ 62 | --execution-role-arn $EXECUTION_ROLE_ARN \ 63 | --logging-config "{\"LogRoleArn\":\"$LOGGING_ROLE_ARN\",\"LogGroupName\": \"$LOG_GROUP_NAME\"}" 64 | ``` 65 | 66 | If you are updating the connector, note the version number and use the following command to set the default version: 67 | ```sh 68 | aws cloudformation set-type-default-version --type RESOURCE --type-name Cloudsoft::Terraform::Infrastructure --version-id 0000000N 69 | ``` 70 | 71 | ## Configuration Paramters 72 | 73 | This resource provider (RP) uses the following parameters: 74 | 75 | - `/cfn/terraform/ssh-host` (required): the hostname or the IP address of the Terraform server 76 | 77 | - `/cfn/terraform/ssh-username` (required): the user as which the RP should SSH 78 | 79 | - `/cfn/terraform/ssh-key` (required): the SSH key with which the RP should SSH 80 | 81 | - `/cfn/terraform/ssh-port` (defaults to 22): the port to which the RP should SSH 82 | 83 | - `/cfn/terraform/ssh-fingerprint` (optional): the fingerprint of the Terraform server, for security. 84 | The value must be in one of the 85 | [fingerprint formats supported in SSHJ](https://github.com/hierynomus/sshj/blob/master/src/main/java/net/schmizz/sshj/transport/verification/FingerprintVerifier.java#L33). 86 | For example, a SHA-256 fingerprint of the Ed25519 SSH host key of the current host 87 | can be computed with `ssh-keygen -E sha256 -lf /etc/ssh/ssh_host_ed25519_key.pub | cut -d' ' -f2`. 88 | 89 | - `/cfn/terraform/process-manager` (optional): the server-side remote persistent execution mechanism to use, 90 | either `nohup` (default) or `systemd`. In the latter case the server 91 | must run a Linux distribution that uses systemd with support for user mode and linger 92 | (typically CentOS 8, Fedora 28+, Ubuntu 18.04+, but not Amazon Linux 2) 93 | 94 | - `/cfn/terraform/logs-s3-bucket-name` (optional): if set, all Terraform logs are shipped to an S3 95 | bucket and returned in output and in error messages. 96 | This value is as per the `LogBucketName` property on the resource; 97 | see the documentation on that property in the [user-guide.md]. 98 | If that property is set it will override any value set here. 99 | 100 | Where a parameter is optional, it can be omitted or the special value `default` can be set to tell the RP 101 | to use the default value. Note that omitting it causes warnings in the CloudWatch logs 102 | (which we cannot disable, though in this case they are benign), and it is not permitted so leave it blank: 103 | this is why the special value `default` is recognized by this RP, as used in `setup.yaml`. 104 | 105 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/RemoteTerraformProcess.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import java.io.IOException; 4 | import java.util.Map; 5 | 6 | import org.apache.commons.lang3.RandomStringUtils; 7 | 8 | import com.fasterxml.jackson.databind.ObjectMapper; 9 | 10 | import io.cloudsoft.terraform.infrastructure.TerraformBaseWorker; 11 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 12 | import io.cloudsoft.terraform.infrastructure.commands.SshToolbox.PostRunBehaviour; 13 | import software.amazon.cloudformation.proxy.Logger; 14 | 15 | public class RemoteTerraformProcess { 16 | 17 | // Convert these constants to parameters later if necessary (more likely to be 18 | // useful after parameters can be specified separately for each server). 19 | 20 | // TF_DATADIR must match the contents of the files in server-side-systemd/ 21 | // (at least as far as realpath(1) is concerned). 22 | // sshj does not expand tilde to the remote user's home directory on the server 23 | // (OpenSSH scp does that). Also neither any directory components nor the 24 | // file name can be quoted (as in "/some/'work dir'/otherdir") because sshj 25 | // fails to escape the quotes properly (again, works in OpenSSH). 26 | 27 | private static final String 28 | TF_DATADIR = "~/tfdata", 29 | TF_SCPDIR = "/tmp", 30 | TF_CFN_METADATA_JSON = "cfn-metadata.json", 31 | TF_CONFFILENAME = "configuration.tf"; 32 | 33 | /** This should be the same for all runs against a particular TF deployment managed by CFN, 34 | * ie across all commands. */ 35 | protected final String modelIdentifier; 36 | 37 | /** This is the same within a run, ie across steps, 38 | * but it is helpful if it is different between different commands. */ 39 | protected final String commandIdentifier; 40 | 41 | protected final SshToolbox ssh; 42 | protected final Logger logger; 43 | 44 | public static RemoteTerraformProcess of(TerraformBaseWorker w) { 45 | return new RemoteTerraformProcess(w.getParameters(), w.getLogger(), w.getModel().getIdentifier(), w.getCallbackContext().getCommandRequestId()); 46 | } 47 | 48 | protected RemoteTerraformProcess(TerraformParameters params, Logger logger, String modelIdentifier, String commandIdentifier) { 49 | this.logger = logger; 50 | ssh = new SshToolbox(params, logger); 51 | this.modelIdentifier = modelIdentifier; 52 | this.commandIdentifier = commandIdentifier; 53 | } 54 | 55 | /** parent directory where all TF runs, for any stack/user/etc live */ 56 | protected String getBaseDir() { 57 | return TF_DATADIR; 58 | } 59 | 60 | protected String getWorkDir() { 61 | return getBaseDir() + "/" + modelIdentifier; 62 | } 63 | 64 | public void mkWorkDir() throws IOException { 65 | ssh.mkdir(getWorkDir()); 66 | } 67 | 68 | public void rmWorkDir() throws IOException { 69 | ssh.rmdir(getWorkDir()); 70 | } 71 | 72 | private String getScpTmpDir() { 73 | return TF_SCPDIR + "/" + modelIdentifier; 74 | } 75 | 76 | public void uploadConfiguration(byte[] contents, Map vars_map, boolean firstTime) throws IOException, IllegalArgumentException { 77 | ssh.mkdir(getScpTmpDir()); 78 | 79 | String tmpFileBasename = "terraform-upload-"+commandIdentifier+"-"+RandomStringUtils.randomAlphanumeric(4)+".file"; 80 | ssh.uploadFile(getScpTmpDir(), tmpFileBasename, contents); 81 | final String tmpFilename = getScpTmpDir() + "/" + tmpFileBasename; 82 | ssh.runSSHCommand("file --brief --mime-type " + tmpFilename, PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 83 | final String mimeType = ssh.lastStdout.trim(); 84 | switch (mimeType) { 85 | case "text/plain": 86 | ssh.runSSHCommand(String.format("mv %s %s/%s", tmpFilename, getWorkDir(), TF_CONFFILENAME), PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 87 | break; 88 | case "application/zip": 89 | ssh.runSSHCommand(String.format("unzip %s -d %s", tmpFilename, getWorkDir()), PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 90 | break; 91 | default: 92 | ssh.rmdir(getScpTmpDir()); 93 | throw new IllegalArgumentException("Unknown MIME type " + mimeType); 94 | } 95 | 96 | final String vars_filename = "cfn-" + modelIdentifier + ".auto.tfvars.json"; 97 | if (vars_map != null && !vars_map.isEmpty()) { 98 | final byte[] vars_json = new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsBytes(vars_map); 99 | // Work around the tilde [non-]expansion as explained above. 100 | ssh.uploadFile(getScpTmpDir(), vars_filename, vars_json); 101 | ssh.runSSHCommand(String.format("mv %s/%s %s/%s", getScpTmpDir(), vars_filename, getWorkDir(), vars_filename), PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 102 | } else if (!firstTime) { 103 | // delete an old vars file if updating with no vars, in case there were vars there previously 104 | ssh.runSSHCommand(String.format("rm -f %s/%s", getWorkDir(), vars_filename), PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 105 | } 106 | 107 | ssh.rmdir(getScpTmpDir()); 108 | } 109 | 110 | // provide a way to store metadata on the server 111 | public void saveMetadata(Map metadata) throws IOException { 112 | ssh.mkdir(getScpTmpDir()); 113 | String tmpFileBasename = "terraform-upload-metadata-"+RandomStringUtils.randomAlphanumeric(4)+".file"; 114 | // Work around the tilde [non-]expansion as explained above. 115 | final byte[] vars_json = new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsBytes(metadata); 116 | ssh.uploadFile(getScpTmpDir(), tmpFileBasename, vars_json); 117 | ssh.mkdir(getWorkDir()); 118 | ssh.runSSHCommand(String.format("mv %s/%s %s/%s", getScpTmpDir(), tmpFileBasename, 119 | getWorkDir(), TF_CFN_METADATA_JSON), PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 120 | ssh.rmdir(getScpTmpDir()); 121 | } 122 | 123 | // provide a way to store metadata on the server 124 | public Map loadMetadata() throws IOException { 125 | ssh.runSSHCommand("cat "+getWorkDir()+"/"+TF_CFN_METADATA_JSON, PostRunBehaviour.FAIL, PostRunBehaviour.FAIL); 126 | return new ObjectMapper().readValue(ssh.lastStdout.getBytes(), Map.class); 127 | } 128 | 129 | } 130 | -------------------------------------------------------------------------------- /doc/developer-guide.md: -------------------------------------------------------------------------------- 1 | # Developer guide 2 | 3 | ## Prerequisites 4 | 5 | To build the project, you will need the: 6 | 7 | * [CFN CLI](https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/resource-type-setup.html) 8 | 9 | Note you will need these prerequisites as described on that page: 10 | 11 | * Python 3.6 or later 12 | * Java and Maven 13 | * [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-install.html) 14 | * [CFN CLI](https://github.com/aws-cloudformation/cloudformation-cli) including the `cloudformation-cli-java-plugin` 15 | 16 | These resources are not needed to build but are useful to develop and test custom resource providers: 17 | 18 | * [Lombok](https://projectlombok.org/) support for your IDE 19 | (if you want your IDE to understand the Lombok Java annotations) 20 | * [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) 21 | and Docker (if you want to run serverless unit tests) 22 | 23 | 24 | ## Build and Run 25 | 26 | To build and run this project, clone it to your machine and then: 27 | 28 | 1. Build with: 29 | ```sh 30 | mvn clean package 31 | ``` 32 | 1. Register in CloudFormation with: 33 | ```sh 34 | cfn submit --set-default -v 35 | ``` 36 | 1. Set the parameters in parameter store. We suggest copying the file `setup.yaml` 37 | to `setup-local.yaml` (which is `.gitignore`d) and editing the values to connect 38 | to your Terraform server as described in the [installation guide](installation-guide.md), 39 | then creating the stack: 40 | 41 | ```sh 42 | aws cloudformation create-stack \ 43 | --template-body "file://setup-local.yaml" \ 44 | --stack-name CloudsoftTerraformInfrastructureSetup \ 45 | --capabilities CAPABILITY_IAM 46 | ``` 47 | 48 | 49 | ## Testing 50 | 51 | ### Unit tests 52 | 53 | Unit tests are ran as part of the Maven build. To run only the test, execute: 54 | ```sh 55 | mvn clean test 56 | ``` 57 | 58 | ### Integration tests 59 | 60 | Integration test uses SAM local to simulate handlers execution of the connector by CloudFormation. These are triggered by 61 | passing an event in a form of JSON payload to the `TestEntrypoint`, which will tell the connector which handler to use. 62 | 63 | The JSON payload must contain the `Cloudsoft::Terraform::Infrastructure` properties within `desiredResourceState`. Provided events 64 | (i.e. `create.json`, `update.json` and `delete.json`) all use the `ConfigurationUrl` but you can use any property defined 65 | in the [user guide](./user-guide.md#syntax). 66 | 67 | To run the tests: 68 | 1. In one terminal, start SAM local lambda: `sam local start-lambda` 69 | 2. In another terminal, run: `cfn invoke --max-reinvoke 10 {CREATE,READ,UPDATE,DELETE,LIST} path/to/event.json` 70 | 71 | For instance to do a full cycle of the tests for this project, execute each of the following commands: 72 | ```sh 73 | cfn invoke --max-reinvoke 10 CREATE ./sam-tests/create.json 74 | cfn invoke --max-reinvoke 10 READ ./sam-tests/read.json 75 | cfn invoke --max-reinvoke 10 UPDATE ./sam-tests/update.json 76 | cfn invoke --max-reinvoke 10 READ ./sam-tests/read.json 77 | cfn invoke --max-reinvoke 10 DELETE ./sam-tests/delete.json 78 | ``` 79 | Log output will be shown in the _first_ terminal, whereas the second will show the 80 | input and output to the connector lambdas. Each command should conclude with a `SUCCESS` status. 81 | 82 | _Note that `cfn` doesn't support yet profiles so you will need to have the `default` profile setup for your `aws` CLI. 83 | However, you can specify `--region` to run the test in a specific region._ 84 | 85 | _These tests require the Terraform server to be up and running, as well as the parameters set in parameter store. 86 | See [prerequisites](./installation-guide.md#prerequisites) and [step 3 of the installation guide](./installation-guide.md#installation)._ 87 | 88 | ### End-to-end tests 89 | 90 | Once the connector is built and submitted to AWS: 91 | 92 | 1. Deploy some `Cloudsoft::Terraform::Infrastructure` resource, e.g. the file `terraform-example.cfn.yaml`: 93 | ```sh 94 | aws cloudformation create-stack --template-body file://terraform-example.cfn.yaml --stack-name terraform-example 95 | ``` 96 | 2. Delete it when you're done: 97 | `aws cloudformation delete-stack --stack-name terraform-example` 98 | 99 | _Note these tests require the a Terraform server to be up and running, as well parameters to be set in parameter store. 100 | See [prerequisites](./installation-guide.md#prerequisites) and [step 3 of the installation guide](./installation-guide.md#installation)._ 101 | 102 | 103 | ## Troubleshooting 104 | 105 | The `aws` and `cfn` tools and maven repository seem at present (Summer 2020) to be updated quite frequently 106 | in ways that break backwards compatibility with respect to building this project or sometimes using this project. 107 | If errors are encountered it is recommented that you update to the latest version, 108 | following the instructions [here](https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/what-is-cloudformation-cli.html), 109 | summarized below for convenience: 110 | 111 | brew install python awscli 112 | pip3 install --upgrade cloudformation-cli-java-plugin 113 | # then update the RPDK verisonin the pom.xml here and fix any compilation breakages 114 | 115 | 116 | ## Open Features aka Limitations 117 | 118 | Some features we'd like to support include: 119 | 120 | * Implement the "List" operation 121 | 122 | * Make "Read" more efficient by being able to cache outputs from the last execution 123 | (rather than need to SSH to ask about outputs on each run) 124 | 125 | * Getting individual outputs, e.g. using `!GetAtt TerraformExample.Outputs.custom_output1.value`. 126 | This project currently returns the map in `Outputs`, but CFN does not support accessing it. 127 | 128 | * More download options: providing credentials, supporting Git, supporting downloads by the Terraform server. 129 | Let us know what you'd like to see! 130 | 131 | * Spinning up a Terraform worker server as needed and maintaining state in S3, 132 | so the connector does not require a pre-existing TF server. 133 | (But the current implementation is more flexible, as it allows you to configure TF the way you wish.) 134 | 135 | * Supporting multiple connector instances in the same account, pointing at different TF servers. 136 | 137 | * Being more forgiving of transient network errors where possible (eg when checking status of a long-running command execution) 138 | 139 | And technical debt: 140 | 141 | * The build insists on overwriting the `docs/` folder, even if the `README.md` exists, ever since upgrading `cfn`. 142 | Seems it is `cfn generate` which does this, with no option to disable it. 143 | For now the docs are in `doc/` instead, with `docs/` ignored in `.gitignore`. 144 | 145 | Contributions are welcome! 146 | 147 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/TerraformParameters.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | import java.net.URL; 5 | import java.nio.charset.StandardCharsets; 6 | import java.util.Arrays; 7 | import java.util.LinkedHashSet; 8 | import java.util.Set; 9 | import java.util.regex.Matcher; 10 | import java.util.regex.Pattern; 11 | 12 | import org.apache.commons.io.IOUtils; 13 | 14 | import software.amazon.awssdk.services.s3.S3Client; 15 | import software.amazon.awssdk.services.ssm.SsmClient; 16 | import software.amazon.awssdk.services.ssm.model.GetParameterRequest; 17 | import software.amazon.awssdk.services.ssm.model.GetParameterResponse; 18 | import software.amazon.awssdk.services.ssm.model.ParameterNotFoundException; 19 | import software.amazon.cloudformation.proxy.AmazonWebServicesClientProxy; 20 | import software.amazon.cloudformation.proxy.Logger; 21 | 22 | public class TerraformParameters { 23 | 24 | private static final String PREFIX = "/cfn/terraform"; 25 | private static final int DEFAULT_SSH_PORT = 22; 26 | private static final String DEFAULT_PROCESS_MANAGER = "nohup"; 27 | // allow this so that parameters can be set, as they don't allow blanks or null 28 | private static final Set DEFAULT_KEYWORDS = new LinkedHashSet(Arrays.asList("default", "disabled", "off")); 29 | private Logger logger; 30 | private final AmazonWebServicesClientProxy proxy; 31 | private final SsmClient ssmClient; 32 | private final S3Client s3Client; 33 | 34 | public TerraformParameters(Logger logger, AmazonWebServicesClientProxy proxy, SsmClient ssmClient, S3Client s3Client) { 35 | this.logger = logger; 36 | this.proxy = proxy; 37 | this.ssmClient = ssmClient; 38 | this.s3Client = s3Client; 39 | } 40 | 41 | public TerraformParameters(Logger logger, AmazonWebServicesClientProxy proxy) { 42 | this(logger, proxy, SsmClient.create(), S3Client.create()); 43 | } 44 | 45 | protected boolean isDefault(Object x) { 46 | return x==null || DEFAULT_KEYWORDS.contains(x.toString().toLowerCase()); 47 | } 48 | 49 | public String getHost() { 50 | return getParameterValue("ssh-host", true); 51 | } 52 | 53 | public int getPort() { 54 | final String port = getParameterValue("ssh-port", false); 55 | if (isDefault(port)) { 56 | return DEFAULT_SSH_PORT; 57 | } 58 | try { 59 | return Integer.parseInt(port.trim()); 60 | 61 | } catch (Exception e) { 62 | throw ConnectorHandlerFailures.unhandled("Parameter 'ssh-port' is invalid: '"+port+"'"); 63 | 64 | } 65 | } 66 | 67 | public String getProcessManager() { 68 | String pm = getParameterValue("process-manager", false); 69 | if (isDefault(pm)) { 70 | pm = DEFAULT_PROCESS_MANAGER; 71 | } 72 | if (pm.equals("systemd") || pm.equals("nohup")) { 73 | return pm; 74 | } 75 | throw ConnectorHandlerFailures.unhandled("Parameter 'process-manager' is invalid: '" + pm + "'"); 76 | } 77 | 78 | public String getUsername() { 79 | return getParameterValue("ssh-username", true); 80 | } 81 | 82 | public String getSSHKey() { 83 | return getParameterValue("ssh-key", true); 84 | } 85 | 86 | public String getFingerprint() { 87 | String fp = getParameterValue("ssh-fingerprint", false); 88 | if (isDefault(fp)) { 89 | return null; 90 | } 91 | return fp; 92 | } 93 | 94 | public String getLogsS3BucketName() { 95 | String bp = getParameterValue("logs-s3-bucket-name", false); 96 | if (isDefault(bp)) { 97 | return null; 98 | } 99 | return bp; 100 | } 101 | 102 | private String getParameterValue(String id, boolean required) { 103 | GetParameterRequest getParameterRequest = GetParameterRequest.builder() 104 | .name(PREFIX + "/" + id) 105 | .withDecryption(true) 106 | .build(); 107 | 108 | try { 109 | GetParameterResponse getParameterResponse = proxy.injectCredentialsAndInvokeV2(getParameterRequest, 110 | ssmClient::getParameter); 111 | return getParameterResponse.parameter().value(); 112 | 113 | } catch (ParameterNotFoundException e) { 114 | if (required) { 115 | throw ConnectorHandlerFailures.unhandled("Parameter '"+id+"' must be set in parameter store.", e); 116 | } else { 117 | // annoyingly we get failure messages in the log if the parameter doesn't exist; explain that so people don't panic 118 | if (logger!=null) { 119 | logger.log("Parameter '"+id+"' not in parameter store; using default. If there is an SSM failure message above, it is likely due to this and is benign. Set the default explicitly to suppress these messages."); 120 | } 121 | return null; 122 | } 123 | } catch (RuntimeException e) { 124 | throw ConnectorHandlerFailures.unhandled("Parameter '"+id+"' could not be retrieved; " 125 | + "check roles/permissions are set for this type connector: "+e, e); 126 | } 127 | } 128 | 129 | public byte[] getConfiguration(ResourceModel model) { 130 | if (model.getConfigurationContent() != null) { 131 | return model.getConfigurationContent().getBytes(StandardCharsets.UTF_8); 132 | } 133 | 134 | if (model.getConfigurationUrl() != null) { 135 | try { 136 | return IOUtils.toByteArray(new URL(model.getConfigurationUrl())); 137 | } catch (IOException e) { 138 | throw ConnectorHandlerFailures.unhandled("Failed to download file at " + model.getConfigurationUrl(), e); 139 | } 140 | } 141 | 142 | if (model.getConfigurationS3Path() != null) { 143 | final Pattern s3Pattern = Pattern.compile("^s3://([^/]*)/(.*)$"); 144 | final Matcher matcher = s3Pattern.matcher(model.getConfigurationS3Path()); 145 | if (!matcher.find()) { 146 | throw ConnectorHandlerFailures.unhandled("Invalid S3 path " + model.getConfigurationS3Path()); 147 | } 148 | 149 | final String bucket = matcher.group(1); 150 | final String key = matcher.group(2); 151 | 152 | try { 153 | byte[] result = new BucketUtils(proxy, s3Client).download(bucket, key); 154 | if (result.length==0) { 155 | throw ConnectorHandlerFailures.unhandled(String.format("S3 file at %s is empty", model.getConfigurationS3Path())); 156 | } 157 | return result; 158 | } catch (Exception e) { 159 | throw ConnectorHandlerFailures.unhandled(String.format("Failed to get S3 Terraform configuration file at %s: check it exists and roles/permissions set for this type connector", model.getConfigurationS3Path()), e); 160 | } 161 | } 162 | 163 | throw ConnectorHandlerFailures.unhandled("No Configuration properties are set."); 164 | } 165 | 166 | } 167 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/CreateHandler.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | 5 | import io.cloudsoft.terraform.infrastructure.commands.RemoteTerraformOutputsProcess; 6 | import io.cloudsoft.terraform.infrastructure.commands.RemoteTerraformProcess; 7 | import software.amazon.cloudformation.proxy.ProgressEvent; 8 | 9 | public class CreateHandler extends TerraformBaseHandler { 10 | 11 | private enum Steps { 12 | CREATE_LOG_TARGET, 13 | CREATE_INIT_AND_UPLOAD, 14 | CREATE_RUN_TF_INIT, 15 | CREATE_WAIT_ON_INIT_THEN_RUN_TF_APPLY, 16 | CREATE_WAIT_ON_APPLY_THEN_GET_OUTPUTS_AND_RETURN 17 | } 18 | 19 | @Override 20 | protected TerraformBaseWorker newWorker() { 21 | return new Worker(); 22 | } 23 | 24 | protected static class Worker extends TerraformBaseWorker { 25 | 26 | public Worker() { super("Create", Steps.class); } 27 | 28 | @Override 29 | protected void initLogBucket() { 30 | // do nothing during pre-run. we _create_ the bucket in the first step of the actual run. 31 | } 32 | 33 | @Override 34 | protected void preRunStep() { 35 | if (model.getIdentifier()==null) { 36 | if (callbackContext.createdModelIdentifier == null) { 37 | // creating this stack, the very first call for the stack 38 | callbackContext.createdModelIdentifier = Configuration.getIdentifier(true, 8); 39 | model.setIdentifier(callbackContext.createdModelIdentifier); 40 | 41 | log("Stack resource model identifier set as: "+callbackContext.createdModelIdentifier); 42 | 43 | // create the metadata file 44 | saveMetadata(); 45 | 46 | } else { 47 | // model doesn't seem to remember the identifier until the end 48 | model.setIdentifier(callbackContext.createdModelIdentifier); 49 | } 50 | } 51 | 52 | super.preRunStep(); 53 | } 54 | 55 | @SuppressWarnings("fallthrough") 56 | @Override 57 | protected ProgressEvent runStep() throws IOException { 58 | switch (currentStep) { 59 | case CREATE_LOG_TARGET: 60 | String logBucketName = model.getLogBucketName(); 61 | if (logBucketName==null) { 62 | logBucketName = getParameters().getLogsS3BucketName(); 63 | } 64 | boolean triedCreatingLogBucket = false; 65 | if (logBucketName!=null) { 66 | logBucketName = logBucketName.replace("*", model.getIdentifier().toLowerCase()); 67 | callbackContext.logBucketName = logBucketName; 68 | setModelLogBucketUrlFromCallbackContextName(); 69 | 70 | // try writing, in case it exists 71 | if (!initLogBucketFirstMessage()) { 72 | // try creating it -- but first restore the name (as failed write will have reset it) 73 | callbackContext.logBucketName = logBucketName; 74 | setModelLogBucketUrlFromCallbackContextName(); 75 | 76 | log("Log bucket "+logBucketName+" does not exist or is not accessible (there may be related failure messages above); will try to create it"); 77 | try { 78 | triedCreatingLogBucket = true; 79 | new BucketUtils(proxy).createBucket(logBucketName); 80 | if (!initLogBucketFirstMessage()) { 81 | throw new IllegalStateException("Bucket created but we cannot write to it. Check permissions. Log bucket will be disabled."); 82 | } 83 | log(String.format("Created bucket for logs at s3://%s/", logBucketName)); 84 | setModelLogBucketUrlFromCallbackContextName(); 85 | } catch (Exception e) { 86 | log(String.format("Failed to createlog bucket %s: %s (%s)", logBucketName, e.getClass().getName(), e.getMessage())); 87 | callbackContext.logBucketName = null; 88 | throw ConnectorHandlerFailures.handled("Unable to initialize log bucket; either the bucket is not creatable or not writeable: "+ConnectorHandlerFailures.simpleMessage(e)); 89 | } 90 | } 91 | saveMetadata(); 92 | } 93 | 94 | advanceTo(Steps.CREATE_INIT_AND_UPLOAD); 95 | if (triedCreatingLogBucket) { 96 | /* NOTE: here, and in several other places, we could always proceed to the next 97 | * step, but returning often increases transparency and maximises the time 98 | * available for each step (avoiding errors due to timeout), so do that if 99 | * we've done things on a step that might have taken a bit of time 100 | */ 101 | return statusInProgress(); 102 | } 103 | 104 | case CREATE_INIT_AND_UPLOAD: 105 | RemoteTerraformProcess.of(this).mkWorkDir(); 106 | getAndUploadConfiguration(true); 107 | 108 | advanceTo(Steps.CREATE_RUN_TF_INIT); 109 | return statusInProgress(); 110 | 111 | case CREATE_RUN_TF_INIT: 112 | tfInit().start(); 113 | advanceTo(Steps.CREATE_WAIT_ON_INIT_THEN_RUN_TF_APPLY); 114 | return statusInProgress(); 115 | 116 | case CREATE_WAIT_ON_INIT_THEN_RUN_TF_APPLY: 117 | if (checkStillRunningOrError(tfInit())) { 118 | return statusInProgress(); 119 | } 120 | 121 | tfApply().start(); 122 | advanceTo(Steps.CREATE_WAIT_ON_APPLY_THEN_GET_OUTPUTS_AND_RETURN); 123 | return statusInProgress(); 124 | 125 | case CREATE_WAIT_ON_APPLY_THEN_GET_OUTPUTS_AND_RETURN: 126 | if (checkStillRunningOrError(tfApply())) { 127 | return statusInProgress(); 128 | } 129 | 130 | RemoteTerraformOutputsProcess outputCmd = RemoteTerraformOutputsProcess.of(this); 131 | outputCmd.run(); 132 | model.setOutputsStringified(outputCmd.getOutputAsJsonStringized()); 133 | model.setOutputs(outputCmd.getOutputAsMap()); 134 | 135 | return statusSuccess(); 136 | 137 | default: 138 | throw new IllegalStateException("Invalid step: " + callbackContext.stepId); 139 | } 140 | } 141 | 142 | } 143 | 144 | } 145 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/commands/SshToolbox.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure.commands; 2 | 3 | import java.io.ByteArrayInputStream; 4 | import java.io.IOException; 5 | import java.io.InputStream; 6 | import java.util.concurrent.TimeUnit; 7 | 8 | import io.cloudsoft.terraform.infrastructure.TerraformParameters; 9 | import net.schmizz.sshj.SSHClient; 10 | import net.schmizz.sshj.common.IOUtils; 11 | import net.schmizz.sshj.connection.channel.direct.Session; 12 | import net.schmizz.sshj.xfer.InMemorySourceFile; 13 | import software.amazon.cloudformation.proxy.Logger; 14 | 15 | public class SshToolbox { 16 | 17 | protected final Logger logger; 18 | protected final String serverHostname, sshUsername, sshServerKeyFP, 19 | sshClientSecretKeyContents; 20 | protected final int sshPort; 21 | protected String lastStdout, lastStderr; 22 | protected Integer lastExitStatusOrNull; 23 | 24 | protected SshToolbox(TerraformParameters params, Logger logger) { 25 | // TODO this should take the arguments below as parameters rather than depend on TerraformParameters 26 | this.logger = logger; 27 | this.serverHostname = params.getHost(); 28 | this.sshPort = params.getPort(); 29 | this.sshServerKeyFP = params.getFingerprint(); 30 | this.sshUsername = params.getUsername(); 31 | this.sshClientSecretKeyContents = params.getSSHKey(); 32 | } 33 | 34 | protected void mkdir(String dir) throws IOException { 35 | runSSHCommand("mkdir -p " + dir, PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 36 | } 37 | 38 | protected void rmdir(String dir) throws IOException { 39 | runSSHCommand("rm -rf " + dir, PostRunBehaviour.FAIL, PostRunBehaviour.IGNORE); 40 | } 41 | 42 | protected void debug(String message) { 43 | // generates a lot of output, but can be useful 44 | // sysout makes it appear in SAM tests but not cloudwatch, 45 | // which is a good compromise in most cases 46 | System.out.println(message); 47 | // logger.log(message); 48 | } 49 | 50 | public enum PostRunBehaviour { IGNORE, WARN, FAIL } 51 | 52 | protected void runSSHCommand(String command, PostRunBehaviour onNonZeroExitCode, PostRunBehaviour onNonEmptyStdErr) throws IOException { 53 | debug("DEBUG: @" + serverHostname + "> " + command); 54 | 55 | final SSHClient ssh = new SSHClient(); 56 | 57 | addHostKeyVerifier(ssh); 58 | ssh.connect(serverHostname, sshPort); 59 | Session session = null; 60 | try { 61 | ssh.authPublickey(sshUsername, ssh.loadKeys(sshClientSecretKeyContents, null, null)); 62 | session = ssh.startSession(); 63 | final Session.Command cmd = session.exec(command); 64 | cmd.join(30, TimeUnit.SECONDS); 65 | lastExitStatusOrNull = cmd.getExitStatus(); 66 | lastStdout = IOUtils.readFully(cmd.getInputStream()).toString(); 67 | lastStderr = IOUtils.readFully(cmd.getErrorStream()).toString(); 68 | debug("stdout: " + lastStdout); 69 | debug("stderr: " + lastStderr); 70 | debug("exit status: " + lastExitStatusOrNull); 71 | if (!((Integer) 0).equals(lastExitStatusOrNull)) { 72 | if (onNonZeroExitCode == PostRunBehaviour.IGNORE) { 73 | // nothing 74 | } else { 75 | logger.log("WARN: unexpected exit code from command '" + command + "': " + lastExitStatusOrNull + "\n" 76 | + " stderr: " + lastStderr + "\n" 77 | + " stdout: " + lastStdout); 78 | if (onNonZeroExitCode == PostRunBehaviour.FAIL) { 79 | throw new IllegalStateException("Non-zero exit code ("+lastExitStatusOrNull+")"); 80 | } 81 | } 82 | } 83 | if (!lastStderr.isEmpty()) { 84 | if (onNonEmptyStdErr == PostRunBehaviour.IGNORE) { 85 | // nothing 86 | } else { 87 | logger.log("WARN: unexpected stderr from command '" + command + "'; exit code 0 but:\n" 88 | + " stderr: " + lastStderr + "\n" 89 | + " stdout: " + lastStdout); 90 | if (onNonEmptyStdErr == PostRunBehaviour.FAIL) { 91 | throw new IllegalStateException("Non-empty stderr"); 92 | } 93 | } 94 | } 95 | } finally { 96 | try { 97 | if (session != null) { 98 | session.close(); 99 | } 100 | } catch (IOException e) { 101 | // do nothing 102 | } 103 | try { 104 | ssh.disconnect(); 105 | ssh.close(); 106 | } catch (IOException e) { 107 | // do nothing 108 | } 109 | } 110 | } 111 | 112 | protected void uploadFile(String dirName, String fileName, byte[] contents) throws IOException { 113 | final BytesSourceFile src = new BytesSourceFile(fileName, contents); 114 | final SSHClient ssh = new SSHClient(); 115 | addHostKeyVerifier(ssh); 116 | ssh.connect(serverHostname, sshPort); 117 | try { 118 | ssh.authPublickey(sshUsername, ssh.loadKeys(sshClientSecretKeyContents, null, null)); 119 | ssh.newSCPFileTransfer().upload(src, dirName); 120 | } finally { 121 | try { 122 | ssh.disconnect(); 123 | ssh.close(); 124 | } catch (Exception ee) { 125 | // ignore 126 | } 127 | } 128 | } 129 | 130 | protected String catFileIfExists(String remotePath) throws IOException { 131 | runSSHCommand(String.format("[ -f %s ] && cat %s || :", remotePath, remotePath), PostRunBehaviour.IGNORE, PostRunBehaviour.IGNORE); 132 | return lastStdout; 133 | } 134 | 135 | private String getSnapshotFileName(String fn) { 136 | return fn + ".snapshot"; 137 | } 138 | 139 | private String getOffsetFileName(String fn) { 140 | return fn + ".offset"; 141 | } 142 | 143 | protected String setupIncrementalFileCommand(String fn) { 144 | return String.format( 145 | "truncate --size=0 %s; " 146 | + "echo 0 > %s", getSnapshotFileName(fn), getOffsetFileName(fn)); 147 | } 148 | 149 | protected String catIncrementalFileIfExists(String fn) throws IOException { 150 | final String sfn = getSnapshotFileName(fn), ofn = getOffsetFileName(fn); 151 | runSSHCommand(String.format( 152 | "cp %s %s; " 153 | + "dd status=none if=%s bs=1 skip=`cat %s`; wc -c <%s >%s", 154 | fn, sfn, sfn, ofn, sfn, ofn), PostRunBehaviour.IGNORE, PostRunBehaviour.IGNORE); 155 | return lastStdout; 156 | } 157 | 158 | private void addHostKeyVerifier(SSHClient ssh) { 159 | if (sshServerKeyFP!=null && sshServerKeyFP.length()>0) { 160 | ssh.addHostKeyVerifier(sshServerKeyFP); 161 | } else { 162 | ssh.addHostKeyVerifier((host, port, key) -> true); 163 | } 164 | } 165 | 166 | private static class BytesSourceFile extends InMemorySourceFile { 167 | final private String name; 168 | final private byte[] contents; 169 | 170 | BytesSourceFile(String name, byte[] contents) { 171 | this.name = name; 172 | this.contents = contents; 173 | } 174 | 175 | public String getName() { 176 | return name; 177 | } 178 | 179 | public long getLength() { 180 | return contents.length; 181 | } 182 | 183 | public InputStream getInputStream() { 184 | return new ByteArrayInputStream(contents); 185 | } 186 | } 187 | 188 | } 189 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 6 | 4.0.0 7 | 8 | io.cloudsoft.terraform.infrastructure 9 | cloudsoft-terraform-infrastructure-handler 10 | cloudsoft-terraform-infrastructure-handler 11 | 1.0-SNAPSHOT 12 | jar 13 | 14 | 15 | 1.8 16 | 1.8 17 | UTF-8 18 | UTF-8 19 | 20 | 2.13.71 21 | 22 | 23 | 24 | 25 | central 26 | https://repo1.maven.org/maven2 27 | 28 | 29 | jitpack.io 30 | https://jitpack.io 31 | 32 | 33 | 34 | 35 | 36 | software.amazon.awssdk 37 | s3 38 | ${awssdk.version} 39 | 40 | 41 | software.amazon.awssdk 42 | ssm 43 | ${awssdk.version} 44 | 45 | 46 | com.hierynomus 47 | sshj 48 | 0.27.0 49 | 50 | 51 | 52 | 53 | software.amazon.cloudformation 54 | aws-cloudformation-rpdk-java-plugin 55 | 2.0.2 56 | 57 | 58 | 59 | org.projectlombok 60 | lombok 61 | 1.18.4 62 | provided 63 | 64 | 65 | 66 | 67 | org.assertj 68 | assertj-core 69 | 3.12.2 70 | test 71 | 72 | 73 | 74 | org.junit.jupiter 75 | junit-jupiter 76 | 5.5.2 77 | test 78 | 79 | 80 | 81 | org.mockito 82 | mockito-core 83 | 2.26.0 84 | test 85 | 86 | 87 | 88 | org.mockito 89 | mockito-junit-jupiter 90 | 2.26.0 91 | test 92 | 93 | 94 | 95 | 96 | 97 | 98 | org.apache.maven.plugins 99 | maven-compiler-plugin 100 | 3.8.1 101 | 102 | true 103 | 104 | -Xlint:all,-options,-processing 105 | -Werror 106 | 107 | 108 | 109 | 110 | org.apache.maven.plugins 111 | maven-shade-plugin 112 | 2.3 113 | 114 | false 115 | 116 | 117 | 118 | package 119 | 120 | shade 121 | 122 | 123 | 124 | 125 | 126 | org.codehaus.mojo 127 | exec-maven-plugin 128 | 1.6.0 129 | 130 | 131 | generate 132 | generate-sources 133 | 134 | exec 135 | 136 | 137 | cfn 138 | generate 139 | ${project.basedir} 140 | 141 | 142 | 143 | 144 | 145 | org.codehaus.mojo 146 | build-helper-maven-plugin 147 | 3.0.0 148 | 149 | 150 | add-source 151 | generate-sources 152 | 153 | add-source 154 | 155 | 156 | 157 | ${project.basedir}/target/generated-sources/rpdk 158 | 159 | 160 | 161 | 162 | 163 | 164 | org.apache.maven.plugins 165 | maven-resources-plugin 166 | 2.4 167 | 168 | 169 | maven-surefire-plugin 170 | 3.0.0-M3 171 | 172 | 173 | org.jacoco 174 | jacoco-maven-plugin 175 | 0.8.4 176 | 177 | 178 | **/BaseConfiguration* 179 | **/BaseHandler* 180 | **/HandlerWrapper* 181 | **/ResourceModel* 182 | 183 | 184 | 185 | 186 | 187 | prepare-agent 188 | 189 | 190 | 191 | report 192 | test 193 | 194 | report 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | ${project.basedir} 230 | 231 | cloudsoft-terraform-infrastructure.json 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | org.eclipse.m2e 240 | lifecycle-mapping 241 | 1.0.0 242 | 243 | 244 | 245 | 246 | 247 | org.codehaus.mojo 248 | exec-maven-plugin 249 | [1.6.0,) 250 | 251 | exec 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | -------------------------------------------------------------------------------- /src/test/java/io/cloudsoft/terraform/infrastructure/TerraformParametersTest.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import static junit.framework.Assert.assertEquals; 4 | import static org.junit.jupiter.api.Assertions.assertArrayEquals; 5 | import static org.junit.jupiter.api.Assertions.assertThrows; 6 | import static org.mockito.ArgumentMatchers.any; 7 | import static org.mockito.ArgumentMatchers.eq; 8 | import static org.mockito.Mockito.mock; 9 | import static org.mockito.Mockito.times; 10 | import static org.mockito.Mockito.verify; 11 | import static org.mockito.Mockito.when; 12 | 13 | import java.io.ByteArrayInputStream; 14 | import java.nio.charset.StandardCharsets; 15 | import java.nio.file.Path; 16 | import java.util.function.Function; 17 | 18 | import org.junit.jupiter.api.BeforeEach; 19 | import org.junit.jupiter.api.Test; 20 | import org.mockito.ArgumentCaptor; 21 | import org.mockito.Mock; 22 | import org.mockito.invocation.InvocationOnMock; 23 | import org.mockito.stubbing.Answer; 24 | 25 | import software.amazon.awssdk.core.sync.ResponseTransformer; 26 | import software.amazon.awssdk.http.AbortableInputStream; 27 | import software.amazon.awssdk.services.s3.S3Client; 28 | import software.amazon.awssdk.services.s3.model.GetObjectRequest; 29 | import software.amazon.awssdk.services.ssm.SsmClient; 30 | import software.amazon.awssdk.services.ssm.model.GetParameterRequest; 31 | import software.amazon.awssdk.services.ssm.model.GetParameterResponse; 32 | import software.amazon.awssdk.services.ssm.model.Parameter; 33 | import software.amazon.awssdk.services.ssm.model.ParameterNotFoundException; 34 | import software.amazon.cloudformation.proxy.AmazonWebServicesClientProxy; 35 | 36 | public class TerraformParametersTest { 37 | 38 | @Mock 39 | private AmazonWebServicesClientProxy proxy; 40 | 41 | @Mock 42 | private SsmClient ssmClient; 43 | 44 | @Mock 45 | private S3Client s3Client; 46 | 47 | private TerraformParameters parameters; 48 | 49 | @BeforeEach 50 | public void setup() { 51 | proxy = mock(AmazonWebServicesClientProxy.class); 52 | ssmClient = mock(SsmClient.class); 53 | s3Client = mock(S3Client.class); 54 | parameters = new TerraformParameters(null, proxy, ssmClient, s3Client); 55 | } 56 | 57 | @Test 58 | public void getHostReturnsParameterFromParameterStore() { 59 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 60 | .name("/cfn/terraform/ssh-host") 61 | .withDecryption(true) 62 | .build(); 63 | final String expected = "acme.com"; 64 | 65 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).thenReturn(GetParameterResponse.builder() 66 | .parameter(Parameter.builder() 67 | .value(expected) 68 | .build()) 69 | .build()); 70 | 71 | String host = parameters.getHost(); 72 | assertEquals(expected, host); 73 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 74 | } 75 | 76 | @Test 77 | public void getFingerprintReturnsParameterFromParameterStore() { 78 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 79 | .name("/cfn/terraform/ssh-fingerprint") 80 | .withDecryption(true) 81 | .build(); 82 | final String expected = "12:f8:7e:78:61:b4:bf:e2:de:24:15:96:4e:d4:72:53"; 83 | 84 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).thenReturn( 85 | GetParameterResponse.builder() 86 | .parameter(Parameter.builder() 87 | .value(expected) 88 | .build()) 89 | .build()); 90 | 91 | String fingerprint = parameters.getFingerprint(); 92 | assertEquals(expected, fingerprint); 93 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 94 | } 95 | 96 | @Test 97 | public void getSSHKeyReturnsParameterFromParameterStore() { 98 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 99 | .name("/cfn/terraform/ssh-key") 100 | .withDecryption(true) 101 | .build(); 102 | final String expected = "=== RSA KEY === ............."; 103 | 104 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).thenReturn( 105 | GetParameterResponse.builder().parameter(Parameter.builder().value(expected).build()).build()); 106 | 107 | String sshKey = parameters.getSSHKey(); 108 | assertEquals(expected, sshKey); 109 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 110 | } 111 | 112 | @Test 113 | public void getUsernameReturnsParameterFromParameterStore() { 114 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 115 | .name("/cfn/terraform/ssh-username") 116 | .withDecryption(true) 117 | .build(); 118 | final String expected = "root"; 119 | 120 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).thenReturn( 121 | GetParameterResponse.builder().parameter(Parameter.builder().value(expected).build()).build()); 122 | 123 | assertEquals(expected, parameters.getUsername()); 124 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 125 | } 126 | 127 | @Test 128 | public void getPortReturnsParameterFromParameterStore() { 129 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 130 | .name("/cfn/terraform/ssh-port") 131 | .withDecryption(true) 132 | .build(); 133 | final String expected = "1234"; 134 | 135 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).thenReturn( 136 | GetParameterResponse.builder().parameter(Parameter.builder().value(expected).build()).build()); 137 | 138 | assertEquals(Integer.parseInt(expected), parameters.getPort()); 139 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 140 | } 141 | 142 | @Test 143 | public void getPortReturns22IfNotFound() { 144 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 145 | .name("/cfn/terraform/ssh-port") 146 | .withDecryption(true) 147 | .build(); 148 | whenProxyGetParameterCallSsmGetParameter(); 149 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(ParameterNotFoundException.builder().build()); 150 | 151 | assertEquals(22, parameters.getPort()); 152 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 153 | } 154 | 155 | @Test 156 | public void getPortThrowsIfGetParameterThrowsOtherError() { 157 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 158 | .name("/cfn/terraform/ssh-port") 159 | .withDecryption(true) 160 | .build(); 161 | whenProxyGetParameterCallSsmGetParameter(); 162 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(new RuntimeException()); 163 | 164 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 165 | parameters.getPort(); 166 | }); 167 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 168 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 169 | } 170 | 171 | @Test 172 | public void getHostThrowsIfGetParameterThrows() { 173 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 174 | .name("/cfn/terraform/ssh-host") 175 | .withDecryption(true) 176 | .build(); 177 | whenProxyGetParameterCallSsmGetParameter(); 178 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(ParameterNotFoundException.builder().build()); 179 | 180 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 181 | parameters.getHost(); 182 | }); 183 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 184 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 185 | } 186 | 187 | @Test 188 | public void getFingerprintReturnsNullIfGetParameterThrowsNotFound() { 189 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 190 | .name("/cfn/terraform/ssh-fingerprint") 191 | .withDecryption(true) 192 | .build(); 193 | whenProxyGetParameterCallSsmGetParameter(); 194 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(ParameterNotFoundException.builder().build()); 195 | 196 | assertEquals(null, parameters.getFingerprint()); 197 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 198 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 199 | } 200 | 201 | @Test 202 | public void getFingerprintThrowsIfGetParameterThrowsOtherError() { 203 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 204 | .name("/cfn/terraform/ssh-fingerprint") 205 | .withDecryption(true) 206 | .build(); 207 | whenProxyGetParameterCallSsmGetParameter(); 208 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(new RuntimeException()); 209 | 210 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 211 | parameters.getFingerprint(); 212 | }); 213 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 214 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 215 | } 216 | 217 | @Test 218 | public void getSSHKeyThrowsIfGetParameterThrows() { 219 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 220 | .name("/cfn/terraform/ssh-key") 221 | .withDecryption(true) 222 | .build(); 223 | whenProxyGetParameterCallSsmGetParameter(); 224 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(ParameterNotFoundException.builder().build()); 225 | 226 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 227 | parameters.getSSHKey(); 228 | }); 229 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 230 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 231 | } 232 | 233 | @Test 234 | public void getUsernameThrowsIfGetParameterThrows() { 235 | proxy = mock(AmazonWebServicesClientProxy.class); 236 | ssmClient = mock(SsmClient.class); 237 | s3Client = mock(S3Client.class); 238 | parameters = new TerraformParameters(null, proxy, ssmClient, s3Client); 239 | 240 | final GetParameterRequest expectedGetParameterRequest = GetParameterRequest.builder() 241 | .name("/cfn/terraform/ssh-username") 242 | .withDecryption(true) 243 | .build(); 244 | whenProxyGetParameterCallSsmGetParameter(); 245 | when(ssmClient.getParameter(any(GetParameterRequest.class))).thenThrow(ParameterNotFoundException.builder().build()); 246 | 247 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 248 | parameters.getUsername(); 249 | }); 250 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(eq(expectedGetParameterRequest), any()); 251 | verify(ssmClient, times(1)).getParameter(any(GetParameterRequest.class)); 252 | } 253 | 254 | protected void whenProxyGetParameterCallSsmGetParameter() { 255 | // if we just do InvocationOnMock::callRealMethod we get NPE due to loggerProxy 256 | when(proxy.injectCredentialsAndInvokeV2(any(GetParameterRequest.class), any())).then( 257 | ctx -> ssmClient.getParameter((GetParameterRequest)ctx.getArgument(0))); 258 | } 259 | 260 | @Test 261 | public void getConfigurationReturnConfigurationContentProperty() { 262 | final String configurationContent = "Hello world"; 263 | final ResourceModel model = ResourceModel.builder().configurationContent(configurationContent).build(); 264 | 265 | byte[] result = parameters.getConfiguration(model); 266 | assertArrayEquals(configurationContent.getBytes(StandardCharsets.UTF_8), result); 267 | } 268 | 269 | @Test 270 | public void getConfigurationReturnsDownloadedConfigurationFromUrlProperty() { 271 | final String configurationUrl = "http://www.mocky.io/v2/5dc19cab33000051e91a5437"; 272 | final ResourceModel model = ResourceModel.builder().configurationUrl(configurationUrl).build(); 273 | 274 | String expected = "Hello world"; 275 | byte[] result = parameters.getConfiguration(model); 276 | 277 | assertArrayEquals(expected.getBytes(StandardCharsets.UTF_8), result); 278 | } 279 | 280 | @Test 281 | public void getConfigurationThrowsIfUrlDoesNotExistProperty() { 282 | final String configurationUrl = "http://acme.com/does/not/exist"; 283 | final ResourceModel model = ResourceModel.builder().configurationUrl(configurationUrl).build(); 284 | 285 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 286 | parameters.getConfiguration(model); 287 | }); 288 | } 289 | 290 | @SuppressWarnings("unchecked") 291 | @Test 292 | public void getConfigurationReturnsDownloadedConfigurationFromS3PathProperty() { 293 | final String expectedBucket = "my-bucket"; 294 | final String expectedKey = "hello-world.txt"; 295 | final String expectedContent = "Hello world"; 296 | final String configurationS3Path = String.format("s3://%s/%s", expectedBucket, expectedKey); 297 | final ResourceModel model = ResourceModel.builder().configurationS3Path(configurationS3Path).build(); 298 | 299 | when(proxy.injectCredentialsAndInvokeV2(any(), any())).thenAnswer(new Answer() { 300 | @Override 301 | public Object answer(InvocationOnMock invocation) throws Throwable { 302 | return ((Function)invocation.getArgument(1)).apply(invocation.getArgument(0)); 303 | } 304 | }); 305 | when(s3Client.getObject(any(GetObjectRequest.class), any(ResponseTransformer.class))).then(invocationOnMock -> { 306 | ResponseTransformer transformer = invocationOnMock.getArgument(1); 307 | transformer.transform(null, AbortableInputStream.create(new ByteArrayInputStream(expectedContent.getBytes()))); 308 | return null; 309 | }); 310 | 311 | byte[] result = parameters.getConfiguration(model); 312 | verify(proxy, times(1)).injectCredentialsAndInvokeV2(any(GetObjectRequest.class), any()); 313 | ArgumentCaptor argument = ArgumentCaptor.forClass(GetObjectRequest.class); 314 | verify(s3Client, times(1)).getObject(argument.capture(), any(ResponseTransformer.class)); 315 | 316 | assertArrayEquals(expectedContent.getBytes(StandardCharsets.UTF_8), result); 317 | assertEquals(expectedBucket, argument.getValue().bucket()); 318 | assertEquals(expectedKey, argument.getValue().key()); 319 | } 320 | 321 | @Test 322 | public void getConfigurationThrowsIfS3PathIsInvalid() { 323 | final String configurationS3Path = "http://acme.com/does/not/exist"; 324 | final ResourceModel model = ResourceModel.builder().configurationS3Path(configurationS3Path).build(); 325 | 326 | when(proxy.injectCredentialsAndInvokeV2(any(), any())).then(InvocationOnMock::callRealMethod); 327 | 328 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 329 | parameters.getConfiguration(model); 330 | }); 331 | } 332 | 333 | @Test 334 | public void getConfigurationThrowsIfS3PathDoesNotExistProperty() { 335 | final String configurationS3Path = "s3://bucket/does/not/exist"; 336 | final ResourceModel model = ResourceModel.builder().configurationS3Path(configurationS3Path).build(); 337 | 338 | when(proxy.injectCredentialsAndInvokeV2(any(), any())).then(InvocationOnMock::callRealMethod); 339 | when(s3Client.getObject((GetObjectRequest)any(), (Path)any())).thenThrow(IllegalArgumentException.class); 340 | 341 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 342 | parameters.getConfiguration(model); 343 | }); 344 | } 345 | 346 | @Test 347 | public void getConfigurationThrowsIfNoProperties() { 348 | final ResourceModel model = ResourceModel.builder().build(); 349 | 350 | when(proxy.injectCredentialsAndInvokeV2(any(), any())).then(InvocationOnMock::callRealMethod); 351 | 352 | assertThrows(ConnectorHandlerFailures.Unhandled.class, () -> { 353 | parameters.getConfiguration(model); 354 | }); 355 | } 356 | } 357 | -------------------------------------------------------------------------------- /src/main/java/io/cloudsoft/terraform/infrastructure/TerraformBaseWorker.java: -------------------------------------------------------------------------------- 1 | package io.cloudsoft.terraform.infrastructure; 2 | 3 | import java.io.IOException; 4 | import java.io.PrintWriter; 5 | import java.io.StringWriter; 6 | import java.util.LinkedHashMap; 7 | import java.util.Map; 8 | import java.util.Optional; 9 | 10 | import javax.annotation.Nullable; 11 | 12 | import com.google.common.annotations.VisibleForTesting; 13 | import com.google.common.base.Preconditions; 14 | 15 | import io.cloudsoft.terraform.infrastructure.commands.RemoteDetachedTerraformProcess; 16 | import io.cloudsoft.terraform.infrastructure.commands.RemoteDetachedTerraformProcess.TerraformCommand; 17 | import io.cloudsoft.terraform.infrastructure.commands.RemoteDetachedTerraformProcessNohup; 18 | import io.cloudsoft.terraform.infrastructure.commands.RemoteDetachedTerraformProcessSystemd; 19 | import io.cloudsoft.terraform.infrastructure.commands.RemoteTerraformProcess; 20 | import lombok.Getter; 21 | import software.amazon.awssdk.core.sync.RequestBody; 22 | import software.amazon.cloudformation.proxy.AmazonWebServicesClientProxy; 23 | import software.amazon.cloudformation.proxy.Logger; 24 | import software.amazon.cloudformation.proxy.OperationStatus; 25 | import software.amazon.cloudformation.proxy.ProgressEvent; 26 | import software.amazon.cloudformation.proxy.ResourceHandlerRequest; 27 | 28 | public abstract class TerraformBaseWorker> { 29 | 30 | private static final String HTTPS_S3_BUCKET_PREFIX = "https://s3.console.aws.amazon.com/s3/buckets/"; 31 | // Mirror Terraform, which maxes its state checks at 10 seconds when working on long jobs 32 | private static final int MAX_CHECK_INTERVAL_SECONDS = 10; 33 | // Use YAML doc separator to separate logged messages 34 | public static final CharSequence LOG_MESSAGE_SEPARATOR = "---"; 35 | public static final String MAIN_LOG_BUCKET_FILE = "cfn-log.txt"; 36 | 37 | @Getter 38 | protected AmazonWebServicesClientProxy proxy; 39 | @Getter 40 | protected ResourceHandlerRequest request; 41 | @Getter 42 | protected ResourceModel model; 43 | @Getter 44 | protected CallbackContext callbackContext; 45 | @Getter 46 | private Logger logger; 47 | @Getter 48 | private final Class stepsEnumClass; 49 | 50 | private TerraformParameters parameters; 51 | 52 | @Getter 53 | private String commandSummary; 54 | @Getter 55 | protected Steps currentStep; 56 | 57 | @VisibleForTesting 58 | boolean storeMetadataOnServer = true; 59 | 60 | // === init and accessors ======================== 61 | 62 | public TerraformBaseWorker(String commandSummary, Class stepsEnumClass) { 63 | this.commandSummary = commandSummary; 64 | this.stepsEnumClass = stepsEnumClass; 65 | } 66 | 67 | protected void init( 68 | @Nullable AmazonWebServicesClientProxy proxy, 69 | ResourceHandlerRequest request, 70 | @Nullable CallbackContext callbackContext, 71 | Logger logger) { 72 | if (this.request!=null) { 73 | throw new IllegalStateException("Handler can only be setup and used once, and request has already been initialized when attempting to re-initialize it"); 74 | } 75 | this.proxy = proxy; 76 | this.request = Preconditions.checkNotNull(request, "request"); 77 | this.model = request.getDesiredResourceState(); 78 | this.callbackContext = callbackContext!=null ? callbackContext : new CallbackContext(); 79 | this.logger = Preconditions.checkNotNull(logger, "logger"); 80 | } 81 | 82 | public synchronized TerraformParameters getParameters() { 83 | if (parameters==null) { 84 | if (proxy==null) { 85 | throw new IllegalStateException("Parameters cannot be accessed before proxy set during init"); 86 | } 87 | parameters = new TerraformParameters(logger, proxy); 88 | } 89 | return parameters; 90 | } 91 | 92 | // for testing 93 | public synchronized void setParameters(TerraformParameters parameters) { 94 | if (this.parameters!=null) { 95 | throw new IllegalStateException("Handler can only be setup and used once, and parameters have already initialized when attempting to re-initializee them"); 96 | } 97 | this.parameters = Preconditions.checkNotNull(parameters, "parameters"); 98 | } 99 | 100 | // === lifecycle ======================== 101 | 102 | public ProgressEvent runHandlingError() { 103 | try { 104 | log(getClass().getName() + " lambda starting, model: "+model+", callback: "+callbackContext); 105 | preRunStep(); 106 | ProgressEvent result = runStep(); 107 | log(getClass().getName() + " lambda exiting, status: "+result.getStatus()+", message: "+result.getMessage()+", callback: "+result.getCallbackContext()+", model: "+result.getResourceModel()); 108 | if (OperationStatus.SUCCESS==result.getStatus()) { 109 | logUserLogOnly("SUCCESS: "+model); 110 | } 111 | return result; 112 | 113 | } catch (ConnectorHandlerFailures.Handled e) { 114 | log(getClass().getName() + " lambda exiting with error"); 115 | String message = "FAILED: "+e.getMessage(); 116 | logUserLogOnly(message); 117 | return statusFailed(message); 118 | 119 | } catch (ConnectorHandlerFailures.Unhandled e) { 120 | if (e.getCause()!=null) { 121 | logExceptionIncludingUserLog("FAILED: "+e.getMessage(), e.getCause()); 122 | } else { 123 | logIncludingUserLog("FAILED: "+e.getMessage()); 124 | } 125 | log(getClass().getName() + " lambda exiting with error"); 126 | return statusFailed(e.getMessage()); 127 | 128 | } catch (Exception e) { 129 | logExceptionIncludingUserLog("FAILED: "+e, e); 130 | log(getClass().getName() + " lambda exiting with error"); 131 | return statusFailed((currentStep!=null ? currentStep+": " : "")+e); 132 | } 133 | } 134 | 135 | protected void preRunStep() { 136 | if (callbackContext.logBucketName==null && model.getLogBucketUrl()!=null) { 137 | // don't this state occurs -- model (and prevmodel) are wiped apart from identifiers between model runs. 138 | // we will populate the log bucket name below in `initLogBucket` 139 | setCallbackLogBucketNameFromModelUrl(); 140 | 141 | } else if (callbackContext.logBucketName!=null && model.getLogBucketUrl()==null) { 142 | // this sometimes wasn't rememered; problem may be fixed now 143 | setModelLogBucketUrlFromCallbackContextName(); 144 | } 145 | 146 | if (getCallbackContext().stepId == null) { 147 | // very first run 148 | 149 | // model data that we need to remember needs to be cached somewhere; we use the server for this 150 | loadMetadata(); 151 | 152 | getCallbackContext().commandRequestId = Configuration.getIdentifier(true, 6); 153 | log("Using "+getCallbackContext().commandRequestId+" to uniquely identify this command across all steps (stack element "+model.getIdentifier()+", request "+request.getClientRequestToken()+")"); 154 | 155 | initLogBucket(); 156 | 157 | // init steps 158 | if (stepsEnumClass.getEnumConstants().length==0) { 159 | // leave it null 160 | } else { 161 | currentStep = stepsEnumClass.getEnumConstants()[0]; 162 | } 163 | 164 | } else { 165 | // continuing a step 166 | currentStep = Enum.valueOf(stepsEnumClass, callbackContext.stepId); 167 | } 168 | } 169 | 170 | // not the cleanest way to store config between commands, or most elegant code, 171 | // but it gets the job done. 172 | 173 | protected void saveMetadata() { 174 | if (storeMetadataOnServer) { 175 | Map md = new LinkedHashMap<>(); 176 | if (model.getLogBucketName()!=null) md.put("LogBucketName", model.getLogBucketName()); 177 | if (model.getLogBucketUrl()!=null) md.put("LogBucketUrl", model.getLogBucketUrl()); 178 | try { 179 | RemoteTerraformProcess.of(this).saveMetadata(md); 180 | } catch (Exception e) { 181 | throw ConnectorHandlerFailures.unhandled("Unable to save model metadata: "+e, e); 182 | } 183 | } 184 | } 185 | 186 | protected void loadMetadata() { 187 | if (storeMetadataOnServer) { 188 | Map md; 189 | try { 190 | md = RemoteTerraformProcess.of(this).loadMetadata(); 191 | } catch (Exception e) { 192 | throw ConnectorHandlerFailures.unhandled("Unable to save model metadata: "+e, e); 193 | } 194 | 195 | if (md.get("LogBucketName")!=null) model.setLogBucketName((String)md.get("LogBucketName")); 196 | if (md.get("LogBucketUrl")!=null) model.setLogBucketUrl((String)md.get("LogBucketUrl")); 197 | setCallbackLogBucketNameFromModelUrl(); 198 | } 199 | } 200 | 201 | /** on the pre-run of the first step in a command, check whether the log bucket exists and 202 | * populate the field in the callback for use in subsequent steps. the model's bucket URL is used 203 | * to compute the bucket name, and that bucket URL is written to the server and read before this is called. 204 | */ 205 | protected void initLogBucket() { 206 | setCallbackLogBucketNameFromModelUrl(); 207 | initLogBucketFirstMessage(); 208 | } 209 | 210 | protected boolean initLogBucketFirstMessage() { 211 | if (userLogsEnabled()) { 212 | String msg = Configuration.getDateTimeString()+" "+ 213 | commandSummary+" command requested "+" on "+model.getIdentifier()+", command "+getCallbackContext().commandRequestId+"\n"; 214 | if (callbackContext.getLogBucketName()!=null) { 215 | //for debugging: 216 | // log("Initializing log bucket "+callbackContext.logBucketName+": "+msg); 217 | return uploadCompleteLog(MAIN_LOG_BUCKET_FILE, msg); 218 | } 219 | } 220 | return false; 221 | } 222 | 223 | protected abstract ProgressEvent runStep() throws IOException; 224 | 225 | // === utils ======================== 226 | 227 | protected void setCallbackLogBucketNameFromModelUrl() { 228 | if (model.getLogBucketUrl()!=null) { 229 | String name = model.getLogBucketUrl(); 230 | 231 | // must have the prefix, remove it 232 | if (!name.startsWith(HTTPS_S3_BUCKET_PREFIX)) { 233 | throw new IllegalStateException("Malformed bucket URL: "+name); 234 | } 235 | name = name.substring(HTTPS_S3_BUCKET_PREFIX.length()); 236 | 237 | // might have a /model suffix - if so remove that too 238 | int end = name.indexOf('/'); 239 | if (end>=0) { 240 | name = name.substring(0, end); 241 | } 242 | callbackContext.logBucketName = name; 243 | } 244 | } 245 | 246 | protected void setModelLogBucketUrlFromCallbackContextName() { 247 | String url = null; 248 | if (callbackContext.logBucketName!=null) { 249 | url = HTTPS_S3_BUCKET_PREFIX+callbackContext.logBucketName; 250 | if (!callbackContext.logBucketName.contains(model.getIdentifier().toLowerCase())) { 251 | // note, NOT lower case here, needs trailing slash 252 | url += "/"+model.getIdentifier()+"/"; 253 | } 254 | } 255 | model.setLogBucketUrl(url); 256 | } 257 | 258 | protected void log(String message) { 259 | System.out.println(message); 260 | System.out.println(LOG_MESSAGE_SEPARATOR); 261 | if (logger!=null) { 262 | logger.log(message); 263 | } 264 | } 265 | 266 | protected void logIncludingUserLog(String message) { 267 | log(message); 268 | logUserLogOnly(message); 269 | } 270 | 271 | protected boolean userLogsEnabled() { 272 | return true; 273 | } 274 | 275 | private void logUserLogOnly(String message) { 276 | if (userLogsEnabled()) { 277 | uploadCompleteLog(MAIN_LOG_BUCKET_FILE, downloadLog(MAIN_LOG_BUCKET_FILE).orElse("")+ 278 | Configuration.getDateTimeString()+" "+message+"\n"); 279 | } 280 | } 281 | 282 | protected final void logException(String message, Throwable e) { 283 | log(message + "\n" + getStackTraceAsString(e)); 284 | } 285 | 286 | protected final void logExceptionIncludingUserLog(String message, Throwable e) { 287 | logIncludingUserLog(message + "\n" + getStackTraceAsString(e)); 288 | } 289 | 290 | protected String getStackTraceAsString(Throwable e) { 291 | final StringWriter sw = new StringWriter(); 292 | final PrintWriter pw = new PrintWriter(sw); 293 | e.printStackTrace(pw); 294 | return sw.toString(); 295 | } 296 | 297 | protected String appendMessages(String message, Object ...appendices) { 298 | String result = ""; 299 | for (Object app: appendices) { 300 | if (app!=null) { 301 | result += app; 302 | if (result.equals(result.trim())) { 303 | result += " "; 304 | } 305 | } 306 | } 307 | if (result.isEmpty()) { 308 | return message; 309 | } 310 | 311 | result = result.trim(); 312 | if (message==null || message.isEmpty()) { 313 | return result; 314 | } else { 315 | return message += " ("+result+")"; 316 | } 317 | } 318 | 319 | protected ProgressEvent statusFailed(String message) { 320 | message = appendMessages(message, 321 | model.getLogBucketUrl()==null ? null : 322 | "Logs are available at "+model.getLogBucketUrl()+" and may be kept after stack rollback."); 323 | return ProgressEvent.builder() 324 | .resourceModel(model) 325 | .status(OperationStatus.FAILED) 326 | .message(message) 327 | .build(); 328 | } 329 | 330 | protected ProgressEvent statusSuccess() { 331 | String message = appendMessages(null, 332 | getCommandSummary()+" succeeded.", 333 | model.getOutputs()==null || model.getOutputs().isEmpty() ? null : 334 | "Outputs: "+model.getOutputs()+". \n", 335 | model.getLogBucketUrl()==null ? null : 336 | "Logs are available at "+model.getLogBucketUrl()+" "+ 337 | (!"Delete".equals(commandSummary) ? "." : "and may be kept after stack deletion.")); 338 | return ProgressEvent.builder() 339 | .resourceModel(model) 340 | .status(OperationStatus.SUCCESS) 341 | // would be nice if this message appeared in UI, but it doesn't 342 | .message(message) 343 | // callback data from this is thrown away 344 | .build(); 345 | } 346 | 347 | protected ProgressEvent statusInProgress() { 348 | String message = appendMessages(null, 349 | currentStep == null ? "In progress..." : "Step: " + currentStep, 350 | model.getOutputs()==null || model.getOutputs().isEmpty() ? null : 351 | "Outputs: "+model.getOutputs()+". \n", 352 | model.getLogBucketUrl()==null ? null : 353 | "Logs are available at "+model.getLogBucketUrl()+" ."); 354 | return ProgressEvent.builder() 355 | .resourceModel(model) 356 | .callbackContext(callbackContext) 357 | .callbackDelaySeconds(nextDelay(callbackContext)) 358 | .status(OperationStatus.IN_PROGRESS) 359 | .message(message) 360 | .build(); 361 | } 362 | 363 | int nextDelay(CallbackContext callbackContext) { 364 | if (callbackContext.lastDelaySeconds < 0) { 365 | callbackContext.lastDelaySeconds = 0; 366 | } else if (callbackContext.lastDelaySeconds == 0) { 367 | callbackContext.lastDelaySeconds = 1; 368 | } else if (callbackContext.lastDelaySeconds < MAX_CHECK_INTERVAL_SECONDS) { 369 | // exponential backoff 370 | callbackContext.lastDelaySeconds = 371 | Math.min(MAX_CHECK_INTERVAL_SECONDS, 2 * callbackContext.lastDelaySeconds); 372 | } 373 | return callbackContext.lastDelaySeconds; 374 | } 375 | 376 | protected final void advanceTo(Steps nextStep) { 377 | log("Entering step "+nextStep); 378 | callbackContext.stepId = nextStep.toString(); 379 | callbackContext.lastDelaySeconds = -1; 380 | } 381 | 382 | protected final RemoteDetachedTerraformProcess remoteProcessForCommand(TerraformCommand command) { 383 | String processManager = callbackContext.processManager; 384 | if (processManager==null) { 385 | processManager = getParameters().getProcessManager(); 386 | } 387 | 388 | // ensure it doesn't change in the middle of a run, even if parameters are changed 389 | callbackContext.processManager = processManager; 390 | if ("systemd".equals(processManager)) { 391 | return RemoteDetachedTerraformProcessSystemd.of(this, command); 392 | 393 | } else if ("nohup".equals(processManager)) { 394 | return RemoteDetachedTerraformProcessNohup.of(this, command); 395 | 396 | } else { 397 | throw new IllegalStateException("Unsupported process manager type"); 398 | } 399 | } 400 | 401 | protected final RemoteTerraformProcess remoteTerraformProcess() { 402 | return RemoteTerraformProcess.of(this); 403 | } 404 | 405 | protected RemoteDetachedTerraformProcess tfInit() { 406 | return remoteProcessForCommand(RemoteDetachedTerraformProcess.TerraformCommand.TF_INIT); 407 | } 408 | 409 | protected RemoteDetachedTerraformProcess tfApply() { 410 | return remoteProcessForCommand(RemoteDetachedTerraformProcess.TerraformCommand.TF_APPLY); 411 | } 412 | 413 | protected RemoteDetachedTerraformProcess tfDestroy() { 414 | return remoteProcessForCommand(RemoteDetachedTerraformProcess.TerraformCommand.TF_DESTROY); 415 | } 416 | 417 | private void drainPendingRemoteLogs(RemoteDetachedTerraformProcess process) throws IOException { 418 | String str; 419 | str = process.getIncrementalStdout(); 420 | if (!str.isEmpty()) 421 | log("New standard output data:\n" + str); 422 | str = process.getIncrementalStderr(); 423 | if (!str.isEmpty()) 424 | log("New standard error data:\n" + str); 425 | } 426 | 427 | protected boolean checkStillRunningOrError(RemoteDetachedTerraformProcess process) throws IOException { 428 | // Always drain pending log messages regardless of any other activity/conditions. 429 | // That said, do not drain _before_ establishing whether the remote process is still 430 | // running as that would be a race against short-lived processes and would require a 431 | // second drain in case the process has finished and would result in a short Terraform 432 | // log split across two CloudWatch messages for no obvious reason. 433 | final boolean isRunning = process.isRunning(); 434 | drainPendingRemoteLogs(process); 435 | if (isRunning) { 436 | return true; 437 | } 438 | 439 | final String stdout = process.getFullStdout(); 440 | final String stderr = process.getFullStderr(); 441 | 442 | final String s3BucketName = callbackContext.getLogBucketName(); 443 | if (s3BucketName != null) { 444 | uploadCompleteLog(process.getCommandName()+"-"+"stdout.txt", stdout); 445 | uploadCompleteLog(process.getCommandName()+"-"+"stderr.txt", stderr); 446 | } 447 | 448 | // FIXME: instead of retrieving the full log files it would be faster to accumulate the 449 | // incremental fragments already retrieved above. 450 | try { 451 | if (!process.wasFailure()) { 452 | if (!stderr.isEmpty()) { 453 | // Any stderr output is not the wanted result because usually it is a side 454 | // effect of the remote process' failure, but combined with a non-raised fault 455 | // flag it may mean a bug (a failure to fail) in Terraform or in the resource 456 | // provider code, hence report this separately to make it easier to relate. 457 | log("Spurious remote stderr:\n" + stderr); 458 | } 459 | } else { 460 | final String message = String.format("Error in %s: %s", process.getCommandName(), process.getErrorString()); 461 | log(message); 462 | log(stderr.isEmpty() ? "(Remote stderr is empty.)" : "Remote stderr:\n" + stderr); 463 | log(stdout.isEmpty() ? "(Remote stdout is empty.)" : "Remote stdout:\n" + stdout); 464 | throw ConnectorHandlerFailures.handled(message+"; see logs for more detail."); 465 | } 466 | return false; 467 | } finally { 468 | process.cleanup(); 469 | } 470 | } 471 | 472 | // This call actually consists of two network transfers, hence for large files is more 473 | // likely to time out. However, splitting it into two FSM states would require some place 474 | // to keep the downloaded file. The callback context isn't intended for that, neither is 475 | // the lambda's runtime filesystem. 476 | // There would be one more transfer if the CloudFormation template defines any Terraform 477 | // variables, so the above note would apply even more. 478 | protected final void getAndUploadConfiguration(boolean firstTime) throws IOException { 479 | remoteTerraformProcess().uploadConfiguration(getParameters().getConfiguration(model), model.getVariables(), firstTime); 480 | } 481 | 482 | private Optional downloadLog(String objectSuffix) { 483 | String bucketName = callbackContext.getLogBucketName(); 484 | if (bucketName!=null) { 485 | BucketUtils bucketUtils = new BucketUtils(proxy); 486 | final String objectKey = getLogFileObjectKey(objectSuffix); 487 | try { 488 | return Optional.of(new String(bucketUtils.download(bucketName, objectKey))); 489 | } catch (Exception e) { 490 | log(String.format("Failed to retrieve log file %s from S3 bucket %s: %s (%s)", objectKey, bucketName, e.getClass().getName(), e.getMessage())); 491 | return Optional.empty(); 492 | } 493 | } else { 494 | return Optional.empty(); 495 | } 496 | } 497 | 498 | private String getLogFileObjectKey(String objectSuffix) { 499 | String folder; 500 | if (callbackContext.getLogBucketName().contains(model.getIdentifier().toLowerCase())) { 501 | folder = ""; 502 | } else { 503 | // put in a virtual subfolder unless the bucket name already contains the model identifier 504 | folder = model.getIdentifier()+"/"; 505 | } 506 | return folder + callbackContext.getCommandRequestId()+"-"+getCommandSummary()+"/"+objectSuffix; 507 | } 508 | protected boolean uploadCompleteLog(String objectSuffix, String text) { 509 | String bucketName = callbackContext.getLogBucketName(); 510 | if (bucketName!=null) { 511 | BucketUtils bucketUtils = new BucketUtils(proxy); 512 | final String objectKey = getLogFileObjectKey(objectSuffix); 513 | try { 514 | bucketUtils.upload(bucketName, objectKey, RequestBody.fromString(text), "text/plain"); 515 | return true; 516 | 517 | } catch (Exception e) { 518 | log(String.format("Failed to put log file %s into S3 bucket %s: %s (%s); disabling logs", objectKey, bucketName, e.getClass().getName(), e.getMessage())); 519 | 520 | callbackContext.logBucketName = null; 521 | setModelLogBucketUrlFromCallbackContextName(); 522 | return false; 523 | } 524 | } 525 | return false; 526 | } 527 | } 528 | --------------------------------------------------------------------------------