├── Analysis-Query-Generation-Action-Group ├── OpenAPI-schema.json └── lamda_function.py ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── IaC-Generation-Deployment-Action-Group ├── OpenAPI-schema.json └── lamda_function.py ├── Knowledge-Base └── terraform-modules.json ├── LICENSE └── README.md /Analysis-Query-Generation-Action-Group/OpenAPI-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "openapi": "3.0.0", 3 | "info": { 4 | "title": "Bedrock Agent Lambda API", 5 | "version": "1.0.0" 6 | }, 7 | "paths": { 8 | "/process-diagram": { 9 | "post": { 10 | "summary": "Architecture diagram analysis", 11 | "description": "Architecture diagram analysis to create IaC", 12 | "operationId": "DiagramAnalysis", 13 | "tags": [ 14 | "Diagram Analysis" 15 | ], 16 | "requestBody": { 17 | "required": true, 18 | "content": { 19 | "application/json": { 20 | "schema": { 21 | "type": "object", 22 | "properties": { 23 | "diagramS3Bucket": { 24 | "type": "string" 25 | }, 26 | "diagramS3Key": { 27 | "type": "string" 28 | } 29 | }, 30 | "required": [ 31 | "diagramS3Bucket", 32 | "diagramS3Key" 33 | ] 34 | } 35 | } 36 | } 37 | }, 38 | "responses": { 39 | "200": { 40 | "description": "Analyzed the diagram", 41 | "content": { 42 | "application/json": { 43 | "schema": { 44 | "type": "object", 45 | "properties": { 46 | "message": { 47 | "type": "string" 48 | } 49 | } 50 | } 51 | } 52 | } 53 | }, 54 | "500": { 55 | "description": "Error occurred", 56 | "content": { 57 | "application/json": { 58 | "schema": { 59 | "type": "object", 60 | "properties": { 61 | "error": { 62 | "type": "string" 63 | }, 64 | "details": { 65 | "type": "string" 66 | } 67 | } 68 | } 69 | } 70 | } 71 | } 72 | } 73 | } 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /Analysis-Query-Generation-Action-Group/lamda_function.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | # Initialize AWS clients 6 | s3_client = boto3.client('s3') 7 | bedrock = boto3.client("bedrock-runtime") 8 | 9 | # Specify the AWS Bedrock model ID for Claude-3 10 | model_id = "anthropic.claude-3-sonnet-20240229-v1:0" 11 | #Specify the prompt for bedrock 12 | prompt_text = "Please analyze this architecture diagram in order to create Infrastructure-As-Code. Please provide a structured summary of the diagram and ask necessary questions about missing configuration components, dependencies, or unclear connections that are not present in the diagram and that are required to create the IaC. DO NOT assume or ask additional unrelated questions" 13 | 14 | def lambda_handler(event, context): 15 | print(event) 16 | # Get S3 bucket details from the event 17 | properties = {prop["name"]: prop["value"] for prop in event["requestBody"]["content"]["application/json"]["properties"]} 18 | bucket_name = properties['diagramS3Bucket'] 19 | bucket_object = properties['diagramS3Key'] 20 | try: 21 | # Fetch diagram from S3 22 | response = s3_client.get_object(Bucket=bucket_name, Key=bucket_object) 23 | diagram_bytes = response['Body'].read() 24 | except Exception as e: 25 | print(f"Error fetching image from S3: {e}") 26 | return {'statusCode': 500, 'body': json.dumps(f"Error fetching image from S3: {e}")} 27 | 28 | # Encode the image in base64 29 | encoded_diagram = base64.b64encode(diagram_bytes).decode("utf-8") 30 | 31 | # Prepare the payload for invoking the Claude-3 model 32 | body = json.dumps({ 33 | "anthropic_version": "bedrock-2023-05-31", 34 | "max_tokens": 4096, 35 | "messages": [ 36 | { 37 | "role": "user", 38 | "content": [ 39 | { 40 | "type": "image", 41 | "source": { 42 | "type": "base64", 43 | "media_type": "image/png", 44 | "data": encoded_diagram, 45 | }, 46 | }, 47 | {"type": "text", "text": prompt_text}, 48 | ], 49 | } 50 | ], 51 | }) 52 | 53 | # Invoke the Claude-3 model 54 | try: 55 | response = bedrock.invoke_model( 56 | modelId=model_id, 57 | body=body) 58 | 59 | # Decode response body from bytes to string and parse to JSON 60 | response_body = json.loads(response['body'].read().decode("utf-8")) 61 | print(response_body) 62 | 63 | return { 64 | 'messageVersion': '1.0', 65 | 'response': { 66 | 'actionGroup': event['actionGroup'], 67 | 'apiPath': event['apiPath'], 68 | 'httpMethod': event['httpMethod'], 69 | 'httpStatusCode': 200, 70 | 'responseBody': { 71 | 'application/json': { 72 | 'body': json.dumps({ 73 | "message": f"Summary and questions created are {response_body}" 74 | }) 75 | } 76 | }, 77 | 'sessionAttributes': event.get('sessionAttributes', {}), 78 | 'promptSessionAttributes': event.get('promptSessionAttributes', {}) 79 | } 80 | } 81 | except Exception as e: 82 | print(f"Error invoking Claude-3 model: {e}") 83 | return {'statusCode': 500, 'body': json.dumps(f"Error invoking Claude-3 model: {e}")} 84 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *main* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | -------------------------------------------------------------------------------- /IaC-Generation-Deployment-Action-Group/OpenAPI-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "openapi": "3.0.0", 3 | "info": { 4 | "title": "Bedrock Agent Lambda API", 5 | "version": "1.0.0" 6 | }, 7 | "paths": { 8 | "/process-diagram": { 9 | "post": { 10 | "summary": "Architecture diagram to IaC", 11 | "description": "Architecture diagram analysis to create IaC", 12 | "operationId": "DiagramToIaC", 13 | "tags": [ 14 | "Diagram to IaC" 15 | ], 16 | "requestBody": { 17 | "required": true, 18 | "content": { 19 | "application/json": { 20 | "schema": { 21 | "type": "object", 22 | "properties": { 23 | "diagramS3Bucket": { 24 | "type": "string" 25 | }, 26 | "diagramS3Key": { 27 | "type": "string" 28 | }, 29 | "final_draft": { 30 | "type": "string" 31 | } 32 | }, 33 | "required": [ 34 | "diagramS3Bucket", 35 | "diagramS3Key", 36 | "final_draft" 37 | ] 38 | } 39 | } 40 | } 41 | }, 42 | "responses": { 43 | "200": { 44 | "description": "Analyzed the diagram and created IaC", 45 | "content": { 46 | "application/json": { 47 | "schema": { 48 | "type": "object", 49 | "properties": { 50 | "message": { 51 | "type": "string" 52 | } 53 | } 54 | } 55 | } 56 | } 57 | }, 58 | "500": { 59 | "description": "Error occurred", 60 | "content": { 61 | "application/json": { 62 | "schema": { 63 | "type": "object", 64 | "properties": { 65 | "error": { 66 | "type": "string" 67 | }, 68 | "details": { 69 | "type": "string" 70 | } 71 | } 72 | } 73 | } 74 | } 75 | } 76 | } 77 | } 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /IaC-Generation-Deployment-Action-Group/lamda_function.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import base64 4 | import os 5 | import boto3 6 | from botocore.exceptions import ClientError 7 | import requests 8 | 9 | # Set up logging 10 | logger = logging.getLogger() 11 | logger.setLevel(logging.INFO) 12 | 13 | # Initialize a Boto3 client for S3 14 | s3_client = boto3.client('s3') 15 | 16 | # Initialize a Boto3 client for Bedrock 17 | bedrock = boto3.client(service_name='bedrock-runtime') 18 | bedrock_client = boto3.client(service_name='bedrock-agent-runtime') 19 | model_id = "anthropic.claude-3-sonnet-20240229-v1:0" 20 | 21 | def invoke_bedrock_model(prompt, bucket_name, bucket_object, final_draft): 22 | try: 23 | # Fetch diagram from S3 24 | response = s3_client.get_object(Bucket=bucket_name, Key=bucket_object) 25 | diagram_bytes = response['Body'].read() 26 | except Exception as e: 27 | print(f"Error fetching image from S3: {e}") 28 | return {'statusCode': 500, 'body': json.dumps(f"Error fetching image from S3: {e}")} 29 | 30 | # Encode the image in base64 31 | encoded_diagram = base64.b64encode(diagram_bytes).decode("utf-8") 32 | 33 | # Prepare the payload for invoking the Claude-3 model 34 | body = json.dumps({ 35 | "anthropic_version": "bedrock-2023-05-31", 36 | "max_tokens": 4096, 37 | "temperature": 0, 38 | "top_k": 250, 39 | "top_p": 1, 40 | "messages": [ 41 | { 42 | "role": "user", 43 | "content": [ 44 | { 45 | "type": "image", 46 | "source": { 47 | "type": "base64", 48 | "media_type": "image/png", 49 | "data": encoded_diagram, 50 | }, 51 | }, 52 | {"type": "text", "text": prompt}, 53 | ], 54 | } 55 | ], 56 | }) 57 | 58 | # Invoke the Claude-3 model 59 | try: 60 | response = bedrock.invoke_model( 61 | modelId=model_id, 62 | body=body, 63 | accept="*/*", 64 | contentType="application/json" 65 | ) 66 | 67 | # Decode response body from bytes to string and parse to JSON 68 | response_body = json.loads(response.get('body').read()) 69 | # Return generated Terraform code 70 | tf_code = response_body['content'][0]['text'] 71 | return tf_code 72 | 73 | except Exception as e: 74 | print(f"Error invoking Claude-3 model") 75 | return {'statusCode': 500, 'body': json.dumps(f"Error invoking Claude-3 model")} 76 | 77 | def create_and_commit_file(repo_owner, repo_name, path, token, commit_message, content): 78 | # Construct the URL for GitHub API 79 | url = f'https://api.github.com/repos/{repo_owner}/{repo_name}/contents/{path}' 80 | 81 | # Headers for GitHub API 82 | headers = { 83 | "Authorization": f'token {token}', 84 | "Accept": 'application/json' 85 | } 86 | 87 | # Check if the file exists and get its sha (necessary for updating the file) 88 | get_response = requests.get(url, headers=headers, timeout=30) 89 | sha = None 90 | if get_response.status_code == 200: 91 | sha = get_response.json()['sha'] 92 | elif get_response.status_code != 404: 93 | get_response.raise_for_status() 94 | 95 | # Encode the content to base64 96 | encoded_content = base64.b64encode(content.encode('utf-8')).decode('utf-8') 97 | 98 | # Prepare the payload for the GitHub API request 99 | data = { 100 | 'message': commit_message, 101 | 'content': encoded_content, 102 | 'sha': sha 103 | } 104 | if sha: 105 | data['sha'] = sha 106 | 107 | # Make the PUT request to GitHub API to create the file 108 | response = requests.put(url, headers=headers, data=json.dumps(data), timeout=30) 109 | 110 | # Check the response from GitHub 111 | if response.status_code in [200, 201]: 112 | logger.info(f'{path} successfully created/updated in GitHub repo.') 113 | else: 114 | logger.error(f'Failed to create/update {path}', response.json()) 115 | response.raise_for_status() 116 | 117 | def retrieve_module_definitions(knowledge_base_id, model_arn): 118 | query_text = f"Retrieve Terraform module sources for AWS services" 119 | try: 120 | response = bedrock_client.retrieve_and_generate( 121 | input={ 122 | 'text': query_text 123 | }, 124 | retrieveAndGenerateConfiguration={ 125 | 'type': 'KNOWLEDGE_BASE', 126 | 'knowledgeBaseConfiguration': { 127 | 'knowledgeBaseId': knowledge_base_id, 128 | 'modelArn': model_arn 129 | } 130 | } 131 | ) 132 | 133 | # Extracting the text from the response 134 | print("KB Response 1:", response) 135 | response_text = response['output']['text'] 136 | print("KB Response:", response_text) # Print the response text 137 | 138 | # Assuming the response text contains a JSON string with module definitions 139 | module_definitions = response_text #json.loads(response_text) 140 | return module_definitions 141 | 142 | except ClientError as e: 143 | print("An error occurred:", e) 144 | return {} 145 | except json.JSONDecodeError as json_err: 146 | print("JSON parsing error:", json_err) 147 | return {} 148 | 149 | def lambda_handler(event, context): 150 | # Print the entire event 151 | print("Received event: " + json.dumps(event)) 152 | try: 153 | properties = {prop["name"]: prop["value"] for prop in event["requestBody"]["content"]["application/json"]["properties"]} 154 | bucket_name = properties['diagramS3Bucket'] 155 | bucket_object = properties['diagramS3Key'] 156 | final_draft = properties['final_draft'] 157 | 158 | # GitHub information 159 | repo_owner = 'input-repo-owner-name' 160 | repo_name = 'input-repo-name' 161 | token = os.environ['GITHUB_TOKEN'] 162 | account_email = 'input-email' 163 | commit_message = 'Initial terraform code' 164 | 165 | #Knowledge base ID 166 | kb_id = os.environ['KNOWLEDGE_BASE_ID'] 167 | 168 | # Define the directory path and file names 169 | main_tf_path = f'test/main.tf' 170 | main_tf_path_url = f'https://github.com/{repo_owner}/{repo_name}/blob/main/{main_tf_path}' 171 | 172 | # Generate Terraform config using Bedrock model 173 | module_definitions = retrieve_module_definitions(kb_id, "arn:aws:bedrock:us-east-1::foundation-model/anthropic.claude-v2") 174 | 175 | # Construct the prompt with module definitions 176 | terraform_prompt = f"Please analyze the architecture diagram in order to create Infrastrucute-As-A-code. Please use {final_draft} and create the necessary IaC in Terraform: " 177 | #For CloudFormation : terraform_prompt = f"Please analyze the architecture diagram in order to create Infrastrucute-As-A-code. Please use {final_draft} and create the necessary IaC in CloudFormation: " 178 | terraform_prompt += ". Use the following module definitions whereever applicable: " 179 | terraform_prompt += json.dumps(module_definitions) 180 | terraform_prompt += " Give only Terraform code as the output response." 181 | #For CloudFormation : terraform_prompt += " Give only CloudFormation code as the output response." 182 | print("terraform_prompt", terraform_prompt) # Print the response text 183 | 184 | # Invoke the model or method to generate Terraform configuration based on the prompt 185 | main_tf_content = invoke_bedrock_model(terraform_prompt, bucket_name, bucket_object, final_draft) 186 | # Commit main.tf to GitHub 187 | create_and_commit_file(repo_owner, repo_name, main_tf_path, token, f'{commit_message}', main_tf_content) 188 | 189 | return { 190 | 'messageVersion': '1.0', 191 | 'response': { 192 | 'actionGroup': event['actionGroup'], 193 | 'apiPath': event['apiPath'], 194 | 'httpMethod': event['httpMethod'], 195 | 'httpStatusCode': 200, 196 | 'responseBody': { 197 | 'application/json': { 198 | 'body': json.dumps({ 199 | "message": f"Terraform code updated successfully", 200 | "main_tf_path": main_tf_path_url 201 | }) 202 | } 203 | }, 204 | 'sessionAttributes': event.get('sessionAttributes', {}), 205 | 'promptSessionAttributes': event.get('promptSessionAttributes', {}) 206 | } 207 | } 208 | 209 | except Exception as e: 210 | logger.error(f"An error occurred: {e}", exc_info=True) 211 | # Ensure that error responses also align with the OpenAPI schema 212 | return { 213 | 'statusCode': 500, 214 | 'headers': { 215 | 'Content-Type': 'application/json' 216 | }, 217 | 'body': json.dumps({ 218 | "error": "An error occurred during the process.", 219 | "details": str(e) 220 | }) 221 | } 222 | -------------------------------------------------------------------------------- /Knowledge-Base/terraform-modules.json: -------------------------------------------------------------------------------- 1 | { 2 | "AWSModules": [ 3 | { 4 | "ModuleName": "vpc", 5 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-vpc", 6 | "Description": "Creates VPC resources, providing a logically isolated section of the AWS cloud.", 7 | "RequiredParameters": ["cidr", "name"], 8 | "Dependencies": [], 9 | "BestPractices": "Define multiple subnets to segregate resources, ensure proper route tables and network ACLs are in place." 10 | }, 11 | { 12 | "ModuleName": "subnet", 13 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-subnet", 14 | "Description": "Provisions subnets within a specified VPC.", 15 | "RequiredParameters": ["vpc_id", "cidr_block"], 16 | "Dependencies": ["vpc"], 17 | "BestPractices": "Use different subnets for different layers (e.g., public, private, database) for enhanced security." 18 | }, 19 | { 20 | "ModuleName": "autoscaling-group", 21 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-autoscaling", 22 | "Description": "Manages autoscaling groups that automatically adjust the number of EC2 instances.", 23 | "RequiredParameters": ["launch_configuration", "min_size", "max_size"], 24 | "Dependencies": ["ec2-instance", "launch-template"], 25 | "BestPractices": "Implement scaling policies based on specific metrics to optimize cost and performance." 26 | }, 27 | { 28 | "ModuleName": "application-load-balancer", 29 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-alb", 30 | "Description": "Deploys an Application Load Balancer to distribute incoming traffic across multiple targets.", 31 | "RequiredParameters": ["load_balancer_type", "subnets"], 32 | "Dependencies": ["vpc", "subnet"], 33 | "BestPractices": "Enable access logs, use SSL/TLS certificates for secure connections." 34 | }, 35 | { 36 | "ModuleName": "target-group", 37 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-target-group", 38 | "Description": "Sets up target groups for routing requests to one or more registered targets.", 39 | "RequiredParameters": ["vpc_id", "port", "protocol"], 40 | "Dependencies": ["vpc"], 41 | "BestPractices": "Use health checks to ensure traffic is routed to healthy instances." 42 | }, 43 | { 44 | "ModuleName": "launch-template", 45 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-launch-template", 46 | "Description": "Configures launch templates for EC2 instances to provide launch parameters.", 47 | "RequiredParameters": ["name", "image_id", "instance_type"], 48 | "Dependencies": [], 49 | "BestPractices": "Regularly update the AMI and review security group settings." 50 | }, 51 | { 52 | "ModuleName": "secrets-manager", 53 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-secrets-manager", 54 | "Description": "Manages AWS Secrets Manager to protect secrets needed to access your applications, services, and IT resources.", 55 | "RequiredParameters": ["name", "secret_string"], 56 | "Dependencies": [], 57 | "BestPractices": "Enable automatic rotation for secrets that are used frequently or are particularly sensitive." 58 | }, 59 | { 60 | "ModuleName": "s3-bucket", 61 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-s3-bucket", 62 | "Description": "Creates S3 buckets for object storage to store and protect any amount of data.", 63 | "RequiredParameters": ["bucket"], 64 | "Dependencies": [], 65 | "BestPractices": "Enable versioning and encryption, configure lifecycle policies appropriately." 66 | }, 67 | { 68 | "ModuleName": "ec2-instance", 69 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-ec2-instance", 70 | "Description": "Provisions EC2 instances, allowing for customizable computing capacity.", 71 | "RequiredParameters": ["instance_type", "ami"], 72 | "Dependencies": ["vpc", "subnet"], 73 | "BestPractices": "Use the latest AMI for enhanced security, configure monitoring, and scalability features." 74 | }, 75 | { 76 | "ModuleName": "rds-instance", 77 | "ModuleSource": "https://github.com/terraform-aws-modules/terraform-aws-rds", 78 | "Description": "Deploys RDS instances for managed relational database services with various DB engines.", 79 | "RequiredParameters": ["instance_class", "engine", "username", "password"], 80 | "Dependencies": ["vpc", "db-subnet-group"], 81 | "BestPractices": "Encrypt database instances, ensure multi-AZ deployment for production workloads for high availability." 82 | } 83 | ] 84 | } 85 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT No Attribution 2 | 3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### Using Agents for Amazon Bedrock to Interactively Generate Infrastructure as Code 2 | 3 | --- 4 | 5 | #### Overview 6 | This README documents two AWS Lambda functions designed for creating IaC from architecture diagrams along with a Knowledge Base (KB). This solution follows AWS security reference architecture(https://docs.aws.amazon.com/prescriptive-guidance/latest/security-reference-architecture/architecture.html), and can be utilized to create customized, compliant, Terraform and CloudFormation code. 7 | 8 | #### Solution Overview 9 | - The user utilizes the bedrock agent chat console to input the name of their S3 Bucket and the Object (key) name where the architecture diagram is stored. 10 | - After receiving these details, the Bedrock Agent forwards them to an action group that triggers an AWS Lambda function. This function retrieves the architecture diagram from the specified S3 bucket, analyzes it, and produces a summary of the diagram. It also generates questions regarding any missing components, dependencies, or parameter values that are needed to create IaC for AWS services. This detailed response is then sent back to the Bedrock Agent. 11 | - The Bedrock Agent displays the generated questions to the user and records their responses. After addressing all the questions, the agent provides a comprehensive summary of the analyzed infrastructure component configuration for user review. Users then have the opportunity to approve this configuration or suggest any necessary adjustments. Once the details are finalized, this information is passed to another action group, which activates an AWS Lambda function to proceed with the process. 12 | - The Lambda function processes the user's finalized inputs, utilizes a knowledge base with modules that adhere to company standards as a baseline, and generates the necessary Infrastructure as Code (IaC). Once generated, the IaC is automatically pushed to a designated GitHub repository. 13 | 14 | #### Analysis Query Generation Lambda (Triggered by action groups) 15 | - **Description**: Analyses the input architecture diagram and generates questions for missing components/dependencies. 16 | - **Dependencies**: Python 3.x, Resource based policy (Principal: bedrock.amazonaws.com, Action: lambda:InvokeFunction) 17 | - **Logical Flow**: 18 | 1. Receives an event with S3 bucket and Object name. 19 | 2. Fetch diagram from S3. 20 | 3. Analyses the diagram. 21 | 4. Creates a summary of the services present in diagram and also questions regarding missing components/dependencies. 22 | 5. Returns the information back to Bedrock agent. 23 | 24 | #### IaC Generation and Deployment Lambda (Triggered by action groups) 25 | - **Description**: Generates and commits Terraform configurations for AWS services to a GitHub repository. 26 | - **Environment Variables**: 27 | - `GITHUB_TOKEN`: Token for GitHub API authentication. 28 | - `KNOWLEDGE_BASE_ID`: ID of created Knowledge base 29 | - **Dependencies**: Python 3.x, `boto3`, `requests`, `logging`, `base64` libraries. 30 | - **Logical Flow**: 31 | 1. Receives an event with S3 bucket, Object name, Final approved changes. 32 | 2. Fetch diagram from S3. 33 | 3. Analyses the diagram. 34 | 4. Retrieves modules from provided Knowledge base. 35 | 5. Creates IaC and publishes it to GitHub respository. 36 | 6. Returns success message with GitHub URLs or error information. 37 | 38 | #### Knowledge Base (KB) 39 | - **Description**: A structured repository containing AWS service and Terraform module information. 40 | - **Structure**: JSON format categorizing services and modules. 41 | - **Configure Knowledge Base**: Configuring a Knowledge Base (KB) enables your Bedrock agents to access a repository of information for AWS Terraform modules. Follow these steps to set up your KB: 42 | 1. Access the Amazon Bedrock Console: Log in and go directly to the 'Knowledge Base' section. This is your starting point for creating a new KB. 43 | 2. Name Your Knowledge Base: Choose a clear and descriptive name that reflects the purpose of your KB, such as "AWS Terraform Modules" 44 | 3. Select an IAM Role: Assign a pre-configured IAM role with the necessary permissions. 45 | 4. Define the Data Source: Upload a JSON file to an S3 bucket with encryption enabled for security. This file should contain a structured list of AWS services and Terraform modules. For the JSON structure, use the example provided in this repository 46 | 5. Choose the Default Embeddings Model: For most use cases, the Amazon Bedrock Titan G1 Embeddings - Text model will suffice. It's pre-configured and ready to use, simplifying the process. 47 | 6. Opt for the Managed Vector Store: Allow Amazon Bedrock to create and manage the vector store for you in Amazon OpenSearch Service. 48 | 7. Review and Finalize: Double-check all entered information for accuracy. Pay special attention to the S3 bucket URI and IAM role details. 49 | 50 | #### Updating and Maintenance 51 | - **Lambda Functions**: 52 | - Regularly update dependencies and environment variables. 53 | - Monitor Lambda logs for troubleshooting. 54 | - **Knowledge Base**: 55 | - Regularly update with new AWS services and modules. 56 | - Validate JSON structure after updates. 57 | 58 | ## License 59 | 60 | This library is licensed under the MIT-0 License. See the LICENSE file. 61 | --------------------------------------------------------------------------------