├── .config
├── .checkov.yml
├── .mdlrc
├── .terraform-docs.yaml
├── .tflint.hcl
├── .tfsec.yml
├── .tfsec
│ ├── launch_configuration_imdsv2_tfchecks.json
│ ├── launch_template_imdsv2_tfchecks.json
│ ├── no_launch_config_tfchecks.json
│ ├── sg_no_embedded_egress_rules_tfchecks.json
│ └── sg_no_embedded_ingress_rules_tfchecks.json
├── functional_tests
│ ├── post-entrypoint-helpers.sh
│ └── pre-entrypoint-helpers.sh
└── static_tests
│ ├── post-entrypoint-helpers.sh
│ └── pre-entrypoint-helpers.sh
├── .copier-answers.yml
├── .gitignore
├── .header.md
├── .pre-commit-config.yaml
├── .project_automation
├── deprecation
│ └── entrypoint.sh
├── deprovision
│ └── entrypoint.sh
├── functional_tests
│ ├── Dockerfile
│ ├── entrypoint.sh
│ └── functional_tests.sh
├── init
│ └── noop.sh
├── provision
│ └── entrypoint.sh
├── publication
│ ├── Dockerfile
│ └── entrypoint.sh
├── static_tests
│ ├── Dockerfile
│ ├── entrypoint.sh
│ └── static_tests.sh
└── update
│ └── noop.sh
├── .project_config.yml
├── CODEOWNERS
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── NOTICE.txt
├── README.md
├── VERSION
├── bda.tf
├── data-source.tf
├── data.tf
├── examples
├── .DS_Store
├── agent-collaborator
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── agent-only
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── agent-with-guardrails
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ ├── providers.tf
│ └── variables.tf
├── agent-with-inference-profile
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── agent-with-knowledge-base
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── application-inference-profile
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── bda
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── kendra-kb
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── knowledge-base-only
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
└── prompt-management
│ ├── .header.md
│ ├── README.md
│ ├── main.tf
│ └── providers.tf
├── iam.tf
├── inference-profile.tf
├── kendra.tf
├── knowledge-base.tf
├── main.tf
├── opensearch.tf
├── outputs.tf
├── prompt.tf
├── providers.tf
├── tests
├── 01_mandatory.tftest.hcl
├── 02_guardrails.tftest.hcl
├── 03_prompt_management.tftest.hcl
├── 04_inference_profile.tftest.hcl
├── 05_agent_collaborator.tftest.hcl
├── 06_kendra_kb.tftest.hcl
├── 07_knowledge_base_only_tftest.hcl
└── 08_bda_tftest.hcl
└── variables.tf
/.config/.checkov.yml:
--------------------------------------------------------------------------------
1 | download-external-modules: False
2 | evaluate-variables: true
3 | directory:
4 | - ./
5 | framework:
6 | - terraform
7 | skip-check:
8 | - CKV2_GCP*
9 | - CKV_AZURE*
10 | - CKV2_AZURE*
11 | - CKV_TF_1 # default to Terraform registry instead of Git
12 | summary-position: bottom
13 | output: 'cli'
14 | compact: True
15 | quiet: True
--------------------------------------------------------------------------------
/.config/.mdlrc:
--------------------------------------------------------------------------------
1 | # Ignoring the following rules
2 | # MD007 Unordered list indentation
3 | # MD013 Line length
4 | # MD029 Ordered list item prefix
5 | rules "~MD007", "~MD013", "~MD029"
--------------------------------------------------------------------------------
/.config/.terraform-docs.yaml:
--------------------------------------------------------------------------------
1 | formatter: markdown
2 | header-from: .header.md
3 | settings:
4 | anchor: true
5 | color: true
6 | default: true
7 | escape: true
8 | html: true
9 | indent: 2
10 | required: true
11 | sensitive: true
12 | type: true
13 |
14 | sort:
15 | enabled: true
16 | by: required
17 |
18 | output:
19 | file: README.md
20 | mode: replace
21 |
--------------------------------------------------------------------------------
/.config/.tflint.hcl:
--------------------------------------------------------------------------------
1 | # https://github.com/terraform-linters/tflint/blob/master/docs/user-guide/module-inspection.md
2 | # borrowed & modified indefinitely from https://github.com/ksatirli/building-infrastructure-you-can-mostly-trust/blob/main/.tflint.hcl
3 |
4 | plugin "aws" {
5 | enabled = true
6 | version = "0.22.1"
7 | source = "github.com/terraform-linters/tflint-ruleset-aws"
8 | }
9 |
10 | config {
11 | call_module_type = "all"
12 | force = false
13 | }
14 |
15 | rule "terraform_required_providers" {
16 | enabled = true
17 | }
18 |
19 | rule "terraform_required_version" {
20 | enabled = true
21 | }
22 |
23 | rule "terraform_naming_convention" {
24 | enabled = true
25 | format = "snake_case"
26 | }
27 |
28 | rule "terraform_typed_variables" {
29 | enabled = true
30 | }
31 |
32 | rule "terraform_unused_declarations" {
33 | enabled = true
34 | }
35 |
36 | rule "terraform_comment_syntax" {
37 | enabled = true
38 | }
39 |
40 | rule "terraform_deprecated_index" {
41 | enabled = true
42 | }
43 |
44 | rule "terraform_deprecated_interpolation" {
45 | enabled = true
46 | }
47 |
48 | rule "terraform_documented_outputs" {
49 | enabled = true
50 | }
51 |
52 | rule "terraform_documented_variables" {
53 | enabled = true
54 | }
55 |
56 | rule "terraform_module_pinned_source" {
57 | enabled = true
58 | }
59 |
60 | rule "terraform_standard_module_structure" {
61 | enabled = true
62 | }
63 |
64 | rule "terraform_workspace_remote" {
65 | enabled = true
66 | }
67 |
--------------------------------------------------------------------------------
/.config/.tfsec.yml:
--------------------------------------------------------------------------------
1 | {
2 | "minimum_severity": "MEDIUM"
3 | }
--------------------------------------------------------------------------------
/.config/.tfsec/launch_configuration_imdsv2_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS002",
5 | "description": "Check to IMDSv2 is required on EC2 instances created by this Launch Template",
6 | "impact": "Instance metadata service can be interacted with freely",
7 | "resolution": "Enable HTTP token requirement for IMDS",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_configuration"
13 | ],
14 | "severity": "CRITICAL",
15 | "matchSpec": {
16 | "action": "isPresent",
17 | "name": "metadata_options",
18 | "subMatch": {
19 | "action": "and",
20 | "predicateMatchSpec": [
21 | {
22 | "action": "equals",
23 | "name": "http_tokens",
24 | "value": "required"
25 |
26 | }
27 | ]
28 | }
29 | },
30 |
31 | "errorMessage": "is missing `metadata_options` block - it is required with `http_tokens` set to `required` to make Instance Metadata Service more secure.",
32 | "relatedLinks": [
33 | "https://tfsec.dev/docs/aws/ec2/enforce-http-token-imds#aws/ec2",
34 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_configuration#metadata-options",
35 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
36 | ]
37 | }
38 | ]
39 | }
40 |
--------------------------------------------------------------------------------
/.config/.tfsec/launch_template_imdsv2_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS001",
5 | "description": "Check to IMDSv2 is required on EC2 instances created by this Launch Template",
6 | "impact": "Instance metadata service can be interacted with freely",
7 | "resolution": "Enable HTTP token requirement for IMDS",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_template"
13 | ],
14 | "severity": "CRITICAL",
15 | "matchSpec": {
16 | "action": "isPresent",
17 | "name": "metadata_options",
18 | "subMatch": {
19 | "action": "and",
20 | "predicateMatchSpec": [
21 | {
22 | "action": "equals",
23 | "name": "http_tokens",
24 | "value": "required"
25 |
26 | }
27 | ]
28 | }
29 | },
30 |
31 | "errorMessage": "is missing `metadata_options` block - it is required with `http_tokens` set to `required` to make Instance Metadata Service more secure.",
32 | "relatedLinks": [
33 | "https://tfsec.dev/docs/aws/ec2/enforce-http-token-imds#aws/ec2",
34 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_template#metadata-options",
35 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
36 | ]
37 | }
38 | ]
39 | }
40 |
--------------------------------------------------------------------------------
/.config/.tfsec/no_launch_config_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS003",
5 | "description": "Use `aws_launch_template` over `aws_launch_configuration",
6 | "impact": "Launch configurations are not capable of versions",
7 | "resolution": "Convert resource type and attributes to `aws_launch_template`",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_configuration"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "image_id"
18 | },
19 |
20 | "errorMessage": "should be changed to `aws_launch_template` since the functionality is the same but templates can be versioned.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_template",
23 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/.config/.tfsec/sg_no_embedded_egress_rules_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS005",
5 | "description": "Security group rules should be defined with `aws_security_group_rule` instead of embedded.",
6 | "impact": "Embedded security group rules can cause issues during configuration updates.",
7 | "resolution": "Move `egress` rules to `aws_security_group_rule` and attach to `aws_security_group`.",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_security_group"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "egress"
18 | },
19 |
20 | "errorMessage": "`egress` rules should be moved to `aws_security_group_rule` and attached to `aws_security_group` instead of embedded.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group_rule",
23 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/.config/.tfsec/sg_no_embedded_ingress_rules_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS004",
5 | "description": "Security group rules should be defined with `aws_security_group_rule` instead of embedded.",
6 | "impact": "Embedded security group rules can cause issues during configuration updates.",
7 | "resolution": "Move `ingress` rules to `aws_security_group_rule` and attach to `aws_security_group`.",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_security_group"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "ingress"
18 | },
19 |
20 | "errorMessage": "`ingress` rules should be moved to `aws_security_group_rule` and attached to `aws_security_group` instead of embedded.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group_rule",
23 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/.config/functional_tests/post-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the end of functional test
3 | ## Use this to load any configurations after the functional test
4 | ## TIPS: avoid modifying the .project_automation/functional_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Post-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/functional_tests/pre-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the start of functional test
3 | ## use this to load any configuration before the functional test
4 | ## TIPS: avoid modifying the .project_automation/functional_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Pre-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/static_tests/post-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the end of static test
3 | ## Use this to load any configurations after the static test
4 | ## TIPS: avoid modifying the .project_automation/static_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Post-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/static_tests/pre-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the start of static test
3 | ## use this to load any configuration before the static test
4 | ## TIPS: avoid modifying the .project_automation/static_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Pre-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.copier-answers.yml:
--------------------------------------------------------------------------------
1 | # This file is auto-generated, changes will be overwritten
2 | _commit: v0.1.4
3 | _src_path: /task/68bbcffe-0834-11f0-993f-8654aa3f6246/projecttype
4 | starting_version: v0.0.0
5 | version_file: VERSION
6 |
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### OSX ###
2 | # General
3 | .DS_Store
4 | .AppleDouble
5 | .LSOverride
6 |
7 | # Icon must end with two \r
8 | Icon
9 |
10 |
11 | # Thumbnails
12 | ._*
13 |
14 | # Files that might appear in the root of a volume
15 | .DocumentRevisions-V100
16 | .fseventsd
17 | .Spotlight-V100
18 | .TemporaryItems
19 | .Trashes
20 | .VolumeIcon.icns
21 | .com.apple.timemachine.donotpresent
22 |
23 | # Directories potentially created on remote AFP share
24 | .AppleDB
25 | .AppleDesktop
26 | Network Trash Folder
27 | Temporary Items
28 | .apdisk
29 |
30 | build/
31 | plan.out
32 | plan.out.json
33 |
34 | # Local .terraform directories
35 | **/.terraform/*
36 |
37 | # .tfstate files
38 | *.tfstate
39 | *.tfstate.*
40 |
41 | # Crash log files
42 | crash.log
43 |
44 | # Exclude all .tfvars files, which are likely to contain sentitive data, such as
45 | # password, private keys, and other secrets. These should not be part of version
46 | # control as they are data points which are potentially sensitive and subject
47 | # to change depending on the environment.
48 | #
49 | *.tfvars
50 |
51 | # Ignore override files as they are usually used to override resources locally and so
52 | # are not checked in
53 | override.tf
54 | override.tf.json
55 | *_override.tf
56 | *_override.tf.json
57 |
58 | # Include override files you do wish to add to version control using negated pattern
59 | #
60 | # !example_override.tf
61 |
62 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
63 | # example: *tfplan*
64 |
65 | # Ignore CLI configuration files
66 | .terraformrc
67 | terraform.rc
68 | .terraform.lock.hcl
69 |
70 | go.mod
71 | go.sum
72 |
73 | .venv
74 | .ruby-version
--------------------------------------------------------------------------------
/.header.md:
--------------------------------------------------------------------------------
1 | # Terraform Bedrock Module
2 |
3 | Amazon Bedrock is a fully managed service that offers a choice of foundation models (FMs) along with a broad set of capabilities for building generative AI applications.
4 |
5 | This module includes resources to deploy Bedrock features.
6 |
7 | You can control which features to use with your input variables. The resources are created based on boolean logic. The default behavior is to deploy a Bedrock Agent. To disable this behavior you can turn `create_agent` to false. To deploy other features such as guardrails or knowledge bases, you can use the input variables to set their respective create booleans to `true` and then pass in the appropriate values.
8 |
9 | The main features of the Bedrock module include:
10 |
11 | - Bedrock Agents
12 | - Agent Action Groups
13 | - Agent Alias
14 | - Agent Collaborators
15 | - Knowledge Bases
16 | - Vector Knowledge Base (OpenSearch Serverless, Neptune Analytics, MongoDB Atlas, Pinecone, RDS)
17 | - Kendra Knowledge Base
18 | - SQL Knowledge Base
19 | - Guardrails
20 | - Prompt Management
21 | - Prompt Versions
22 | - Application Inference Profiles
23 | - Custom Models
24 | - Bedrock Data Automation
25 |
26 | ## Bedrock Agents
27 |
28 | Enable generative AI applications to execute multistep tasks across company systems and data sources.
29 |
30 | ### Create an Agent
31 |
32 | The following example creates an Agent, where you must define at a minimum the desired foundtaion model and the instruction for the agent.
33 |
34 | ```hcl
35 | module "bedrock" {
36 | source = "aws-ia/bedrock/aws"
37 | version = "0.0.20"
38 | foundation_model = "anthropic.claude-v2"
39 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
40 | }
41 | ```
42 |
43 | See the additional input variables for deploying an Agent [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L7)
44 |
45 | ### Action Groups
46 |
47 | An action group defines functions your agent can call. The functions are Lambda functions. The action group uses an OpenAPI schema to tell the agent what your functions do and how to call them. You can configure an action group by setting `create_ag` to `true` and passing in the appropriate input variables. You can see an example of an an agent being deployed with an action group in [this samples repository](https://github.com/aws-samples/aws-generative-ai-terraform-samples/blob/main/samples/bedrock-agent/main.tf)
48 |
49 | ### Prepare the Agent
50 |
51 | The Agent constructs take an optional parameter shouldPrepareAgent to indicate that the Agent should be prepared after any updates to an agent, Knowledge Base association, or action group. This may increase the time to create and update those resources. By default, this value is true.
52 |
53 | ### Prompt Overrides
54 |
55 | Bedrock Agents allows you to customize the prompts and LLM configuration for its different steps. You can disable steps or create a new prompt template. Prompt templates can be inserted from plain text files.
56 |
57 | ### Agent Alias
58 |
59 | After you have sufficiently iterated on your working draft and are satisfied with the behavior of your agent, you can set it up for deployment and integration into your application by creating aliases of your agent.
60 |
61 | To deploy your agent, you need to create an alias. During alias creation, Amazon Bedrock automatically creates a version of your agent. The alias points to this newly created version. You can point the alias to a previously created version if necessary. You then configure your application to make API calls to that alias.
62 |
63 | By default, the Agent resource does not create any aliases, and you can use the 'DRAFT' version.
64 |
65 | You can creat an Agent Alias by setting `create_agent_alias` to `true`.
66 |
67 | See the additional input variables for deploying an Agent Alias [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L183)
68 |
69 | ### Agent Collaborators
70 |
71 | Multi-agent collaboration in Amazon Bedrock enables you to create teams of specialized agents that work together to solve complex tasks. You can designate a supervisor agent to coordinate with collaborator agents, each optimized for specific functions.
72 |
73 | To set up agent collaboration, you'll need:
74 |
75 | - A supervisor agent that coordinates the team
76 | - One or more collaborator agents with specialized capabilities
77 | - Collaboration instructions that define when each agent should be used
78 |
79 | Example configuration with a supervisor agent and a collaborator agent:
80 |
81 | ```hcl
82 | module "bedrock" {
83 | source = "aws-ia/bedrock/aws"
84 | version = "0.0.20"
85 | create_agent_alias = true
86 | foundation_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
87 | instruction = "You are an agent. Do what the supervisor tells you to do"
88 |
89 | # Setting up the collaboration
90 | create_collaborator = true
91 | collaboration_instruction = "Tell the other agent what to do"
92 | supervisor_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
93 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars to an agent."
94 | }
95 | ```
96 |
97 | See the additional input variables for deploying Agent Collaborators [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L221)
98 |
99 | ## Knowledge Bases
100 |
101 | With Knowledge Bases for Amazon Bedrock, you can give FMs and agents contextual information from your company’s private data sources for Retrieval Augmented Generation (RAG) to deliver more relevant, accurate, and customized responses.
102 |
103 | ### Create a Vector Knowledge Base
104 |
105 | A vector index on a vector store is required to create a vector Knowledge Base. This construct supports multiple vector store options:
106 |
107 | - **Amazon OpenSearch Serverless**: Default option with automatic collection and index creation
108 | - **Neptune Analytics**: For graph database integration
109 | - **MongoDB Atlas**: For MongoDB vector search
110 | - **Pinecone**: For Pinecone vector database
111 | - **Amazon RDS Aurora PostgreSQL**: For PostgreSQL with pgvector
112 |
113 | By default, this resource will create an OpenSearch Serverless vector collection and index for each Knowledge Base you create, but you can provide an existing collection to have more control. For other resources you need to have the vector stores already created and credentials stored in AWS Secrets Manager.
114 |
115 | The resource accepts an instruction prop that is provided to any Bedrock Agent it is associated with so the agent can decide when to query the Knowledge Base.
116 |
117 | To create different types of knowledge bases, set the appropriate variable to `true`:
118 |
119 | - OpenSearch Serverless: `create_default_kb = true`
120 | - Neptune Analytics: `create_neptune_analytics_config = true`
121 | - MongoDB Atlas: `create_mongo_config = true`
122 | - Pinecone: `create_pinecone_config = true`
123 | - RDS: `create_rds_config = true`
124 |
125 | #### Advanced Vector Knowledge Base Features
126 |
127 | This module supports advanced vector knowledge base features:
128 |
129 | - **Embedding Model Configuration**: Fine-tune your embedding model settings with:
130 | - `embedding_model_dimensions`: Specify vector dimensions explicitly
131 | - `embedding_data_type`: Define the data type for vectors
132 |
133 | - **Supplemental Data Storage**: Store additional data alongside vector embeddings:
134 | - `create_supplemental_data_storage = true`
135 | - `supplemental_data_s3_uri`: S3 URI for supplemental data storage
136 |
137 | Example default Opensearch Serverless Agent with Knowledge Base:
138 |
139 | ```hcl
140 | provider "opensearch" {
141 | url = module.bedrock.default_collection.collection_endpoint
142 | healthcheck = false
143 | }
144 |
145 | module "bedrock" {
146 | source = "aws-ia/bedrock/aws"
147 | version = "0.0.20"
148 | create_default_kb = true
149 | create_s3_data_source = true
150 | foundation_model = "anthropic.claude-v2"
151 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
152 | }
153 | ```
154 |
155 | Example using Neptune Analytics with advanced features:
156 |
157 | ```hcl
158 | module "bedrock" {
159 | source = "aws-ia/bedrock/aws"
160 | version = "0.0.20"
161 |
162 | # Create Neptune Analytics knowledge base
163 | create_neptune_analytics_config = true
164 | graph_arn = "arn:aws:neptune-graph:us-east-1:123456789012:graph/my-graph"
165 |
166 | # Advanced embedding model configuration
167 | embedding_model_dimensions = 1024
168 | embedding_data_type = "FLOAT32"
169 |
170 | # Supplemental data storage
171 | create_supplemental_data_storage = true
172 | supplemental_data_s3_uri = "s3://my-bucket/supplemental-data/"
173 |
174 | # Agent configuration
175 | foundation_model = "anthropic.claude-3-sonnet-20240229-v1:0"
176 | instruction = "You are a graph database expert who can analyze relationships in data."
177 | }
178 | ```
179 |
180 | See the additional input variables for deploying Knowledge Bases [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L693)
181 |
182 | ### Vector Knowledge Base - Data Sources
183 |
184 | Data sources are the various repositories or systems from which information is extracted and ingested into the knowledge base. These sources provide the raw content that will be processed, indexed, and made available for querying within the knowledge base system. Data sources can include various types of systems such as document management systems, databases, file storage systems, and content management platforms. Suuported Data Sources include Amazon S3 buckets, Web Crawlers, SharePoint sites, Salesforce instances, and Confluence spaces.
185 |
186 | - Amazon S3. You can either create a new data source by passing in the existing data source arn to the input variable `kb_s3_data_source` or create a new one by setting `create_s3_data_source` to true.
187 |
188 | - Web Crawler. You can create a new web crawler data source by setting the `create_web_crawler` input variable to true and passing in the necessary variables for urls, scope, etc.
189 |
190 | - SharePoint. You can create a new SharePoint data source by setting the `create_sharepoint` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
191 |
192 | - Salesforce. You can create a new Salesforce data source by setting the `create_salesforce` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
193 |
194 | - Confluence. You can create a new Confluence data source by setting the `create_confluence` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
195 |
196 | See the additional input variables for deploying Knowledge Base Data Sources [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L423)
197 |
198 | ### Create a Kendra Knowledge Base
199 |
200 | With Amazon Bedrock Knowledge Bases, you can build a knowledge base from an Amazon Kendra GenAI index to create more sophisticated and accurate Retrieval Augmented Generation (RAG)-powered digital assistants. By combining an Amazon Kendra GenAI index with Amazon Bedrock Knowledge Bases, you can:
201 |
202 | - Reuse your indexed content across multiple Amazon Bedrock applications without rebuilding indexes or re-ingesting data.
203 | - Leverage the advanced GenAI capabilities of Amazon Bedrock while benefiting from the high-accuracy information retrieval of Amazon Kendra.
204 | - Customize your digital assistant's behavior using the tools of Amazon Bedrock while maintaining the semantic accuracy of an Amazon Kendra GenAI index.
205 |
206 | Example Kendra Knowledge Base:
207 |
208 | ```
209 | module "bedrock" {
210 | source = "aws-ia/bedrock/aws"
211 | version = "0.0.20"
212 | create_kendra_config = true
213 | create_kendra_s3_data_source = true
214 | create_agent = false
215 | }
216 | ```
217 |
218 | See the additional input variables for deploying a Kendra Knowledge Base [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1199)
219 |
220 | ### Create a SQL Knowledge Base
221 |
222 | Amazon Bedrock Knowledge Bases provides direct integration with structured data stores, allowing natural language queries to be automatically converted into SQL queries for data retrieval. This feature enables you to query your structured data sources without the need for vector embeddings or data preprocessing.
223 |
224 | - Amazon Bedrock Knowledge Bases analyzes:
225 | - Query patterns
226 | - Query history
227 | - Schema metadata
228 | - Converts natural language queries into SQL
229 | - Retrieves relevant information directly from supported data sources
230 |
231 | See the additional input variables for deploying a SQL Knowledge Base [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1398)
232 |
233 | ### Using an Existing Knowledge Base
234 |
235 | If you already have an Amazon Bedrock Knowledge Base created and want to attach it to a Bedrock Agent using this module, you can configure the module to reference the existing resource instead of creating a new one.
236 |
237 | #### Configuration
238 |
239 | To use an existing Knowledge Base:
240 |
241 | ```hcl
242 | module "bedrock_agent" {
243 | source = "aws-ia/bedrock/aws"
244 | version = "0.0.20"
245 | # ID of the existing Knowledge Base
246 | existing_kb = "kb-abc123" # Required
247 | kb_state = "ENABLED"
248 | # ... other required variables
249 | }
250 | ```
251 |
252 | #### Notes
253 |
254 | - existing_kb: The Knowledge Base ID (e.g., kb-abc123) that you want to attach to the Bedrock Agent.
255 |
256 | - kb_state: Set this to the current state of the KB (typically "ENABLED").
257 |
258 | ## Bedrock Guardrails
259 |
260 | Amazon Bedrock's Guardrails feature enables you to implement robust governance and control mechanisms for your generative AI applications, ensuring alignment with your specific use cases and responsible AI policies. Guardrails empowers you to create multiple tailored policy configurations, each designed to address the unique requirements and constraints of different use cases. These policy configurations can then be seamlessly applied across multiple foundation models (FMs) and Agents, ensuring a consistent user experience and standardizing safety, security, and privacy controls throughout your generative AI ecosystem.
261 |
262 | With Guardrails, you can define and enforce granular, customizable policies to precisely govern the behavior of your generative AI applications. You can configure the following policies in a guardrail to avoid undesirable and harmful content and remove sensitive information for privacy protection.
263 |
264 | Content filters – Adjust filter strengths to block input prompts or model responses containing harmful content.
265 |
266 | Denied topics – Define a set of topics that are undesirable in the context of your application. These topics will be blocked if detected in user queries or model responses.
267 |
268 | Word filters – Configure filters to block undesirable words, phrases, and profanity. Such words can include offensive terms, competitor names etc.
269 |
270 | Sensitive information filters – Block or mask sensitive information such as personally identifiable information (PII) or custom regex in user inputs and model responses.
271 |
272 | You can create a Guardrail by setting `create_guardrail` to true and passing in the appropriate input variables:
273 |
274 | ```hcl
275 | module "bedrock" {
276 | source = "aws-ia/bedrock/aws"
277 | version = "0.0.20"
278 | create_guardrail = true
279 | blocked_input = "I can provide general info about services, but can't fully address your request here. For personalized help or detailed questions, please contact our customer service team directly. For security reasons, avoid sharing sensitive information through this channel. If you have a general product question, feel free to ask without including personal details."
280 | blocked_output = "I can provide general info about services, but can't fully address your request here. For personalized help or detailed questions, please contact our customer service team directly. For security reasons, avoid sharing sensitive information through this channel. If you have a general product question, feel free to ask without including personal details."
281 | filters_config = [
282 | {
283 | input_strength = "MEDIUM"
284 | output_strength = "MEDIUM"
285 | type = "HATE"
286 | },
287 | {
288 | input_strength = "HIGH"
289 | output_strength = "HIGH"
290 | type = "VIOLENCE"
291 | }
292 | ]
293 | pii_entities_config = [
294 | {
295 | action = "BLOCK"
296 | type = "NAME"
297 | },
298 | {
299 | action = "BLOCK"
300 | type = "DRIVER_ID"
301 | },
302 | {
303 | action = "ANONYMIZE"
304 | type = "USERNAME"
305 | },
306 | ]
307 | regexes_config = [{
308 | action = "BLOCK"
309 | description = "example regex"
310 | name = "regex_example"
311 | pattern = "^\\d{3}-\\d{2}-\\d{4}$"
312 | }]
313 | managed_word_lists_config = [{
314 | type = "PROFANITY"
315 | }]
316 | words_config = [{
317 | text = "HATE"
318 | }]
319 | topics_config = [{
320 | name = "investment_topic"
321 | examples = ["Where should I invest my money ?"]
322 | type = "DENY"
323 | definition = "Investment advice refers to inquiries, guidance, or recommendations regarding the management or allocation of funds or assets with the goal of generating returns ."
324 | }]
325 | foundation_model = "anthropic.claude-v2"
326 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
327 | }
328 | ```
329 |
330 | See the additional input variables for deploying guardrails [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L317)
331 |
332 | ## Prompt Management
333 |
334 | Amazon Bedrock provides the ability to create and save prompts using Prompt management so that you can save time by applying the same prompt to different workflows. You can include variables in the prompt so that you can adjust the prompt for different use case. To create a prompt, you set the `create_prompt` variable to `true` and pass in the appropriate values.
335 |
336 | ### Prompt Variants
337 |
338 | Prompt variants in the context of Amazon Bedrock refer to alternative configurations of a prompt, including its message or the model and inference configurations used. Prompt variants allow you to create different versions of a prompt, test them, and save the variant that works best for your use case. You can add prompt variants to a prompt by passing in the values for the `variants_list` variable:
339 |
340 | ```hcl
341 | variants_list = [
342 | {
343 | name = "variant-example"
344 | template_type = "TEXT"
345 | model_id = "amazon.titan-text-express-v1"
346 | inference_configuration = {
347 | text = {
348 | temperature = 1
349 | top_p = 0.9900000095367432
350 | max_tokens = 300
351 | stop_sequences = ["User:"]
352 | top_k = 250
353 | }
354 | }
355 | template_configuration = {
356 | text = {
357 | input_variables = [
358 | {
359 | name = "topic"
360 | }
361 | ]
362 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
363 | }
364 | }
365 | }
366 | ]
367 | ```
368 |
369 | ### Prompt Version
370 |
371 | A prompt version is a snapshot of a prompt at a specific point in time that you create when you are satisfied with a set of configurations. Versions allow you to deploy your prompt and easily switch between different configurations for your prompt and update your application with the most appropriate version for your use-case.
372 |
373 | You can create a Prompt version by setting `create_prompt_version` to `true` and adding an optional `prompt_version_description` and optional `prompt_version_tags`.
374 |
375 | Creating a prompt with a prompt version would look like:
376 |
377 | ```hcl
378 | module "bedrock" {
379 | source = "aws-ia/bedrock/aws"
380 | version = "0.0.20"
381 | create_agent = false
382 |
383 | # Prompt Management
384 | prompt_name = "prompt"
385 | default_variant = "variant-example"
386 | create_prompt = true
387 | create_prompt_version = true
388 | prompt_version_description = "Example prompt version"
389 | variants_list = [
390 | {
391 | name = "variant-example"
392 | template_type = "TEXT"
393 | model_id = "amazon.titan-text-express-v1"
394 | inference_configuration = {
395 | text = {
396 | temperature = 1
397 | top_p = 0.9900000095367432
398 | max_tokens = 300
399 | stop_sequences = ["User:"]
400 | top_k = 250
401 | }
402 | }
403 | template_configuration = {
404 | text = {
405 | input_variables = [
406 | {
407 | name = "topic"
408 | }
409 | ]
410 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
411 | }
412 | }
413 | }
414 |
415 | ]
416 |
417 | }
418 | ```
419 |
420 | See the additional input variables for deploying prompt management [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L971)
421 |
422 | ## Application Inference Profile
423 |
424 | You can create an application inference profile with one or more Regions to track usage and costs when invoking a model.
425 |
426 | To create an application inference profile for one Region, specify a foundation model. Usage and costs for requests made to that Region with that model will be tracked.
427 |
428 | To create an application inference profile for multiple Regions, specify a cross region (system-defined) inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. Usage and costs for requests made to the Regions in the inference profile will be tracked. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference).
429 |
430 | ```hcl
431 | # Get current AWS account ID
432 | data "aws_caller_identity" "current" {}
433 |
434 | # Get current AWS region
435 | data "aws_region" "current" {}
436 |
437 | module "bedrock" {
438 | source = "aws-ia/bedrock/aws"
439 | version = "0.0.20"
440 | create_agent = false
441 |
442 | # Application Inference Profile
443 | create_app_inference_profile = true
444 | app_inference_profile_model_source = "arn:aws:bedrock:${data.aws_region.current.name}::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0"
445 | }
446 | ```
447 |
448 | See the additional input variables for deploying application inference profiles [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1057)
449 |
450 | ## Custom Models
451 |
452 | Model customization is the process of providing training data to a base model in order to improve its performance for specific use-cases. Custom models help improve performance on domain-specific tasks while maintaining the base capabilities of the foundation model. With custom models, you can do a continued pre-training or fine-tuning job which is started when the Terraform resource is created.
453 |
454 | To create a custom model, set the `create_custom_model` variable to `true` and pass in the necessary values for custom models:
455 |
456 | - `custom_model_id`
457 | - Defaults to `amazon.titan-text-express-v1`
458 | - `custom_model_name`
459 | - Defaults to `custom-model`
460 | - `custom_model_job_name`
461 | - Defaults to `custom-model-job`
462 | - `customization_type`
463 | - Defaults to `FINE_TUNING` but the other valid value is `CONTINUED_PRE_TRAINING`
464 | - `custom_model_hyperparameters`
465 | - Defaults to:
466 | {
467 | "epochCount" = "2",
468 | "batchSize" = "1",
469 | "learningRate" = "0.00001",
470 | "learningRateWarmupSteps" = "10"
471 | }
472 | - `custom_model_training_uri`
473 |
474 | See the additional input variables for deploying custom models [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1127)
475 |
476 | ## Bedrock Data Automation (BDA)
477 |
478 | ### BDA Project
479 |
480 | Amazon Bedrock Data AAutomation (BDA) helps you extract information and insights from your documents, images, videos, and audio files using foundation models (FMs). BDA provides both standard output and custom output through blueprints.
481 |
482 | BDA supports different extraction capabilities for each file type:
483 |
484 | - Documents
485 | - Text extraction with different granularity levels (word, line, page)
486 | - Bounding box information
487 | - Custom output formats
488 | - Images
489 | - Object and scene detection
490 | - Text extraction
491 | - Bounding box information
492 | - Custom generative fields
493 | - Video
494 | - Object and action detection
495 | - Scene analysis
496 | - Bounding box tracking
497 | - Custom generative fields
498 | - Audio
499 | - Speaker identification
500 | - Sentiment analysis
501 | - Language detection
502 | - Transcription
503 | - Custom generative fields
504 |
505 | ### Standard Output
506 |
507 | Standard output is pre-defined extraction managed by Bedrock. It can extract information from documents, images, videos, and audio files. You can configure what information to extract for each file type.
508 |
509 | ```hcl
510 | module "bedrock" {
511 | source = "aws-ia/bedrock/aws"
512 | version = "0.0.20"
513 | create_agent = false
514 | create_bda = true
515 |
516 | bda_standard_output_configuration = {
517 | document = {
518 | extraction = {
519 | bounding_box = {
520 | state = "ENABLED"
521 | }
522 | granularity = {
523 | types = ["WORD", "PAGE"]
524 | }
525 | }
526 | generative_field = {
527 | state = "ENABLED"
528 | }
529 | output_format = {
530 | additional_file_format = {
531 | state = "ENABLED"
532 | }
533 | text_format = {
534 | types = ["PLAIN_TEXT"]
535 | }
536 | }
537 | }
538 | }
539 | }
540 | ```
541 |
542 | ### Blueprints
543 |
544 | Blueprints allow you to define custom extraction schemas for your specific use cases. You can specify what information to extract and how to structure the output.
545 |
546 | ```hcl
547 | module "bedrock" {
548 | source = "aws-ia/bedrock/aws"
549 | version = "0.0.20"
550 | create_agent = false
551 |
552 | create_blueprint = true
553 | blueprint_name = "advertisement-analysis"
554 | blueprint_schema = jsonencode({
555 | "$schema" = "http://json-schema.org/draft-07/schema#"
556 | description = "Extract key information from advertisement images"
557 | class = "advertisement image"
558 | type = "object"
559 | properties = {
560 | image_sentiment = {
561 | type = "string"
562 | inferenceType = "explicit"
563 | instruction = "What is the overall sentiment of the image?"
564 | }
565 | # Additional properties as needed
566 | }
567 | })
568 | }
569 | ```
570 |
571 | See the additional input variables for deploying BDA projects and blueprints [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1530)
572 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | fail_fast: false
3 | minimum_pre_commit_version: "2.6.0"
4 | repos:
5 | -
6 | repo: https://github.com/terraform-docs/terraform-docs
7 | # To update run:
8 | # pre-commit autoupdate --freeze
9 | rev: 212db41760d7fc45d736d5eb94a483d0d2a12049 # frozen: v0.16.0
10 | hooks:
11 | - id: terraform-docs-go
12 | args:
13 | - "--config=.config/.terraform-docs.yaml"
14 | - "--lockfile=false"
15 | - "--recursive"
16 | - "--recursive-path=examples/"
17 | - "./"
--------------------------------------------------------------------------------
/.project_automation/deprecation/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/.project_automation/deprovision/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/.project_automation/functional_tests/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | ENV TERRAFORM_VERSION=1.7.4
3 | RUN cd /tmp && \
4 | wget --quiet https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
5 | unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/terraform
--------------------------------------------------------------------------------
/.project_automation/functional_tests/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## WARNING: DO NOT modify the content of entrypoint.sh
4 | # Use ./config/functional_tests/pre-entrypoint-helpers.sh or ./config/functional_tests/post-entrypoint-helpers.sh
5 | # to load any customizations or additional configurations
6 |
7 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
8 | # managed and local tasks always use these variables for the project and project type path
9 | PROJECT_PATH=${BASE_PATH}/project
10 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
11 |
12 | #********** helper functions *************
13 | pre_entrypoint() {
14 | if [ -f ${PROJECT_PATH}/.config/functional_tests/pre-entrypoint-helpers.sh ]; then
15 | echo "Pre-entrypoint helper found"
16 | source ${PROJECT_PATH}/.config/functional_tests/pre-entrypoint-helpers.sh
17 | echo "Pre-entrypoint helper loaded"
18 | else
19 | echo "Pre-entrypoint helper not found - skipped"
20 | fi
21 | }
22 | post_entrypoint() {
23 | if [ -f ${PROJECT_PATH}/.config/functional_tests/post-entrypoint-helpers.sh ]; then
24 | echo "Post-entrypoint helper found"
25 | source ${PROJECT_PATH}/.config/functional_tests/post-entrypoint-helpers.sh
26 | echo "Post-entrypoint helper loaded"
27 | else
28 | echo "Post-entrypoint helper not found - skipped"
29 | fi
30 | }
31 |
32 | #********** Pre-entrypoint helper *************
33 | pre_entrypoint
34 |
35 | #********** Functional Test *************
36 | /bin/bash ${PROJECT_PATH}/.project_automation/functional_tests/functional_tests.sh
37 | if [ $? -eq 0 ]
38 | then
39 | echo "Functional test completed"
40 | EXIT_CODE=0
41 | else
42 | echo "Functional test failed"
43 | EXIT_CODE=1
44 | fi
45 |
46 | #********** Post-entrypoint helper *************
47 | post_entrypoint
48 |
49 | #********** Exit Code *************
50 | exit $EXIT_CODE
--------------------------------------------------------------------------------
/.project_automation/functional_tests/functional_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "Starting Functional Tests"
9 | cd ${PROJECT_PATH}
10 | echo "---------------------------------------------------------------------------"
11 | git log --max-count=1
12 | echo "---------------------------------------------------------------------------"
13 |
14 |
15 | #********** Terraform Test **********
16 |
17 | # Look up the mandatory test file
18 | MANDATORY_TEST_PATH="./tests/01_mandatory.tftest.hcl"
19 | if test -f ${MANDATORY_TEST_PATH}; then
20 | echo "File ${MANDATORY_TEST_PATH} is found, resuming test"
21 | # Run Terraform test
22 | terraform init -no-color
23 | terraform test -no-color
24 | else
25 | echo "File ${MANDATORY_TEST_PATH} not found. You must include at least one test run in file ${MANDATORY_TEST_PATH}"
26 | (exit 1)
27 | fi
28 |
29 | if [ $? -eq 0 ]; then
30 | echo "Terraform Test Successfull"
31 | else
32 | echo "Terraform Test Failed"
33 | exit 1
34 | fi
35 |
36 | echo "End of Functional Tests"
--------------------------------------------------------------------------------
/.project_automation/init/noop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "Not Supported!"
3 |
--------------------------------------------------------------------------------
/.project_automation/provision/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/.project_automation/publication/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | RUN yum install -y yum-utils && yum-config-manager --add-repo https://cli.github.com/packages/rpm/gh-cli.repo && yum install -y gh
3 | RUN pip install awscli
4 |
--------------------------------------------------------------------------------
/.project_automation/publication/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "[STAGE: Publication]"
9 | VERSION=$(cat VERSION)
10 | echo $VERSION
11 | BRANCH=main
12 | EXISTING_GIT_VERSION="$(git tag -l)"
13 |
14 | if [[ $(echo $EXISTING_GIT_VERSION | grep $VERSION) ]]
15 | then
16 | echo "version exists skipping release creation hint: Bump version in VERSION file"
17 | else
18 | echo "creating new version"
19 | gh release create ${VERSION} --target ${BRANCH} --generate-notes
20 | fi
21 |
--------------------------------------------------------------------------------
/.project_automation/static_tests/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | ENV TERRAFORM_VERSION=1.7.4
3 | RUN cd /tmp && \
4 | wget --quiet https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
5 | unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/terraform
6 |
7 | ENV TFLINT_VERSION=v0.45.0
8 |
9 | RUN cd /tmp && \
10 | wget --quiet https://github.com/terraform-linters/tflint/releases/download/${TFLINT_VERSION}/tflint_linux_amd64.zip && \
11 | unzip tflint_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/tflint
12 |
13 | RUN mkdir -p ~/.tflint.d/plugins
14 |
15 | ENV TFLINT_VERSION=v0.22.1
16 |
17 | RUN wget --quiet -O /tmp/tflint-ruleset-aws.zip https://github.com/terraform-linters/tflint-ruleset-aws/releases/download/${TFLINT_VERSION}/tflint-ruleset-aws_darwin_arm64.zip \
18 | && unzip /tmp/tflint-ruleset-aws.zip -d ~/.tflint.d/plugins \
19 | && rm /tmp/tflint-ruleset-aws.zip
20 |
21 | RUN curl -s https://raw.githubusercontent.com/aquasecurity/tfsec/master/scripts/install_linux.sh | bash
22 |
23 | RUN pip3 install checkov
24 |
25 | RUN gem install mdl
26 |
27 | ENV TERRAFORM_DOCS_VERSION=v0.16.0
28 | RUN wget --quiet https://github.com/terraform-docs/terraform-docs/releases/download/${TERRAFORM_DOCS_VERSION}/terraform-docs-${TERRAFORM_DOCS_VERSION}-linux-amd64.tar.gz && \
29 | tar -C /usr/local/bin -xzf terraform-docs-${TERRAFORM_DOCS_VERSION}-linux-amd64.tar.gz && chmod +x /usr/local/bin/terraform-docs
--------------------------------------------------------------------------------
/.project_automation/static_tests/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## WARNING: DO NOT modify the content of entrypoint.sh
4 | # Use ./config/static_tests/pre-entrypoint-helpers.sh or ./config/static_tests/post-entrypoint-helpers.sh
5 | # to load any customizations or additional configurations
6 |
7 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
8 | # managed and local tasks always use these variables for the project and project type path
9 | PROJECT_PATH=${BASE_PATH}/project
10 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
11 |
12 | #********** helper functions *************
13 | pre_entrypoint() {
14 | if [ -f ${PROJECT_PATH}/.config/static_tests/pre-entrypoint-helpers.sh ]; then
15 | echo "Pre-entrypoint helper found"
16 | source ${PROJECT_PATH}/.config/static_tests/pre-entrypoint-helpers.sh
17 | echo "Pre-entrypoint helper loaded"
18 | else
19 | echo "Pre-entrypoint helper not found - skipped"
20 | fi
21 | }
22 | post_entrypoint() {
23 | if [ -f ${PROJECT_PATH}/.config/static_tests/post-entrypoint-helpers.sh ]; then
24 | echo "Post-entrypoint helper found"
25 | source ${PROJECT_PATH}/.config/static_tests/post-entrypoint-helpers.sh
26 | echo "Post-entrypoint helper loaded"
27 | else
28 | echo "Post-entrypoint helper not found - skipped"
29 | fi
30 | }
31 |
32 | #********** Pre-entrypoint helper *************
33 | pre_entrypoint
34 |
35 | #********** Static Test *************
36 | /bin/bash ${PROJECT_PATH}/.project_automation/static_tests/static_tests.sh
37 | if [ $? -eq 0 ]
38 | then
39 | echo "Static test completed"
40 | EXIT_CODE=0
41 | else
42 | echo "Static test failed"
43 | EXIT_CODE=1
44 | fi
45 |
46 | #********** Post-entrypoint helper *************
47 | post_entrypoint
48 |
49 | #********** Exit Code *************
50 | exit $EXIT_CODE
--------------------------------------------------------------------------------
/.project_automation/static_tests/static_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "Starting Static Tests"
9 |
10 | #********** Terraform Validate *************
11 | cd ${PROJECT_PATH}
12 | echo "---------------------------------------------------------------------------"
13 | git log --max-count=1
14 | echo "---------------------------------------------------------------------------"
15 |
16 | terraform init -no-color
17 | terraform validate -no-color
18 | if [ $? -eq 0 ]
19 | then
20 | echo "Success - Terraform validate"
21 | else
22 | echo "Failure - Terraform validate"
23 | exit 1
24 | fi
25 |
26 | #********** tflint ********************
27 | echo 'Starting tflint'
28 | tflint --no-color --init --config ${PROJECT_PATH}/.config/.tflint.hcl
29 | MYLINT=$(tflint --no-color --force --config ${PROJECT_PATH}/.config/.tflint.hcl)
30 | if [ -z "$MYLINT" ]
31 | then
32 | echo "Success - tflint found no linting issues!"
33 | else
34 | echo "Failure - tflint found linting issues!"
35 | echo "$MYLINT"
36 | exit 1
37 | fi
38 |
39 | #********** tfsec *********************
40 | echo 'Starting tfsec'
41 | MYTFSEC=$(tfsec . --no-color --config-file ${PROJECT_PATH}/.config/.tfsec.yml --custom-check-dir ${PROJECT_PATH}/.config/.tfsec)
42 | if [[ $MYTFSEC == *"No problems detected!"* ]];
43 | then
44 | echo "Success - tfsec found no security issues!"
45 | echo "$MYTFSEC"
46 | else
47 | echo "Failure - tfsec found security issues!"
48 | echo "$MYTFSEC"
49 | exit 1
50 | fi
51 |
52 | #********** Checkov Analysis *************
53 | echo "Running Checkov Analysis"
54 | checkov --config-file ${PROJECT_PATH}/.config/.checkov.yml
55 | if [ $? -eq 0 ]
56 | then
57 | echo "Success - Checkov found no issues!"
58 | else
59 | echo "Failure - Checkov found issues!"
60 | exit 1
61 | fi
62 |
63 | #********** Markdown Lint **************
64 | echo 'Starting markdown lint'
65 | MYMDL=$(mdl --config ${PROJECT_PATH}/.config/.mdlrc .header.md examples/*/.header.md)
66 | if [ -z "$MYMDL" ]
67 | then
68 | echo "Success - markdown lint found no linting issues!"
69 | else
70 | echo "Failure - markdown lint found linting issues!"
71 | echo "$MYMDL"
72 | exit 1
73 | fi
74 |
75 | #********** Terraform Docs *************
76 | echo 'Starting terraform-docs'
77 | TDOCS="$(terraform-docs --config ${PROJECT_PATH}/.config/.terraform-docs.yaml --lockfile=false ./)"
78 | git add -N README.md
79 | GDIFF="$(git diff --compact-summary)"
80 | if [ -z "$GDIFF" ]
81 | then
82 | echo "Success - Terraform Docs creation verified!"
83 | else
84 | echo "Failure - Terraform Docs creation failed, ensure you have precommit installed and running before submitting the Pull Request. TIPS: false error may occur if you have unstaged files in your repo"
85 | echo "$GDIFF"
86 | exit 1
87 | fi
88 |
89 | #***************************************
90 | echo "End of Static Tests"
--------------------------------------------------------------------------------
/.project_automation/update/noop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "Not Supported!"
3 |
--------------------------------------------------------------------------------
/.project_config.yml:
--------------------------------------------------------------------------------
1 | version: "1.0.0"
2 |
3 | init:
4 | entrypoint: .project_automation/init/noop.sh
5 | update:
6 | entrypoint: .project_automation/update/noop.sh
7 | static_tests:
8 | dockerfile: .project_automation/static_tests/Dockerfile
9 | entrypoint: .project_automation/static_tests/entrypoint.sh
10 | functional_tests:
11 | github_permissions:
12 | contents: write
13 | dockerfile: .project_automation/functional_tests/Dockerfile
14 | entrypoint: .project_automation/functional_tests/entrypoint.sh
15 | publication:
16 | github_permissions:
17 | contents: write
18 | dockerfile: .project_automation/publication/Dockerfile
19 | entrypoint: .project_automation/publication/entrypoint.sh
20 | deprecation:
21 | entrypoint: .project_automation/deprecation/entrypoint.sh
22 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @aws-ia/aws-ia
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Creating modules for Terraform
2 |
3 | This repository contains code for an application that is published using the Application Builder Platform (ABP).
4 |
5 | ## Module Standards
6 |
7 | For best practices and information on developing with Terraform, see the [I&A Module Standards](https://aws-ia.github.io/standards-terraform/)
8 |
9 | ## Contributing Code
10 |
11 | In order to contribute code to this repository, you must submit a *[Pull Request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request)*. To do so, you must *[fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo)* this repository, make your changes in your forked version and submit a *Pull Request*.
12 |
13 | ## Writing Documentation
14 |
15 | > :bangbang: **Do not manually update README.md**.
16 |
17 | README.md is automatically generated by pulling in content from other files. For instructions, including a fill-in-the-blank content template, see [Create readmes for Terraform-based Partner Solutions.](https://aws-ia-us-west-2.s3.us-west-2.amazonaws.com/docs/content/index.html#/lessons/8rpYWWL59M7dcS-NsjYmaISUu-L_UqEv)
18 |
19 | ## Checks and Validation
20 |
21 | Pull Requests (PRs) submitted against this repository undergo a series of static and functional checks.
22 |
23 | > :exclamation: Note: Failures during functional or static checks will prevent a pull request from being accepted.
24 |
25 | It is a best practice to perform these checks locally prior to submitting a pull request.
26 |
27 | ## Customizing static and functional test
28 |
29 | Details about the static and functional test can be found at `./project_automation/{test-name}/entrypoint.sh`.
30 | TIPS: **do not** modify the `./project_automation/{test-name}/entrypoint.sh`, instead use the helper script located at `.config/{test-name}/`
31 |
32 | ## Checks Performed
33 |
34 | - TFLint
35 | - tfsec
36 | - Markdown Lint
37 | - Checkov
38 | - Terratest
39 |
40 | > :bangbang: The readme.md file will be created after all checks have completed successfully, it is recommended that you install terraform-docs locally in order to preview your readme.md file prior to publication.
41 |
42 | ## Install the required tools
43 |
44 | Prerequisites:
45 |
46 | - [Python](https://docs.python.org/3/using/index.html)
47 | - [Pip](https://pip.pypa.io/en/stable/installation/)
48 | - [golang](https://go.dev/doc/install) (for macos you can use `brew`)
49 | - [tflint](https://github.com/terraform-linters/tflint)
50 | - [tfsec](https://aquasecurity.github.io/tfsec/v1.0.11/)
51 | - [Markdown Lint](https://github.com/markdownlint/markdownlint)
52 | - [Checkov](https://www.checkov.io/2.Basics/Installing%20Checkov.html)
53 | - [terraform-docs](https://github.com/terraform-docs/terraform-docs)
54 | - [coreutils](https://www.gnu.org/software/coreutils/)
55 |
56 | ## Performing Checks manually
57 |
58 | Preparation
59 |
60 | ```sh
61 | terraform init
62 | terraform validate
63 | ```
64 |
65 | ## Checks
66 |
67 | ### tflint
68 |
69 | ```sh
70 | tflint --init --config ${PROJECT_PATH}/.config/.tflint.hcl
71 | tflint --force --config ${PROJECT_PATH}/.config/.tflint.hcl
72 | ```
73 |
74 | ### tfsec
75 |
76 | ```sh
77 | tfsec . --config-file ${PROJECT_PATH}/.config/.tfsec.yml
78 | ```
79 |
80 | ### Markdown Lint
81 |
82 | ```sh
83 | mdl --config ${PROJECT_PATH}/.config/.mdlrc .header.md examples/*/.header.md
84 | ```
85 |
86 | ### Checkov
87 |
88 | ```sh
89 | checkov --config-file ${PROJECT_PATH}/.config/.checkov.yml
90 | ```
91 |
92 | ### Terratest
93 |
94 | Include tests to validate your examples/<> root modules, at a minimum. This can be accomplished with usually only slight modifications to the [boilerplate test provided in this template](./test/examples\_basic\_test.go)
95 |
96 | ```sh
97 | # from the root of the repository
98 | cd test
99 | go mod init github.com/aws-ia/terraform-project-ephemeral
100 | go mod tidy
101 | go install github.com/gruntwork-io/terratest/modules/terraform
102 | go test -timeout 45m
103 | ```
104 |
105 | ## Documentation
106 |
107 | ### terraform-docs
108 |
109 | ```sh
110 | # from the root of the repository
111 | terraform-docs --config ${PROJECT_PATH}/.config/.terraform-docs.yaml --lockfile=false ./
112 | ```
113 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Copyright 2016-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
4 |
5 | http://aws.amazon.com/apache2.0/
6 |
7 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
8 |
--------------------------------------------------------------------------------
/VERSION:
--------------------------------------------------------------------------------
1 | v0.0.26
2 |
--------------------------------------------------------------------------------
/bda.tf:
--------------------------------------------------------------------------------
1 | resource "awscc_bedrock_data_automation_project" "bda_project" {
2 | count = var.create_bda ? 1 : 0
3 | project_name = "${random_string.solution_prefix.result}-${var.bda_project_name}"
4 | project_description = var.bda_project_description
5 | kms_encryption_context = var.bda_kms_encryption_context
6 | kms_key_id = var.bda_kms_key_id
7 | tags = var.bda_tags
8 | standard_output_configuration = var.bda_standard_output_configuration
9 | custom_output_configuration = {
10 | blueprints = var.bda_custom_output_config
11 | }
12 | override_configuration = {
13 | document = {
14 | splitter = {
15 | state = var.bda_override_config_state
16 | }
17 | }
18 | }
19 | }
20 |
21 | resource "awscc_bedrock_blueprint" "bda_blueprint" {
22 | count = var.create_blueprint ? 1 : 0
23 | blueprint_name = "${random_string.solution_prefix.result}-${var.blueprint_name}"
24 | schema = var.blueprint_schema
25 | type = var.blueprint_type
26 | kms_encryption_context = var.blueprint_kms_encryption_context
27 | kms_key_id = var.blueprint_kms_key_id
28 | tags = var.blueprint_tags
29 | }
--------------------------------------------------------------------------------
/data-source.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | create_cwl = var.create_default_kb && var.create_kb_log_group
3 | create_delivery = local.create_cwl || var.kb_monitoring_arn != null
4 | vector_ingestion_configuration = {
5 | chunking_configuration = var.chunking_strategy == null ? null : {
6 | chunking_strategy = var.chunking_strategy
7 | fixed_size_chunking_configuration = var.chunking_strategy_max_tokens == null ? null : {
8 | max_tokens = var.chunking_strategy_max_tokens
9 | overlap_percentage = var.chunking_strategy_overlap_percentage
10 | }
11 | hierarchical_chunking_configuration = var.heirarchical_overlap_tokens == null && var.level_configurations_list == null ? null : {
12 | level_configurations = var.level_configurations_list
13 | overlap_tokens = var.heirarchical_overlap_tokens
14 | }
15 | semantic_chunking_configuration = var.breakpoint_percentile_threshold == null && var.semantic_buffer_size == null && var.semantic_max_tokens == null ? null : {
16 | breakpoint_percentile_threshold = var.breakpoint_percentile_threshold
17 | buffer_size = var.semantic_buffer_size
18 | max_tokens = var.semantic_max_tokens
19 | }
20 | }
21 | custom_transformation_configuration = var.create_custom_tranformation_config == false ? null : {
22 | intermediate_storage = {
23 | s3_location = {
24 | uri = var.s3_location_uri
25 | }
26 | }
27 | transformations = var.transformations_list
28 | }
29 | parsing_configuration = var.create_parsing_configuration == false ? null : {
30 | bedrock_foundation_model_configuration = {
31 | model_arn = var.parsing_config_model_arn
32 | parsing_prompt = {
33 | parsing_prompt_text = var.parsing_prompt_text
34 | }
35 | }
36 | parsing_strategy = var.parsing_strategy
37 | }
38 | }
39 | }
40 |
41 | # - Knowledge Base S3 Data Source –
42 | resource "awscc_s3_bucket" "s3_data_source" {
43 | count = (var.create_s3_data_source || var.create_kendra_s3_data_source) && var.use_existing_s3_data_source == false ? 1 : 0
44 | bucket_name = "${random_string.solution_prefix.result}-${var.kb_name}-default-bucket"
45 |
46 | public_access_block_configuration = {
47 | block_public_acls = true
48 | block_public_policy = true
49 | ignore_public_acls = true
50 | restrict_public_buckets = true
51 | }
52 |
53 | bucket_encryption = {
54 | server_side_encryption_configuration = [{
55 | bucket_key_enabled = true
56 | server_side_encryption_by_default = {
57 | sse_algorithm = var.kb_s3_data_source_kms_arn == null ? "AES256" : "aws:kms"
58 | kms_master_key_id = var.kb_s3_data_source_kms_arn
59 | }
60 | }]
61 | }
62 |
63 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
64 | key = "Name"
65 | value = "S3 Data Source"
66 | }]
67 | }
68 |
69 | resource "awscc_bedrock_data_source" "knowledge_base_ds" {
70 | count = var.create_s3_data_source ? 1 : 0
71 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
72 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSource"
73 | data_deletion_policy = var.data_deletion_policy
74 | data_source_configuration = {
75 | type = "S3"
76 | s3_configuration = {
77 | bucket_arn = var.kb_s3_data_source == null ? awscc_s3_bucket.s3_data_source[0].arn : var.kb_s3_data_source # Create an S3 bucket or reference existing
78 | bucket_owner_account_id = var.bucket_owner_account_id
79 | inclusion_prefixes = var.s3_inclusion_prefixes
80 | }
81 | }
82 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
83 | }
84 |
85 | resource "aws_cloudwatch_log_group" "knowledge_base_cwl" {
86 | #tfsec:ignore:log-group-customer-key
87 | #checkov:skip=CKV_AWS_158:Encryption not required for log group
88 | count = local.create_cwl ? 1 : 0
89 | name = "/aws/vendedlogs/bedrock/knowledge-base/APPLICATION_LOGS/${awscc_bedrock_knowledge_base.knowledge_base_default[0].id}"
90 | retention_in_days = var.kb_log_group_retention_in_days
91 | }
92 |
93 | resource "awscc_logs_delivery_source" "knowledge_base_log_source" {
94 | count = local.create_delivery ? 1 : 0
95 | name = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-source"
96 | log_type = "APPLICATION_LOGS"
97 | resource_arn = awscc_bedrock_knowledge_base.knowledge_base_default[0].knowledge_base_arn
98 | }
99 |
100 | resource "awscc_logs_delivery_destination" "knowledge_base_log_destination" {
101 | count = local.create_delivery ? 1 : 0
102 | name = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-destination"
103 | output_format = "json"
104 | destination_resource_arn = local.create_cwl ? aws_cloudwatch_log_group.knowledge_base_cwl[0].arn : var.kb_monitoring_arn
105 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
106 | key = "Name"
107 | value = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-destination"
108 | }]
109 | }
110 |
111 | resource "awscc_logs_delivery" "knowledge_base_log_delivery" {
112 | count = local.create_delivery ? 1 : 0
113 | delivery_destination_arn = awscc_logs_delivery_destination.knowledge_base_log_destination[0].arn
114 | delivery_source_name = awscc_logs_delivery_source.knowledge_base_log_source[0].name
115 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
116 | key = "Name"
117 | value = "${random_string.solution_prefix.result}-${var.kb_name}-delivery"
118 | }]
119 | }
120 |
121 | # – Knowledge Base Web Crawler Data Source
122 | resource "awscc_bedrock_data_source" "knowledge_base_web_crawler" {
123 | count = var.create_web_crawler ? 1 : 0
124 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
125 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceWebCrawler"
126 | data_source_configuration = {
127 | type = "WEB"
128 | web_configuration = {
129 | crawler_configuration = {
130 | crawler_limits = {
131 | rate_limit = var.rate_limit
132 | }
133 | exclusion_filters = var.exclusion_filters
134 | inclusion_filters = var.inclusion_filters
135 | scope = var.crawler_scope
136 | }
137 | source_configuration = {
138 | url_configuration = {
139 | seed_urls = var.seed_urls
140 | }
141 | }
142 | }
143 | }
144 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
145 | }
146 |
147 | # – Knowledge Base Confluence Data Source
148 | resource "awscc_bedrock_data_source" "knowledge_base_confluence" {
149 | count = var.create_confluence ? 1 : 0
150 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
151 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceConfluence"
152 | data_source_configuration = {
153 | type = "CONFLUENCE"
154 | confluence_configuration = {
155 | crawler_configuration = {
156 | filter_configuration = {
157 | pattern_object_filter = {
158 | filters = var.pattern_object_filter_list
159 | }
160 | type = var.crawl_filter_type
161 | }
162 | }
163 | source_configuration = {
164 | auth_type = var.auth_type
165 | credentials_secret_arn = var.confluence_credentials_secret_arn
166 | host_type = var.host_type
167 | host_url = var.host_url
168 | }
169 | }
170 | }
171 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
172 | }
173 |
174 | # – Knowledge Base Sharepoint Data Source
175 | resource "awscc_bedrock_data_source" "knowledge_base_sharepoint" {
176 | count = var.create_sharepoint ? 1 : 0
177 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
178 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceSharepoint"
179 | data_source_configuration = {
180 | type = "SHAREPOINT"
181 | share_point_configuration = {
182 | crawler_configuration = {
183 | filter_configuration = {
184 | pattern_object_filter ={
185 | filters = var.pattern_object_filter_list
186 | }
187 | type = var.crawl_filter_type
188 | }
189 | }
190 | source_configuration = {
191 | auth_type = var.auth_type
192 | credentials_secret_arn = var.share_point_credentials_secret_arn
193 | domain = var.share_point_domain
194 | host_type = var.host_type
195 | site_urls = var.share_point_site_urls
196 | tenant_id = var.tenant_id
197 | }
198 | }
199 | }
200 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
201 | }
202 |
203 | # – Knowledge Base Salesforce Data Source
204 | resource "awscc_bedrock_data_source" "knowledge_base_salesforce" {
205 | count = var.create_salesforce ? 1 : 0
206 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
207 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceSalesforce"
208 | data_source_configuration = {
209 | type = "SALESFORCE"
210 | salesforce_configuration = {
211 | crawler_configuration = {
212 | filter_configuration = {
213 | pattern_object_filter = {
214 | filters = var.pattern_object_filter_list
215 | }
216 | type = var.crawl_filter_type
217 | }
218 | }
219 | source_configuration = {
220 | auth_type = var.auth_type
221 | credentials_secret_arn = var.salesforce_credentials_secret_arn
222 | host_url = var.host_url
223 | }
224 | }
225 | }
226 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
227 | }
228 |
--------------------------------------------------------------------------------
/data.tf:
--------------------------------------------------------------------------------
1 | data "aws_caller_identity" "current" {}
2 | data "aws_partition" "current" {}
3 | data "aws_region" "current" {}
4 |
5 | locals {
6 | region = data.aws_region.current.name
7 | account_id = data.aws_caller_identity.current.account_id
8 | partition = data.aws_partition.current.partition
9 | create_kb = var.create_default_kb || var.create_rds_config || var.create_mongo_config || var.create_pinecone_config || var.create_opensearch_config || var.create_kb || var.create_kendra_config
10 | foundation_model = var.create_agent ? var.foundation_model : (var.create_supervisor ? var.supervisor_model : null)
11 | }
12 |
13 | data "aws_iam_policy_document" "agent_trust" {
14 | count = var.create_agent || var.create_supervisor ? 1 : 0
15 | statement {
16 | actions = ["sts:AssumeRole"]
17 | principals {
18 | identifiers = ["bedrock.amazonaws.com"]
19 | type = "Service"
20 | }
21 | condition {
22 | test = "StringEquals"
23 | values = [local.account_id]
24 | variable = "aws:SourceAccount"
25 | }
26 | condition {
27 | test = "ArnLike"
28 | values = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent/*"]
29 | variable = "AWS:SourceArn"
30 | }
31 | }
32 | }
33 |
34 | data "aws_iam_policy_document" "agent_permissions" {
35 | count = var.create_agent || var.create_supervisor ? 1 : 0
36 | statement {
37 | actions = [
38 | "bedrock:InvokeModel*",
39 | "bedrock:UseInferenceProfile",
40 | ]
41 | resources = distinct(concat(
42 | var.create_app_inference_profile ? [
43 | var.app_inference_profile_model_source,
44 | awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn,
45 | "arn:aws:bedrock:*:*:application-inference-profile/*",
46 | ] : [
47 | "arn:${local.partition}:bedrock:${local.region}::foundation-model/${local.foundation_model}",
48 | "arn:${local.partition}:bedrock:*::foundation-model/${local.foundation_model}",
49 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:inference-profile/*.${local.foundation_model}",
50 | ],
51 | var.create_app_inference_profile ?
52 | awscc_bedrock_application_inference_profile.application_inference_profile[0].models[*].model_arn : []
53 | ))
54 | }
55 | }
56 |
57 | data "aws_iam_policy_document" "agent_alias_permissions" {
58 | count = var.create_agent_alias || var.create_supervisor ? 1 : 0
59 | statement {
60 | actions = [
61 | "bedrock:GetAgentAlias",
62 | "bedrock:InvokeAgent"
63 | ]
64 | resources = [
65 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent/*",
66 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent-alias/*"
67 | ]
68 | }
69 | }
70 |
71 |
72 | data "aws_iam_policy_document" "knowledge_base_permissions" {
73 | count = local.create_kb ? 1 : 0
74 |
75 | statement {
76 | actions = ["bedrock:Retrieve"]
77 | resources = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:knowledge-base/*"]
78 | }
79 | }
80 |
81 | data "aws_iam_policy_document" "custom_model_trust" {
82 | count = var.create_custom_model ? 1 : 0
83 | statement {
84 | actions = ["sts:AssumeRole"]
85 | principals {
86 | identifiers = ["bedrock.amazonaws.com"]
87 | type = "Service"
88 | }
89 | condition {
90 | test = "StringEquals"
91 | values = [local.account_id]
92 | variable = "aws:SourceAccount"
93 | }
94 | condition {
95 | test = "ArnLike"
96 | values = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:model-customization-job/*"]
97 | variable = "AWS:SourceArn"
98 | }
99 | }
100 | }
101 |
102 | data "aws_iam_policy_document" "app_inference_profile_permission" {
103 | count = var.create_app_inference_profile ? 1 : 0
104 | statement {
105 | actions = [
106 | "bedrock:GetInferenceProfile",
107 | "bedrock:ListInferenceProfiles",
108 | "bedrock:UseInferenceProfile",
109 | ]
110 | resources = [
111 | "arn:aws:bedrock:*:*:inference-profile/*",
112 | "arn:aws:bedrock:*:*:application-inference-profile/*"
113 | ]
114 | }
115 | }
116 |
117 | data "aws_bedrock_foundation_model" "model_identifier" {
118 | count = var.create_custom_model ? 1 : 0
119 | model_id = var.custom_model_id
120 | }
121 |
--------------------------------------------------------------------------------
/examples/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-ia/terraform-aws-bedrock/6a918316b1bdec23505c8fe6a982e6fe34174036/examples/.DS_Store
--------------------------------------------------------------------------------
/examples/agent-collaborator/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent collaborator with a supervisor agent and a collaborator agent with agent alias.
2 |
--------------------------------------------------------------------------------
/examples/agent-collaborator/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent collaborator with a supervisor agent and a collaborator agent with agent alias.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [agent\_collaborator1](#module\_agent\_collaborator1) | ../.. | n/a |
24 | | [agent\_collaborator2](#module\_agent\_collaborator2) | ../.. | n/a |
25 | | [agent\_supervisor](#module\_agent\_supervisor) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | No resources.
30 |
31 | ## Inputs
32 |
33 | | Name | Description | Type | Default | Required |
34 | |------|-------------|------|---------|:--------:|
35 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
36 |
37 | ## Outputs
38 |
39 | No outputs.
40 |
--------------------------------------------------------------------------------
/examples/agent-collaborator/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "agent_supervisor" {
8 | source = "../.."
9 | create_agent = false
10 | create_supervisor = true
11 | supervisor_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
12 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars to an agent."
13 | }
14 |
15 | module "agent_collaborator1" {
16 | source = "../.."
17 | create_agent_alias = true
18 | foundation_model = "anthropic.claude-v2"
19 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
20 | supervisor_id = module.agent_supervisor.supervisor_id
21 | create_collaborator = true
22 | collaborator_name = "AgentA"
23 | collaboration_instruction = "Handle customer inquiries"
24 |
25 | depends_on = [module.agent_supervisor]
26 | }
27 |
28 | module "agent_collaborator2" {
29 | source = "../.."
30 | create_agent_alias = true
31 | foundation_model = "anthropic.claude-v2"
32 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
33 | supervisor_id = module.agent_supervisor.supervisor_id
34 | create_collaborator = true
35 | collaborator_name = "AgentB"
36 | collaboration_instruction = "Process backend tasks"
37 |
38 | depends_on = [module.agent_supervisor, module.agent_collaborator1]
39 | }
--------------------------------------------------------------------------------
/examples/agent-collaborator/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/agent-only/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent with agent alias, leaving the default values and without creating an action group or a knowledgebase.
2 |
--------------------------------------------------------------------------------
/examples/agent-only/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent with agent alias, leaving the default values and without creating an action group or a knowledgebase.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/agent-only/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_agent_alias = true
10 | foundation_model = "anthropic.claude-v2"
11 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
12 | }
--------------------------------------------------------------------------------
/examples/agent-only/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent with guardrails, leaving the default values and without creating an action group or a knowledgebase.
2 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent with guardrails, leaving the default values and without creating an action group or a knowledgebase.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_guardrail = true
10 | filters_config = [
11 | {
12 | input_strength = "MEDIUM"
13 | output_strength = "MEDIUM"
14 | type = "HATE"
15 | },
16 | {
17 | input_strength = "HIGH"
18 | output_strength = "HIGH"
19 | type = "VIOLENCE"
20 | }
21 | ]
22 | pii_entities_config = [
23 | {
24 | action = "BLOCK"
25 | type = "NAME"
26 | },
27 | {
28 | action = "BLOCK"
29 | type = "DRIVER_ID"
30 | },
31 | {
32 | action = "ANONYMIZE"
33 | type = "USERNAME"
34 | },
35 | ]
36 | regexes_config = [{
37 | action = "BLOCK"
38 | description = "example regex"
39 | name = "regex_example"
40 | pattern = "^\\d{3}-\\d{2}-\\d{4}$"
41 | }]
42 | managed_word_lists_config = [{
43 | type = "PROFANITY"
44 | }]
45 | words_config = [{
46 | text = "HATE"
47 | }]
48 | topics_config = [{
49 | name = "investment_topic"
50 | examples = ["Where should I invest my money ?"]
51 | type = "DENY"
52 | definition = "Investment advice refers to inquiries, guidance, or recommendations regarding the management or allocation of funds or assets with the goal of generating returns ."
53 | }]
54 | foundation_model = "anthropic.claude-v2"
55 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
56 | }
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | provider "aws" {
28 | region = var.region
29 | }
30 |
31 | provider "awscc" {
32 | region = var.region
33 | }
34 |
35 | provider "opensearch" {
36 | url = "n/a"
37 | healthcheck = false
38 | }
39 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/variables.tf:
--------------------------------------------------------------------------------
1 | variable "region" {
2 | type = string
3 | description = "AWS region to deploy the resources"
4 | default = "us-east-1"
5 | }
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/.header.md:
--------------------------------------------------------------------------------
1 | This example demonstrates how to create a supervisor agent using Claude 3.7 Sonnet, which is only available with inference profiles.
2 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example demonstrates how to create a supervisor agent using Claude 3.7 Sonnet, which is only available with inference profiles.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | | Name | Version |
18 | |------|---------|
19 | | [aws](#provider\_aws) | ~>5.0 |
20 |
21 | ## Modules
22 |
23 | | Name | Source | Version |
24 | |------|--------|---------|
25 | | [agent\_supervisor](#module\_agent\_supervisor) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | | Name | Type |
30 | |------|------|
31 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
32 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
33 |
34 | ## Inputs
35 |
36 | No inputs.
37 |
38 | ## Outputs
39 |
40 | No outputs.
41 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | # Get current AWS account ID
8 | data "aws_caller_identity" "current" {}
9 |
10 | # Get current AWS region
11 | data "aws_region" "current" {}
12 |
13 | locals {
14 | region = data.aws_region.current.name
15 | account_id = data.aws_caller_identity.current.account_id
16 | }
17 |
18 | module "agent_supervisor" {
19 | source = "../.."
20 |
21 | create_agent = false
22 | create_supervisor = true
23 | supervisor_name = "SupervisorTF"
24 |
25 | create_app_inference_profile = true
26 | app_inference_profile_name = "Claude37SonnetProfile"
27 | app_inference_profile_description = "Inference profile for Claude 3.7 Sonnet"
28 | app_inference_profile_model_source = "arn:aws:bedrock:${local.region}:${local.account_id}:inference-profile/us.anthropic.claude-3-7-sonnet-20250219-v1:0"
29 |
30 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars and trucks to an agent. You can also provide feedback to the agent."
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | provider "aws" {
28 | region = "us-east-1"
29 | }
30 |
31 | provider "awscc" {
32 | region = "us-east-1"
33 | }
34 |
35 | provider "opensearch" {
36 | url = "n/a"
37 | healthcheck = false
38 | }
39 |
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy an extended Bedrock agent, creating a default Opensearch Serverless knowledgebase with an S3 datasource.
2 |
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy an extended Bedrock agent, creating a default Opensearch Serverless knowledgebase with an S3 datasource.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_default_kb = true
10 | create_s3_data_source = true
11 | foundation_model = "anthropic.claude-v2"
12 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
13 | }
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = module.bedrock.default_collection.collection_endpoint
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/application-inference-profile/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy an application inference profile.
2 |
--------------------------------------------------------------------------------
/examples/application-inference-profile/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy an application inference profile.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | | Name | Version |
18 | |------|---------|
19 | | [aws](#provider\_aws) | ~>5.0 |
20 |
21 | ## Modules
22 |
23 | | Name | Source | Version |
24 | |------|--------|---------|
25 | | [bedrock](#module\_bedrock) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | | Name | Type |
30 | |------|------|
31 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
32 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
33 |
34 | ## Inputs
35 |
36 | | Name | Description | Type | Default | Required |
37 | |------|-------------|------|---------|:--------:|
38 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
39 |
40 | ## Outputs
41 |
42 | No outputs.
43 |
--------------------------------------------------------------------------------
/examples/application-inference-profile/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | # Get current AWS account ID
8 | data "aws_caller_identity" "current" {}
9 |
10 | # Get current AWS region
11 | data "aws_region" "current" {}
12 |
13 | module "bedrock" {
14 | source = "../.." # local example
15 | create_agent = false
16 |
17 | # Application Inference Profile
18 | create_app_inference_profile = true
19 | app_inference_profile_model_source = "arn:aws:bedrock:${data.aws_region.current.name}::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0"
20 | }
--------------------------------------------------------------------------------
/examples/application-inference-profile/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/examples/bda/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Bedrock data automation (BDA) project and blueprint.
2 |
--------------------------------------------------------------------------------
/examples/bda/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Bedrock data automation (BDA) project and blueprint.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bda](#module\_bda) | ../.. | n/a |
24 | | [blueprint](#module\_blueprint) | ../.. | n/a |
25 |
26 | ## Resources
27 |
28 | No resources.
29 |
30 | ## Inputs
31 |
32 | | Name | Description | Type | Default | Required |
33 | |------|-------------|------|---------|:--------:|
34 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
35 |
36 | ## Outputs
37 |
38 | No outputs.
39 |
--------------------------------------------------------------------------------
/examples/bda/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bda" {
8 | source = "../.." # local example
9 | create_agent = false
10 |
11 | # BDA project config
12 | create_bda = true
13 | bda_standard_output_configuration = {
14 | image = {
15 | extraction = {
16 | bounding_box = {
17 | state = "ENABLED"
18 | }
19 | category = {
20 | state = "ENABLED"
21 | types = ["TEXT_DETECTION", "LOGOS"]
22 | }
23 | }
24 | generative_field = {
25 | state = "ENABLED"
26 | types = ["IMAGE_SUMMARY"]
27 | }
28 | }
29 | }
30 | bda_custom_output_config = [{
31 | blueprint_arn = module.blueprint.bda_blueprint.blueprint_arn
32 | blueprint_stage = module.blueprint.bda_blueprint.blueprint_stage
33 | }]
34 | }
35 |
36 | module "blueprint" {
37 | source = "../.."
38 | create_agent = false
39 |
40 | # Blueprint config
41 | create_blueprint = true
42 | blueprint_schema = jsonencode({
43 | "$schema" = "http://json-schema.org/draft-07/schema#"
44 | description = "This blueprint is to extract key information from advertisement images."
45 | class = "advertisement image"
46 | type = "object"
47 | definitions = {
48 | ProductDetails = {
49 | type = "object"
50 | properties = {
51 | product_category = {
52 | type = "string"
53 | inferenceType = "explicit"
54 | instruction = "The broad category or type of product being advertised, e.g., appliances, electronics, clothing, etc."
55 | }
56 | product_name = {
57 | type = "string"
58 | inferenceType = "explicit"
59 | instruction = "The specific name or model of the product being advertised, if visible in the image."
60 | }
61 | product_placement = {
62 | type = "string"
63 | inferenceType = "explicit"
64 | instruction = "How the product is positioned or placed within the advertisement image. Limit the field values to enum['Front and center', 'In the background', 'Held/used by a person', 'Others']"
65 | }
66 | }
67 | }
68 | }
69 | properties = {
70 | product_details = {
71 | "$ref" = "#/definitions/ProductDetails"
72 | }
73 | image_sentiment = {
74 | type = "string"
75 | inferenceType = "explicit"
76 | instruction = "What is the overall sentiment of the image? Limit the field values to enum['Positive', 'Negative', 'Neutral']"
77 | }
78 | image_background = {
79 | type = "string"
80 | inferenceType = "explicit"
81 | instruction = "What is the background of the ad image? For example, 'Solid color', 'Natural landscape', 'Indoor', 'Urban', 'Abstract'"
82 | }
83 | image_style = {
84 | type = "string"
85 | inferenceType = "explicit"
86 | instruction = "Classify the image style of the ad. For example, 'Product image', 'Lifestyle', 'Portrait', 'Retro', 'Infographic', 'None of the above'"
87 | }
88 | image_humor = {
89 | type = "boolean"
90 | inferenceType = "explicit"
91 | instruction = "Does the advertisement use any humor or wit in its messaging?"
92 | }
93 | key_visuals = {
94 | type = "array"
95 | inferenceType = "explicit"
96 | instruction = "A list of key visual elements or objects present in the advertisement image, apart from the main product."
97 | items = {
98 | type = "string"
99 | }
100 | }
101 | ad_copy = {
102 | type = "string"
103 | inferenceType = "explicit"
104 | instruction = "Any text or copy present in the advertisement image, excluding the brand name and promotional offer."
105 | }
106 | }
107 | })
108 | }
--------------------------------------------------------------------------------
/examples/bda/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/examples/kendra-kb/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Kendra GenAI Knowledge Base without an agent.
2 |
--------------------------------------------------------------------------------
/examples/kendra-kb/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Kendra GenAI Knowledge Base without an agent.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/kendra-kb/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_kendra_config = true
10 | create_kendra_s3_data_source = true
11 | create_agent = false
12 | }
--------------------------------------------------------------------------------
/examples/kendra-kb/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/knowledge-base-only/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Knowledge Base, creating a default Opensearch Serverless vector store with an S3 datasource.
2 |
--------------------------------------------------------------------------------
/examples/knowledge-base-only/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Knowledge Base, creating a default Opensearch Serverless vector store with an S3 datasource.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.35.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/knowledge-base-only/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_default_kb = true
10 | create_agent = false
11 | create_s3_data_source = true
12 | }
--------------------------------------------------------------------------------
/examples/knowledge-base-only/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.35.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = module.bedrock.default_collection.collection_endpoint
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/prompt-management/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock prompt with version.
2 |
--------------------------------------------------------------------------------
/examples/prompt-management/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock prompt with version.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.0.7 |
9 | | [aws](#requirement\_aws) | ~>5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/prompt-management/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_agent = false
10 |
11 | # Prompt Management
12 | prompt_name = "prompt"
13 | default_variant = "variant-example"
14 | create_prompt = true
15 | create_prompt_version = true
16 | prompt_version_description = "Example prompt version"
17 | variants_list = [
18 | {
19 | name = "variant-example"
20 | template_type = "TEXT"
21 | model_id = "amazon.titan-text-express-v1"
22 | inference_configuration = {
23 | text = {
24 | temperature = 1
25 | top_p = 0.9900000095367432
26 | max_tokens = 300
27 | stop_sequences = ["User:"]
28 | top_k = 250
29 | }
30 | }
31 | template_configuration = {
32 | text = {
33 | input_variables = [
34 | {
35 | name = "topic"
36 | }
37 | ]
38 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
39 | }
40 | }
41 | }
42 |
43 | ]
44 |
45 | }
--------------------------------------------------------------------------------
/examples/prompt-management/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = "~>5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/iam.tf:
--------------------------------------------------------------------------------
1 | # – IAM –
2 | locals {
3 | create_kb_role = var.kb_role_arn == null && local.create_kb
4 | kendra_index_id = var.create_kendra_config == true ? (var.kendra_index_id != null ? var.kendra_index_id : awscc_kendra_index.genai_kendra_index[0].id) : null
5 | kendra_data_source_bucket_arn = var.create_kendra_s3_data_source ? (var.kb_s3_data_source != null ? var.kb_s3_data_source : awscc_s3_bucket.s3_data_source[0].arn) : null
6 | action_group_names = concat(var.action_group_lambda_names_list, [var.lambda_action_group_executor])
7 | agent_role_name = var.agent_resource_role_arn != null ? split("/", var.agent_resource_role_arn)[1] : ((var.create_agent || var.create_supervisor) ? aws_iam_role.agent_role[0].name : null)
8 | }
9 |
10 | resource "aws_iam_role" "agent_role" {
11 | count = var.agent_resource_role_arn == null && (var.create_agent || var.create_supervisor) ? 1 : 0
12 | assume_role_policy = data.aws_iam_policy_document.agent_trust[0].json
13 | name_prefix = var.name_prefix
14 | permissions_boundary = var.permissions_boundary_arn
15 | }
16 |
17 | resource "aws_iam_role_policy" "agent_policy" {
18 | count = var.agent_resource_role_arn == null && (var.create_agent || var.create_supervisor) ? 1 : 0
19 | policy = data.aws_iam_policy_document.agent_permissions[0].json
20 | role = local.agent_role_name
21 | }
22 |
23 | resource "aws_iam_role_policy" "agent_alias_policy" {
24 | count = var.agent_resource_role_arn == null && (var.create_agent_alias || var.create_supervisor) ? 1 : 0
25 | policy = data.aws_iam_policy_document.agent_alias_permissions[0].json
26 | role = local.agent_role_name
27 | }
28 |
29 | resource "aws_iam_role_policy" "kb_policy" {
30 | count = var.agent_resource_role_arn == null && local.create_kb && var.create_agent ? 1 : 0
31 | policy = data.aws_iam_policy_document.knowledge_base_permissions[0].json
32 | role = local.agent_role_name
33 | }
34 |
35 | resource "aws_iam_role_policy" "app_inference_profile_policy" {
36 | count = var.create_app_inference_profile ? 1 : 0
37 | policy = data.aws_iam_policy_document.app_inference_profile_permission[0].json
38 | role = local.agent_role_name != null ? local.agent_role_name : aws_iam_role.application_inference_profile_role[0].id
39 | }
40 |
41 | # Define the IAM role for Amazon Bedrock Knowledge Base
42 | resource "aws_iam_role" "bedrock_knowledge_base_role" {
43 | count = var.kb_role_arn != null || (local.create_kb == false && var.create_sql_config == false) ? 0 : 1
44 | name = "AmazonBedrockExecutionRoleForKnowledgeBase-${random_string.solution_prefix.result}"
45 |
46 | assume_role_policy = jsonencode({
47 | "Version" : "2012-10-17",
48 | "Statement" : [
49 | {
50 | "Effect" : "Allow",
51 | "Principal" : {
52 | "Service" : "bedrock.amazonaws.com"
53 | },
54 | "Action" : "sts:AssumeRole"
55 | }
56 | ]
57 | })
58 | permissions_boundary = var.permissions_boundary_arn
59 | }
60 |
61 | # Attach a policy to allow necessary permissions for the Bedrock Knowledge Base
62 | resource "aws_iam_policy" "bedrock_knowledge_base_policy" {
63 | count = var.kb_role_arn != null || var.create_default_kb == false || var.create_kendra_config == true ? 0 : 1
64 | name = "AmazonBedrockKnowledgeBasePolicy-${random_string.solution_prefix.result}"
65 |
66 | policy = jsonencode({
67 | "Version" : "2012-10-17",
68 | "Statement" : [
69 | {
70 | "Effect" : "Allow",
71 | "Action" : [
72 | "aoss:APIAccessAll"
73 | ],
74 | "Resource" : module.oss_knowledgebase[0].opensearch_serverless_collection.arn
75 | },
76 | {
77 | "Effect" : "Allow",
78 | "Action" : [
79 | "bedrock:InvokeModel",
80 | ],
81 | "Resource" : var.kb_embedding_model_arn
82 | },
83 | {
84 | "Effect" : "Allow",
85 | "Action" : [
86 | "bedrock:ListFoundationModels",
87 | "bedrock:ListCustomModels"
88 | ],
89 | "Resource" : "*"
90 | },
91 | ]
92 | })
93 | }
94 |
95 | resource "aws_iam_policy" "bedrock_knowledge_base_policy_s3" {
96 | count = var.kb_role_arn != null || local.create_kb == false || var.create_s3_data_source == false ? 0 : 1
97 | name = "AmazonBedrockKnowledgeBasePolicyS3DataSource-${random_string.solution_prefix.result}"
98 |
99 | policy = jsonencode({
100 | "Version" : "2012-10-17",
101 | "Statement" : [
102 | {
103 | "Effect" : "Allow",
104 | "Action" : [
105 | "s3:ListBucket",
106 | ],
107 | "Resource" : var.kb_s3_data_source == null ? awscc_s3_bucket.s3_data_source[0].arn : var.kb_s3_data_source
108 | },
109 | {
110 | "Effect" : "Allow",
111 | "Action" : [
112 | "s3:GetObject",
113 | ],
114 | "Resource" : var.kb_s3_data_source == null ? "${awscc_s3_bucket.s3_data_source[0].arn}/*" : "${var.kb_s3_data_source}/*"
115 | }
116 | ]
117 | })
118 | }
119 |
120 | resource "aws_iam_policy" "bedrock_kb_s3_decryption_policy" {
121 | count = local.create_kb_role && var.kb_s3_data_source_kms_arn != null && var.create_s3_data_source ? 1 : 0
122 | name = "AmazonBedrockS3KMSPolicyForKnowledgeBase_${random_string.solution_prefix.result}"
123 |
124 | policy = jsonencode({
125 | "Version" : "2012-10-17",
126 | "Statement" : [
127 | {
128 | "Effect" : "Allow",
129 | "Action" : "kms:Decrypt",
130 | "Resource" : var.kb_s3_data_source_kms_arn
131 | "Condition" : {
132 | "StringEquals" : {
133 | "kms:ViaService" : ["s3.${data.aws_region.current.name}.amazonaws.com"]
134 | }
135 | }
136 | }
137 | ]
138 | })
139 | }
140 |
141 | # Attach the policies to the role
142 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_policy_attachment" {
143 | count = var.kb_role_arn != null || local.create_kb == false || var.create_kendra_config == true ? 0 : 1
144 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
145 | policy_arn = aws_iam_policy.bedrock_knowledge_base_policy[0].arn
146 | }
147 |
148 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_kendra_policy_attachment" {
149 | count = var.kb_role_arn != null || var.create_kendra_config == false ? 0 : 1
150 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
151 | policy_arn = aws_iam_policy.bedrock_kb_kendra[0].arn
152 | }
153 |
154 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_policy_attachment" {
155 | count = var.kb_role_arn != null || var.create_sql_config == false ? 0 : 1
156 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
157 | policy_arn = aws_iam_policy.bedrock_kb_sql[0].arn
158 | }
159 |
160 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_serverless_policy_attachment" {
161 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "SERVERLESS" ? 0 : 1
162 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
163 | policy_arn = aws_iam_policy.bedrock_kb_sql_serverless[0].arn
164 | }
165 |
166 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_provision_policy_attachment" {
167 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "PROVISIONED" ? 0 : 1
168 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
169 | policy_arn = aws_iam_policy.bedrock_kb_sql_provisioned[0].arn
170 | }
171 |
172 | resource "aws_iam_role_policy_attachment" "bedrock_kb_s3_decryption_policy_attachment" {
173 | count = local.create_kb_role && var.kb_s3_data_source_kms_arn != null && var.create_s3_data_source ? 1 : 0
174 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
175 | policy_arn = aws_iam_policy.bedrock_kb_s3_decryption_policy[0].arn
176 | }
177 |
178 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_policy_s3_attachment" {
179 | count = var.kb_role_arn != null || local.create_kb == false || var.create_s3_data_source == false ? 0 : 1
180 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
181 | policy_arn = aws_iam_policy.bedrock_knowledge_base_policy_s3[0].arn
182 | }
183 |
184 | resource "aws_iam_role_policy" "bedrock_kb_oss" {
185 | count = var.kb_role_arn != null || var.create_default_kb == false ? 0 : 1
186 | name = "AmazonBedrockOSSPolicyForKnowledgeBase_${var.kb_name}"
187 | role = aws_iam_role.bedrock_knowledge_base_role[count.index].name
188 | policy = jsonencode({
189 | Version = "2012-10-17"
190 | Statement = [
191 | {
192 | Action = ["aoss:*"]
193 | Effect = "Allow"
194 | Resource = ["arn:aws:aoss:${local.region}:${local.account_id}:*/*"]
195 | }
196 | ]
197 | })
198 | }
199 |
200 | # Guardrails Policies
201 |
202 | resource "aws_iam_role_policy" "guardrail_policy" {
203 | count = var.create_guardrail && var.create_agent ? 1 : 0
204 | policy = jsonencode({
205 | Version = "2012-10-17"
206 | Statement = [
207 | {
208 | Effect = "Allow"
209 | Action = [
210 | "bedrock:ApplyGuardrail",
211 | ]
212 | Resource = awscc_bedrock_agent.bedrock_agent[0].guardrail_configuration.guardrail_identifier
213 | }
214 | ]
215 | })
216 | role = aws_iam_role.agent_role[0].id
217 | }
218 |
219 | resource "aws_iam_role_policy" "guardrail_policy_supervisor_agent" {
220 | count = var.create_collaborator && var.create_supervisor_guardrail ? 1 : 0
221 | policy = jsonencode({
222 | Version = "2012-10-17"
223 | Statement = [
224 | {
225 | Effect = "Allow"
226 | Action = [
227 | "bedrock:ApplyGuardrail",
228 | ]
229 | Resource = aws_bedrockagent_agent.agent_supervisor[0].guardrail_configuration[0].guardrail_identifier
230 | }
231 | ]
232 | })
233 | role = aws_iam_role.agent_role[0].id
234 | }
235 |
236 |
237 | # Action Group Policies
238 |
239 | resource "aws_lambda_permission" "allow_bedrock_agent" {
240 | count = var.create_ag ? length(local.action_group_names) : 0
241 | action = "lambda:InvokeFunction"
242 | function_name = local.action_group_names[count.index]
243 | principal = "bedrock.amazonaws.com"
244 | source_arn = awscc_bedrock_agent.bedrock_agent[0].agent_arn
245 | }
246 |
247 | resource "aws_iam_role_policy" "action_group_policy" {
248 | count = var.create_ag ? 1 : 0
249 | policy = jsonencode({
250 | Version = "2012-10-17"
251 | Statement = [
252 | {
253 | Effect = "Allow"
254 | Action = "lambda:InvokeModel"
255 | Resource = concat([var.lambda_action_group_executor], var.action_group_lambda_arns_list)
256 | }
257 | ]
258 | })
259 | role = aws_iam_role.agent_role[0].id
260 | }
261 |
262 | # Application Inference Profile Policies
263 |
264 | # Define the IAM role for Application Inference Profile
265 | resource "aws_iam_role" "application_inference_profile_role" {
266 | count = var.create_app_inference_profile ? 1 : 0
267 | name = "ApplicationInferenceProfile-${random_string.solution_prefix.result}"
268 |
269 | assume_role_policy = jsonencode({
270 | "Version" : "2012-10-17",
271 | "Statement" : [
272 | {
273 | "Effect" : "Allow",
274 | "Principal" : {
275 | "Service" : "bedrock.amazonaws.com"
276 | },
277 | "Action" : "sts:AssumeRole"
278 | }
279 | ]
280 | })
281 | permissions_boundary = var.permissions_boundary_arn
282 | }
283 |
284 | resource "aws_iam_role_policy" "app_inference_profile_role_policy" {
285 | count = var.create_app_inference_profile ? 1 : 0
286 | policy = jsonencode({
287 | "Version": "2012-10-17",
288 | "Statement": [
289 | {
290 | "Effect": "Allow",
291 | "Action": [
292 | "bedrock:InvokeModel*",
293 | "bedrock:CreateInferenceProfile"
294 | ],
295 | "Resource": [
296 | "arn:aws:bedrock:*::foundation-model/*",
297 | "arn:aws:bedrock:*:*:inference-profile/*",
298 | "arn:aws:bedrock:*:*:application-inference-profile/*"
299 | ]
300 | },
301 | {
302 | "Effect": "Allow",
303 | "Action": [
304 | "bedrock:GetInferenceProfile",
305 | "bedrock:ListInferenceProfiles",
306 | "bedrock:DeleteInferenceProfile",
307 | "bedrock:TagResource",
308 | "bedrock:UntagResource",
309 | "bedrock:ListTagsForResource"
310 | ],
311 | "Resource": [
312 | "arn:aws:bedrock:*:*:inference-profile/*",
313 | "arn:aws:bedrock:*:*:application-inference-profile/*"
314 | ]
315 | }
316 | ]
317 | })
318 | role = aws_iam_role.application_inference_profile_role[0].id
319 | }
320 |
321 | # Custom model
322 |
323 | resource "aws_iam_role" "custom_model_role" {
324 | count = var.create_custom_model ? 1 : 0
325 | assume_role_policy = data.aws_iam_policy_document.custom_model_trust[0].json
326 | permissions_boundary = var.permissions_boundary_arn
327 | name_prefix = "CustomModelRole"
328 | }
329 |
330 | resource "aws_iam_role_policy" "custom_model_policy" {
331 | count = var.create_custom_model ? 1 : 0
332 | policy = jsonencode({
333 | "Version": "2012-10-17",
334 | "Statement": [
335 | {
336 | "Effect": "Allow",
337 | "Action": [
338 | "s3:GetObject",
339 | "s3:PutObject",
340 | "s3:ListBucket",
341 | "kms:Decrypt"
342 | ],
343 | "Resource": [
344 | "arn:aws:s3:::${var.custom_model_training_uri}",
345 | "arn:aws:s3:::${var.custom_model_training_uri}/*",
346 | ],
347 | "Condition": {
348 | "StringEquals": {
349 | "aws:PrincipalAccount": local.account_id
350 | }
351 | }
352 | },
353 | {
354 | "Effect": "Allow",
355 | "Action": [
356 | "s3:GetObject",
357 | "s3:PutObject",
358 | "s3:ListBucket",
359 | "kms:Decrypt"
360 | ],
361 | "Resource": var.custom_model_output_uri == null ? "arn:aws:s3:::${awscc_s3_bucket.custom_model_output[0].id}/" : "arn:aws:s3:::${var.custom_model_output_uri}",
362 |
363 | "Condition": {
364 | "StringEquals": {
365 | "aws:PrincipalAccount": local.account_id
366 | }
367 | }
368 | },
369 | {
370 | "Effect": "Allow",
371 | "Action": [
372 | "s3:GetObject",
373 | "s3:PutObject",
374 | "s3:ListBucket",
375 | "kms:Decrypt"
376 | ],
377 | "Resource": var.custom_model_output_uri == null ? "arn:aws:s3:::${awscc_s3_bucket.custom_model_output[0].id}/*" : "arn:aws:s3:::${var.custom_model_output_uri}/*",
378 | "Condition": {
379 | "StringEquals": {
380 | "aws:PrincipalAccount": local.account_id
381 | }
382 | }
383 | },
384 | ]
385 | })
386 | role = aws_iam_role.custom_model_role[0].id
387 | }
388 |
389 | # Kendra IAM
390 | resource "aws_iam_policy" "bedrock_kb_kendra" {
391 | count = var.kb_role_arn != null || var.create_kendra_config == false ? 0 : 1
392 | name = "AmazonBedrockKnowledgeBaseKendraIndexAccessStatement_${var.kendra_index_name}"
393 |
394 | policy = jsonencode({
395 | "Version" = "2012-10-17"
396 | "Statement" = [
397 | {
398 | "Action" = [
399 | "kendra:Retrieve",
400 | "kendra:DescribeIndex"
401 | ]
402 | "Effect" = "Allow"
403 | "Resource" = ["arn:aws:kendra:${local.region}:${local.account_id}:index/${local.kendra_index_id}"]
404 | }
405 | ]
406 | })
407 | }
408 |
409 | resource "awscc_iam_role" "kendra_index_role" {
410 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
411 | role_name = "kendra_index_role_${random_string.solution_prefix.result}"
412 | description = "Role assigned to the Kendra index"
413 | assume_role_policy_document = jsonencode({
414 | Version = "2012-10-17"
415 | Statement = [
416 | {
417 | Action = "sts:AssumeRole"
418 | Effect = "Allow"
419 | Principal = {
420 | Service = "kendra.amazonaws.com"
421 | }
422 | }
423 | ]
424 | })
425 | }
426 |
427 | resource "awscc_iam_role_policy" "kendra_role_policy" {
428 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
429 | policy_name = "kendra_role_policy"
430 | role_name = awscc_iam_role.kendra_index_role[0].id
431 |
432 | policy_document = jsonencode({
433 | Version = "2012-10-17"
434 | Statement = [
435 | {
436 | Effect = "Allow"
437 | Action = "cloudwatch:PutMetricData"
438 | Resource = "*"
439 | Condition = {
440 | "StringEquals" : {
441 | "cloudwatch:namespace" : "AWS/Kendra"
442 | }
443 | }
444 | },
445 | {
446 | Effect = "Allow"
447 | Action = "logs:DescribeLogGroups"
448 | Resource = "*"
449 | },
450 | {
451 | Effect = "Allow"
452 | Action = "logs:CreateLogGroup",
453 | Resource = "arn:aws:logs:${local.region}:${data.aws_caller_identity.current.account_id}:log-group:/aws/kendra/*"
454 | },
455 | {
456 | Effect = "Allow"
457 | Action = [
458 | "logs:DescribeLogStreams",
459 | "logs:CreateLogStream",
460 | "logs:PutLogEvents"
461 | ],
462 | Resource = "arn:aws:logs:${local.region}:${data.aws_caller_identity.current.account_id}:log-group:/aws/kendra/*:log-stream:*"
463 | }
464 | ]
465 | })
466 | }
467 |
468 |
469 | # Create IAM role for Kendra Data Source
470 | resource "awscc_iam_role" "kendra_s3_datasource_role" {
471 | count = var.create_kendra_s3_data_source ? 1 : 0
472 | assume_role_policy_document = jsonencode({
473 | Version = "2012-10-17"
474 | Statement = [
475 | {
476 | Action = "sts:AssumeRole"
477 | Effect = "Allow"
478 | Principal = {
479 | Service = "kendra.amazonaws.com"
480 | }
481 | }
482 | ]
483 | })
484 | description = "IAM role for Kendra Data Source"
485 | path = "/"
486 | role_name = "kendra-datasource-role"
487 |
488 | policies = [
489 | {
490 | policy_name = "kendra-datasource-policy"
491 | policy_document = jsonencode({
492 | Version = "2012-10-17"
493 | Statement = [
494 | {
495 | Effect = "Allow"
496 | Action = [
497 | "s3:GetObject",
498 | "s3:ListBucket"
499 | ]
500 | Resource = [
501 | local.kendra_data_source_bucket_arn,
502 | "${local.kendra_data_source_bucket_arn}/*"
503 | ]
504 | },
505 | {
506 | Effect: "Allow",
507 | Action: [
508 | "kendra:BatchPutDocument",
509 | "kendra:BatchDeleteDocument"
510 | ],
511 | Resource: "arn:aws:kendra:${local.region}:${local.account_id}:index/${local.kendra_index_id}"
512 | }
513 | ]
514 | })
515 | }
516 | ]
517 | }
518 |
519 | # SQL Knowledge Base IAM
520 | resource "aws_iam_policy" "bedrock_kb_sql" {
521 | count = var.kb_role_arn != null || var.create_sql_config == false ? 0 : 1
522 | name = "AmazonBedrockKnowledgeBaseRedshiftStatement_${var.kb_name}"
523 |
524 | policy = jsonencode({
525 | "Version": "2012-10-17",
526 | "Statement": [
527 | {
528 | "Sid": "RedshiftDataAPIStatementPermissions",
529 | "Effect": "Allow",
530 | "Action": [
531 | "redshift-data:GetStatementResult",
532 | "redshift-data:DescribeStatement",
533 | "redshift-data:CancelStatement"
534 | ],
535 | "Resource": [
536 | "*"
537 | ],
538 | "Condition": {
539 | "StringEquals": {
540 | "redshift-data:statement-owner-iam-userid": "$${aws:userid}"
541 | }
542 | }
543 | },
544 | {
545 | "Sid": "SqlWorkbenchAccess",
546 | "Effect": "Allow",
547 | "Action": [
548 | "sqlworkbench:GetSqlRecommendations",
549 | "sqlworkbench:PutSqlGenerationContext",
550 | "sqlworkbench:GetSqlGenerationContext",
551 | "sqlworkbench:DeleteSqlGenerationContext"
552 | ],
553 | "Resource": "*"
554 | },
555 | {
556 | "Sid": "KbAccess",
557 | "Effect": "Allow",
558 | "Action": [
559 | "bedrock:GenerateQuery"
560 | ],
561 | "Resource": "*"
562 | }
563 | ]
564 | })
565 | }
566 |
567 |
568 | resource "aws_iam_policy" "bedrock_kb_sql_serverless" {
569 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "SERVERLESS" ? 0 : 1
570 | name = "AmazonBedrockKnowledgeBaseRedshiftServerlessStatement_${var.kb_name}"
571 |
572 | policy = jsonencode({
573 | "Version": "2012-10-17",
574 | "Statement": [
575 |
576 | {
577 | "Sid": "RedshiftDataAPIExecutePermissions",
578 | "Effect": "Allow",
579 | "Action": [
580 | "redshift-data:ExecuteStatement"
581 | ],
582 | "Resource": [
583 | "arn:aws:redshift-serverless:${local.region}:${local.account_id}:workgroup:${split("/", var.sql_kb_workgroup_arn)[1]}"
584 | ]
585 | },
586 | {
587 | "Sid": "RedshiftServerlessGetCredentials",
588 | "Effect": "Allow",
589 | "Action": "redshift-serverless:GetCredentials",
590 | "Resource": [
591 | "arn:aws:redshift-serverless:${local.region}:${local.account_id}:workgroup:${split("/", var.sql_kb_workgroup_arn)[1]}"
592 | ]
593 | }
594 | ]
595 | })
596 | }
597 |
598 |
599 | resource "aws_iam_policy" "bedrock_kb_sql_provisioned" {
600 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "PROVISIONED" ? 0 : 1
601 | name = "AmazonBedrockKnowledgeBaseRedshiftProvisionedStatement_${var.kb_name}"
602 |
603 | policy = jsonencode({
604 | "Version": "2012-10-17",
605 | "Statement": [
606 | {
607 | "Sid": "RedshiftDataAPIExecutePermissions",
608 | "Effect": "Allow",
609 | "Action": [
610 | "redshift-data:ExecuteStatement"
611 | ],
612 | "Resource": [
613 | "arn:aws:redshift:${local.region}:${local.account_id}:cluster:${var.provisioned_config_cluster_identifier}"
614 | ]
615 | },
616 | {
617 | "Sid": "GetCredentialsWithFederatedIAMCredentials",
618 | "Effect": "Allow",
619 | "Action": "redshift:GetClusterCredentialsWithIAM",
620 | "Resource": [
621 | "arn:aws:redshift:${local.region}:${local.account_id}:dbname:${var.provisioned_config_cluster_identifier}/*"
622 | ]
623 | }
624 | ]
625 | })
626 | }
627 |
--------------------------------------------------------------------------------
/inference-profile.tf:
--------------------------------------------------------------------------------
1 | resource "awscc_bedrock_application_inference_profile" "application_inference_profile" {
2 | count = var.create_app_inference_profile ? 1 : 0
3 | inference_profile_name = "${random_string.solution_prefix.result}-${var.app_inference_profile_name}"
4 | description = var.app_inference_profile_description
5 | model_source = {
6 | copy_from = var.app_inference_profile_model_source
7 | }
8 | tags = var.app_inference_profile_tags
9 | }
10 |
--------------------------------------------------------------------------------
/kendra.tf:
--------------------------------------------------------------------------------
1 | # Kendra Index
2 | resource "awscc_kendra_index" "genai_kendra_index" {
3 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
4 | edition = var.kendra_index_edition
5 | name = "${random_string.solution_prefix.result}-${var.kendra_index_name}"
6 | role_arn = awscc_iam_role.kendra_index_role[0].arn
7 | description = var.kendra_index_description
8 | capacity_units = {
9 | query_capacity_units = var.kendra_index_query_capacity
10 | storage_capacity_units = var.kendra_index_storage_capacity
11 | }
12 | document_metadata_configurations = var.document_metadata_configurations
13 | server_side_encryption_configuration = {
14 | kms_key_id = var.kendra_kms_key_id
15 | }
16 | user_context_policy = var.kendra_index_user_context_policy
17 | user_token_configurations = var.user_token_configurations
18 | tags = var.kendra_index_tags
19 | }
20 |
21 | resource "time_sleep" "wait_after_kendra_index_creation" {
22 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
23 | depends_on = [ awscc_kendra_index.genai_kendra_index[0] ]
24 | create_duration = "60s" # Wait for 60 seconds
25 | }
26 |
27 |
28 | # Create Kendra Data Source
29 | resource "awscc_kendra_data_source" "kendra_s3_data_source" {
30 | count = var.create_kendra_s3_data_source == true ? 1 : 0
31 | index_id = var.kendra_index_arn != null ? var.kendra_index_arn : awscc_kendra_index.genai_kendra_index[0].id
32 | name = "${random_string.solution_prefix.result}-${var.kendra_data_source_name}"
33 | type = "S3"
34 | role_arn = awscc_iam_role.kendra_s3_datasource_role[0].arn
35 | language_code = var.kendra_data_source_language_code
36 | schedule = var.kendra_data_source_schedule
37 | description = var.kendra_data_source_description
38 | tags = var.kendra_data_source_tags
39 | data_source_configuration = {
40 | s3_configuration = {
41 | bucket_name = var.s3_data_source_bucket_name != null ? var.s3_data_source_bucket_name : awscc_s3_bucket.s3_data_source[0].bucket_name
42 | exclusion_patterns = var.s3_data_source_exclusion_patterns
43 | inclusion_patterns = var.s3_data_source_inclusion_patterns
44 | documents_metadata_configuration = {
45 | s3_prefix = var.s3_data_source_document_metadata_prefix
46 | }
47 | access_control_list_documents = {
48 | key_path = var.s3_data_source_key_path
49 | }
50 | }
51 | }
52 | }
53 |
54 | resource "time_sleep" "wait_after_kendra_s3_data_source_creation" {
55 | count = var.create_kendra_s3_data_source ? 1 : 0
56 | depends_on = [ awscc_kendra_data_source.kendra_s3_data_source[0] ]
57 | create_duration = "60s" # Wait for 60 seconds
58 | }
--------------------------------------------------------------------------------
/knowledge-base.tf:
--------------------------------------------------------------------------------
1 | # - Knowledge Base Default OpenSearch -
2 | resource "awscc_bedrock_knowledge_base" "knowledge_base_default" {
3 | count = var.create_default_kb ? 1 : 0
4 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
5 | description = var.kb_description
6 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
7 | tags = var.kb_tags
8 |
9 | storage_configuration = {
10 | type = "OPENSEARCH_SERVERLESS"
11 | opensearch_serverless_configuration = {
12 | collection_arn = module.oss_knowledgebase[0].opensearch_serverless_collection.arn
13 | vector_index_name = module.oss_knowledgebase[0].vector_index.name
14 | field_mapping = {
15 | metadata_field = var.metadata_field
16 | text_field = var.text_field
17 | vector_field = var.vector_field
18 | }
19 | }
20 | }
21 | knowledge_base_configuration = {
22 | type = "VECTOR"
23 | vector_knowledge_base_configuration = {
24 | embedding_model_arn = var.kb_embedding_model_arn
25 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
26 | bedrock_embedding_model_configuration = {
27 | dimensions = var.embedding_model_dimensions
28 | embedding_data_type = var.embedding_data_type
29 | }
30 | } : null
31 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
32 | supplemental_data_storage_locations = [
33 | {
34 | supplemental_data_storage_location_type = "S3"
35 | s3_location = {
36 | uri = var.supplemental_data_s3_uri
37 | }
38 | }
39 | ]
40 | } : null
41 | }
42 | }
43 | depends_on = [ time_sleep.wait_after_index_creation ]
44 | }
45 |
46 | # – Existing Vector KBs –
47 |
48 | # - Mongo –
49 | resource "awscc_bedrock_knowledge_base" "knowledge_base_mongo" {
50 | count = var.create_mongo_config ? 1 : 0
51 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
52 | description = var.kb_description
53 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
54 | tags = var.kb_tags
55 |
56 | storage_configuration = {
57 | type = var.kb_storage_type
58 |
59 | mongo_db_atlas_configuration = {
60 | collection_name = var.collection_name
61 | credentials_secret_arn = var.credentials_secret_arn
62 | database_name = var.database_name
63 | endpoint = var.endpoint
64 | vector_index_name = var.vector_index_name
65 | text_index_name = var.text_index_name
66 | field_mapping = {
67 | metadata_field = var.metadata_field
68 | text_field = var.text_field
69 | vector_field = var.vector_field
70 | }
71 | endpoint_service_name = var.endpoint_service_name
72 | }
73 | }
74 | knowledge_base_configuration = {
75 | type = var.kb_type
76 | vector_knowledge_base_configuration = {
77 | embedding_model_arn = var.kb_embedding_model_arn
78 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
79 | bedrock_embedding_model_configuration = {
80 | dimensions = var.embedding_model_dimensions
81 | embedding_data_type = var.embedding_data_type
82 | }
83 | } : null
84 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
85 | supplemental_data_storage_locations = [
86 | {
87 | supplemental_data_storage_location_type = "S3"
88 | s3_location = {
89 | uri = var.supplemental_data_s3_uri
90 | }
91 | }
92 | ]
93 | } : null
94 | }
95 | }
96 | }
97 |
98 | # – OpenSearch Serverless –
99 | resource "awscc_bedrock_knowledge_base" "knowledge_base_opensearch" {
100 | count = var.create_opensearch_config ? 1 : 0
101 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
102 | description = var.kb_description
103 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
104 | tags = var.kb_tags
105 |
106 | storage_configuration = {
107 | type = var.kb_storage_type
108 | opensearch_serverless_configuration = {
109 | collection_arn = var.collection_arn
110 | vector_index_name = var.vector_index_name
111 | field_mapping = {
112 | metadata_field = var.metadata_field
113 | text_field = var.text_field
114 | vector_field = var.vector_field
115 | }
116 | }
117 | }
118 | knowledge_base_configuration = {
119 | type = var.kb_type
120 | vector_knowledge_base_configuration = {
121 | embedding_model_arn = var.kb_embedding_model_arn
122 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
123 | bedrock_embedding_model_configuration = {
124 | dimensions = var.embedding_model_dimensions
125 | embedding_data_type = var.embedding_data_type
126 | }
127 | } : null
128 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
129 | supplemental_data_storage_locations = [
130 | {
131 | supplemental_data_storage_location_type = "S3"
132 | s3_location = {
133 | uri = var.supplemental_data_s3_uri
134 | }
135 | }
136 | ]
137 | } : null
138 | }
139 | }
140 | }
141 |
142 | # – Neptune Analytics –
143 | resource "awscc_bedrock_knowledge_base" "knowledge_base_neptune_analytics" {
144 | count = var.create_neptune_analytics_config ? 1 : 0
145 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
146 | description = var.kb_description
147 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
148 | tags = var.kb_tags
149 |
150 | storage_configuration = {
151 | type = "NEPTUNE_ANALYTICS"
152 | neptune_analytics_configuration = {
153 | graph_arn = var.graph_arn
154 | field_mapping = {
155 | metadata_field = var.metadata_field
156 | text_field = var.text_field
157 | }
158 | }
159 | }
160 | knowledge_base_configuration = {
161 | type = var.kb_type
162 | vector_knowledge_base_configuration = {
163 | embedding_model_arn = var.kb_embedding_model_arn
164 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
165 | bedrock_embedding_model_configuration = {
166 | dimensions = var.embedding_model_dimensions
167 | embedding_data_type = var.embedding_data_type
168 | }
169 | } : null
170 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
171 | supplemental_data_storage_locations = [
172 | {
173 | supplemental_data_storage_location_type = "S3"
174 | s3_location = {
175 | uri = var.supplemental_data_s3_uri
176 | }
177 | }
178 | ]
179 | } : null
180 | }
181 | }
182 | }
183 |
184 | # – Pinecone –
185 | resource "awscc_bedrock_knowledge_base" "knowledge_base_pinecone" {
186 | count = var.create_pinecone_config ? 1 : 0
187 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
188 | description = var.kb_description
189 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
190 | tags = var.kb_tags
191 |
192 | storage_configuration = {
193 | type = var.kb_storage_type
194 | pinecone_configuration = {
195 | connection_string = var.connection_string
196 | credentials_secret_arn = var.credentials_secret_arn
197 | field_mapping = {
198 | metadata_field = var.metadata_field
199 | text_field = var.text_field
200 | }
201 | namespace = var.namespace
202 | }
203 | }
204 | knowledge_base_configuration = {
205 | type = var.kb_type
206 | vector_knowledge_base_configuration = {
207 | embedding_model_arn = var.kb_embedding_model_arn
208 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
209 | bedrock_embedding_model_configuration = {
210 | dimensions = var.embedding_model_dimensions
211 | embedding_data_type = var.embedding_data_type
212 | }
213 | } : null
214 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
215 | supplemental_data_storage_locations = [
216 | {
217 | supplemental_data_storage_location_type = "S3"
218 | s3_location = {
219 | uri = var.supplemental_data_s3_uri
220 | }
221 | }
222 | ]
223 | } : null
224 | }
225 | }
226 | }
227 |
228 | # – RDS –
229 | resource "awscc_bedrock_knowledge_base" "knowledge_base_rds" {
230 | count = var.create_rds_config ? 1 : 0
231 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
232 | description = var.kb_description
233 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
234 | tags = var.kb_tags
235 |
236 | storage_configuration = {
237 | type = var.kb_storage_type
238 | rds_configuration = {
239 | credentials_secret_arn = var.credentials_secret_arn
240 | database_name = var.database_name
241 | resource_arn = var.resource_arn
242 | table_name = var.table_name
243 | field_mapping = {
244 | metadata_field = var.metadata_field
245 | primary_key_field = var.primary_key_field
246 | text_field = var.text_field
247 | vector_field = var.vector_field
248 | custom_metadata_field = var.custom_metadata_field
249 | }
250 | }
251 | }
252 | knowledge_base_configuration = {
253 | type = var.kb_type
254 | vector_knowledge_base_configuration = {
255 | embedding_model_arn = var.kb_embedding_model_arn
256 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
257 | bedrock_embedding_model_configuration = {
258 | dimensions = var.embedding_model_dimensions
259 | embedding_data_type = var.embedding_data_type
260 | }
261 | } : null
262 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
263 | supplemental_data_storage_locations = [
264 | {
265 | supplemental_data_storage_location_type = "S3"
266 | s3_location = {
267 | uri = var.supplemental_data_s3_uri
268 | }
269 | }
270 | ]
271 | } : null
272 | }
273 | }
274 | }
275 |
276 | # – Kendra Knowledge Base –
277 |
278 | resource "awscc_bedrock_knowledge_base" "knowledge_base_kendra" {
279 | count = var.create_kendra_config ? 1 : 0
280 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
281 | description = var.kb_description
282 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
283 | tags = var.kb_tags
284 |
285 | knowledge_base_configuration = {
286 | type = "KENDRA"
287 | kendra_knowledge_base_configuration = {
288 | kendra_index_arn = var.kendra_index_arn != null ? var.kendra_index_arn : awscc_kendra_index.genai_kendra_index[0].arn
289 | }
290 | }
291 |
292 | depends_on = [ time_sleep.wait_after_kendra_index_creation, time_sleep.wait_after_kendra_s3_data_source_creation ]
293 | }
294 |
295 | # – SQL Knowledge Base –
296 |
297 | resource "awscc_bedrock_knowledge_base" "knowledge_base_sql" {
298 | count = var.create_sql_config ? 1 : 0
299 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
300 | description = var.kb_description
301 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
302 | tags = var.kb_tags
303 |
304 | knowledge_base_configuration = {
305 | type = "SQL"
306 | sql_knowledge_base_configuration = {
307 | type = "REDSHIFT"
308 | redshift_configuration = {
309 | query_engine_configuration = {
310 | serverless_configuration = var.sql_kb_workgroup_arn == null ? null : {
311 | workgroup_arn = var.sql_kb_workgroup_arn
312 | auth_configuration = var.serverless_auth_configuration
313 | }
314 | provisioned_configuration = var.provisioned_config_cluster_identifier == null ? null : {
315 | cluster_identifier = var.provisioned_config_cluster_identifier
316 | auth_configuration = var.provisioned_auth_configuration
317 | }
318 | type = var.redshift_query_engine_type
319 | }
320 | query_generation_configuration = var.query_generation_configuration
321 | storage_configurations = var.redshift_storage_configuration
322 | }
323 |
324 | }
325 | }
326 |
327 | }
328 |
--------------------------------------------------------------------------------
/main.tf:
--------------------------------------------------------------------------------
1 | resource "random_string" "solution_prefix" {
2 | length = 4
3 | special = false
4 | upper = false
5 | }
6 |
7 | # – Bedrock Agent –
8 |
9 | locals {
10 | bedrock_agent_alias = var.create_agent_alias && var.use_aws_provider_alias ? aws_bedrockagent_agent_alias.bedrock_agent_alias : awscc_bedrock_agent_alias.bedrock_agent_alias
11 |
12 | counter_kb = local.create_kb || var.existing_kb != null ? [1] : []
13 | knowledge_base_id = local.create_kb ? (var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : (var.create_mongo_config ? awscc_bedrock_knowledge_base.knowledge_base_mongo[0].id : (var.create_opensearch_config ? awscc_bedrock_knowledge_base.knowledge_base_opensearch[0].id : (var.create_pinecone_config ? awscc_bedrock_knowledge_base.knowledge_base_pinecone[0].id : (var.create_rds_config ? awscc_bedrock_knowledge_base.knowledge_base_rds[0].id : null))))) : null
14 | knowledge_bases_value = {
15 | description = var.kb_description
16 | knowledge_base_id = local.create_kb ? local.knowledge_base_id : var.existing_kb
17 | knowledge_base_state = var.kb_state
18 | }
19 | kb_result = [for count in local.counter_kb : local.knowledge_bases_value]
20 |
21 |
22 | counter_action_group = var.create_ag ? [1] : []
23 | action_group_value = {
24 | action_group_name = var.action_group_name
25 | description = var.action_group_description
26 | action_group_state = var.action_group_state
27 | parent_action_group_signature = var.parent_action_group_signature
28 | skip_resource_in_use_check_on_delete = var.skip_resource_in_use
29 | api_schema = {
30 | payload = var.api_schema_payload
31 | s3 = {
32 | s3_bucket_name = var.api_schema_s3_bucket_name
33 | s3_object_key = var.api_schema_s3_object_key
34 | }
35 | }
36 | action_group_executor = {
37 | custom_control = var.custom_control
38 | lambda = var.lambda_action_group_executor
39 | }
40 | }
41 | action_group_result = [for count in local.counter_action_group : local.action_group_value]
42 |
43 | # Create a map with action_group_name as keys for stable sorting
44 | action_group_map = var.action_group_list != null ? {
45 | for idx, ag in var.action_group_list :
46 | # Use action_group_name as key, or index if name is null
47 | coalesce(try(ag.action_group_name, ""), format("%04d", idx)) => ag
48 | } : {}
49 |
50 | # Extract values from the sorted map (Terraform maps are sorted by keys)
51 | sorted_action_groups = [for k, v in local.action_group_map : v]
52 |
53 | # Combine action groups with consistent ordering
54 | action_group_list = concat(local.action_group_result, local.sorted_action_groups)
55 |
56 | counter_collaborator = var.create_agent && var.create_agent_alias && var.create_collaborator ? 1 : 0
57 |
58 | supervisor_guardrail = var.create_supervisor_guardrail == false || local.counter_collaborator == 0 ? null : [{
59 | guardrail_identifier = var.supervisor_guardrail_id
60 | guardrail_version = var.supervisor_guardrail_version
61 | }]
62 | }
63 |
64 | # Add a sleep after creating the inference profile to ensure it's fully available
65 | resource "time_sleep" "wait_for_inference_profile" {
66 | count = var.create_app_inference_profile ? 1 : 0
67 | depends_on = [awscc_bedrock_application_inference_profile.application_inference_profile[0]]
68 | create_duration = "5s"
69 | }
70 |
71 | resource "awscc_bedrock_agent" "bedrock_agent" {
72 | count = var.create_agent ? 1 : 0
73 | agent_name = "${random_string.solution_prefix.result}-${var.agent_name}"
74 | foundation_model = var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : var.foundation_model
75 | instruction = var.instruction
76 | description = var.agent_description
77 | idle_session_ttl_in_seconds = var.idle_session_ttl
78 | agent_resource_role_arn = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : aws_iam_role.agent_role[0].arn
79 |
80 | depends_on = [time_sleep.wait_for_inference_profile]
81 |
82 | customer_encryption_key_arn = var.kms_key_arn
83 | tags = var.tags
84 | prompt_override_configuration = var.prompt_override == false ? null : {
85 | prompt_configurations = [{
86 | prompt_type = var.prompt_type
87 | inference_configuration = {
88 | temperature = var.temperature
89 | top_p = var.top_p
90 | top_k = var.top_k
91 | stop_sequences = var.stop_sequences
92 | maximum_length = var.max_length
93 | }
94 | base_prompt_template = var.base_prompt_template
95 | parser_mode = var.parser_mode
96 | prompt_creation_mode = var.prompt_creation_mode
97 | prompt_state = var.prompt_state
98 |
99 | }]
100 | override_lambda = var.override_lambda_arn
101 |
102 | }
103 | # open issue: https://github.com/hashicorp/terraform-provider-awscc/issues/2004
104 | # auto_prepare needs to be set to true
105 | auto_prepare = true
106 | knowledge_bases = length(local.kb_result) > 0 ? local.kb_result : null
107 | action_groups = length(local.action_group_list) > 0 ? local.action_group_list : null
108 | guardrail_configuration = var.create_guardrail == false ? null : {
109 | guardrail_identifier = awscc_bedrock_guardrail.guardrail[0].id
110 | guardrail_version = awscc_bedrock_guardrail_version.guardrail[0].version
111 | }
112 | memory_configuration = var.memory_configuration
113 | }
114 |
115 | # Agent Alias
116 |
117 | resource "awscc_bedrock_agent_alias" "bedrock_agent_alias" {
118 | count = var.create_agent_alias && var.use_aws_provider_alias == false ? 1 : 0
119 | agent_alias_name = var.agent_alias_name
120 | agent_id = var.create_agent ? awscc_bedrock_agent.bedrock_agent[0].id : var.agent_id
121 | description = var.agent_alias_description
122 | routing_configuration = var.bedrock_agent_version == null ? null : [
123 | {
124 | agent_version = var.bedrock_agent_version
125 | }
126 | ]
127 | tags = var.agent_alias_tags
128 | }
129 |
130 | resource "aws_bedrockagent_agent_alias" "bedrock_agent_alias" {
131 | count = var.create_agent_alias && var.use_aws_provider_alias ? 1 : 0
132 | agent_alias_name = var.agent_alias_name
133 | agent_id = var.create_agent ? awscc_bedrock_agent.bedrock_agent[0].id : var.agent_id
134 | description = var.agent_alias_description
135 | routing_configuration = var.bedrock_agent_version == null ? null : [
136 | {
137 | agent_version = var.bedrock_agent_version
138 | provisioned_throughput = var.bedrock_agent_alias_provisioned_throughput
139 | }
140 | ]
141 | tags = var.agent_alias_tags
142 | }
143 |
144 | # Agent Collaborator
145 |
146 | resource "aws_bedrockagent_agent_collaborator" "agent_collaborator" {
147 | count = local.counter_collaborator
148 | agent_id = var.create_supervisor ? aws_bedrockagent_agent.agent_supervisor[0].agent_id : var.supervisor_id
149 | collaboration_instruction = var.collaboration_instruction
150 | collaborator_name = "${random_string.solution_prefix.result}-${var.collaborator_name}"
151 | relay_conversation_history = "TO_COLLABORATOR"
152 |
153 | agent_descriptor {
154 | alias_arn = local.bedrock_agent_alias[0].agent_alias_arn
155 | }
156 |
157 | depends_on = [awscc_bedrock_agent.bedrock_agent[0], local.bedrock_agent_alias]
158 | }
159 |
160 | resource "aws_bedrockagent_agent" "agent_supervisor" {
161 | count = var.create_supervisor ? 1 : 0
162 | agent_name = "${random_string.solution_prefix.result}-${var.supervisor_name}"
163 | agent_resource_role_arn = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : aws_iam_role.agent_role[0].arn
164 |
165 | agent_collaboration = var.agent_collaboration
166 | idle_session_ttl_in_seconds = var.supervisor_idle_session_ttl
167 | foundation_model = var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : var.supervisor_model
168 | instruction = var.supervisor_instruction
169 | customer_encryption_key_arn = var.supervisor_kms_key_arn
170 | #checkov:skip=CKV_AWS_383:The user can optionally associate agent with Bedrock guardrails
171 | guardrail_configuration = local.supervisor_guardrail
172 | prepare_agent = false
173 |
174 | depends_on = [time_sleep.wait_for_inference_profile]
175 | }
176 |
177 | # – Guardrail –
178 |
179 | resource "awscc_bedrock_guardrail" "guardrail" {
180 | count = var.create_guardrail ? 1 : 0
181 | name = "${random_string.solution_prefix.result}-${var.guardrail_name}"
182 | blocked_input_messaging = var.blocked_input_messaging
183 | blocked_outputs_messaging = var.blocked_outputs_messaging
184 | description = var.guardrail_description
185 | content_policy_config = {
186 | filters_config = var.filters_config
187 | }
188 | sensitive_information_policy_config = {
189 | pii_entities_config = var.pii_entities_config
190 | regexes_config = var.regexes_config
191 | }
192 | word_policy_config = {
193 | managed_word_lists_config = var.managed_word_lists_config
194 | words_config = var.words_config
195 | }
196 | topic_policy_config = var.topics_config == null ? null : {
197 | topics_config = var.topics_config
198 | }
199 | tags = var.guardrail_tags
200 | kms_key_arn = var.guardrail_kms_key_arn
201 |
202 | }
203 |
204 | resource "awscc_bedrock_guardrail_version" "guardrail" {
205 | count = var.create_guardrail ? 1 : 0
206 | guardrail_identifier = awscc_bedrock_guardrail.guardrail[0].guardrail_id
207 | description = "Guardrail version"
208 | }
209 |
210 | # – Bedrock Flow –
211 |
212 | resource "awscc_bedrock_flow_alias" "flow_alias" {
213 | count = var.create_flow_alias ? 1 : 0
214 | name = var.flow_alias_name
215 | flow_arn = var.flow_arn
216 | description = var.flow_alias_description
217 | routing_configuration = [
218 | {
219 | flow_version = var.flow_version != null ? var.flow_version : awscc_bedrock_flow_version.flow_version[0].version
220 | }
221 | ]
222 | }
223 |
224 | resource "awscc_bedrock_flow_version" "flow_version" {
225 | count = var.flow_version == null && var.create_flow_alias ? 1 : 0
226 | flow_arn = var.flow_arn
227 | description = var.flow_version_description
228 | }
229 |
230 | # – Custom Model –
231 |
232 | resource "aws_bedrock_custom_model" "custom_model" {
233 | count = var.create_custom_model ? 1 : 0
234 | custom_model_name = "${random_string.solution_prefix.result}-${var.custom_model_name}"
235 | job_name = "${random_string.solution_prefix.result}-${var.custom_model_job_name}"
236 | base_model_identifier = data.aws_bedrock_foundation_model.model_identifier[0].model_arn
237 | role_arn = aws_iam_role.custom_model_role[0].arn
238 | custom_model_kms_key_id = var.custom_model_kms_key_id
239 | customization_type = var.customization_type
240 | hyperparameters = var.custom_model_hyperparameters
241 | output_data_config {
242 | s3_uri = var.custom_model_output_uri == null ? "s3://${awscc_s3_bucket.custom_model_output[0].id}/" : "s3://${var.custom_model_output_uri}"
243 | }
244 | training_data_config {
245 | s3_uri = "s3://${var.custom_model_training_uri}"
246 | }
247 | tags = var.custom_model_tags
248 | }
249 |
250 | resource "awscc_s3_bucket" "custom_model_output" {
251 | count = var.custom_model_output_uri == null && var.create_custom_model == true ? 1 : 0
252 | bucket_name = "${random_string.solution_prefix.result}-${var.custom_model_name}-output-bucket"
253 | public_access_block_configuration = {
254 | block_public_acls = true
255 | block_public_policy = true
256 | ignore_public_acls = true
257 | restrict_public_buckets = true
258 | }
259 | bucket_encryption = {
260 | server_side_encryption_configuration = [{
261 | bucket_key_enabled = true
262 | server_side_encryption_by_default = {
263 | sse_algorithm = var.kb_s3_data_source_kms_arn == null ? "AES256" : "aws:kms"
264 | kms_master_key_id = var.kb_s3_data_source_kms_arn
265 | }
266 | }]
267 | }
268 | tags = var.custom_model_tags != null ? [for k, v in var.custom_model_tags : { key = k, value = v }] : [{
269 | key = "Name"
270 | value = "${random_string.solution_prefix.result}-${var.custom_model_name}-output-bucket"
271 | }]
272 | }
273 |
--------------------------------------------------------------------------------
/opensearch.tf:
--------------------------------------------------------------------------------
1 | # – OpenSearch Serverless Default –
2 |
3 | module "oss_knowledgebase" {
4 | count = var.create_default_kb ? 1 : 0
5 | source = "aws-ia/opensearch-serverless/aws"
6 | version = "0.0.4"
7 | allow_public_access_network_policy = var.allow_opensearch_public_access
8 | number_of_shards = var.number_of_shards
9 | number_of_replicas = var.number_of_replicas
10 | create_vector_index = true
11 | collection_tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : []
12 | vector_index_mappings = <<-EOF
13 | {
14 | "properties": {
15 | "bedrock-knowledge-base-default-vector": {
16 | "type": "knn_vector",
17 | "dimension": ${var.vector_dimension},
18 | "method": {
19 | "name": "hnsw",
20 | "engine": "faiss",
21 | "parameters": {
22 | "m": 16,
23 | "ef_construction": 512
24 | },
25 | "space_type": "l2"
26 | }
27 | },
28 | "AMAZON_BEDROCK_METADATA": {
29 | "type": "text",
30 | "index": "false"
31 | },
32 | "AMAZON_BEDROCK_TEXT_CHUNK": {
33 | "type": "text",
34 | "index": "true"
35 | }
36 | }
37 | }
38 | EOF
39 | }
40 |
41 | resource "aws_opensearchserverless_access_policy" "updated_data_policy" {
42 | count = var.create_default_kb ? 1 : 0
43 |
44 | name = "os-access-policy-${random_string.solution_prefix.result}"
45 | type = "data"
46 |
47 | policy = jsonencode([
48 | {
49 | Rules = [
50 | {
51 | ResourceType = "index"
52 | Resource = [
53 | "index/${module.oss_knowledgebase[0].opensearch_serverless_collection.name}/*"
54 | ]
55 | Permission = [
56 | "aoss:UpdateIndex",
57 | "aoss:DeleteIndex",
58 | "aoss:DescribeIndex",
59 | "aoss:ReadDocument",
60 | "aoss:WriteDocument",
61 | "aoss:CreateIndex"
62 | ]
63 | },
64 | {
65 | ResourceType = "collection"
66 | Resource = [
67 | "collection/${module.oss_knowledgebase[0].opensearch_serverless_collection.name}"
68 | ]
69 | Permission = [
70 | "aoss:DescribeCollectionItems",
71 | "aoss:DeleteCollectionItems",
72 | "aoss:CreateCollectionItems",
73 | "aoss:UpdateCollectionItems"
74 | ]
75 | }
76 | ],
77 | Principal = [
78 | var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
79 | ]
80 | }
81 | ])
82 | }
83 |
84 | resource "time_sleep" "wait_after_index_creation" {
85 | count = var.create_default_kb ? 1 : 0
86 | depends_on = [ module.oss_knowledgebase[0].vector_index ]
87 | create_duration = "60s" # Wait for 60 seconds before creating the index
88 | }
89 |
--------------------------------------------------------------------------------
/outputs.tf:
--------------------------------------------------------------------------------
1 | output "default_collection" {
2 | value = var.create_default_kb ? module.oss_knowledgebase[0].opensearch_serverless_collection : null
3 | description = "Opensearch default collection value."
4 | }
5 |
6 | output "default_kb_identifier" {
7 | value = length(awscc_bedrock_knowledge_base.knowledge_base_default) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : null
8 | description = "The unique identifier of the default knowledge base that was created. If no default KB was requested, value will be null"
9 | }
10 |
11 | output "mongo_kb_identifier" {
12 | value = length(awscc_bedrock_knowledge_base.knowledge_base_mongo) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_mongo[0].id : null
13 | description = "The unique identifier of the MongoDB knowledge base that was created. If no MongoDB KB was requested, value will be null"
14 | }
15 |
16 | output "opensearch_kb_identifier" {
17 | value = length(awscc_bedrock_knowledge_base.knowledge_base_opensearch) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_opensearch[0].id : null
18 | description = "The unique identifier of the OpenSearch knowledge base that was created. If no OpenSearch KB was requested, value will be null"
19 | }
20 |
21 | output "pinecone_kb_identifier" {
22 | value = length(awscc_bedrock_knowledge_base.knowledge_base_pinecone) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_pinecone[0].id : null
23 | description = "The unique identifier of the Pinecone knowledge base that was created. If no Pinecone KB was requested, value will be null"
24 | }
25 |
26 | output "rds_kb_identifier" {
27 | value = length(awscc_bedrock_knowledge_base.knowledge_base_rds) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_rds[0].id : null
28 | description = "The unique identifier of the RDS knowledge base that was created. If no RDS KB was requested, value will be null"
29 | }
30 |
31 | output "datasource_identifier" {
32 | value = length(awscc_bedrock_data_source.knowledge_base_ds) > 0 ? awscc_bedrock_data_source.knowledge_base_ds[0].data_source_id : null
33 | description = "The unique identifier of the data source."
34 | }
35 |
36 | output "cloudwatch_log_group" {
37 | value = length(aws_cloudwatch_log_group.knowledge_base_cwl) > 0 ? aws_cloudwatch_log_group.knowledge_base_cwl[0].name : null
38 | description = "The name of the CloudWatch log group for the knowledge base. If no log group was requested, value will be null"
39 | }
40 |
41 | output "bedrock_agent" {
42 | value = var.create_agent == true ? awscc_bedrock_agent.bedrock_agent : null
43 | description = "The Amazon Bedrock Agent if it is created."
44 | }
45 |
46 | output "bedrock_agent_alias" {
47 | value = var.create_agent_alias == true ? (var.use_aws_provider_alias ? aws_bedrockagent_agent_alias.bedrock_agent_alias : awscc_bedrock_agent_alias.bedrock_agent_alias) : null
48 | description = "The Amazon Bedrock Agent Alias if it is created."
49 | }
50 |
51 | output "s3_data_source_arn" {
52 | value = var.kb_s3_data_source != null ? var.kb_s3_data_source : var.create_default_kb ? length(awscc_s3_bucket.s3_data_source) > 0 ? awscc_s3_bucket.s3_data_source[0].arn : null : null
53 | description = "The Amazon Bedrock Data Source for S3."
54 | }
55 |
56 | output "s3_data_source_name" {
57 | value = var.kb_s3_data_source != null ? split(":", var.kb_s3_data_source)[5] : var.create_default_kb ? length(awscc_s3_bucket.s3_data_source) > 0 ? awscc_s3_bucket.s3_data_source[0].id : null : null
58 | description = "The name of the Amazon Bedrock Data Source for S3."
59 | }
60 |
61 | output "supervisor_id" {
62 | value = var.create_supervisor ? aws_bedrockagent_agent.agent_supervisor[0].agent_id : null
63 | description = "The identifier of the supervisor agent."
64 | }
65 |
66 | output "bda_blueprint" {
67 | value = var.create_blueprint ? awscc_bedrock_blueprint.bda_blueprint[0] : null
68 | description = "The BDA blueprint."
69 | }
70 |
71 |
72 | output "agent_resource_role_arn" {
73 | value = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : (var.create_agent ? aws_iam_role.agent_role[0].arn : null)
74 | description = "The ARN of the Bedrock agent resource role."
75 | }
76 |
77 | output "agent_resource_role_name" {
78 | value = var.agent_resource_role_arn != null ? split("/", var.agent_resource_role_arn)[1] : (var.create_agent ? aws_iam_role.agent_role[0].name : null)
79 | description = "The name of the Bedrock agent resource role."
80 | }
81 |
82 | output "supervisor_role_arn" {
83 | value = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : (var.create_supervisor ? aws_iam_role.agent_role[0].arn : null)
84 | description = "The ARN of the Bedrock supervisor agent resource role."
85 | }
86 |
87 | output "custom_model" {
88 | value = var.create_custom_model ? aws_bedrock_custom_model.custom_model[0] : null
89 | description = "The custom model. If no custom model was requested, value will be null."
90 | }
91 |
92 | output "knowledge_base_role_name" {
93 | description = "The name of the IAM role used by the knowledge base."
94 | value = try(aws_iam_role.bedrock_knowledge_base_role[0].name, null)
95 | }
96 |
97 | output "application_inference_profile_arn" {
98 | description = "The ARN of the application inference profile."
99 | value = var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : null
100 | }
101 |
--------------------------------------------------------------------------------
/prompt.tf:
--------------------------------------------------------------------------------
1 | # – Prompt Management –
2 |
3 | resource "awscc_bedrock_prompt_version" "prompt_version" {
4 | count = var.create_prompt_version ? 1 : 0
5 | prompt_arn = awscc_bedrock_prompt.prompt[0].arn
6 | description = var.prompt_version_description
7 | tags = var.prompt_version_tags
8 | }
9 |
10 | resource "awscc_bedrock_prompt" "prompt" {
11 | count = var.create_prompt ? 1 : 0
12 | name = "${random_string.solution_prefix.result}-${var.prompt_name}"
13 | description = var.prompt_description
14 | customer_encryption_key_arn = var.customer_encryption_key_arn
15 | default_variant = var.default_variant
16 | tags = var.prompt_tags
17 | variants = var.variants_list
18 | }
19 |
--------------------------------------------------------------------------------
/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.0.7"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.93.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = "= 1.35.0"
11 | }
12 | time = {
13 | source = "hashicorp/time"
14 | version = "~> 0.6"
15 | }
16 | random = {
17 | source = "hashicorp/random"
18 | version = ">= 3.6.0"
19 | }
20 | opensearch = {
21 | source = "opensearch-project/opensearch"
22 | version = "= 2.2.0"
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/tests/01_mandatory.tftest.hcl:
--------------------------------------------------------------------------------
1 | ## NOTE: This is the minimum mandatory test
2 | # run at least one test using the ./examples directory as your module source
3 | # create additional *.tftest.hcl for your own unit / integration tests
4 | # use tests/*.auto.tfvars to add non-default variables
5 |
6 | run "agent_only_plan" {
7 | command = plan
8 | module {
9 | source = "./examples/agent-only"
10 | }
11 | }
12 |
13 | run "agent_only_apply" {
14 | command = apply
15 | module {
16 | source = "./examples/agent-only"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/tests/02_guardrails.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "guardrails_plan_basic" {
2 | command = plan
3 | module {
4 | source = "./examples/agent-with-guardrails"
5 | }
6 | }
7 |
8 | run "guardrails_apply_basic" {
9 | command = apply
10 | module {
11 | source = "./examples/agent-with-guardrails"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/03_prompt_management.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "prompt_management_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/prompt-management"
5 | }
6 | }
7 |
8 | run "prompt_management_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/prompt-management"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/04_inference_profile.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "app_inference_profile_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/application-inference-profile"
5 | }
6 | }
7 |
8 | run "app_inference_profile_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/application-inference-profile"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/05_agent_collaborator.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "agent_collaborator_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/agent-collaborator"
5 | }
6 | }
7 |
8 | run "agent_collaborator_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/agent-collaborator"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/06_kendra_kb.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "kendra_kb_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/kendra-kb"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/tests/07_knowledge_base_only_tftest.hcl:
--------------------------------------------------------------------------------
1 | run "kb_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/knowledge-base-only"
5 | }
6 | }
7 |
8 | run "kb_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/knowledge-base-only"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/08_bda_tftest.hcl:
--------------------------------------------------------------------------------
1 | run "bda_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/bda"
5 | }
6 | }
7 |
8 | run "bda_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/bda"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------