├── VERSION
├── CODEOWNERS
├── .config
├── .tfsec.yml
├── .mdlrc
├── .checkov.yml
├── static_tests
│ ├── post-entrypoint-helpers.sh
│ └── pre-entrypoint-helpers.sh
├── .terraform-docs.yaml
├── functional_tests
│ ├── post-entrypoint-helpers.sh
│ └── pre-entrypoint-helpers.sh
├── .tfsec
│ ├── no_launch_config_tfchecks.json
│ ├── sg_no_embedded_egress_rules_tfchecks.json
│ ├── sg_no_embedded_ingress_rules_tfchecks.json
│ ├── launch_template_imdsv2_tfchecks.json
│ └── launch_configuration_imdsv2_tfchecks.json
└── .tflint.hcl
├── .project_automation
├── init
│ └── noop.sh
├── update
│ └── noop.sh
├── publication
│ ├── Dockerfile
│ └── entrypoint.sh
├── deprecation
│ └── entrypoint.sh
├── deprovision
│ └── entrypoint.sh
├── provision
│ └── entrypoint.sh
├── functional_tests
│ ├── Dockerfile
│ ├── functional_tests.sh
│ └── entrypoint.sh
└── static_tests
│ ├── Dockerfile
│ ├── entrypoint.sh
│ └── static_tests.sh
├── examples
├── .DS_Store
├── prompt-management
│ ├── .header.md
│ ├── providers.tf
│ ├── README.md
│ └── main.tf
├── bda
│ ├── .header.md
│ ├── providers.tf
│ ├── README.md
│ └── main.tf
├── kendra-kb
│ ├── .header.md
│ ├── main.tf
│ ├── providers.tf
│ └── README.md
├── application-inference-profile
│ ├── .header.md
│ ├── main.tf
│ ├── providers.tf
│ └── README.md
├── knowledge-base-only
│ ├── .header.md
│ ├── main.tf
│ ├── providers.tf
│ └── README.md
├── agent-collaborator
│ ├── .header.md
│ ├── providers.tf
│ ├── README.md
│ └── main.tf
├── agent-with-guardrails
│ ├── variables.tf
│ ├── .header.md
│ ├── providers.tf
│ ├── README.md
│ └── main.tf
├── agent-with-inference-profile
│ ├── .header.md
│ ├── providers.tf
│ ├── main.tf
│ └── README.md
├── agent-with-knowledge-base
│ ├── .header.md
│ ├── main.tf
│ ├── providers.tf
│ └── README.md
└── agent-only
│ ├── .header.md
│ ├── main.tf
│ ├── providers.tf
│ └── README.md
├── tests
├── 06_kendra_kb.tftest.hcl
├── 08_bda_tftest.hcl
├── 07_knowledge_base_only_tftest.hcl
├── 02_guardrails.tftest.hcl
├── 03_prompt_management.tftest.hcl
├── 05_agent_collaborator.tftest.hcl
├── 04_inference_profile.tftest.hcl
└── 01_mandatory.tftest.hcl
├── .copier-answers.yml
├── CODE_OF_CONDUCT.md
├── inference-profile.tf
├── .pre-commit-config.yaml
├── NOTICE.txt
├── providers.tf
├── .project_config.yml
├── prompt.tf
├── bda.tf
├── .gitignore
├── kendra.tf
├── opensearch.tf
├── CONTRIBUTING.md
├── data.tf
├── outputs.tf
├── LICENSE
├── data-source.tf
├── main.tf
├── knowledge-base.tf
├── iam.tf
└── .header.md
/VERSION:
--------------------------------------------------------------------------------
1 | v0.0.31
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @aws-ia/aws-ia
--------------------------------------------------------------------------------
/.config/.tfsec.yml:
--------------------------------------------------------------------------------
1 | {
2 | "minimum_severity": "MEDIUM"
3 | }
--------------------------------------------------------------------------------
/.project_automation/init/noop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "Not Supported!"
3 |
--------------------------------------------------------------------------------
/.project_automation/update/noop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | echo "Not Supported!"
3 |
--------------------------------------------------------------------------------
/examples/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-ia/terraform-aws-bedrock/HEAD/examples/.DS_Store
--------------------------------------------------------------------------------
/examples/prompt-management/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock prompt with version.
2 |
--------------------------------------------------------------------------------
/examples/bda/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Bedrock data automation (BDA) project and blueprint.
2 |
--------------------------------------------------------------------------------
/examples/kendra-kb/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Kendra GenAI Knowledge Base without an agent.
2 |
--------------------------------------------------------------------------------
/examples/application-inference-profile/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy an application inference profile.
2 |
--------------------------------------------------------------------------------
/tests/06_kendra_kb.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "kendra_kb_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/kendra-kb"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/examples/knowledge-base-only/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a Knowledge Base, creating a default Opensearch Serverless vector store with an S3 datasource.
2 |
--------------------------------------------------------------------------------
/examples/agent-collaborator/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent collaborator with a supervisor agent and a collaborator agent with agent alias.
2 |
--------------------------------------------------------------------------------
/.config/.mdlrc:
--------------------------------------------------------------------------------
1 | # Ignoring the following rules
2 | # MD007 Unordered list indentation
3 | # MD013 Line length
4 | # MD029 Ordered list item prefix
5 | rules "~MD007", "~MD013", "~MD029"
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/variables.tf:
--------------------------------------------------------------------------------
1 | variable "region" {
2 | type = string
3 | description = "AWS region to deploy the resources"
4 | default = "us-east-1"
5 | }
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/.header.md:
--------------------------------------------------------------------------------
1 | This example demonstrates how to create a supervisor agent using Claude 3.7 Sonnet, which is only available with inference profiles.
2 |
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy an extended Bedrock agent, creating a default Opensearch Serverless knowledgebase with an S3 datasource.
2 |
--------------------------------------------------------------------------------
/examples/agent-only/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent with agent alias, leaving the default values and without creating an action group or a knowledgebase.
2 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/.header.md:
--------------------------------------------------------------------------------
1 | This example shows how to deploy a basic Bedrock agent with guardrails, leaving the default values and without creating an action group or a knowledgebase.
2 |
--------------------------------------------------------------------------------
/.copier-answers.yml:
--------------------------------------------------------------------------------
1 | # This file is auto-generated, changes will be overwritten
2 | _commit: v0.1.4
3 | _src_path: /task/68bbcffe-0834-11f0-993f-8654aa3f6246/projecttype
4 | starting_version: v0.0.0
5 | version_file: VERSION
6 |
7 |
--------------------------------------------------------------------------------
/tests/08_bda_tftest.hcl:
--------------------------------------------------------------------------------
1 | run "bda_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/bda"
5 | }
6 | }
7 |
8 | run "bda_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/bda"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.project_automation/publication/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | RUN yum install -y yum-utils && yum-config-manager --add-repo https://cli.github.com/packages/rpm/gh-cli.repo && yum install -y gh
3 | RUN pip install awscli
4 |
--------------------------------------------------------------------------------
/tests/07_knowledge_base_only_tftest.hcl:
--------------------------------------------------------------------------------
1 | run "kb_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/knowledge-base-only"
5 | }
6 | }
7 |
8 | run "kb_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/knowledge-base-only"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/02_guardrails.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "guardrails_plan_basic" {
2 | command = plan
3 | module {
4 | source = "./examples/agent-with-guardrails"
5 | }
6 | }
7 |
8 | run "guardrails_apply_basic" {
9 | command = apply
10 | module {
11 | source = "./examples/agent-with-guardrails"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/03_prompt_management.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "prompt_management_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/prompt-management"
5 | }
6 | }
7 |
8 | run "prompt_management_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/prompt-management"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/tests/05_agent_collaborator.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "agent_collaborator_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/agent-collaborator"
5 | }
6 | }
7 |
8 | run "agent_collaborator_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/agent-collaborator"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/.project_automation/deprecation/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/.project_automation/deprovision/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/.project_automation/provision/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
--------------------------------------------------------------------------------
/tests/04_inference_profile.tftest.hcl:
--------------------------------------------------------------------------------
1 | run "app_inference_profile_plan" {
2 | command = plan
3 | module {
4 | source = "./examples/application-inference-profile"
5 | }
6 | }
7 |
8 | run "app_inference_profile_apply" {
9 | command = apply
10 | module {
11 | source = "./examples/application-inference-profile"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/.config/.checkov.yml:
--------------------------------------------------------------------------------
1 | download-external-modules: False
2 | evaluate-variables: true
3 | directory:
4 | - ./
5 | framework:
6 | - terraform
7 | skip-check:
8 | - CKV2_GCP*
9 | - CKV_AZURE*
10 | - CKV2_AZURE*
11 | - CKV_TF_1 # default to Terraform registry instead of Git
12 | summary-position: bottom
13 | output: 'cli'
14 | compact: True
15 | quiet: True
--------------------------------------------------------------------------------
/.config/static_tests/post-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the end of static test
3 | ## Use this to load any configurations after the static test
4 | ## TIPS: avoid modifying the .project_automation/static_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Post-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/static_tests/pre-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the start of static test
3 | ## use this to load any configuration before the static test
4 | ## TIPS: avoid modifying the .project_automation/static_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Pre-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/.terraform-docs.yaml:
--------------------------------------------------------------------------------
1 | formatter: markdown
2 | header-from: .header.md
3 | settings:
4 | anchor: true
5 | color: true
6 | default: true
7 | escape: true
8 | html: true
9 | indent: 2
10 | required: true
11 | sensitive: true
12 | type: true
13 |
14 | sort:
15 | enabled: true
16 | by: required
17 |
18 | output:
19 | file: README.md
20 | mode: replace
21 |
--------------------------------------------------------------------------------
/.config/functional_tests/post-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the end of functional test
3 | ## Use this to load any configurations after the functional test
4 | ## TIPS: avoid modifying the .project_automation/functional_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Post-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.config/functional_tests/pre-entrypoint-helpers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ## NOTE: this script runs at the start of functional test
3 | ## use this to load any configuration before the functional test
4 | ## TIPS: avoid modifying the .project_automation/functional_test/entrypoint.sh
5 | ## migrate any customization you did on entrypoint.sh to this helper script
6 | echo "Executing Pre-Entrypoint Helpers"
--------------------------------------------------------------------------------
/.project_automation/functional_tests/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | ENV TERRAFORM_VERSION=1.13.1
3 | RUN cd /tmp && \
4 | wget --quiet https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
5 | unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/terraform
--------------------------------------------------------------------------------
/inference-profile.tf:
--------------------------------------------------------------------------------
1 | resource "awscc_bedrock_application_inference_profile" "application_inference_profile" {
2 | count = var.create_app_inference_profile ? 1 : 0
3 | inference_profile_name = "${random_string.solution_prefix.result}-${var.app_inference_profile_name}"
4 | description = var.app_inference_profile_description
5 | model_source = {
6 | copy_from = var.app_inference_profile_model_source
7 | }
8 | tags = var.app_inference_profile_tags
9 | }
10 |
--------------------------------------------------------------------------------
/tests/01_mandatory.tftest.hcl:
--------------------------------------------------------------------------------
1 | ## NOTE: This is the minimum mandatory test
2 | # run at least one test using the ./examples directory as your module source
3 | # create additional *.tftest.hcl for your own unit / integration tests
4 | # use tests/*.auto.tfvars to add non-default variables
5 |
6 | run "agent_only_plan" {
7 | command = plan
8 | module {
9 | source = "./examples/agent-only"
10 | }
11 | }
12 |
13 | run "agent_only_apply" {
14 | command = apply
15 | module {
16 | source = "./examples/agent-only"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | fail_fast: false
3 | minimum_pre_commit_version: "2.6.0"
4 | repos:
5 | -
6 | repo: https://github.com/terraform-docs/terraform-docs
7 | # To update run:
8 | # pre-commit autoupdate --freeze
9 | rev: 212db41760d7fc45d736d5eb94a483d0d2a12049 # frozen: v0.16.0
10 | hooks:
11 | - id: terraform-docs-go
12 | args:
13 | - "--config=.config/.terraform-docs.yaml"
14 | - "--lockfile=false"
15 | - "--recursive"
16 | - "--recursive-path=examples/"
17 | - "./"
--------------------------------------------------------------------------------
/NOTICE.txt:
--------------------------------------------------------------------------------
1 | Copyright 2016-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
4 |
5 | http://aws.amazon.com/apache2.0/
6 |
7 | or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
8 |
--------------------------------------------------------------------------------
/examples/kendra-kb/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_kendra_config = true
10 | create_kendra_s3_data_source = true
11 | create_agent = false
12 | }
--------------------------------------------------------------------------------
/examples/knowledge-base-only/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_default_kb = true
10 | create_agent = false
11 | create_s3_data_source = true
12 | }
--------------------------------------------------------------------------------
/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0, ~> 6.2.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | time = {
13 | source = "hashicorp/time"
14 | version = "~> 0.6"
15 | }
16 | random = {
17 | source = "hashicorp/random"
18 | version = ">= 3.6.0"
19 | }
20 | opensearch = {
21 | source = "opensearch-project/opensearch"
22 | version = ">= 2.2.0"
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/examples/agent-only/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_agent_alias = true
10 | foundation_model = "anthropic.claude-v2"
11 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
12 | }
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_default_kb = true
10 | create_s3_data_source = true
11 | foundation_model = "anthropic.claude-v2"
12 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
13 | }
--------------------------------------------------------------------------------
/.project_automation/publication/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -ex
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "[STAGE: Publication]"
9 | VERSION=$(cat VERSION)
10 | echo $VERSION
11 | BRANCH=main
12 | EXISTING_GIT_VERSION="$(git tag -l)"
13 |
14 | if [[ $(echo $EXISTING_GIT_VERSION | grep $VERSION) ]]
15 | then
16 | echo "version exists skipping release creation hint: Bump version in VERSION file"
17 | else
18 | echo "creating new version"
19 | gh release create ${VERSION} --target ${BRANCH} --generate-notes
20 | fi
21 |
--------------------------------------------------------------------------------
/.project_config.yml:
--------------------------------------------------------------------------------
1 | version: "1.0.0"
2 |
3 | init:
4 | entrypoint: .project_automation/init/noop.sh
5 | update:
6 | entrypoint: .project_automation/update/noop.sh
7 | static_tests:
8 | dockerfile: .project_automation/static_tests/Dockerfile
9 | entrypoint: .project_automation/static_tests/entrypoint.sh
10 | functional_tests:
11 | github_permissions:
12 | contents: write
13 | dockerfile: .project_automation/functional_tests/Dockerfile
14 | entrypoint: .project_automation/functional_tests/entrypoint.sh
15 | publication:
16 | github_permissions:
17 | contents: write
18 | dockerfile: .project_automation/publication/Dockerfile
19 | entrypoint: .project_automation/publication/entrypoint.sh
20 | deprecation:
21 | entrypoint: .project_automation/deprecation/entrypoint.sh
22 |
--------------------------------------------------------------------------------
/prompt.tf:
--------------------------------------------------------------------------------
1 | # – Prompt Management –
2 |
3 | resource "awscc_bedrock_prompt_version" "prompt_version" {
4 | count = var.create_prompt_version ? 1 : 0
5 | prompt_arn = awscc_bedrock_prompt.prompt[0].arn
6 | description = var.prompt_version_description
7 | tags = var.prompt_version_tags
8 | }
9 |
10 | resource "awscc_bedrock_prompt" "prompt" {
11 | count = var.create_prompt ? 1 : 0
12 | name = "${random_string.solution_prefix.result}-${var.prompt_name}"
13 | description = var.prompt_description
14 | customer_encryption_key_arn = var.customer_encryption_key_arn
15 | default_variant = var.default_variant
16 | tags = var.prompt_tags
17 | variants = var.variants_list
18 | }
19 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | provider "aws" {
28 | region = var.region
29 | }
30 |
31 | provider "awscc" {
32 | region = var.region
33 | }
34 |
35 | provider "opensearch" {
36 | url = "n/a"
37 | healthcheck = false
38 | }
39 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | provider "aws" {
28 | region = "us-east-1"
29 | }
30 |
31 | provider "awscc" {
32 | region = "us-east-1"
33 | }
34 |
35 | provider "opensearch" {
36 | url = "n/a"
37 | healthcheck = false
38 | }
39 |
--------------------------------------------------------------------------------
/examples/application-inference-profile/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | # Get current AWS account ID
8 | data "aws_caller_identity" "current" {}
9 |
10 | # Get current AWS region
11 | data "aws_region" "current" {}
12 |
13 | module "bedrock" {
14 | source = "../.." # local example
15 | create_agent = false
16 |
17 | # Application Inference Profile
18 | create_app_inference_profile = true
19 | app_inference_profile_model_source = "arn:aws:bedrock:${data.aws_region.current.name}::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0"
20 | }
--------------------------------------------------------------------------------
/examples/agent-only/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/bda/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/examples/kendra-kb/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/agent-collaborator/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/prompt-management/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/examples/application-inference-profile/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = "n/a"
43 | healthcheck = false
44 | }
45 |
--------------------------------------------------------------------------------
/examples/knowledge-base-only/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = module.bedrock.default_collection.collection_endpoint
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/providers.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_version = ">= 1.13.1"
3 | required_providers {
4 | aws = {
5 | source = "hashicorp/aws"
6 | version = ">= 5.0"
7 | }
8 | awscc = {
9 | source = "hashicorp/awscc"
10 | version = ">= 1.0.0"
11 | }
12 | opensearch = {
13 | source = "opensearch-project/opensearch"
14 | version = "= 2.2.0"
15 | }
16 | time = {
17 | source = "hashicorp/time"
18 | version = "~> 0.6"
19 | }
20 | random = {
21 | source = "hashicorp/random"
22 | version = ">= 3.6.0"
23 | }
24 | }
25 | }
26 |
27 | variable "region" {
28 | type = string
29 | description = "AWS region to deploy the resources"
30 | default = "us-east-1"
31 | }
32 |
33 | provider "aws" {
34 | region = var.region
35 | }
36 |
37 | provider "awscc" {
38 | region = var.region
39 | }
40 |
41 | provider "opensearch" {
42 | url = module.bedrock.default_collection.collection_endpoint
43 | healthcheck = false
44 | }
--------------------------------------------------------------------------------
/.config/.tfsec/no_launch_config_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS003",
5 | "description": "Use `aws_launch_template` over `aws_launch_configuration",
6 | "impact": "Launch configurations are not capable of versions",
7 | "resolution": "Convert resource type and attributes to `aws_launch_template`",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_configuration"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "image_id"
18 | },
19 |
20 | "errorMessage": "should be changed to `aws_launch_template` since the functionality is the same but templates can be versioned.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_template",
23 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/.config/.tfsec/sg_no_embedded_egress_rules_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS005",
5 | "description": "Security group rules should be defined with `aws_security_group_rule` instead of embedded.",
6 | "impact": "Embedded security group rules can cause issues during configuration updates.",
7 | "resolution": "Move `egress` rules to `aws_security_group_rule` and attach to `aws_security_group`.",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_security_group"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "egress"
18 | },
19 |
20 | "errorMessage": "`egress` rules should be moved to `aws_security_group_rule` and attached to `aws_security_group` instead of embedded.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group_rule",
23 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/.config/.tfsec/sg_no_embedded_ingress_rules_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS004",
5 | "description": "Security group rules should be defined with `aws_security_group_rule` instead of embedded.",
6 | "impact": "Embedded security group rules can cause issues during configuration updates.",
7 | "resolution": "Move `ingress` rules to `aws_security_group_rule` and attach to `aws_security_group`.",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_security_group"
13 | ],
14 | "severity": "MEDIUM",
15 | "matchSpec": {
16 | "action": "notPresent",
17 | "name": "ingress"
18 | },
19 |
20 | "errorMessage": "`ingress` rules should be moved to `aws_security_group_rule` and attached to `aws_security_group` instead of embedded.",
21 | "relatedLinks": [
22 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group_rule",
23 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/security_group"
24 | ]
25 | }
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/examples/prompt-management/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock prompt with version.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/kendra-kb/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Kendra GenAI Knowledge Base without an agent.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/.project_automation/functional_tests/functional_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "Starting Functional Tests"
9 | cd ${PROJECT_PATH}
10 | echo "---------------------------------------------------------------------------"
11 | git log --max-count=1
12 | echo "---------------------------------------------------------------------------"
13 |
14 |
15 | #********** Terraform Test **********
16 |
17 | # Look up the mandatory test file
18 | MANDATORY_TEST_PATH="./tests/01_mandatory.tftest.hcl"
19 | if test -f ${MANDATORY_TEST_PATH}; then
20 | echo "File ${MANDATORY_TEST_PATH} is found, resuming test"
21 | # Run Terraform test
22 | terraform init -no-color
23 | terraform test -no-color
24 | else
25 | echo "File ${MANDATORY_TEST_PATH} not found. You must include at least one test run in file ${MANDATORY_TEST_PATH}"
26 | (exit 1)
27 | fi
28 |
29 | if [ $? -eq 0 ]; then
30 | echo "Terraform Test Successfull"
31 | else
32 | echo "Terraform Test Failed"
33 | exit 1
34 | fi
35 |
36 | echo "End of Functional Tests"
--------------------------------------------------------------------------------
/examples/knowledge-base-only/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Knowledge Base, creating a default Opensearch Serverless vector store with an S3 datasource.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/bda.tf:
--------------------------------------------------------------------------------
1 | resource "awscc_bedrock_data_automation_project" "bda_project" {
2 | count = var.create_bda ? 1 : 0
3 | project_name = "${random_string.solution_prefix.result}-${var.bda_project_name}"
4 | project_description = var.bda_project_description
5 | kms_encryption_context = var.bda_kms_encryption_context
6 | kms_key_id = var.bda_kms_key_id
7 | tags = var.bda_tags
8 | standard_output_configuration = var.bda_standard_output_configuration
9 | custom_output_configuration = {
10 | blueprints = var.bda_custom_output_config
11 | }
12 | override_configuration = {
13 | document = {
14 | splitter = {
15 | state = var.bda_override_config_state
16 | }
17 | }
18 | }
19 | }
20 |
21 | resource "awscc_bedrock_blueprint" "bda_blueprint" {
22 | count = var.create_blueprint ? 1 : 0
23 | blueprint_name = "${random_string.solution_prefix.result}-${var.blueprint_name}"
24 | schema = var.blueprint_schema
25 | type = var.blueprint_type
26 | kms_encryption_context = var.blueprint_kms_encryption_context
27 | kms_key_id = var.blueprint_kms_key_id
28 | tags = var.blueprint_tags
29 | }
--------------------------------------------------------------------------------
/examples/agent-only/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent with agent alias, leaving the default values and without creating an action group or a knowledgebase.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/agent-with-knowledge-base/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy an extended Bedrock agent, creating a default Opensearch Serverless knowledgebase with an S3 datasource.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/bda/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a Bedrock data automation (BDA) project and blueprint.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bda](#module\_bda) | ../.. | n/a |
24 | | [blueprint](#module\_blueprint) | ../.. | n/a |
25 |
26 | ## Resources
27 |
28 | No resources.
29 |
30 | ## Inputs
31 |
32 | | Name | Description | Type | Default | Required |
33 | |------|-------------|------|---------|:--------:|
34 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
35 |
36 | ## Outputs
37 |
38 | No outputs.
39 |
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent with guardrails, leaving the default values and without creating an action group or a knowledgebase.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [bedrock](#module\_bedrock) | ../.. | n/a |
24 |
25 | ## Resources
26 |
27 | No resources.
28 |
29 | ## Inputs
30 |
31 | | Name | Description | Type | Default | Required |
32 | |------|-------------|------|---------|:--------:|
33 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
34 |
35 | ## Outputs
36 |
37 | No outputs.
38 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | # Get current AWS account ID
8 | data "aws_caller_identity" "current" {}
9 |
10 | # Get current AWS region
11 | data "aws_region" "current" {}
12 |
13 | locals {
14 | region = data.aws_region.current.name
15 | account_id = data.aws_caller_identity.current.account_id
16 | }
17 |
18 | module "agent_supervisor" {
19 | source = "../.."
20 |
21 | create_agent = false
22 | create_supervisor = true
23 | supervisor_name = "SupervisorTF"
24 |
25 | create_app_inference_profile = true
26 | app_inference_profile_name = "Claude37SonnetProfile"
27 | app_inference_profile_description = "Inference profile for Claude 3.7 Sonnet"
28 | app_inference_profile_model_source = "arn:aws:bedrock:${local.region}:${local.account_id}:inference-profile/us.anthropic.claude-3-7-sonnet-20250219-v1:0"
29 |
30 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars and trucks to an agent. You can also provide feedback to the agent."
31 |
32 | }
33 |
--------------------------------------------------------------------------------
/.config/.tflint.hcl:
--------------------------------------------------------------------------------
1 | # https://github.com/terraform-linters/tflint/blob/master/docs/user-guide/module-inspection.md
2 | # borrowed & modified indefinitely from https://github.com/ksatirli/building-infrastructure-you-can-mostly-trust/blob/main/.tflint.hcl
3 |
4 | plugin "aws" {
5 | enabled = true
6 | version = "0.34.0"
7 | source = "github.com/terraform-linters/tflint-ruleset-aws"
8 | }
9 |
10 | config {
11 | module = true
12 | force = false
13 | }
14 |
15 | rule "terraform_required_providers" {
16 | enabled = true
17 | }
18 |
19 | rule "terraform_required_version" {
20 | enabled = true
21 | }
22 |
23 | rule "terraform_naming_convention" {
24 | enabled = true
25 | format = "snake_case"
26 | }
27 |
28 | rule "terraform_typed_variables" {
29 | enabled = true
30 | }
31 |
32 | rule "terraform_unused_declarations" {
33 | enabled = true
34 | }
35 |
36 | rule "terraform_comment_syntax" {
37 | enabled = true
38 | }
39 |
40 | rule "terraform_deprecated_index" {
41 | enabled = true
42 | }
43 |
44 | rule "terraform_deprecated_interpolation" {
45 | enabled = true
46 | }
47 |
48 | rule "terraform_documented_outputs" {
49 | enabled = true
50 | }
51 |
52 | rule "terraform_documented_variables" {
53 | enabled = true
54 | }
55 |
56 | rule "terraform_module_pinned_source" {
57 | enabled = true
58 | }
59 |
60 | rule "terraform_standard_module_structure" {
61 | enabled = true
62 | }
63 |
64 | rule "terraform_workspace_remote" {
65 | enabled = true
66 | }
67 |
--------------------------------------------------------------------------------
/.config/.tfsec/launch_template_imdsv2_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS001",
5 | "description": "Check to IMDSv2 is required on EC2 instances created by this Launch Template",
6 | "impact": "Instance metadata service can be interacted with freely",
7 | "resolution": "Enable HTTP token requirement for IMDS",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_template"
13 | ],
14 | "severity": "CRITICAL",
15 | "matchSpec": {
16 | "action": "isPresent",
17 | "name": "metadata_options",
18 | "subMatch": {
19 | "action": "and",
20 | "predicateMatchSpec": [
21 | {
22 | "action": "equals",
23 | "name": "http_tokens",
24 | "value": "required"
25 |
26 | }
27 | ]
28 | }
29 | },
30 |
31 | "errorMessage": "is missing `metadata_options` block - it is required with `http_tokens` set to `required` to make Instance Metadata Service more secure.",
32 | "relatedLinks": [
33 | "https://tfsec.dev/docs/aws/ec2/enforce-http-token-imds#aws/ec2",
34 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_template#metadata-options",
35 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
36 | ]
37 | }
38 | ]
39 | }
40 |
--------------------------------------------------------------------------------
/examples/prompt-management/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_agent = false
10 |
11 | # Prompt Management
12 | prompt_name = "prompt"
13 | default_variant = "variant-example"
14 | create_prompt = true
15 | create_prompt_version = true
16 | prompt_version_description = "Example prompt version"
17 | variants_list = [
18 | {
19 | name = "variant-example"
20 | template_type = "TEXT"
21 | model_id = "amazon.titan-text-express-v1"
22 | inference_configuration = {
23 | text = {
24 | temperature = 1
25 | top_p = 0.9900000095367432
26 | max_tokens = 300
27 | stop_sequences = ["User:"]
28 | top_k = 250
29 | }
30 | }
31 | template_configuration = {
32 | text = {
33 | input_variables = [
34 | {
35 | name = "topic"
36 | }
37 | ]
38 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
39 | }
40 | }
41 | }
42 |
43 | ]
44 |
45 | }
--------------------------------------------------------------------------------
/.config/.tfsec/launch_configuration_imdsv2_tfchecks.json:
--------------------------------------------------------------------------------
1 | {
2 | "checks": [
3 | {
4 | "code": "CUS002",
5 | "description": "Check to IMDSv2 is required on EC2 instances created by this Launch Template",
6 | "impact": "Instance metadata service can be interacted with freely",
7 | "resolution": "Enable HTTP token requirement for IMDS",
8 | "requiredTypes": [
9 | "resource"
10 | ],
11 | "requiredLabels": [
12 | "aws_launch_configuration"
13 | ],
14 | "severity": "CRITICAL",
15 | "matchSpec": {
16 | "action": "isPresent",
17 | "name": "metadata_options",
18 | "subMatch": {
19 | "action": "and",
20 | "predicateMatchSpec": [
21 | {
22 | "action": "equals",
23 | "name": "http_tokens",
24 | "value": "required"
25 |
26 | }
27 | ]
28 | }
29 | },
30 |
31 | "errorMessage": "is missing `metadata_options` block - it is required with `http_tokens` set to `required` to make Instance Metadata Service more secure.",
32 | "relatedLinks": [
33 | "https://tfsec.dev/docs/aws/ec2/enforce-http-token-imds#aws/ec2",
34 | "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/launch_configuration#metadata-options",
35 | "https://aws.amazon.com/blogs/security/defense-in-depth-open-firewalls-reverse-proxies-ssrf-vulnerabilities-ec2-instance-metadata-service"
36 | ]
37 | }
38 | ]
39 | }
40 |
--------------------------------------------------------------------------------
/examples/agent-with-inference-profile/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example demonstrates how to create a supervisor agent using Claude 3.7 Sonnet, which is only available with inference profiles.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | | Name | Version |
18 | |------|---------|
19 | | [aws](#provider\_aws) | >= 5.0 |
20 |
21 | ## Modules
22 |
23 | | Name | Source | Version |
24 | |------|--------|---------|
25 | | [agent\_supervisor](#module\_agent\_supervisor) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | | Name | Type |
30 | |------|------|
31 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
32 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
33 |
34 | ## Inputs
35 |
36 | No inputs.
37 |
38 | ## Outputs
39 |
40 | No outputs.
41 |
--------------------------------------------------------------------------------
/.project_automation/static_tests/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/codebuild/amazonlinux2-x86_64-standard:4.0
2 | ENV TERRAFORM_VERSION=1.13.1
3 | RUN cd /tmp && \
4 | wget --quiet https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
5 | unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/terraform
6 |
7 | ENV TFLINT_VERSION=v0.45.0
8 |
9 | RUN cd /tmp && \
10 | wget --quiet https://github.com/terraform-linters/tflint/releases/download/${TFLINT_VERSION}/tflint_linux_amd64.zip && \
11 | unzip tflint_linux_amd64.zip -d /usr/local/bin && chmod 755 /usr/local/bin/tflint
12 |
13 | RUN mkdir -p ~/.tflint.d/plugins
14 |
15 | ENV TFLINT_VERSION=v0.22.1
16 |
17 | RUN wget --quiet -O /tmp/tflint-ruleset-aws.zip https://github.com/terraform-linters/tflint-ruleset-aws/releases/download/${TFLINT_VERSION}/tflint-ruleset-aws_darwin_arm64.zip \
18 | && unzip /tmp/tflint-ruleset-aws.zip -d ~/.tflint.d/plugins \
19 | && rm /tmp/tflint-ruleset-aws.zip
20 |
21 | RUN curl -s https://raw.githubusercontent.com/aquasecurity/tfsec/master/scripts/install_linux.sh | bash
22 |
23 | RUN pip3 install checkov
24 |
25 | RUN gem install mdl
26 |
27 | ENV TERRAFORM_DOCS_VERSION=v0.16.0
28 | RUN wget --quiet https://github.com/terraform-docs/terraform-docs/releases/download/${TERRAFORM_DOCS_VERSION}/terraform-docs-${TERRAFORM_DOCS_VERSION}-linux-amd64.tar.gz && \
29 | tar -C /usr/local/bin -xzf terraform-docs-${TERRAFORM_DOCS_VERSION}-linux-amd64.tar.gz && chmod +x /usr/local/bin/terraform-docs
--------------------------------------------------------------------------------
/examples/agent-collaborator/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy a basic Bedrock agent collaborator with a supervisor agent and a collaborator agent with agent alias.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | No providers.
18 |
19 | ## Modules
20 |
21 | | Name | Source | Version |
22 | |------|--------|---------|
23 | | [agent\_collaborator1](#module\_agent\_collaborator1) | ../.. | n/a |
24 | | [agent\_collaborator2](#module\_agent\_collaborator2) | ../.. | n/a |
25 | | [agent\_supervisor](#module\_agent\_supervisor) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | No resources.
30 |
31 | ## Inputs
32 |
33 | | Name | Description | Type | Default | Required |
34 | |------|-------------|------|---------|:--------:|
35 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
36 |
37 | ## Outputs
38 |
39 | No outputs.
40 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### OSX ###
2 | # General
3 | .DS_Store
4 | .AppleDouble
5 | .LSOverride
6 |
7 | # Icon must end with two \r
8 | Icon
9 |
10 |
11 | # Thumbnails
12 | ._*
13 |
14 | # Files that might appear in the root of a volume
15 | .DocumentRevisions-V100
16 | .fseventsd
17 | .Spotlight-V100
18 | .TemporaryItems
19 | .Trashes
20 | .VolumeIcon.icns
21 | .com.apple.timemachine.donotpresent
22 |
23 | # Directories potentially created on remote AFP share
24 | .AppleDB
25 | .AppleDesktop
26 | Network Trash Folder
27 | Temporary Items
28 | .apdisk
29 |
30 | build/
31 | plan.out
32 | plan.out.json
33 |
34 | # Local .terraform directories
35 | **/.terraform/*
36 |
37 | # .tfstate files
38 | *.tfstate
39 | *.tfstate.*
40 |
41 | # Crash log files
42 | crash.log
43 |
44 | # Exclude all .tfvars files, which are likely to contain sentitive data, such as
45 | # password, private keys, and other secrets. These should not be part of version
46 | # control as they are data points which are potentially sensitive and subject
47 | # to change depending on the environment.
48 | #
49 | *.tfvars
50 |
51 | # Ignore override files as they are usually used to override resources locally and so
52 | # are not checked in
53 | override.tf
54 | override.tf.json
55 | *_override.tf
56 | *_override.tf.json
57 |
58 | # Include override files you do wish to add to version control using negated pattern
59 | #
60 | # !example_override.tf
61 |
62 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
63 | # example: *tfplan*
64 |
65 | # Ignore CLI configuration files
66 | .terraformrc
67 | terraform.rc
68 | .terraform.lock.hcl
69 |
70 | go.mod
71 | go.sum
72 |
73 | .venv
74 | .ruby-version
--------------------------------------------------------------------------------
/examples/application-inference-profile/README.md:
--------------------------------------------------------------------------------
1 |
2 | This example shows how to deploy an application inference profile.
3 |
4 | ## Requirements
5 |
6 | | Name | Version |
7 | |------|---------|
8 | | [terraform](#requirement\_terraform) | >= 1.13.1 |
9 | | [aws](#requirement\_aws) | >= 5.0 |
10 | | [awscc](#requirement\_awscc) | >= 1.0.0 |
11 | | [opensearch](#requirement\_opensearch) | = 2.2.0 |
12 | | [random](#requirement\_random) | >= 3.6.0 |
13 | | [time](#requirement\_time) | ~> 0.6 |
14 |
15 | ## Providers
16 |
17 | | Name | Version |
18 | |------|---------|
19 | | [aws](#provider\_aws) | >= 5.0 |
20 |
21 | ## Modules
22 |
23 | | Name | Source | Version |
24 | |------|--------|---------|
25 | | [bedrock](#module\_bedrock) | ../.. | n/a |
26 |
27 | ## Resources
28 |
29 | | Name | Type |
30 | |------|------|
31 | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
32 | | [aws_region.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/region) | data source |
33 |
34 | ## Inputs
35 |
36 | | Name | Description | Type | Default | Required |
37 | |------|-------------|------|---------|:--------:|
38 | | [region](#input\_region) | AWS region to deploy the resources | `string` | `"us-east-1"` | no |
39 |
40 | ## Outputs
41 |
42 | No outputs.
43 |
--------------------------------------------------------------------------------
/.project_automation/static_tests/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## WARNING: DO NOT modify the content of entrypoint.sh
4 | # Use ./config/static_tests/pre-entrypoint-helpers.sh or ./config/static_tests/post-entrypoint-helpers.sh
5 | # to load any customizations or additional configurations
6 |
7 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
8 | # managed and local tasks always use these variables for the project and project type path
9 | PROJECT_PATH=${BASE_PATH}/project
10 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
11 |
12 | #********** helper functions *************
13 | pre_entrypoint() {
14 | if [ -f ${PROJECT_PATH}/.config/static_tests/pre-entrypoint-helpers.sh ]; then
15 | echo "Pre-entrypoint helper found"
16 | source ${PROJECT_PATH}/.config/static_tests/pre-entrypoint-helpers.sh
17 | echo "Pre-entrypoint helper loaded"
18 | else
19 | echo "Pre-entrypoint helper not found - skipped"
20 | fi
21 | }
22 | post_entrypoint() {
23 | if [ -f ${PROJECT_PATH}/.config/static_tests/post-entrypoint-helpers.sh ]; then
24 | echo "Post-entrypoint helper found"
25 | source ${PROJECT_PATH}/.config/static_tests/post-entrypoint-helpers.sh
26 | echo "Post-entrypoint helper loaded"
27 | else
28 | echo "Post-entrypoint helper not found - skipped"
29 | fi
30 | }
31 |
32 | #********** Pre-entrypoint helper *************
33 | pre_entrypoint
34 |
35 | #********** Static Test *************
36 | /bin/bash ${PROJECT_PATH}/.project_automation/static_tests/static_tests.sh
37 | if [ $? -eq 0 ]
38 | then
39 | echo "Static test completed"
40 | EXIT_CODE=0
41 | else
42 | echo "Static test failed"
43 | EXIT_CODE=1
44 | fi
45 |
46 | #********** Post-entrypoint helper *************
47 | post_entrypoint
48 |
49 | #********** Exit Code *************
50 | exit $EXIT_CODE
--------------------------------------------------------------------------------
/examples/agent-collaborator/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "agent_supervisor" {
8 | source = "../.."
9 | create_agent = false
10 | create_supervisor = true
11 | supervisor_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
12 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars to an agent."
13 | }
14 |
15 | module "agent_collaborator1" {
16 | source = "../.."
17 | create_agent_alias = true
18 | foundation_model = "anthropic.claude-v2"
19 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
20 | supervisor_id = module.agent_supervisor.supervisor_id
21 | create_collaborator = true
22 | collaborator_name = "AgentA"
23 | collaboration_instruction = "Handle customer inquiries"
24 |
25 | depends_on = [module.agent_supervisor]
26 | }
27 |
28 | module "agent_collaborator2" {
29 | source = "../.."
30 | create_agent_alias = true
31 | foundation_model = "anthropic.claude-v2"
32 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
33 | supervisor_id = module.agent_supervisor.supervisor_id
34 | create_collaborator = true
35 | collaborator_name = "AgentB"
36 | collaboration_instruction = "Process backend tasks"
37 |
38 | depends_on = [module.agent_supervisor, module.agent_collaborator1]
39 | }
--------------------------------------------------------------------------------
/.project_automation/functional_tests/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## WARNING: DO NOT modify the content of entrypoint.sh
4 | # Use ./config/functional_tests/pre-entrypoint-helpers.sh or ./config/functional_tests/post-entrypoint-helpers.sh
5 | # to load any customizations or additional configurations
6 |
7 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
8 | # managed and local tasks always use these variables for the project and project type path
9 | PROJECT_PATH=${BASE_PATH}/project
10 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
11 |
12 | #********** helper functions *************
13 | pre_entrypoint() {
14 | if [ -f ${PROJECT_PATH}/.config/functional_tests/pre-entrypoint-helpers.sh ]; then
15 | echo "Pre-entrypoint helper found"
16 | source ${PROJECT_PATH}/.config/functional_tests/pre-entrypoint-helpers.sh
17 | echo "Pre-entrypoint helper loaded"
18 | else
19 | echo "Pre-entrypoint helper not found - skipped"
20 | fi
21 | }
22 | post_entrypoint() {
23 | if [ -f ${PROJECT_PATH}/.config/functional_tests/post-entrypoint-helpers.sh ]; then
24 | echo "Post-entrypoint helper found"
25 | source ${PROJECT_PATH}/.config/functional_tests/post-entrypoint-helpers.sh
26 | echo "Post-entrypoint helper loaded"
27 | else
28 | echo "Post-entrypoint helper not found - skipped"
29 | fi
30 | }
31 |
32 | #********** Pre-entrypoint helper *************
33 | pre_entrypoint
34 |
35 | #********** Functional Test *************
36 | /bin/bash ${PROJECT_PATH}/.project_automation/functional_tests/functional_tests.sh
37 | if [ $? -eq 0 ]
38 | then
39 | echo "Functional test completed"
40 | EXIT_CODE=0
41 | else
42 | echo "Functional test failed"
43 | EXIT_CODE=1
44 | fi
45 |
46 | #********** Post-entrypoint helper *************
47 | post_entrypoint
48 |
49 | #********** Exit Code *************
50 | exit $EXIT_CODE
--------------------------------------------------------------------------------
/examples/agent-with-guardrails/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bedrock" {
8 | source = "../.." # local example
9 | create_guardrail = true
10 | filters_config = [
11 | {
12 | input_strength = "MEDIUM"
13 | output_strength = "MEDIUM"
14 | type = "HATE"
15 | },
16 | {
17 | input_strength = "HIGH"
18 | output_strength = "HIGH"
19 | type = "VIOLENCE"
20 | }
21 | ]
22 | pii_entities_config = [
23 | {
24 | action = "BLOCK"
25 | type = "NAME"
26 | },
27 | {
28 | action = "BLOCK"
29 | type = "DRIVER_ID"
30 | },
31 | {
32 | action = "ANONYMIZE"
33 | type = "USERNAME"
34 | },
35 | ]
36 | regexes_config = [{
37 | action = "BLOCK"
38 | description = "example regex"
39 | name = "regex_example"
40 | pattern = "^\\d{3}-\\d{2}-\\d{4}$"
41 | }]
42 | managed_word_lists_config = [{
43 | type = "PROFANITY"
44 | }]
45 | words_config = [{
46 | text = "HATE"
47 | }]
48 | topics_config = [{
49 | name = "investment_topic"
50 | examples = ["Where should I invest my money ?"]
51 | type = "DENY"
52 | definition = "Investment advice refers to inquiries, guidance, or recommendations regarding the management or allocation of funds or assets with the goal of generating returns ."
53 | }]
54 | foundation_model = "anthropic.claude-v2"
55 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
56 | }
--------------------------------------------------------------------------------
/kendra.tf:
--------------------------------------------------------------------------------
1 | # Kendra Index
2 | resource "awscc_kendra_index" "genai_kendra_index" {
3 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
4 | edition = var.kendra_index_edition
5 | name = "${random_string.solution_prefix.result}-${var.kendra_index_name}"
6 | role_arn = awscc_iam_role.kendra_index_role[0].arn
7 | description = var.kendra_index_description
8 | capacity_units = {
9 | query_capacity_units = var.kendra_index_query_capacity
10 | storage_capacity_units = var.kendra_index_storage_capacity
11 | }
12 | document_metadata_configurations = var.document_metadata_configurations
13 | server_side_encryption_configuration = var.kendra_kms_key_id != null ? {
14 | kms_key_id = var.kendra_kms_key_id
15 | } : null
16 | user_context_policy = var.kendra_index_user_context_policy
17 | user_token_configurations = var.user_token_configurations
18 | tags = var.kendra_index_tags
19 | }
20 |
21 | resource "time_sleep" "wait_after_kendra_index_creation" {
22 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
23 | depends_on = [ awscc_kendra_index.genai_kendra_index[0] ]
24 | create_duration = "60s" # Wait for 60 seconds
25 | }
26 |
27 |
28 | # Create Kendra Data Source
29 | resource "awscc_kendra_data_source" "kendra_s3_data_source" {
30 | count = var.create_kendra_s3_data_source == true ? 1 : 0
31 | index_id = var.kendra_index_arn != null ? var.kendra_index_arn : awscc_kendra_index.genai_kendra_index[0].id
32 | name = "${random_string.solution_prefix.result}-${var.kendra_data_source_name}"
33 | type = "S3"
34 | role_arn = awscc_iam_role.kendra_s3_datasource_role[0].arn
35 | language_code = var.kendra_data_source_language_code
36 | schedule = var.kendra_data_source_schedule
37 | description = var.kendra_data_source_description
38 | tags = var.kendra_data_source_tags
39 | data_source_configuration = {
40 | s3_configuration = {
41 | bucket_name = var.s3_data_source_bucket_name != null ? var.s3_data_source_bucket_name : awscc_s3_bucket.s3_data_source[0].bucket_name
42 | exclusion_patterns = var.s3_data_source_exclusion_patterns
43 | inclusion_patterns = var.s3_data_source_inclusion_patterns
44 | documents_metadata_configuration = {
45 | s3_prefix = var.s3_data_source_document_metadata_prefix
46 | }
47 | access_control_list_documents = {
48 | key_path = var.s3_data_source_key_path
49 | }
50 | }
51 | }
52 | }
53 |
54 | resource "time_sleep" "wait_after_kendra_s3_data_source_creation" {
55 | count = var.create_kendra_s3_data_source ? 1 : 0
56 | depends_on = [ awscc_kendra_data_source.kendra_s3_data_source[0] ]
57 | create_duration = "60s" # Wait for 60 seconds
58 | }
59 |
--------------------------------------------------------------------------------
/opensearch.tf:
--------------------------------------------------------------------------------
1 | # – OpenSearch Serverless Default –
2 |
3 | module "oss_knowledgebase" {
4 | count = var.create_default_kb ? 1 : 0
5 | source = "aws-ia/opensearch-serverless/aws"
6 | version = "0.0.5"
7 | allow_public_access_network_policy = var.allow_opensearch_public_access
8 | number_of_shards = var.number_of_shards
9 | number_of_replicas = var.number_of_replicas
10 | create_vector_index = true
11 | collection_tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : []
12 | vector_index_mappings = <<-EOF
13 | {
14 | "properties": {
15 | "bedrock-knowledge-base-default-vector": {
16 | "type": "knn_vector",
17 | "dimension": ${var.vector_dimension},
18 | "method": {
19 | "name": "hnsw",
20 | "engine": "faiss",
21 | "parameters": {
22 | "m": 16,
23 | "ef_construction": 512
24 | },
25 | "space_type": "l2"
26 | }
27 | },
28 | "AMAZON_BEDROCK_METADATA": {
29 | "type": "text",
30 | "index": "false"
31 | },
32 | "AMAZON_BEDROCK_TEXT_CHUNK": {
33 | "type": "text",
34 | "index": "true"
35 | }
36 | }
37 | }
38 | EOF
39 | }
40 |
41 | resource "aws_opensearchserverless_access_policy" "updated_data_policy" {
42 | count = var.create_default_kb ? 1 : 0
43 |
44 | name = "os-access-policy-${random_string.solution_prefix.result}"
45 | type = "data"
46 |
47 | policy = jsonencode([
48 | {
49 | Rules = [
50 | {
51 | ResourceType = "index"
52 | Resource = [
53 | "index/${module.oss_knowledgebase[0].opensearch_serverless_collection.name}/*"
54 | ]
55 | Permission = [
56 | "aoss:UpdateIndex",
57 | "aoss:DeleteIndex",
58 | "aoss:DescribeIndex",
59 | "aoss:ReadDocument",
60 | "aoss:WriteDocument",
61 | "aoss:CreateIndex"
62 | ]
63 | },
64 | {
65 | ResourceType = "collection"
66 | Resource = [
67 | "collection/${module.oss_knowledgebase[0].opensearch_serverless_collection.name}"
68 | ]
69 | Permission = [
70 | "aoss:DescribeCollectionItems",
71 | "aoss:DeleteCollectionItems",
72 | "aoss:CreateCollectionItems",
73 | "aoss:UpdateCollectionItems"
74 | ]
75 | }
76 | ],
77 | Principal = [
78 | var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
79 | ]
80 | }
81 | ])
82 | }
83 |
84 | resource "time_sleep" "wait_after_index_creation" {
85 | count = var.create_default_kb ? 1 : 0
86 | depends_on = [ module.oss_knowledgebase[0].vector_index ]
87 | create_duration = "60s" # Wait for 60 seconds before creating the index
88 | }
89 |
--------------------------------------------------------------------------------
/.project_automation/static_tests/static_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## NOTE: paths may differ when running in a managed task. To ensure behavior is consistent between
4 | # managed and local tasks always use these variables for the project and project type path
5 | PROJECT_PATH=${BASE_PATH}/project
6 | PROJECT_TYPE_PATH=${BASE_PATH}/projecttype
7 |
8 | echo "Starting Static Tests"
9 |
10 | #********** Terraform Validate *************
11 | cd ${PROJECT_PATH}
12 | echo "---------------------------------------------------------------------------"
13 | git log --max-count=1
14 | echo "---------------------------------------------------------------------------"
15 |
16 | terraform init -no-color
17 | terraform validate -no-color
18 | if [ $? -eq 0 ]
19 | then
20 | echo "Success - Terraform validate"
21 | else
22 | echo "Failure - Terraform validate"
23 | exit 1
24 | fi
25 |
26 | #********** tflint ********************
27 | echo 'Starting tflint'
28 | tflint --no-color --init --config ${PROJECT_PATH}/.config/.tflint.hcl
29 | MYLINT=$(tflint --no-color --force --config ${PROJECT_PATH}/.config/.tflint.hcl)
30 | if [ -z "$MYLINT" ]
31 | then
32 | echo "Success - tflint found no linting issues!"
33 | else
34 | echo "Failure - tflint found linting issues!"
35 | echo "$MYLINT"
36 | exit 1
37 | fi
38 |
39 | #********** tfsec *********************
40 | echo 'Starting tfsec'
41 | MYTFSEC=$(tfsec . --no-color --config-file ${PROJECT_PATH}/.config/.tfsec.yml --custom-check-dir ${PROJECT_PATH}/.config/.tfsec)
42 | if [[ $MYTFSEC == *"No problems detected!"* ]];
43 | then
44 | echo "Success - tfsec found no security issues!"
45 | echo "$MYTFSEC"
46 | else
47 | echo "Failure - tfsec found security issues!"
48 | echo "$MYTFSEC"
49 | exit 1
50 | fi
51 |
52 | #********** Checkov Analysis *************
53 | echo "Running Checkov Analysis"
54 | checkov --config-file ${PROJECT_PATH}/.config/.checkov.yml
55 | if [ $? -eq 0 ]
56 | then
57 | echo "Success - Checkov found no issues!"
58 | else
59 | echo "Failure - Checkov found issues!"
60 | exit 1
61 | fi
62 |
63 | #********** Markdown Lint **************
64 | echo 'Starting markdown lint'
65 | MYMDL=$(mdl --config ${PROJECT_PATH}/.config/.mdlrc .header.md examples/*/.header.md)
66 | if [ -z "$MYMDL" ]
67 | then
68 | echo "Success - markdown lint found no linting issues!"
69 | else
70 | echo "Failure - markdown lint found linting issues!"
71 | echo "$MYMDL"
72 | exit 1
73 | fi
74 |
75 | #********** Terraform Docs *************
76 | echo 'Starting terraform-docs'
77 | TDOCS="$(terraform-docs --config ${PROJECT_PATH}/.config/.terraform-docs.yaml --lockfile=false ./)"
78 | git add -N README.md
79 | GDIFF="$(git diff --compact-summary)"
80 | if [ -z "$GDIFF" ]
81 | then
82 | echo "Success - Terraform Docs creation verified!"
83 | else
84 | echo "Failure - Terraform Docs creation failed, ensure you have precommit installed and running before submitting the Pull Request. TIPS: false error may occur if you have unstaged files in your repo"
85 | echo "$GDIFF"
86 | exit 1
87 | fi
88 |
89 | #***************************************
90 | echo "End of Static Tests"
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Creating modules for Terraform
2 |
3 | This repository contains code for an application that is published using the Application Builder Platform (ABP).
4 |
5 | ## Module Standards
6 |
7 | For best practices and information on developing with Terraform, see the [I&A Module Standards](https://aws-ia.github.io/standards-terraform/)
8 |
9 | ## Contributing Code
10 |
11 | In order to contribute code to this repository, you must submit a *[Pull Request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request)*. To do so, you must *[fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo)* this repository, make your changes in your forked version and submit a *Pull Request*.
12 |
13 | ## Writing Documentation
14 |
15 | > :bangbang: **Do not manually update README.md**.
16 |
17 | README.md is automatically generated by pulling in content from other files. For instructions, including a fill-in-the-blank content template, see [Create readmes for Terraform-based Partner Solutions.](https://aws-ia-us-west-2.s3.us-west-2.amazonaws.com/docs/content/index.html#/lessons/8rpYWWL59M7dcS-NsjYmaISUu-L_UqEv)
18 |
19 | ## Checks and Validation
20 |
21 | Pull Requests (PRs) submitted against this repository undergo a series of static and functional checks.
22 |
23 | > :exclamation: Note: Failures during functional or static checks will prevent a pull request from being accepted.
24 |
25 | It is a best practice to perform these checks locally prior to submitting a pull request.
26 |
27 | ## Customizing static and functional test
28 |
29 | Details about the static and functional test can be found at `./project_automation/{test-name}/entrypoint.sh`.
30 | TIPS: **do not** modify the `./project_automation/{test-name}/entrypoint.sh`, instead use the helper script located at `.config/{test-name}/`
31 |
32 | ## Checks Performed
33 |
34 | - TFLint
35 | - tfsec
36 | - Markdown Lint
37 | - Checkov
38 | - Terratest
39 |
40 | > :bangbang: The readme.md file will be created after all checks have completed successfully, it is recommended that you install terraform-docs locally in order to preview your readme.md file prior to publication.
41 |
42 | ## Install the required tools
43 |
44 | Prerequisites:
45 |
46 | - [Python](https://docs.python.org/3/using/index.html)
47 | - [Pip](https://pip.pypa.io/en/stable/installation/)
48 | - [golang](https://go.dev/doc/install) (for macos you can use `brew`)
49 | - [tflint](https://github.com/terraform-linters/tflint)
50 | - [tfsec](https://aquasecurity.github.io/tfsec/v1.0.11/)
51 | - [Markdown Lint](https://github.com/markdownlint/markdownlint)
52 | - [Checkov](https://www.checkov.io/2.Basics/Installing%20Checkov.html)
53 | - [terraform-docs](https://github.com/terraform-docs/terraform-docs)
54 | - [coreutils](https://www.gnu.org/software/coreutils/)
55 |
56 | ## Performing Checks manually
57 |
58 | Preparation
59 |
60 | ```sh
61 | terraform init
62 | terraform validate
63 | ```
64 |
65 | ## Checks
66 |
67 | ### tflint
68 |
69 | ```sh
70 | tflint --init --config ${PROJECT_PATH}/.config/.tflint.hcl
71 | tflint --force --config ${PROJECT_PATH}/.config/.tflint.hcl
72 | ```
73 |
74 | ### tfsec
75 |
76 | ```sh
77 | tfsec . --config-file ${PROJECT_PATH}/.config/.tfsec.yml
78 | ```
79 |
80 | ### Markdown Lint
81 |
82 | ```sh
83 | mdl --config ${PROJECT_PATH}/.config/.mdlrc .header.md examples/*/.header.md
84 | ```
85 |
86 | ### Checkov
87 |
88 | ```sh
89 | checkov --config-file ${PROJECT_PATH}/.config/.checkov.yml
90 | ```
91 |
92 | ### Terratest
93 |
94 | Include tests to validate your examples/<> root modules, at a minimum. This can be accomplished with usually only slight modifications to the [boilerplate test provided in this template](./test/examples\_basic\_test.go)
95 |
96 | ```sh
97 | # from the root of the repository
98 | cd test
99 | go mod init github.com/aws-ia/terraform-project-ephemeral
100 | go mod tidy
101 | go install github.com/gruntwork-io/terratest/modules/terraform
102 | go test -timeout 45m
103 | ```
104 |
105 | ## Documentation
106 |
107 | ### terraform-docs
108 |
109 | ```sh
110 | # from the root of the repository
111 | terraform-docs --config ${PROJECT_PATH}/.config/.terraform-docs.yaml --lockfile=false ./
112 | ```
113 |
--------------------------------------------------------------------------------
/examples/bda/main.tf:
--------------------------------------------------------------------------------
1 | #####################################################################################
2 | # Terraform module examples are meant to show an _example_ on how to use a module
3 | # per use-case. The code below should not be copied directly but referenced in order
4 | # to build your own root module that invokes this module
5 | #####################################################################################
6 |
7 | module "bda" {
8 | source = "../.." # local example
9 | create_agent = false
10 |
11 | # BDA project config
12 | create_bda = true
13 | bda_standard_output_configuration = {
14 | image = {
15 | extraction = {
16 | bounding_box = {
17 | state = "ENABLED"
18 | }
19 | category = {
20 | state = "ENABLED"
21 | types = ["TEXT_DETECTION", "LOGOS"]
22 | }
23 | }
24 | generative_field = {
25 | state = "ENABLED"
26 | types = ["IMAGE_SUMMARY"]
27 | }
28 | }
29 | }
30 | bda_custom_output_config = [{
31 | blueprint_arn = module.blueprint.bda_blueprint.blueprint_arn
32 | blueprint_stage = module.blueprint.bda_blueprint.blueprint_stage
33 | }]
34 | }
35 |
36 | module "blueprint" {
37 | source = "../.."
38 | create_agent = false
39 |
40 | # Blueprint config
41 | create_blueprint = true
42 | blueprint_schema = jsonencode({
43 | "$schema" = "http://json-schema.org/draft-07/schema#"
44 | description = "This blueprint is to extract key information from advertisement images."
45 | class = "advertisement image"
46 | type = "object"
47 | definitions = {
48 | ProductDetails = {
49 | type = "object"
50 | properties = {
51 | product_category = {
52 | type = "string"
53 | inferenceType = "explicit"
54 | instruction = "The broad category or type of product being advertised, e.g., appliances, electronics, clothing, etc."
55 | }
56 | product_name = {
57 | type = "string"
58 | inferenceType = "explicit"
59 | instruction = "The specific name or model of the product being advertised, if visible in the image."
60 | }
61 | product_placement = {
62 | type = "string"
63 | inferenceType = "explicit"
64 | instruction = "How the product is positioned or placed within the advertisement image. Limit the field values to enum['Front and center', 'In the background', 'Held/used by a person', 'Others']"
65 | }
66 | }
67 | }
68 | }
69 | properties = {
70 | product_details = {
71 | "$ref" = "#/definitions/ProductDetails"
72 | }
73 | image_sentiment = {
74 | type = "string"
75 | inferenceType = "explicit"
76 | instruction = "What is the overall sentiment of the image? Limit the field values to enum['Positive', 'Negative', 'Neutral']"
77 | }
78 | image_background = {
79 | type = "string"
80 | inferenceType = "explicit"
81 | instruction = "What is the background of the ad image? For example, 'Solid color', 'Natural landscape', 'Indoor', 'Urban', 'Abstract'"
82 | }
83 | image_style = {
84 | type = "string"
85 | inferenceType = "explicit"
86 | instruction = "Classify the image style of the ad. For example, 'Product image', 'Lifestyle', 'Portrait', 'Retro', 'Infographic', 'None of the above'"
87 | }
88 | image_humor = {
89 | type = "boolean"
90 | inferenceType = "explicit"
91 | instruction = "Does the advertisement use any humor or wit in its messaging?"
92 | }
93 | key_visuals = {
94 | type = "array"
95 | inferenceType = "explicit"
96 | instruction = "A list of key visual elements or objects present in the advertisement image, apart from the main product."
97 | items = {
98 | type = "string"
99 | }
100 | }
101 | ad_copy = {
102 | type = "string"
103 | inferenceType = "explicit"
104 | instruction = "Any text or copy present in the advertisement image, excluding the brand name and promotional offer."
105 | }
106 | }
107 | })
108 | }
--------------------------------------------------------------------------------
/data.tf:
--------------------------------------------------------------------------------
1 | data "aws_caller_identity" "current" {}
2 | data "aws_partition" "current" {}
3 | data "aws_region" "current" {}
4 |
5 | locals {
6 | region = data.aws_region.current.region
7 | account_id = data.aws_caller_identity.current.account_id
8 | partition = data.aws_partition.current.partition
9 | create_kb = var.create_default_kb || var.create_rds_config || var.create_mongo_config || var.create_pinecone_config || var.create_opensearch_config || var.create_opensearch_managed_config || var.create_kb || var.create_kendra_config
10 | foundation_model = var.create_agent ? var.foundation_model : (var.create_supervisor ? var.supervisor_model : null)
11 | }
12 |
13 | data "aws_iam_policy_document" "agent_trust" {
14 | count = var.create_agent || var.create_supervisor ? 1 : 0
15 | statement {
16 | actions = ["sts:AssumeRole"]
17 | principals {
18 | identifiers = ["bedrock.amazonaws.com"]
19 | type = "Service"
20 | }
21 | condition {
22 | test = "StringEquals"
23 | values = [local.account_id]
24 | variable = "aws:SourceAccount"
25 | }
26 | condition {
27 | test = "ArnLike"
28 | values = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent/*"]
29 | variable = "AWS:SourceArn"
30 | }
31 | }
32 | }
33 |
34 | data "aws_iam_policy_document" "agent_permissions" {
35 | count = var.create_agent || var.create_supervisor ? 1 : 0
36 | statement {
37 | actions = [
38 | "bedrock:InvokeModel*", # For "bedrock:InvokeModel" & "bedrock:InvokeModelWithResponseStream"
39 | "bedrock:UseInferenceProfile",
40 | "bedrock:GetInferenceProfile",
41 | ]
42 | resources = distinct(concat(
43 | var.use_app_inference_profile ? [
44 | var.app_inference_profile_model_source,
45 | "arn:aws:bedrock:*:*:inference-profile/*",
46 | "arn:aws:bedrock:*::foundation-model/*", # Too broad
47 | "arn:aws:bedrock:*:*:application-inference-profile/*",
48 | ] : [],
49 | var.create_app_inference_profile ? [
50 | var.app_inference_profile_model_source,
51 | awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn,
52 | "arn:${local.partition}:bedrock:*:*:application-inference-profile/*",
53 | ] : [],
54 | var.create_app_inference_profile ?
55 | awscc_bedrock_application_inference_profile.application_inference_profile[0].models[*].model_arn : [],
56 | !var.create_app_inference_profile && !var.use_app_inference_profile ?
57 | [
58 | "arn:${local.partition}:bedrock:${local.region}::foundation-model/${local.foundation_model}",
59 | "arn:${local.partition}:bedrock:*::foundation-model/${local.foundation_model}",
60 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:inference-profile/*.${local.foundation_model}",
61 | ]: []
62 | ))
63 | }
64 | }
65 |
66 | data "aws_iam_policy_document" "agent_alias_permissions" {
67 | count = var.create_agent_alias || var.create_supervisor ? 1 : 0
68 | statement {
69 | actions = [
70 | "bedrock:GetAgentAlias",
71 | "bedrock:InvokeAgent"
72 | ]
73 | resources = [
74 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent/*",
75 | "arn:${local.partition}:bedrock:${local.region}:${local.account_id}:agent-alias/*"
76 | ]
77 | }
78 | }
79 |
80 |
81 | data "aws_iam_policy_document" "knowledge_base_permissions" {
82 | count = local.create_kb ? 1 : 0
83 |
84 | statement {
85 | actions = ["bedrock:Retrieve"]
86 | resources = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:knowledge-base/*"]
87 | }
88 | }
89 |
90 | data "aws_iam_policy_document" "custom_model_trust" {
91 | count = var.create_custom_model ? 1 : 0
92 | statement {
93 | actions = ["sts:AssumeRole"]
94 | principals {
95 | identifiers = ["bedrock.amazonaws.com"]
96 | type = "Service"
97 | }
98 | condition {
99 | test = "StringEquals"
100 | values = [local.account_id]
101 | variable = "aws:SourceAccount"
102 | }
103 | condition {
104 | test = "ArnLike"
105 | values = ["arn:${local.partition}:bedrock:${local.region}:${local.account_id}:model-customization-job/*"]
106 | variable = "AWS:SourceArn"
107 | }
108 | }
109 | }
110 |
111 | data "aws_iam_policy_document" "app_inference_profile_permission" {
112 | count = var.create_app_inference_profile || var.use_app_inference_profile ? 1 : 0
113 | statement {
114 | actions = [
115 | "bedrock:GetInferenceProfile",
116 | "bedrock:ListInferenceProfiles",
117 | "bedrock:UseInferenceProfile",
118 | ]
119 | resources = [
120 | "arn:${local.partition}:bedrock:*:*:inference-profile/*",
121 | "arn:${local.partition}:bedrock:*:*:application-inference-profile/*"
122 | ]
123 | }
124 | }
125 |
126 | data "aws_bedrock_foundation_model" "model_identifier" {
127 | count = var.create_custom_model ? 1 : 0
128 | model_id = var.custom_model_id
129 | }
130 |
--------------------------------------------------------------------------------
/outputs.tf:
--------------------------------------------------------------------------------
1 | output "default_collection" {
2 | value = var.create_default_kb ? module.oss_knowledgebase[0].opensearch_serverless_collection : null
3 | description = "Opensearch default collection value."
4 | }
5 |
6 | output "default_kb_identifier" {
7 | value = length(awscc_bedrock_knowledge_base.knowledge_base_default) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : null
8 | description = "The unique identifier of the default knowledge base that was created. If no default KB was requested, value will be null"
9 | }
10 |
11 | output "mongo_kb_identifier" {
12 | value = length(awscc_bedrock_knowledge_base.knowledge_base_mongo) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_mongo[0].id : null
13 | description = "The unique identifier of the MongoDB knowledge base that was created. If no MongoDB KB was requested, value will be null"
14 | }
15 |
16 | output "opensearch_kb_identifier" {
17 | value = length(awscc_bedrock_knowledge_base.knowledge_base_opensearch) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_opensearch[0].id : null
18 | description = "The unique identifier of the OpenSearch knowledge base that was created. If no OpenSearch KB was requested, value will be null"
19 | }
20 |
21 | output "pinecone_kb_identifier" {
22 | value = length(awscc_bedrock_knowledge_base.knowledge_base_pinecone) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_pinecone[0].id : null
23 | description = "The unique identifier of the Pinecone knowledge base that was created. If no Pinecone KB was requested, value will be null"
24 | }
25 |
26 | output "rds_kb_identifier" {
27 | value = length(awscc_bedrock_knowledge_base.knowledge_base_rds) > 0 ? awscc_bedrock_knowledge_base.knowledge_base_rds[0].id : null
28 | description = "The unique identifier of the RDS knowledge base that was created. If no RDS KB was requested, value will be null"
29 | }
30 |
31 | output "datasource_identifier" {
32 | value = length(awscc_bedrock_data_source.knowledge_base_ds) > 0 ? awscc_bedrock_data_source.knowledge_base_ds[0].data_source_id : null
33 | description = "The unique identifier of the data source."
34 | }
35 |
36 | output "cloudwatch_log_group" {
37 | value = length(aws_cloudwatch_log_group.knowledge_base_cwl) > 0 ? aws_cloudwatch_log_group.knowledge_base_cwl[0].name : null
38 | description = "The name of the CloudWatch log group for the knowledge base. If no log group was requested, value will be null"
39 | }
40 |
41 | output "bedrock_agent" {
42 | value = var.create_agent == true ? awscc_bedrock_agent.bedrock_agent : null
43 | description = "The Amazon Bedrock Agent if it is created."
44 | }
45 |
46 | output "bedrock_agent_alias" {
47 | value = var.create_agent_alias == true ? (var.use_aws_provider_alias ? aws_bedrockagent_agent_alias.bedrock_agent_alias : awscc_bedrock_agent_alias.bedrock_agent_alias) : null
48 | description = "The Amazon Bedrock Agent Alias if it is created."
49 | }
50 |
51 | output "s3_data_source_arn" {
52 | value = var.kb_s3_data_source != null ? var.kb_s3_data_source : var.create_default_kb ? length(awscc_s3_bucket.s3_data_source) > 0 ? awscc_s3_bucket.s3_data_source[0].arn : null : null
53 | description = "The Amazon Bedrock Data Source for S3."
54 | }
55 |
56 | output "s3_data_source_name" {
57 | value = var.kb_s3_data_source != null ? split(":", var.kb_s3_data_source)[5] : var.create_default_kb ? length(awscc_s3_bucket.s3_data_source) > 0 ? awscc_s3_bucket.s3_data_source[0].id : null : null
58 | description = "The name of the Amazon Bedrock Data Source for S3."
59 | }
60 |
61 | output "supervisor_id" {
62 | value = var.create_supervisor ? aws_bedrockagent_agent.agent_supervisor[0].agent_id : null
63 | description = "The identifier of the supervisor agent."
64 | }
65 |
66 | output "bda_blueprint" {
67 | value = var.create_blueprint ? awscc_bedrock_blueprint.bda_blueprint[0] : null
68 | description = "The BDA blueprint."
69 | }
70 |
71 |
72 | output "agent_resource_role_arn" {
73 | value = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : (var.create_agent ? aws_iam_role.agent_role[0].arn : null)
74 | description = "The ARN of the Bedrock agent resource role."
75 | }
76 |
77 | output "agent_resource_role_name" {
78 | value = var.agent_resource_role_arn != null ? split("/", var.agent_resource_role_arn)[1] : (var.create_agent ? aws_iam_role.agent_role[0].name : null)
79 | description = "The name of the Bedrock agent resource role."
80 | }
81 |
82 | output "supervisor_role_arn" {
83 | value = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : (var.create_supervisor ? aws_iam_role.agent_role[0].arn : null)
84 | description = "The ARN of the Bedrock supervisor agent resource role."
85 | }
86 |
87 | output "custom_model" {
88 | value = var.create_custom_model ? aws_bedrock_custom_model.custom_model[0] : null
89 | description = "The custom model. If no custom model was requested, value will be null."
90 | }
91 |
92 | output "knowledge_base_role_name" {
93 | description = "The name of the IAM role used by the knowledge base."
94 | value = try(aws_iam_role.bedrock_knowledge_base_role[0].name, null)
95 | }
96 |
97 | output "application_inference_profile_arn" {
98 | description = "The ARN of the application inference profile."
99 | value = var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : null
100 | }
101 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/data-source.tf:
--------------------------------------------------------------------------------
1 | locals {
2 | create_cwl = var.create_default_kb && var.create_kb_log_group
3 | create_delivery = local.create_cwl || var.kb_monitoring_arn != null
4 | vector_ingestion_configuration = {
5 | chunking_configuration = var.chunking_strategy == null ? null : {
6 | chunking_strategy = var.chunking_strategy
7 | fixed_size_chunking_configuration = var.chunking_strategy_max_tokens == null ? null : {
8 | max_tokens = var.chunking_strategy_max_tokens
9 | overlap_percentage = var.chunking_strategy_overlap_percentage
10 | }
11 | hierarchical_chunking_configuration = var.heirarchical_overlap_tokens == null && var.level_configurations_list == null ? null : {
12 | level_configurations = var.level_configurations_list
13 | overlap_tokens = var.heirarchical_overlap_tokens
14 | }
15 | semantic_chunking_configuration = var.breakpoint_percentile_threshold == null && var.semantic_buffer_size == null && var.semantic_max_tokens == null ? null : {
16 | breakpoint_percentile_threshold = var.breakpoint_percentile_threshold
17 | buffer_size = var.semantic_buffer_size
18 | max_tokens = var.semantic_max_tokens
19 | }
20 | }
21 | context_enrichment_configuration = var.create_context_enrichment_config == false ? null : {
22 | type = var.context_enrichment_type
23 | bedrock_foundation_model_configuration = {
24 | model_arn = var.context_enrichment_model_arn
25 | enrichment_strategy_configuration = {
26 | method = var.enrichment_strategy_method
27 | }
28 | }
29 | }
30 | custom_transformation_configuration = var.create_custom_tranformation_config == false ? null : {
31 | intermediate_storage = {
32 | s3_location = {
33 | uri = var.s3_location_uri
34 | }
35 | }
36 | transformations = var.transformations_list
37 | }
38 | parsing_configuration = var.create_parsing_configuration == false ? null : {
39 | bedrock_foundation_model_configuration = {
40 | model_arn = var.parsing_config_model_arn
41 | parsing_prompt = {
42 | parsing_prompt_text = var.parsing_prompt_text
43 | }
44 | parsing_modality = var.parsing_modality
45 | }
46 | bedrock_data_automation_configuration = var.create_bedrock_data_automation_config == false ? null : {
47 | parsing_modality = var.parsing_modality
48 | }
49 | parsing_strategy = var.parsing_strategy
50 | }
51 | }
52 |
53 | server_side_encryption_configuration = var.create_server_side_encryption_config == false ? null : {
54 | kms_key_arn = var.data_source_kms_key_arn
55 | }
56 | }
57 |
58 | # - Knowledge Base S3 Data Source –
59 | resource "awscc_s3_bucket" "s3_data_source" {
60 | count = (var.create_s3_data_source || var.create_kendra_s3_data_source) && var.use_existing_s3_data_source == false ? 1 : 0
61 | bucket_name = "${random_string.solution_prefix.result}-${var.kb_name}-default-bucket"
62 |
63 | public_access_block_configuration = {
64 | block_public_acls = true
65 | block_public_policy = true
66 | ignore_public_acls = true
67 | restrict_public_buckets = true
68 | }
69 |
70 | bucket_encryption = {
71 | server_side_encryption_configuration = [{
72 | bucket_key_enabled = true
73 | server_side_encryption_by_default = {
74 | sse_algorithm = var.kb_s3_data_source_kms_arn == null ? "AES256" : "aws:kms"
75 | kms_master_key_id = var.kb_s3_data_source_kms_arn
76 | }
77 | }]
78 | }
79 |
80 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
81 | key = "Name"
82 | value = "S3 Data Source"
83 | }]
84 | }
85 |
86 | resource "awscc_bedrock_data_source" "knowledge_base_ds" {
87 | count = var.create_s3_data_source ? 1 : 0
88 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
89 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSource"
90 | description = var.data_source_description
91 | data_deletion_policy = var.data_deletion_policy
92 | data_source_configuration = {
93 | type = "S3"
94 | s3_configuration = {
95 | bucket_arn = var.kb_s3_data_source == null ? awscc_s3_bucket.s3_data_source[0].arn : var.kb_s3_data_source # Create an S3 bucket or reference existing
96 | bucket_owner_account_id = var.bucket_owner_account_id
97 | inclusion_prefixes = var.s3_inclusion_prefixes
98 | }
99 | }
100 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
101 | server_side_encryption_configuration = local.server_side_encryption_configuration
102 | }
103 |
104 | resource "aws_cloudwatch_log_group" "knowledge_base_cwl" {
105 | #tfsec:ignore:log-group-customer-key
106 | #checkov:skip=CKV_AWS_158:Encryption not required for log group
107 | count = local.create_cwl ? 1 : 0
108 | name = "/aws/vendedlogs/bedrock/knowledge-base/APPLICATION_LOGS/${awscc_bedrock_knowledge_base.knowledge_base_default[0].id}"
109 | retention_in_days = var.kb_log_group_retention_in_days
110 | }
111 |
112 | resource "awscc_logs_delivery_source" "knowledge_base_log_source" {
113 | count = local.create_delivery ? 1 : 0
114 | name = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-source"
115 | log_type = "APPLICATION_LOGS"
116 | resource_arn = awscc_bedrock_knowledge_base.knowledge_base_default[0].knowledge_base_arn
117 | }
118 |
119 | resource "awscc_logs_delivery_destination" "knowledge_base_log_destination" {
120 | count = local.create_delivery ? 1 : 0
121 | name = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-destination"
122 | output_format = "json"
123 | destination_resource_arn = local.create_cwl ? aws_cloudwatch_log_group.knowledge_base_cwl[0].arn : var.kb_monitoring_arn
124 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
125 | key = "Name"
126 | value = "${random_string.solution_prefix.result}-${var.kb_name}-delivery-destination"
127 | }]
128 | }
129 |
130 | resource "awscc_logs_delivery" "knowledge_base_log_delivery" {
131 | count = local.create_delivery ? 1 : 0
132 | delivery_destination_arn = awscc_logs_delivery_destination.knowledge_base_log_destination[0].arn
133 | delivery_source_name = awscc_logs_delivery_source.knowledge_base_log_source[0].name
134 | tags = var.kb_tags != null ? [for k, v in var.kb_tags : { key = k, value = v }] : [{
135 | key = "Name"
136 | value = "${random_string.solution_prefix.result}-${var.kb_name}-delivery"
137 | }]
138 | }
139 |
140 | # – Knowledge Base Web Crawler Data Source
141 | resource "awscc_bedrock_data_source" "knowledge_base_web_crawler" {
142 | count = var.create_web_crawler ? 1 : 0
143 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
144 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceWebCrawler"
145 | description = var.data_source_description
146 | data_source_configuration = {
147 | type = "WEB"
148 | web_configuration = {
149 | crawler_configuration = {
150 | crawler_limits = {
151 | rate_limit = var.rate_limit
152 | max_pages = var.max_pages
153 | }
154 | exclusion_filters = var.exclusion_filters
155 | inclusion_filters = var.inclusion_filters
156 | scope = var.crawler_scope
157 | user_agent = var.user_agent
158 | }
159 | source_configuration = {
160 | url_configuration = {
161 | seed_urls = var.seed_urls
162 | }
163 | }
164 | }
165 | }
166 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
167 | server_side_encryption_configuration = local.server_side_encryption_configuration
168 | }
169 |
170 | # – Knowledge Base Confluence Data Source
171 | resource "awscc_bedrock_data_source" "knowledge_base_confluence" {
172 | count = var.create_confluence ? 1 : 0
173 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
174 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceConfluence"
175 | description = var.data_source_description
176 | data_source_configuration = {
177 | type = "CONFLUENCE"
178 | confluence_configuration = {
179 | crawler_configuration = {
180 | filter_configuration = {
181 | pattern_object_filter = {
182 | filters = var.pattern_object_filter_list
183 | }
184 | type = var.crawl_filter_type
185 | }
186 | }
187 | source_configuration = {
188 | auth_type = var.auth_type
189 | credentials_secret_arn = var.confluence_credentials_secret_arn
190 | host_type = var.host_type
191 | host_url = var.host_url
192 | }
193 | }
194 | }
195 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
196 | server_side_encryption_configuration = local.server_side_encryption_configuration
197 | }
198 |
199 | # – Knowledge Base Sharepoint Data Source
200 | resource "awscc_bedrock_data_source" "knowledge_base_sharepoint" {
201 | count = var.create_sharepoint ? 1 : 0
202 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
203 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceSharepoint"
204 | description = var.data_source_description
205 | data_source_configuration = {
206 | type = "SHAREPOINT"
207 | share_point_configuration = {
208 | crawler_configuration = {
209 | filter_configuration = {
210 | pattern_object_filter ={
211 | filters = var.pattern_object_filter_list
212 | }
213 | type = var.crawl_filter_type
214 | }
215 | }
216 | source_configuration = {
217 | auth_type = var.auth_type
218 | credentials_secret_arn = var.share_point_credentials_secret_arn
219 | domain = var.share_point_domain
220 | host_type = var.host_type
221 | site_urls = var.share_point_site_urls
222 | tenant_id = var.tenant_id
223 | }
224 | }
225 | }
226 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
227 | server_side_encryption_configuration = local.server_side_encryption_configuration
228 | }
229 |
230 | # – Knowledge Base Salesforce Data Source
231 | resource "awscc_bedrock_data_source" "knowledge_base_salesforce" {
232 | count = var.create_salesforce ? 1 : 0
233 | knowledge_base_id = var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : var.existing_kb
234 | name = "${random_string.solution_prefix.result}-${var.kb_name}DataSourceSalesforce"
235 | description = var.data_source_description
236 | data_source_configuration = {
237 | type = "SALESFORCE"
238 | salesforce_configuration = {
239 | crawler_configuration = {
240 | filter_configuration = {
241 | pattern_object_filter = {
242 | filters = var.pattern_object_filter_list
243 | }
244 | type = var.crawl_filter_type
245 | }
246 | }
247 | source_configuration = {
248 | auth_type = var.auth_type
249 | credentials_secret_arn = var.salesforce_credentials_secret_arn
250 | host_url = var.host_url
251 | }
252 | }
253 | }
254 | vector_ingestion_configuration = var.create_vector_ingestion_configuration == false ? null : local.vector_ingestion_configuration
255 | server_side_encryption_configuration = local.server_side_encryption_configuration
256 | }
257 |
--------------------------------------------------------------------------------
/main.tf:
--------------------------------------------------------------------------------
1 | resource "random_string" "solution_prefix" {
2 | length = 4
3 | special = false
4 | upper = false
5 | }
6 |
7 | # – Bedrock Agent –
8 |
9 | locals {
10 | bedrock_agent_alias = var.create_agent_alias && var.use_aws_provider_alias ? aws_bedrockagent_agent_alias.bedrock_agent_alias : awscc_bedrock_agent_alias.bedrock_agent_alias
11 |
12 | counter_kb = local.create_kb || var.existing_kb != null ? [1] : []
13 | knowledge_base_id = local.create_kb ? (var.create_default_kb ? awscc_bedrock_knowledge_base.knowledge_base_default[0].id : (var.create_mongo_config ? awscc_bedrock_knowledge_base.knowledge_base_mongo[0].id : (var.create_opensearch_config ? awscc_bedrock_knowledge_base.knowledge_base_opensearch[0].id : (var.create_opensearch_managed_config ? awscc_bedrock_knowledge_base.knowledge_base_opensearch_managed[0].id : (var.create_pinecone_config ? awscc_bedrock_knowledge_base.knowledge_base_pinecone[0].id : (var.create_rds_config ? awscc_bedrock_knowledge_base.knowledge_base_rds[0].id : (var.create_kendra_config ? awscc_bedrock_knowledge_base.knowledge_base_kendra[0].id : (var.create_sql_config ? awscc_bedrock_knowledge_base.knowledge_base_sql[0].id : null)))))))) : null
14 | knowledge_bases_value = {
15 | description = var.kb_description
16 | knowledge_base_id = local.create_kb ? local.knowledge_base_id : var.existing_kb
17 | knowledge_base_state = var.kb_state
18 | }
19 | kb_result = [for count in local.counter_kb : local.knowledge_bases_value]
20 |
21 |
22 | counter_action_group = var.create_ag ? [1] : []
23 | action_group_value = {
24 | action_group_name = var.action_group_name
25 | description = var.parent_action_group_signature != null ? null : var.action_group_description
26 | action_group_state = var.action_group_state
27 | parent_action_group_signature = var.parent_action_group_signature
28 | skip_resource_in_use_check_on_delete = var.skip_resource_in_use
29 | api_schema = {
30 | payload = var.api_schema_payload
31 | s3 = {
32 | s3_bucket_name = var.api_schema_s3_bucket_name
33 | s3_object_key = var.api_schema_s3_object_key
34 | }
35 | }
36 | action_group_executor = {
37 | custom_control = var.custom_control
38 | lambda = var.lambda_action_group_executor
39 | }
40 | }
41 | action_group_result = [for count in local.counter_action_group : local.action_group_value]
42 |
43 | # Create a map with action_group_name as keys for stable sorting
44 | action_group_map = var.action_group_list != null ? {
45 | for idx, ag in var.action_group_list :
46 | # Use action_group_name as key, or index if name is null
47 | coalesce(try(ag.action_group_name, ""), format("%04d", idx)) => ag
48 | } : {}
49 |
50 | # Extract values from the sorted map (Terraform maps are sorted by keys)
51 | # Also handle the description/parent_action_group_signature conflict
52 | sorted_action_groups = [for k, v in local.action_group_map : merge(v, {
53 | description = try(v.parent_action_group_signature, null) != null ? null : try(v.description, null)
54 | })]
55 |
56 | # Combine action groups with consistent ordering
57 | action_group_list = concat(local.action_group_result, local.sorted_action_groups)
58 |
59 | counter_collaborator = var.create_agent && var.create_agent_alias && var.create_collaborator ? 1 : 0
60 |
61 | supervisor_guardrail = var.create_supervisor_guardrail == false || local.counter_collaborator == 0 ? null : [{
62 | guardrail_identifier = var.supervisor_guardrail_id
63 | guardrail_version = var.supervisor_guardrail_version
64 | }]
65 | }
66 |
67 | # Add a sleep after creating the inference profile to ensure it's fully available
68 | resource "time_sleep" "wait_for_inference_profile" {
69 | count = var.create_app_inference_profile ? 1 : 0
70 | depends_on = [awscc_bedrock_application_inference_profile.application_inference_profile[0]]
71 | create_duration = "5s"
72 | }
73 |
74 | resource "time_sleep" "wait_for_use_inference_profile_role_policy" {
75 | count = var.use_app_inference_profile ? 1 : 0
76 | depends_on = [aws_iam_role_policy.app_inference_profile_role_policy]
77 | create_duration = "10s"
78 | }
79 |
80 | resource "awscc_bedrock_agent" "bedrock_agent" {
81 | count = var.create_agent ? 1 : 0
82 | agent_name = "${random_string.solution_prefix.result}-${var.agent_name}"
83 | foundation_model = var.use_app_inference_profile ? var.app_inference_profile_model_source : (var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : var.foundation_model)
84 | instruction = var.instruction
85 | description = var.agent_description
86 | idle_session_ttl_in_seconds = var.idle_session_ttl
87 | agent_resource_role_arn = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : aws_iam_role.agent_role[0].arn
88 | orchestration_type = var.orchestration_type
89 | custom_orchestration = var.orchestration_type == "CUSTOM" ? {
90 | executor = {
91 | lambda = var.custom_orchestration_lambda_arn
92 | }
93 | } : null
94 |
95 | depends_on = [time_sleep.wait_for_inference_profile, time_sleep.wait_for_use_inference_profile_role_policy]
96 |
97 | customer_encryption_key_arn = var.kms_key_arn
98 | tags = var.tags
99 | prompt_override_configuration = var.prompt_override == false ? null : {
100 | prompt_configurations = [{
101 | prompt_type = var.prompt_type
102 | inference_configuration = {
103 | temperature = var.temperature
104 | top_p = var.top_p
105 | top_k = var.top_k
106 | stop_sequences = var.stop_sequences
107 | maximum_length = var.max_length
108 | }
109 | base_prompt_template = var.base_prompt_template
110 | parser_mode = var.parser_mode
111 | prompt_creation_mode = var.prompt_creation_mode
112 | prompt_state = var.prompt_state
113 | additional_model_request_fields = var.additional_model_request_fields
114 | }]
115 | override_lambda = var.override_lambda_arn
116 | }
117 | # open issue: https://github.com/hashicorp/terraform-provider-awscc/issues/2004
118 | # auto_prepare needs to be set to true
119 | auto_prepare = true
120 | knowledge_bases = length(local.kb_result) > 0 ? local.kb_result : null
121 | action_groups = length(local.action_group_list) > 0 ? local.action_group_list : null
122 | guardrail_configuration = var.create_guardrail == false ? null : {
123 | guardrail_identifier = awscc_bedrock_guardrail.guardrail[0].id
124 | guardrail_version = awscc_bedrock_guardrail_version.guardrail[0].version
125 | }
126 | memory_configuration = var.memory_configuration
127 | }
128 |
129 | # Agent Alias
130 |
131 | resource "awscc_bedrock_agent_alias" "bedrock_agent_alias" {
132 | count = var.create_agent_alias && var.use_aws_provider_alias == false ? 1 : 0
133 | agent_alias_name = var.agent_alias_name
134 | agent_id = var.create_agent ? awscc_bedrock_agent.bedrock_agent[0].id : var.agent_id
135 | description = var.agent_alias_description
136 | routing_configuration = var.bedrock_agent_version == null ? null : [
137 | {
138 | agent_version = var.bedrock_agent_version
139 | }
140 | ]
141 | tags = var.agent_alias_tags
142 | }
143 |
144 | resource "aws_bedrockagent_agent_alias" "bedrock_agent_alias" {
145 | count = var.create_agent_alias && var.use_aws_provider_alias ? 1 : 0
146 | agent_alias_name = var.agent_alias_name
147 | agent_id = var.create_agent ? awscc_bedrock_agent.bedrock_agent[0].id : var.agent_id
148 | description = var.agent_alias_description
149 | routing_configuration = var.bedrock_agent_version == null ? null : [
150 | {
151 | agent_version = var.bedrock_agent_version
152 | provisioned_throughput = var.bedrock_agent_alias_provisioned_throughput
153 | }
154 | ]
155 | tags = var.agent_alias_tags
156 | }
157 |
158 | # Agent Collaborator
159 |
160 | resource "aws_bedrockagent_agent_collaborator" "agent_collaborator" {
161 | count = local.counter_collaborator
162 | agent_id = var.create_supervisor ? aws_bedrockagent_agent.agent_supervisor[0].agent_id : var.supervisor_id
163 | collaboration_instruction = var.collaboration_instruction
164 | collaborator_name = "${random_string.solution_prefix.result}-${var.collaborator_name}"
165 | relay_conversation_history = var.relay_conversation_history
166 |
167 | agent_descriptor {
168 | alias_arn = local.bedrock_agent_alias[0].agent_alias_arn
169 | }
170 |
171 | depends_on = [awscc_bedrock_agent.bedrock_agent[0], local.bedrock_agent_alias]
172 | }
173 |
174 | resource "aws_bedrockagent_agent" "agent_supervisor" {
175 | count = var.create_supervisor ? 1 : 0
176 | agent_name = "${random_string.solution_prefix.result}-${var.supervisor_name}"
177 | agent_resource_role_arn = var.agent_resource_role_arn != null ? var.agent_resource_role_arn : aws_iam_role.agent_role[0].arn
178 |
179 | agent_collaboration = var.agent_collaboration
180 | idle_session_ttl_in_seconds = var.supervisor_idle_session_ttl
181 | foundation_model = var.use_app_inference_profile ? var.app_inference_profile_model_source : (var.create_app_inference_profile ? awscc_bedrock_application_inference_profile.application_inference_profile[0].inference_profile_arn : var.supervisor_model)
182 | instruction = var.supervisor_instruction
183 | customer_encryption_key_arn = var.supervisor_kms_key_arn
184 | #checkov:skip=CKV_AWS_383:The user can optionally associate agent with Bedrock guardrails
185 | guardrail_configuration = local.supervisor_guardrail
186 | prepare_agent = false
187 |
188 | depends_on = [time_sleep.wait_for_inference_profile, time_sleep.wait_for_use_inference_profile_role_policy]
189 | }
190 |
191 | # – Guardrail –
192 |
193 | resource "awscc_bedrock_guardrail" "guardrail" {
194 | count = var.create_guardrail ? 1 : 0
195 | name = "${random_string.solution_prefix.result}-${var.guardrail_name}"
196 | blocked_input_messaging = var.blocked_input_messaging
197 | blocked_outputs_messaging = var.blocked_outputs_messaging
198 | description = var.guardrail_description
199 |
200 | # Cross region configuration
201 | cross_region_config = var.guardrail_cross_region_config
202 |
203 | # Content policy configuration
204 | content_policy_config = {
205 | filters_config = var.filters_config
206 | content_filters_tier_config = var.content_filters_tier_config
207 | }
208 |
209 | # Contextual grounding policy configuration
210 | contextual_grounding_policy_config = var.contextual_grounding_policy_filters != null ? {
211 | filters_config = var.contextual_grounding_policy_filters
212 | } : null
213 |
214 | # Sensitive information policy configuration
215 | sensitive_information_policy_config = {
216 | pii_entities_config = var.pii_entities_config
217 | regexes_config = var.regexes_config
218 | }
219 |
220 | # Word policy configuration
221 | word_policy_config = {
222 | managed_word_lists_config = var.managed_word_lists_config
223 | words_config = var.words_config
224 | }
225 |
226 | # Topic policy configuration
227 | topic_policy_config = var.topics_config == null ? null : {
228 | topics_config = var.topics_config
229 | topics_tier_config = var.topics_tier_config
230 | }
231 |
232 | tags = var.guardrail_tags
233 | kms_key_arn = var.guardrail_kms_key_arn
234 | }
235 |
236 | resource "awscc_bedrock_guardrail_version" "guardrail" {
237 | count = var.create_guardrail ? 1 : 0
238 | guardrail_identifier = awscc_bedrock_guardrail.guardrail[0].guardrail_id
239 | description = "Guardrail version"
240 | }
241 |
242 | # – Bedrock Flow –
243 |
244 | resource "awscc_bedrock_flow_alias" "flow_alias" {
245 | count = var.create_flow_alias ? 1 : 0
246 | name = var.flow_alias_name
247 | flow_arn = var.flow_arn
248 | description = var.flow_alias_description
249 | routing_configuration = [
250 | {
251 | flow_version = var.flow_version != null ? var.flow_version : awscc_bedrock_flow_version.flow_version[0].version
252 | }
253 | ]
254 | }
255 |
256 | resource "awscc_bedrock_flow_version" "flow_version" {
257 | count = var.flow_version == null && var.create_flow_alias ? 1 : 0
258 | flow_arn = var.flow_arn
259 | description = var.flow_version_description
260 | }
261 |
262 | # – Custom Model –
263 |
264 | resource "aws_bedrock_custom_model" "custom_model" {
265 | count = var.create_custom_model ? 1 : 0
266 | custom_model_name = "${random_string.solution_prefix.result}-${var.custom_model_name}"
267 | job_name = "${random_string.solution_prefix.result}-${var.custom_model_job_name}"
268 | base_model_identifier = data.aws_bedrock_foundation_model.model_identifier[0].model_arn
269 | role_arn = aws_iam_role.custom_model_role[0].arn
270 | custom_model_kms_key_id = var.custom_model_kms_key_id
271 | customization_type = var.customization_type
272 | hyperparameters = var.custom_model_hyperparameters
273 | output_data_config {
274 | s3_uri = var.custom_model_output_uri == null ? "s3://${awscc_s3_bucket.custom_model_output[0].id}/" : "s3://${var.custom_model_output_uri}"
275 | }
276 | training_data_config {
277 | s3_uri = "s3://${var.custom_model_training_uri}"
278 | }
279 | tags = var.custom_model_tags
280 | }
281 |
282 | resource "awscc_s3_bucket" "custom_model_output" {
283 | count = var.custom_model_output_uri == null && var.create_custom_model == true ? 1 : 0
284 | bucket_name = "${random_string.solution_prefix.result}-${var.custom_model_name}-output-bucket"
285 | public_access_block_configuration = {
286 | block_public_acls = true
287 | block_public_policy = true
288 | ignore_public_acls = true
289 | restrict_public_buckets = true
290 | }
291 | bucket_encryption = {
292 | server_side_encryption_configuration = [{
293 | bucket_key_enabled = true
294 | server_side_encryption_by_default = {
295 | sse_algorithm = var.kb_s3_data_source_kms_arn == null ? "AES256" : "aws:kms"
296 | kms_master_key_id = var.kb_s3_data_source_kms_arn
297 | }
298 | }]
299 | }
300 | tags = var.custom_model_tags != null ? [for k, v in var.custom_model_tags : { key = k, value = v }] : [{
301 | key = "Name"
302 | value = "${random_string.solution_prefix.result}-${var.custom_model_name}-output-bucket"
303 | }]
304 | }
305 |
--------------------------------------------------------------------------------
/knowledge-base.tf:
--------------------------------------------------------------------------------
1 | # - Knowledge Base Default OpenSearch -
2 | resource "awscc_bedrock_knowledge_base" "knowledge_base_default" {
3 | count = var.create_default_kb ? 1 : 0
4 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
5 | description = var.kb_description
6 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
7 | tags = var.kb_tags
8 |
9 | storage_configuration = {
10 | type = "OPENSEARCH_SERVERLESS"
11 | opensearch_serverless_configuration = {
12 | collection_arn = module.oss_knowledgebase[0].opensearch_serverless_collection.arn
13 | vector_index_name = module.oss_knowledgebase[0].vector_index.name
14 | field_mapping = {
15 | metadata_field = var.metadata_field
16 | text_field = var.text_field
17 | vector_field = var.vector_field
18 | }
19 | }
20 | }
21 | knowledge_base_configuration = {
22 | type = "VECTOR"
23 | vector_knowledge_base_configuration = {
24 | embedding_model_arn = local.kb_embedding_model_arn
25 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
26 | bedrock_embedding_model_configuration = {
27 | dimensions = var.embedding_model_dimensions
28 | embedding_data_type = var.embedding_data_type
29 | }
30 | } : null
31 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
32 | supplemental_data_storage_locations = [
33 | {
34 | supplemental_data_storage_location_type = "S3"
35 | s3_location = {
36 | uri = var.supplemental_data_s3_uri
37 | }
38 | }
39 | ]
40 | } : null
41 | }
42 | }
43 | depends_on = [ time_sleep.wait_after_index_creation ]
44 | }
45 |
46 | # – Existing Vector KBs –
47 |
48 | # - Mongo –
49 | resource "awscc_bedrock_knowledge_base" "knowledge_base_mongo" {
50 | count = var.create_mongo_config ? 1 : 0
51 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
52 | description = var.kb_description
53 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
54 | tags = var.kb_tags
55 |
56 | storage_configuration = {
57 | type = var.kb_storage_type
58 |
59 | mongo_db_atlas_configuration = {
60 | collection_name = var.collection_name
61 | credentials_secret_arn = var.credentials_secret_arn
62 | database_name = var.database_name
63 | endpoint = var.endpoint
64 | vector_index_name = var.vector_index_name
65 | text_index_name = var.text_index_name
66 | field_mapping = {
67 | metadata_field = var.metadata_field
68 | text_field = var.text_field
69 | vector_field = var.vector_field
70 | }
71 | endpoint_service_name = var.endpoint_service_name
72 | }
73 | }
74 | knowledge_base_configuration = {
75 | type = var.kb_type
76 | vector_knowledge_base_configuration = {
77 | embedding_model_arn = local.kb_embedding_model_arn
78 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
79 | bedrock_embedding_model_configuration = {
80 | dimensions = var.embedding_model_dimensions
81 | embedding_data_type = var.embedding_data_type
82 | }
83 | } : null
84 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
85 | supplemental_data_storage_locations = [
86 | {
87 | supplemental_data_storage_location_type = "S3"
88 | s3_location = {
89 | uri = var.supplemental_data_s3_uri
90 | }
91 | }
92 | ]
93 | } : null
94 | }
95 | }
96 | }
97 |
98 | # – OpenSearch Managed Cluster –
99 | resource "awscc_bedrock_knowledge_base" "knowledge_base_opensearch_managed" {
100 | count = var.create_opensearch_managed_config ? 1 : 0
101 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
102 | description = var.kb_description
103 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
104 | tags = var.kb_tags
105 |
106 | storage_configuration = {
107 | type = "OPENSEARCH_MANAGED_CLUSTER"
108 | opensearch_managed_cluster_configuration = {
109 | domain_arn = var.domain_arn
110 | domain_endpoint = var.domain_endpoint
111 | vector_index_name = var.vector_index_name
112 | field_mapping = {
113 | metadata_field = var.metadata_field
114 | text_field = var.text_field
115 | vector_field = var.vector_field
116 | }
117 | }
118 | }
119 | knowledge_base_configuration = {
120 | type = var.kb_type
121 | vector_knowledge_base_configuration = {
122 | embedding_model_arn = local.kb_embedding_model_arn
123 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
124 | bedrock_embedding_model_configuration = {
125 | dimensions = var.embedding_model_dimensions
126 | embedding_data_type = var.embedding_data_type
127 | }
128 | } : null
129 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
130 | supplemental_data_storage_locations = [
131 | {
132 | supplemental_data_storage_location_type = "S3"
133 | s3_location = {
134 | uri = var.supplemental_data_s3_uri
135 | }
136 | }
137 | ]
138 | } : null
139 | }
140 | }
141 | }
142 |
143 | # – OpenSearch Serverless –
144 | resource "awscc_bedrock_knowledge_base" "knowledge_base_opensearch" {
145 | count = var.create_opensearch_config ? 1 : 0
146 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
147 | description = var.kb_description
148 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
149 | tags = var.kb_tags
150 |
151 | storage_configuration = {
152 | type = var.kb_storage_type
153 | opensearch_serverless_configuration = {
154 | collection_arn = var.collection_arn
155 | vector_index_name = var.vector_index_name
156 | field_mapping = {
157 | metadata_field = var.metadata_field
158 | text_field = var.text_field
159 | vector_field = var.vector_field
160 | }
161 | }
162 | }
163 | knowledge_base_configuration = {
164 | type = var.kb_type
165 | vector_knowledge_base_configuration = {
166 | embedding_model_arn = local.kb_embedding_model_arn
167 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
168 | bedrock_embedding_model_configuration = {
169 | dimensions = var.embedding_model_dimensions
170 | embedding_data_type = var.embedding_data_type
171 | }
172 | } : null
173 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
174 | supplemental_data_storage_locations = [
175 | {
176 | supplemental_data_storage_location_type = "S3"
177 | s3_location = {
178 | uri = var.supplemental_data_s3_uri
179 | }
180 | }
181 | ]
182 | } : null
183 | }
184 | }
185 | }
186 |
187 | # – Neptune Analytics –
188 | resource "awscc_bedrock_knowledge_base" "knowledge_base_neptune_analytics" {
189 | count = var.create_neptune_analytics_config ? 1 : 0
190 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
191 | description = var.kb_description
192 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
193 | tags = var.kb_tags
194 |
195 | storage_configuration = {
196 | type = "NEPTUNE_ANALYTICS"
197 | neptune_analytics_configuration = {
198 | graph_arn = var.graph_arn
199 | field_mapping = {
200 | metadata_field = var.metadata_field
201 | text_field = var.text_field
202 | }
203 | }
204 | }
205 | knowledge_base_configuration = {
206 | type = var.kb_type
207 | vector_knowledge_base_configuration = {
208 | embedding_model_arn = local.kb_embedding_model_arn
209 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
210 | bedrock_embedding_model_configuration = {
211 | dimensions = var.embedding_model_dimensions
212 | embedding_data_type = var.embedding_data_type
213 | }
214 | } : null
215 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
216 | supplemental_data_storage_locations = [
217 | {
218 | supplemental_data_storage_location_type = "S3"
219 | s3_location = {
220 | uri = var.supplemental_data_s3_uri
221 | }
222 | }
223 | ]
224 | } : null
225 | }
226 | }
227 | }
228 |
229 | # – Pinecone –
230 | resource "awscc_bedrock_knowledge_base" "knowledge_base_pinecone" {
231 | count = var.create_pinecone_config ? 1 : 0
232 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
233 | description = var.kb_description
234 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
235 | tags = var.kb_tags
236 |
237 | storage_configuration = {
238 | type = var.kb_storage_type
239 | pinecone_configuration = {
240 | connection_string = var.connection_string
241 | credentials_secret_arn = var.credentials_secret_arn
242 | field_mapping = {
243 | metadata_field = var.metadata_field
244 | text_field = var.text_field
245 | }
246 | namespace = var.namespace
247 | }
248 | }
249 | knowledge_base_configuration = {
250 | type = var.kb_type
251 | vector_knowledge_base_configuration = {
252 | embedding_model_arn = local.kb_embedding_model_arn
253 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
254 | bedrock_embedding_model_configuration = {
255 | dimensions = var.embedding_model_dimensions
256 | embedding_data_type = var.embedding_data_type
257 | }
258 | } : null
259 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
260 | supplemental_data_storage_locations = [
261 | {
262 | supplemental_data_storage_location_type = "S3"
263 | s3_location = {
264 | uri = var.supplemental_data_s3_uri
265 | }
266 | }
267 | ]
268 | } : null
269 | }
270 | }
271 | }
272 |
273 | # – RDS –
274 | resource "awscc_bedrock_knowledge_base" "knowledge_base_rds" {
275 | count = var.create_rds_config ? 1 : 0
276 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
277 | description = var.kb_description
278 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
279 | tags = var.kb_tags
280 |
281 | storage_configuration = {
282 | type = var.kb_storage_type
283 | rds_configuration = {
284 | credentials_secret_arn = var.credentials_secret_arn
285 | database_name = var.database_name
286 | resource_arn = var.resource_arn
287 | table_name = var.table_name
288 | field_mapping = {
289 | metadata_field = var.metadata_field
290 | primary_key_field = var.primary_key_field
291 | text_field = var.text_field
292 | vector_field = var.vector_field
293 | custom_metadata_field = var.custom_metadata_field
294 | }
295 | }
296 | }
297 | knowledge_base_configuration = {
298 | type = var.kb_type
299 | vector_knowledge_base_configuration = {
300 | embedding_model_arn = local.kb_embedding_model_arn
301 | embedding_model_configuration = var.embedding_model_dimensions != null ? {
302 | bedrock_embedding_model_configuration = {
303 | dimensions = var.embedding_model_dimensions
304 | embedding_data_type = var.embedding_data_type
305 | }
306 | } : null
307 | supplemental_data_storage_configuration = var.create_supplemental_data_storage ? {
308 | supplemental_data_storage_locations = [
309 | {
310 | supplemental_data_storage_location_type = "S3"
311 | s3_location = {
312 | uri = var.supplemental_data_s3_uri
313 | }
314 | }
315 | ]
316 | } : null
317 | }
318 | }
319 | }
320 |
321 | # – Kendra Knowledge Base –
322 |
323 | resource "awscc_bedrock_knowledge_base" "knowledge_base_kendra" {
324 | count = var.create_kendra_config ? 1 : 0
325 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
326 | description = var.kb_description
327 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
328 | tags = var.kb_tags
329 |
330 | knowledge_base_configuration = {
331 | type = "KENDRA"
332 | kendra_knowledge_base_configuration = {
333 | kendra_index_arn = var.kendra_index_arn != null ? var.kendra_index_arn : awscc_kendra_index.genai_kendra_index[0].arn
334 | }
335 | }
336 |
337 | depends_on = [ time_sleep.wait_after_kendra_index_creation, time_sleep.wait_after_kendra_s3_data_source_creation ]
338 | }
339 |
340 | # – SQL Knowledge Base –
341 |
342 | resource "awscc_bedrock_knowledge_base" "knowledge_base_sql" {
343 | count = var.create_sql_config ? 1 : 0
344 | name = "${random_string.solution_prefix.result}-${var.kb_name}"
345 | description = var.kb_description
346 | role_arn = var.kb_role_arn != null ? var.kb_role_arn : aws_iam_role.bedrock_knowledge_base_role[0].arn
347 | tags = var.kb_tags
348 |
349 | knowledge_base_configuration = {
350 | type = "SQL"
351 | sql_knowledge_base_configuration = {
352 | type = "REDSHIFT"
353 | redshift_configuration = {
354 | query_engine_configuration = {
355 | serverless_configuration = var.sql_kb_workgroup_arn == null ? null : {
356 | workgroup_arn = var.sql_kb_workgroup_arn
357 | auth_configuration = var.serverless_auth_configuration != null ? {
358 | type = var.serverless_auth_configuration.type
359 | username_password_secret_arn = var.serverless_auth_configuration.username_password_secret_arn
360 | } : null
361 | }
362 | provisioned_configuration = var.provisioned_config_cluster_identifier == null ? null : {
363 | cluster_identifier = var.provisioned_config_cluster_identifier
364 | auth_configuration = var.provisioned_auth_configuration != null ? {
365 | type = var.provisioned_auth_configuration.type
366 | database_user = var.provisioned_auth_configuration.database_user
367 | username_password_secret_arn = var.provisioned_auth_configuration.username_password_secret_arn
368 | } : null
369 | }
370 | type = var.redshift_query_engine_type
371 | }
372 | query_generation_configuration = var.query_generation_configuration
373 | storage_configurations = var.redshift_storage_configuration
374 | }
375 | }
376 | }
377 | }
378 |
--------------------------------------------------------------------------------
/iam.tf:
--------------------------------------------------------------------------------
1 | # – IAM –
2 | locals {
3 | create_kb_role = var.kb_role_arn == null && local.create_kb
4 | kendra_index_id = var.create_kendra_config == true ? (var.kendra_index_id != null ? var.kendra_index_id : awscc_kendra_index.genai_kendra_index[0].id) : null
5 | kendra_data_source_bucket_arn = var.create_kendra_s3_data_source ? (var.kb_s3_data_source != null ? var.kb_s3_data_source : awscc_s3_bucket.s3_data_source[0].arn) : null
6 | action_group_names = concat(var.action_group_lambda_names_list, [var.lambda_action_group_executor])
7 | agent_role_name = var.agent_resource_role_arn != null ? split("/", var.agent_resource_role_arn)[1] : ((var.create_agent || var.create_supervisor) ? aws_iam_role.agent_role[0].name : null)
8 | kb_embedding_model_arn = replace(replace(var.kb_embedding_model_arn, "arn:aws", "arn:${local.partition}"), "us-east-1", local.region)
9 | }
10 |
11 | resource "aws_iam_role" "agent_role" {
12 | count = var.agent_resource_role_arn == null && (var.create_agent || var.create_supervisor) ? 1 : 0
13 | assume_role_policy = data.aws_iam_policy_document.agent_trust[0].json
14 | name_prefix = var.name_prefix
15 | permissions_boundary = var.permissions_boundary_arn
16 | }
17 |
18 | resource "aws_iam_role_policy" "agent_policy" {
19 | count = var.agent_resource_role_arn == null && (var.create_agent || var.create_supervisor) ? 1 : 0
20 | policy = data.aws_iam_policy_document.agent_permissions[0].json
21 | role = local.agent_role_name
22 | }
23 |
24 | resource "aws_iam_role_policy" "agent_alias_policy" {
25 | count = var.agent_resource_role_arn == null && (var.create_agent_alias || var.create_supervisor) ? 1 : 0
26 | policy = data.aws_iam_policy_document.agent_alias_permissions[0].json
27 | role = local.agent_role_name
28 | }
29 |
30 | resource "aws_iam_role_policy" "kb_policy" {
31 | count = var.agent_resource_role_arn == null && local.create_kb && var.create_agent ? 1 : 0
32 | policy = data.aws_iam_policy_document.knowledge_base_permissions[0].json
33 | role = local.agent_role_name
34 | }
35 |
36 | resource "aws_iam_role_policy" "app_inference_profile_policy" {
37 | count = var.create_app_inference_profile ? 1 : 0
38 | policy = data.aws_iam_policy_document.app_inference_profile_permission[0].json
39 | role = local.agent_role_name != null ? local.agent_role_name : aws_iam_role.application_inference_profile_role[0].id
40 | }
41 |
42 | # Define the IAM role for Amazon Bedrock Knowledge Base
43 | resource "aws_iam_role" "bedrock_knowledge_base_role" {
44 | count = var.kb_role_arn != null || (local.create_kb == false && var.create_sql_config == false) ? 0 : 1
45 | name = "AmazonBedrockExecutionRoleForKnowledgeBase-${random_string.solution_prefix.result}"
46 |
47 | assume_role_policy = jsonencode({
48 | "Version" : "2012-10-17",
49 | "Statement" : [
50 | {
51 | "Effect" : "Allow",
52 | "Principal" : {
53 | "Service" : "bedrock.amazonaws.com"
54 | },
55 | "Action" : "sts:AssumeRole"
56 | }
57 | ]
58 | })
59 | permissions_boundary = var.permissions_boundary_arn
60 | }
61 |
62 | # Attach a policy to allow necessary permissions for the Bedrock Knowledge Base
63 | resource "aws_iam_policy" "bedrock_knowledge_base_policy" {
64 | count = var.kb_role_arn != null || var.create_default_kb == false || var.create_kendra_config == true || var.create_opensearch_managed_config == true ? 0 : 1
65 | name = "AmazonBedrockKnowledgeBasePolicy-${random_string.solution_prefix.result}"
66 |
67 | policy = jsonencode({
68 | "Version" : "2012-10-17",
69 | "Statement" : [
70 | {
71 | "Effect" : "Allow",
72 | "Action" : [
73 | "aoss:APIAccessAll"
74 | ],
75 | "Resource" : module.oss_knowledgebase[0].opensearch_serverless_collection.arn
76 | },
77 | {
78 | "Effect" : "Allow",
79 | "Action" : [
80 | "bedrock:InvokeModel",
81 | ],
82 | "Resource" : local.kb_embedding_model_arn
83 | },
84 | {
85 | "Effect" : "Allow",
86 | "Action" : [
87 | "bedrock:ListFoundationModels",
88 | "bedrock:ListCustomModels"
89 | ],
90 | "Resource" : "*"
91 | },
92 | ]
93 | })
94 | }
95 |
96 | resource "aws_iam_policy" "bedrock_knowledge_base_policy_s3" {
97 | count = var.kb_role_arn != null || local.create_kb == false || var.create_s3_data_source == false ? 0 : 1
98 | name = "AmazonBedrockKnowledgeBasePolicyS3DataSource-${random_string.solution_prefix.result}"
99 |
100 | policy = jsonencode({
101 | "Version" : "2012-10-17",
102 | "Statement" : [
103 | {
104 | "Effect" : "Allow",
105 | "Action" : [
106 | "s3:ListBucket",
107 | ],
108 | "Resource" : var.kb_s3_data_source == null ? awscc_s3_bucket.s3_data_source[0].arn : var.kb_s3_data_source
109 | },
110 | {
111 | "Effect" : "Allow",
112 | "Action" : [
113 | "s3:GetObject",
114 | ],
115 | "Resource" : var.kb_s3_data_source == null ? "${awscc_s3_bucket.s3_data_source[0].arn}/*" : "${var.kb_s3_data_source}/*"
116 | }
117 | ]
118 | })
119 | }
120 |
121 | resource "aws_iam_policy" "bedrock_kb_s3_decryption_policy" {
122 | count = local.create_kb_role && var.kb_s3_data_source_kms_arn != null && var.create_s3_data_source ? 1 : 0
123 | name = "AmazonBedrockS3KMSPolicyForKnowledgeBase_${random_string.solution_prefix.result}"
124 |
125 | policy = jsonencode({
126 | "Version" : "2012-10-17",
127 | "Statement" : [
128 | {
129 | "Effect" : "Allow",
130 | "Action" : "kms:Decrypt",
131 | "Resource" : var.kb_s3_data_source_kms_arn
132 | "Condition" : {
133 | "StringEquals" : {
134 | "kms:ViaService" : ["s3.${data.aws_region.current.region}.amazonaws.com"]
135 | }
136 | }
137 | }
138 | ]
139 | })
140 | }
141 |
142 | # Attach the policies to the role
143 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_policy_attachment" {
144 | count = var.kb_role_arn != null || local.create_kb == false || var.create_kendra_config == true || var.create_opensearch_managed_config == true ? 0 : 1
145 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
146 | policy_arn = aws_iam_policy.bedrock_knowledge_base_policy[0].arn
147 | }
148 |
149 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_kendra_policy_attachment" {
150 | count = var.kb_role_arn != null || var.create_kendra_config == false ? 0 : 1
151 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
152 | policy_arn = aws_iam_policy.bedrock_kb_kendra[0].arn
153 | }
154 |
155 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_policy_attachment" {
156 | count = var.kb_role_arn != null || var.create_sql_config == false ? 0 : 1
157 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
158 | policy_arn = aws_iam_policy.bedrock_kb_sql[0].arn
159 | }
160 |
161 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_serverless_policy_attachment" {
162 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "SERVERLESS" ? 0 : 1
163 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
164 | policy_arn = aws_iam_policy.bedrock_kb_sql_serverless[0].arn
165 | }
166 |
167 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_sql_provision_policy_attachment" {
168 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "PROVISIONED" ? 0 : 1
169 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
170 | policy_arn = aws_iam_policy.bedrock_kb_sql_provisioned[0].arn
171 | }
172 |
173 | resource "aws_iam_role_policy_attachment" "bedrock_kb_s3_decryption_policy_attachment" {
174 | count = local.create_kb_role && var.kb_s3_data_source_kms_arn != null && var.create_s3_data_source ? 1 : 0
175 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
176 | policy_arn = aws_iam_policy.bedrock_kb_s3_decryption_policy[0].arn
177 | }
178 |
179 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_policy_s3_attachment" {
180 | count = var.kb_role_arn != null || local.create_kb == false || var.create_s3_data_source == false ? 0 : 1
181 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
182 | policy_arn = aws_iam_policy.bedrock_knowledge_base_policy_s3[0].arn
183 | }
184 |
185 | resource "aws_iam_role_policy_attachment" "bedrock_knowledge_base_opensearch_managed_policy_attachment" {
186 | count = var.kb_role_arn != null || var.create_opensearch_managed_config == false ? 0 : 1
187 | role = aws_iam_role.bedrock_knowledge_base_role[0].name
188 | policy_arn = aws_iam_policy.bedrock_kb_opensearch_managed[0].arn
189 | }
190 |
191 | resource "aws_iam_role_policy" "bedrock_kb_oss" {
192 | count = var.kb_role_arn != null || var.create_default_kb == false ? 0 : 1
193 | name = "AmazonBedrockOSSPolicyForKnowledgeBase_${var.kb_name}"
194 | role = aws_iam_role.bedrock_knowledge_base_role[count.index].name
195 | policy = jsonencode({
196 | Version = "2012-10-17"
197 | Statement = [
198 | {
199 | Action = ["aoss:*"]
200 | Effect = "Allow"
201 | Resource = ["arn:${local.partition}:aoss:${local.region}:${local.account_id}:*/*"]
202 | }
203 | ]
204 | })
205 | }
206 |
207 | resource "aws_iam_policy" "bedrock_kb_opensearch_managed" {
208 | count = var.kb_role_arn != null || var.create_opensearch_managed_config == false ? 0 : 1
209 | name = "AmazonBedrockOpenSearchManagedPolicyForKnowledgeBase_${var.kb_name}"
210 |
211 | policy = jsonencode({
212 | Version = "2012-10-17"
213 | Statement = [
214 | {
215 | Action = [
216 | "es:ESHttpGet",
217 | "es:ESHttpPost",
218 | "es:ESHttpPut",
219 | "es:ESHttpDelete",
220 | "es:DescribeDomain"
221 | ]
222 | Effect = "Allow"
223 | Resource = [
224 | var.domain_arn,
225 | "${var.domain_arn}/*"
226 | ]
227 | }
228 | ]
229 | })
230 | }
231 |
232 | # Guardrails Policies
233 |
234 | resource "aws_iam_role_policy" "guardrail_policy" {
235 | count = var.create_guardrail && var.create_agent ? 1 : 0
236 | policy = jsonencode({
237 | Version = "2012-10-17"
238 | Statement = [
239 | {
240 | Effect = "Allow"
241 | Action = [
242 | "bedrock:ApplyGuardrail",
243 | ]
244 | Resource = awscc_bedrock_agent.bedrock_agent[0].guardrail_configuration.guardrail_identifier
245 | }
246 | ]
247 | })
248 | role = aws_iam_role.agent_role[0].id
249 | }
250 |
251 | resource "aws_iam_role_policy" "guardrail_policy_supervisor_agent" {
252 | count = var.create_collaborator && var.create_supervisor_guardrail ? 1 : 0
253 | policy = jsonencode({
254 | Version = "2012-10-17"
255 | Statement = [
256 | {
257 | Effect = "Allow"
258 | Action = [
259 | "bedrock:ApplyGuardrail",
260 | ]
261 | Resource = aws_bedrockagent_agent.agent_supervisor[0].guardrail_configuration[0].guardrail_identifier
262 | }
263 | ]
264 | })
265 | role = aws_iam_role.agent_role[0].id
266 | }
267 |
268 |
269 | # Action Group Policies
270 |
271 | resource "aws_lambda_permission" "allow_bedrock_agent" {
272 | count = var.create_ag ? length(local.action_group_names) : 0
273 | action = "lambda:InvokeFunction"
274 | function_name = local.action_group_names[count.index]
275 | principal = "bedrock.amazonaws.com"
276 | source_arn = awscc_bedrock_agent.bedrock_agent[0].agent_arn
277 | }
278 |
279 | resource "aws_iam_role_policy" "action_group_policy" {
280 | count = var.create_ag ? 1 : 0
281 | policy = jsonencode({
282 | Version = "2012-10-17"
283 | Statement = [
284 | {
285 | Effect = "Allow"
286 | Action = "lambda:InvokeModel"
287 | Resource = concat([var.lambda_action_group_executor], var.action_group_lambda_arns_list)
288 | }
289 | ]
290 | })
291 | role = aws_iam_role.agent_role[0].id
292 | }
293 |
294 | # Application Inference Profile Policies
295 |
296 | # Define the IAM role for Application Inference Profile
297 | resource "aws_iam_role" "application_inference_profile_role" {
298 | count = var.create_app_inference_profile || var.use_app_inference_profile ? 1 : 0
299 | name = "ApplicationInferenceProfile-${random_string.solution_prefix.result}"
300 |
301 | assume_role_policy = jsonencode({
302 | "Version" : "2012-10-17",
303 | "Statement" : [
304 | {
305 | "Effect" : "Allow",
306 | "Principal" : {
307 | "Service" : "bedrock.amazonaws.com"
308 | },
309 | "Action" : "sts:AssumeRole"
310 | }
311 | ]
312 | })
313 | permissions_boundary = var.permissions_boundary_arn
314 | }
315 |
316 | resource "aws_iam_role_policy" "app_inference_profile_role_policy" {
317 | count = var.create_app_inference_profile || var.use_app_inference_profile ? 1 : 0
318 | policy = jsonencode({
319 | "Version": "2012-10-17",
320 | "Statement": [
321 | {
322 | "Effect": "Allow",
323 | "Action": [
324 | "bedrock:InvokeModel*",
325 | "bedrock:CreateInferenceProfile"
326 | ],
327 | "Resource": [
328 | "arn:${local.partition}:bedrock:*::foundation-model/*",
329 | "arn:${local.partition}:bedrock:*:*:inference-profile/*",
330 | "arn:${local.partition}:bedrock:*:*:application-inference-profile/*"
331 | ]
332 | },
333 | {
334 | "Effect": "Allow",
335 | "Action": [
336 | "bedrock:GetInferenceProfile",
337 | "bedrock:ListInferenceProfiles",
338 | "bedrock:DeleteInferenceProfile",
339 | "bedrock:TagResource",
340 | "bedrock:UntagResource",
341 | "bedrock:ListTagsForResource"
342 | ],
343 | "Resource": [
344 | "arn:${local.partition}:bedrock:*:*:inference-profile/*",
345 | "arn:${local.partition}:bedrock:*:*:application-inference-profile/*"
346 | ]
347 | }
348 | ]
349 | })
350 | role = aws_iam_role.application_inference_profile_role[0].id
351 | }
352 |
353 | # Custom model
354 |
355 | resource "aws_iam_role" "custom_model_role" {
356 | count = var.create_custom_model ? 1 : 0
357 | assume_role_policy = data.aws_iam_policy_document.custom_model_trust[0].json
358 | permissions_boundary = var.permissions_boundary_arn
359 | name_prefix = "CustomModelRole"
360 | }
361 |
362 | resource "aws_iam_role_policy" "custom_model_policy" {
363 | count = var.create_custom_model ? 1 : 0
364 | policy = jsonencode({
365 | "Version": "2012-10-17",
366 | "Statement": [
367 | {
368 | "Effect": "Allow",
369 | "Action": [
370 | "s3:GetObject",
371 | "s3:PutObject",
372 | "s3:ListBucket",
373 | "kms:Decrypt"
374 | ],
375 | "Resource": [
376 | "arn:${local.partition}:s3:::${var.custom_model_training_uri}",
377 | "arn:${local.partition}:s3:::${var.custom_model_training_uri}/*",
378 | ],
379 | "Condition": {
380 | "StringEquals": {
381 | "aws:PrincipalAccount": local.account_id
382 | }
383 | }
384 | },
385 | {
386 | "Effect": "Allow",
387 | "Action": [
388 | "s3:GetObject",
389 | "s3:PutObject",
390 | "s3:ListBucket",
391 | "kms:Decrypt"
392 | ],
393 | "Resource": var.custom_model_output_uri == null ? "arn:${local.partition}:s3:::${awscc_s3_bucket.custom_model_output[0].id}/" : "arn:${local.partition}:s3:::${var.custom_model_output_uri}",
394 |
395 | "Condition": {
396 | "StringEquals": {
397 | "aws:PrincipalAccount": local.account_id
398 | }
399 | }
400 | },
401 | {
402 | "Effect": "Allow",
403 | "Action": [
404 | "s3:GetObject",
405 | "s3:PutObject",
406 | "s3:ListBucket",
407 | "kms:Decrypt"
408 | ],
409 | "Resource": var.custom_model_output_uri == null ? "arn:${local.partition}:s3:::${awscc_s3_bucket.custom_model_output[0].id}/*" : "arn:${local.partition}:s3:::${var.custom_model_output_uri}/*",
410 | "Condition": {
411 | "StringEquals": {
412 | "aws:PrincipalAccount": local.account_id
413 | }
414 | }
415 | },
416 | ]
417 | })
418 | role = aws_iam_role.custom_model_role[0].id
419 | }
420 |
421 | # Kendra IAM
422 | resource "aws_iam_policy" "bedrock_kb_kendra" {
423 | count = var.kb_role_arn != null || var.create_kendra_config == false ? 0 : 1
424 | name = "AmazonBedrockKnowledgeBaseKendraIndexAccessStatement_${var.kendra_index_name}"
425 |
426 | policy = jsonencode({
427 | "Version" = "2012-10-17"
428 | "Statement" = [
429 | {
430 | "Action" = [
431 | "kendra:Retrieve",
432 | "kendra:DescribeIndex"
433 | ]
434 | "Effect" = "Allow"
435 | "Resource" = ["arn:${local.partition}:kendra:${local.region}:${local.account_id}:index/${local.kendra_index_id}"]
436 | }
437 | ]
438 | })
439 | }
440 |
441 | resource "awscc_iam_role" "kendra_index_role" {
442 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
443 | role_name = "kendra_index_role_${random_string.solution_prefix.result}"
444 | description = "Role assigned to the Kendra index"
445 | assume_role_policy_document = jsonencode({
446 | Version = "2012-10-17"
447 | Statement = [
448 | {
449 | Action = "sts:AssumeRole"
450 | Effect = "Allow"
451 | Principal = {
452 | Service = "kendra.amazonaws.com"
453 | }
454 | }
455 | ]
456 | })
457 | }
458 |
459 | resource "awscc_iam_role_policy" "kendra_role_policy" {
460 | count = var.create_kendra_config && var.kendra_index_arn == null ? 1 : 0
461 | policy_name = "kendra_role_policy"
462 | role_name = awscc_iam_role.kendra_index_role[0].id
463 |
464 | policy_document = jsonencode({
465 | Version = "2012-10-17"
466 | Statement = [
467 | {
468 | Effect = "Allow"
469 | Action = "cloudwatch:PutMetricData"
470 | Resource = "*"
471 | Condition = {
472 | "StringEquals" : {
473 | "cloudwatch:namespace" : "AWS/Kendra"
474 | }
475 | }
476 | },
477 | {
478 | Effect = "Allow"
479 | Action = "logs:DescribeLogGroups"
480 | Resource = "*"
481 | },
482 | {
483 | Effect = "Allow"
484 | Action = "logs:CreateLogGroup",
485 | Resource = "arn:${local.partition}:logs:${local.region}:${data.aws_caller_identity.current.account_id}:log-group:/aws/kendra/*"
486 | },
487 | {
488 | Effect = "Allow"
489 | Action = [
490 | "logs:DescribeLogStreams",
491 | "logs:CreateLogStream",
492 | "logs:PutLogEvents"
493 | ],
494 | Resource = "arn:${local.partition}:logs:${local.region}:${data.aws_caller_identity.current.account_id}:log-group:/aws/kendra/*:log-stream:*"
495 | }
496 | ]
497 | })
498 | }
499 |
500 |
501 | # Create IAM role for Kendra Data Source
502 | resource "awscc_iam_role" "kendra_s3_datasource_role" {
503 | count = var.create_kendra_s3_data_source ? 1 : 0
504 | assume_role_policy_document = jsonencode({
505 | Version = "2012-10-17"
506 | Statement = [
507 | {
508 | Action = "sts:AssumeRole"
509 | Effect = "Allow"
510 | Principal = {
511 | Service = "kendra.amazonaws.com"
512 | }
513 | }
514 | ]
515 | })
516 | description = "IAM role for Kendra Data Source"
517 | path = "/"
518 | role_name = "kendra-datasource-role"
519 |
520 | policies = [
521 | {
522 | policy_name = "kendra-datasource-policy"
523 | policy_document = jsonencode({
524 | Version = "2012-10-17"
525 | Statement = [
526 | {
527 | Effect = "Allow"
528 | Action = [
529 | "s3:GetObject",
530 | "s3:ListBucket"
531 | ]
532 | Resource = [
533 | local.kendra_data_source_bucket_arn,
534 | "${local.kendra_data_source_bucket_arn}/*"
535 | ]
536 | },
537 | {
538 | Effect: "Allow",
539 | Action: [
540 | "kendra:BatchPutDocument",
541 | "kendra:BatchDeleteDocument"
542 | ],
543 | Resource: "arn:${local.partition}:kendra:${local.region}:${local.account_id}:index/${local.kendra_index_id}"
544 | }
545 | ]
546 | })
547 | }
548 | ]
549 | }
550 |
551 | # SQL Knowledge Base IAM
552 | resource "aws_iam_policy" "bedrock_kb_sql" {
553 | count = var.kb_role_arn != null || var.create_sql_config == false ? 0 : 1
554 | name = "AmazonBedrockKnowledgeBaseRedshiftStatement_${var.kb_name}"
555 |
556 | policy = jsonencode({
557 | "Version": "2012-10-17",
558 | "Statement": [
559 | {
560 | "Sid": "RedshiftDataAPIStatementPermissions",
561 | "Effect": "Allow",
562 | "Action": [
563 | "redshift-data:GetStatementResult",
564 | "redshift-data:DescribeStatement",
565 | "redshift-data:CancelStatement"
566 | ],
567 | "Resource": [
568 | "*"
569 | ],
570 | "Condition": {
571 | "StringEquals": {
572 | "redshift-data:statement-owner-iam-userid": "$${aws:userid}"
573 | }
574 | }
575 | },
576 | {
577 | "Sid": "SqlWorkbenchAccess",
578 | "Effect": "Allow",
579 | "Action": [
580 | "sqlworkbench:GetSqlRecommendations",
581 | "sqlworkbench:PutSqlGenerationContext",
582 | "sqlworkbench:GetSqlGenerationContext",
583 | "sqlworkbench:DeleteSqlGenerationContext"
584 | ],
585 | "Resource": "*"
586 | },
587 | {
588 | "Sid": "KbAccess",
589 | "Effect": "Allow",
590 | "Action": [
591 | "bedrock:GenerateQuery"
592 | ],
593 | "Resource": "*"
594 | }
595 | ]
596 | })
597 | }
598 |
599 |
600 | resource "aws_iam_policy" "bedrock_kb_sql_serverless" {
601 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "SERVERLESS" ? 0 : 1
602 | name = "AmazonBedrockKnowledgeBaseRedshiftServerlessStatement_${var.kb_name}"
603 |
604 | policy = jsonencode({
605 | "Version": "2012-10-17",
606 | "Statement": [
607 |
608 | {
609 | "Sid": "RedshiftDataAPIExecutePermissions",
610 | "Effect": "Allow",
611 | "Action": [
612 | "redshift-data:ExecuteStatement"
613 | ],
614 | "Resource": [
615 | "arn:${local.partition}:redshift-serverless:${local.region}:${local.account_id}:workgroup:${split("/", var.sql_kb_workgroup_arn)[1]}"
616 | ]
617 | },
618 | {
619 | "Sid": "RedshiftServerlessGetCredentials",
620 | "Effect": "Allow",
621 | "Action": "redshift-serverless:GetCredentials",
622 | "Resource": [
623 | "arn:${local.partition}:redshift-serverless:${local.region}:${local.account_id}:workgroup:${split("/", var.sql_kb_workgroup_arn)[1]}"
624 | ]
625 | }
626 | ]
627 | })
628 | }
629 |
630 |
631 | resource "aws_iam_policy" "bedrock_kb_sql_provisioned" {
632 | count = var.kb_role_arn != null || var.create_sql_config == false || var.redshift_query_engine_type != "PROVISIONED" ? 0 : 1
633 | name = "AmazonBedrockKnowledgeBaseRedshiftProvisionedStatement_${var.kb_name}"
634 |
635 | policy = jsonencode({
636 | "Version": "2012-10-17",
637 | "Statement": [
638 | {
639 | "Sid": "RedshiftDataAPIExecutePermissions",
640 | "Effect": "Allow",
641 | "Action": [
642 | "redshift-data:ExecuteStatement"
643 | ],
644 | "Resource": [
645 | "arn:${local.partition}:redshift:${local.region}:${local.account_id}:cluster:${var.provisioned_config_cluster_identifier}"
646 | ]
647 | },
648 | {
649 | "Sid": "GetCredentialsWithFederatedIAMCredentials",
650 | "Effect": "Allow",
651 | "Action": "redshift:GetClusterCredentialsWithIAM",
652 | "Resource": [
653 | "arn:${local.partition}:redshift:${local.region}:${local.account_id}:dbname:${var.provisioned_config_cluster_identifier}/*"
654 | ]
655 | }
656 | ]
657 | })
658 | }
659 |
--------------------------------------------------------------------------------
/.header.md:
--------------------------------------------------------------------------------
1 | # Terraform Bedrock Module
2 |
3 | Amazon Bedrock is a fully managed service that offers a choice of foundation models (FMs) along with a broad set of capabilities for building generative AI applications.
4 |
5 | This module includes resources to deploy Bedrock features.
6 |
7 | You can control which features to use with your input variables. The resources are created based on boolean logic. The default behavior is to deploy a Bedrock Agent. To disable this behavior you can turn `create_agent` to false. To deploy other features such as guardrails or knowledge bases, you can use the input variables to set their respective create booleans to `true` and then pass in the appropriate values.
8 |
9 | The main features of the Bedrock module include:
10 |
11 | - Bedrock Agents
12 | - Agent Action Groups
13 | - Agent Alias
14 | - Agent Collaborators
15 | - Knowledge Bases
16 | - Vector Knowledge Base (OpenSearch Serverless, Neptune Analytics, MongoDB Atlas, Pinecone, RDS)
17 | - Kendra Knowledge Base
18 | - SQL Knowledge Base
19 | - Guardrails
20 | - Prompt Management
21 | - Prompt Versions
22 | - Application Inference Profiles
23 | - Custom Models
24 | - Bedrock Data Automation
25 |
26 | ## Bedrock Agents
27 |
28 | Enable generative AI applications to execute multistep tasks across company systems and data sources.
29 |
30 | ### Create an Agent
31 |
32 | The following example creates an Agent, where you must define at a minimum the desired foundtaion model and the instruction for the agent.
33 |
34 | ```hcl
35 | module "bedrock" {
36 | source = "aws-ia/bedrock/aws"
37 | version = "0.0.31"
38 | foundation_model = "anthropic.claude-v2"
39 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
40 | }
41 | ```
42 |
43 | See the additional input variables for deploying an Agent [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L7)
44 |
45 | ### Action Groups
46 |
47 | An action group defines functions your agent can call. The functions are Lambda functions. The action group uses an OpenAPI schema to tell the agent what your functions do and how to call them. You can configure an action group by setting `create_ag` to `true` and passing in the appropriate input variables. You can see an example of an an agent being deployed with an action group in [this samples repository](https://github.com/aws-samples/aws-generative-ai-terraform-samples/blob/main/samples/bedrock-agent/main.tf)
48 |
49 | ### Prepare the Agent
50 |
51 | The Agent constructs take an optional parameter shouldPrepareAgent to indicate that the Agent should be prepared after any updates to an agent, Knowledge Base association, or action group. This may increase the time to create and update those resources. By default, this value is true.
52 |
53 | ### Prompt Overrides
54 |
55 | Bedrock Agents allows you to customize the prompts and LLM configuration for its different steps. You can disable steps or create a new prompt template. Prompt templates can be inserted from plain text files.
56 |
57 | ### Agent Alias
58 |
59 | After you have sufficiently iterated on your working draft and are satisfied with the behavior of your agent, you can set it up for deployment and integration into your application by creating aliases of your agent.
60 |
61 | To deploy your agent, you need to create an alias. During alias creation, Amazon Bedrock automatically creates a version of your agent. The alias points to this newly created version. You can point the alias to a previously created version if necessary. You then configure your application to make API calls to that alias.
62 |
63 | By default, the Agent resource does not create any aliases, and you can use the 'DRAFT' version.
64 |
65 | You can creat an Agent Alias by setting `create_agent_alias` to `true`.
66 |
67 | See the additional input variables for deploying an Agent Alias [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L183)
68 |
69 | ### Agent Collaborators
70 |
71 | Multi-agent collaboration in Amazon Bedrock enables you to create teams of specialized agents that work together to solve complex tasks. You can designate a supervisor agent to coordinate with collaborator agents, each optimized for specific functions.
72 |
73 | To set up agent collaboration, you'll need:
74 |
75 | - A supervisor agent that coordinates the team
76 | - One or more collaborator agents with specialized capabilities
77 | - Collaboration instructions that define when each agent should be used
78 |
79 | Example configuration with a supervisor agent and a collaborator agent:
80 |
81 | ```hcl
82 | module "bedrock" {
83 | source = "aws-ia/bedrock/aws"
84 | version = "0.0.31"
85 | create_agent_alias = true
86 | foundation_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
87 | instruction = "You are an agent. Do what the supervisor tells you to do"
88 |
89 | # Setting up the collaboration
90 | create_collaborator = true
91 | collaboration_instruction = "Tell the other agent what to do"
92 | supervisor_model = "anthropic.claude-3-5-sonnet-20241022-v2:0"
93 | supervisor_instruction = "You are a supervisor who can provide detailed information about cars to an agent."
94 | }
95 | ```
96 |
97 | See the additional input variables for deploying Agent Collaborators [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L221)
98 |
99 | ## Knowledge Bases
100 |
101 | With Knowledge Bases for Amazon Bedrock, you can give FMs and agents contextual information from your company's private data sources for Retrieval Augmented Generation (RAG) to deliver more relevant, accurate, and customized responses.
102 |
103 | ### Create a Vector Knowledge Base
104 |
105 | A vector index on a vector store is required to create a vector Knowledge Base. This construct supports multiple vector store options:
106 |
107 | - **Amazon OpenSearch Serverless**: Default option with automatic collection and index creation
108 | - **Amazon OpenSearch Managed Cluster**: For using existing OpenSearch domains
109 | - **Neptune Analytics**: For graph database integration
110 | - **MongoDB Atlas**: For MongoDB vector search
111 | - **Pinecone**: For Pinecone vector database
112 | - **Amazon RDS Aurora PostgreSQL**: For PostgreSQL with pgvector
113 |
114 | By default, this resource will create an OpenSearch Serverless vector collection and index for each Knowledge Base you create, but you can provide an existing collection to have more control. For other resources you need to have the vector stores already created and credentials stored in AWS Secrets Manager.
115 |
116 | The resource accepts an instruction prop that is provided to any Bedrock Agent it is associated with so the agent can decide when to query the Knowledge Base.
117 |
118 | To create different types of knowledge bases, set the appropriate variable to `true`:
119 |
120 | - OpenSearch Serverless: `create_default_kb = true`
121 | - OpenSearch Managed Cluster: `create_opensearch_managed_config = true`
122 | - Neptune Analytics: `create_neptune_analytics_config = true`
123 | - MongoDB Atlas: `create_mongo_config = true`
124 | - Pinecone: `create_pinecone_config = true`
125 | - RDS: `create_rds_config = true`
126 |
127 | #### Advanced Vector Knowledge Base Features
128 |
129 | This module supports advanced vector knowledge base features:
130 |
131 | - **Embedding Model Configuration**: Fine-tune your embedding model settings with:
132 | - `embedding_model_dimensions`: Specify vector dimensions explicitly
133 | - `embedding_data_type`: Define the data type for vectors
134 |
135 | - **Supplemental Data Storage**: Store additional data alongside vector embeddings:
136 | - `create_supplemental_data_storage = true`
137 | - `supplemental_data_s3_uri`: S3 URI for supplemental data storage
138 |
139 | Example default Opensearch Serverless Agent with Knowledge Base:
140 |
141 | ```hcl
142 | provider "opensearch" {
143 | url = module.bedrock.default_collection.collection_endpoint
144 | healthcheck = false
145 | }
146 |
147 | module "bedrock" {
148 | source = "aws-ia/bedrock/aws"
149 | version = "0.0.31"
150 | create_default_kb = true
151 | create_s3_data_source = true
152 | foundation_model = "anthropic.claude-v2"
153 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
154 | }
155 | ```
156 |
157 | Example using Neptune Analytics with advanced features:
158 |
159 | ```hcl
160 | module "bedrock" {
161 | source = "aws-ia/bedrock/aws"
162 | version = "0.0.31"
163 |
164 | # Create Neptune Analytics knowledge base
165 | create_neptune_analytics_config = true
166 | graph_arn = "arn:aws:neptune-graph:us-east-1:123456789012:graph/my-graph"
167 |
168 | # Advanced embedding model configuration
169 | embedding_model_dimensions = 1024
170 | embedding_data_type = "FLOAT32"
171 |
172 | # Supplemental data storage
173 | create_supplemental_data_storage = true
174 | supplemental_data_s3_uri = "s3://my-bucket/supplemental-data/"
175 |
176 | # Agent configuration
177 | foundation_model = "anthropic.claude-3-sonnet-20240229-v1:0"
178 | instruction = "You are a graph database expert who can analyze relationships in data."
179 | }
180 | ```
181 |
182 | See the additional input variables for deploying Knowledge Bases [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L693)
183 |
184 | ### Vector Knowledge Base - Data Sources
185 |
186 | Data sources are the various repositories or systems from which information is extracted and ingested into the knowledge base. These sources provide the raw content that will be processed, indexed, and made available for querying within the knowledge base system. Data sources can include various types of systems such as document management systems, databases, file storage systems, and content management platforms. Suuported Data Sources include Amazon S3 buckets, Web Crawlers, SharePoint sites, Salesforce instances, and Confluence spaces.
187 |
188 | - Amazon S3. You can either create a new data source by passing in the existing data source arn to the input variable `kb_s3_data_source` or create a new one by setting `create_s3_data_source` to true.
189 |
190 | - Web Crawler. You can create a new web crawler data source by setting the `create_web_crawler` input variable to true and passing in the necessary variables for urls, scope, etc.
191 |
192 | - SharePoint. You can create a new SharePoint data source by setting the `create_sharepoint` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
193 |
194 | - Salesforce. You can create a new Salesforce data source by setting the `create_salesforce` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
195 |
196 | - Confluence. You can create a new Confluence data source by setting the `create_confluence` input variable to true and passing in the necessary variables for site urls, filter patterns, etc.
197 |
198 | See the additional input variables for deploying Knowledge Base Data Sources [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L423)
199 |
200 | ### Create a Kendra Knowledge Base
201 |
202 | With Amazon Bedrock Knowledge Bases, you can build a knowledge base from an Amazon Kendra GenAI index to create more sophisticated and accurate Retrieval Augmented Generation (RAG)-powered digital assistants. By combining an Amazon Kendra GenAI index with Amazon Bedrock Knowledge Bases, you can:
203 |
204 | - Reuse your indexed content across multiple Amazon Bedrock applications without rebuilding indexes or re-ingesting data.
205 | - Leverage the advanced GenAI capabilities of Amazon Bedrock while benefiting from the high-accuracy information retrieval of Amazon Kendra.
206 | - Customize your digital assistant's behavior using the tools of Amazon Bedrock while maintaining the semantic accuracy of an Amazon Kendra GenAI index.
207 |
208 | Example Kendra Knowledge Base:
209 |
210 | ```
211 | module "bedrock" {
212 | source = "aws-ia/bedrock/aws"
213 | version = "0.0.31"
214 | create_kendra_config = true
215 | create_kendra_s3_data_source = true
216 | create_agent = false
217 | }
218 | ```
219 |
220 | See the additional input variables for deploying a Kendra Knowledge Base [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1199)
221 |
222 | ### Create a SQL Knowledge Base
223 |
224 | Amazon Bedrock Knowledge Bases provides direct integration with structured data stores, allowing natural language queries to be automatically converted into SQL queries for data retrieval. This feature enables you to query your structured data sources without the need for vector embeddings or data preprocessing.
225 |
226 | - Amazon Bedrock Knowledge Bases analyzes:
227 | - Query patterns
228 | - Query history
229 | - Schema metadata
230 | - Converts natural language queries into SQL
231 | - Retrieves relevant information directly from supported data sources
232 |
233 | See the additional input variables for deploying a SQL Knowledge Base [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1398)
234 |
235 | ### Using an Existing Knowledge Base
236 |
237 | If you already have an Amazon Bedrock Knowledge Base created and want to attach it to a Bedrock Agent using this module, you can configure the module to reference the existing resource instead of creating a new one.
238 |
239 | #### Configuration
240 |
241 | To use an existing Knowledge Base:
242 |
243 | ```hcl
244 | module "bedrock_agent" {
245 | source = "aws-ia/bedrock/aws"
246 | version = "0.0.31"
247 | # ID of the existing Knowledge Base
248 | existing_kb = "kb-abc123" # Required
249 | kb_state = "ENABLED"
250 | # ... other required variables
251 | }
252 | ```
253 |
254 | #### Notes
255 |
256 | - existing_kb: The Knowledge Base ID (e.g., kb-abc123) that you want to attach to the Bedrock Agent.
257 |
258 | - kb_state: Set this to the current state of the KB (typically "ENABLED").
259 |
260 | ## Bedrock Guardrails
261 |
262 | Amazon Bedrock's Guardrails feature enables you to implement robust governance and control mechanisms for your generative AI applications, ensuring alignment with your specific use cases and responsible AI policies. Guardrails empowers you to create multiple tailored policy configurations, each designed to address the unique requirements and constraints of different use cases. These policy configurations can then be seamlessly applied across multiple foundation models (FMs) and Agents, ensuring a consistent user experience and standardizing safety, security, and privacy controls throughout your generative AI ecosystem.
263 |
264 | With Guardrails, you can define and enforce granular, customizable policies to precisely govern the behavior of your generative AI applications. You can configure the following policies in a guardrail to avoid undesirable and harmful content and remove sensitive information for privacy protection.
265 |
266 | Content filters – Adjust filter strengths to block input prompts or model responses containing harmful content.
267 |
268 | Denied topics – Define a set of topics that are undesirable in the context of your application. These topics will be blocked if detected in user queries or model responses.
269 |
270 | Word filters – Configure filters to block undesirable words, phrases, and profanity. Such words can include offensive terms, competitor names etc.
271 |
272 | Sensitive information filters – Block or mask sensitive information such as personally identifiable information (PII) or custom regex in user inputs and model responses.
273 |
274 | You can create a Guardrail by setting `create_guardrail` to true and passing in the appropriate input variables:
275 |
276 | ```hcl
277 | module "bedrock" {
278 | source = "aws-ia/bedrock/aws"
279 | version = "0.0.31"
280 | create_guardrail = true
281 | blocked_input = "I can provide general info about services, but can't fully address your request here. For personalized help or detailed questions, please contact our customer service team directly. For security reasons, avoid sharing sensitive information through this channel. If you have a general product question, feel free to ask without including personal details."
282 | blocked_output = "I can provide general info about services, but can't fully address your request here. For personalized help or detailed questions, please contact our customer service team directly. For security reasons, avoid sharing sensitive information through this channel. If you have a general product question, feel free to ask without including personal details."
283 | filters_config = [
284 | {
285 | input_strength = "MEDIUM"
286 | output_strength = "MEDIUM"
287 | type = "HATE"
288 | },
289 | {
290 | input_strength = "HIGH"
291 | output_strength = "HIGH"
292 | type = "VIOLENCE"
293 | }
294 | ]
295 | pii_entities_config = [
296 | {
297 | action = "BLOCK"
298 | type = "NAME"
299 | },
300 | {
301 | action = "BLOCK"
302 | type = "DRIVER_ID"
303 | },
304 | {
305 | action = "ANONYMIZE"
306 | type = "USERNAME"
307 | },
308 | ]
309 | regexes_config = [{
310 | action = "BLOCK"
311 | description = "example regex"
312 | name = "regex_example"
313 | pattern = "^\\d{3}-\\d{2}-\\d{4}$"
314 | }]
315 | managed_word_lists_config = [{
316 | type = "PROFANITY"
317 | }]
318 | words_config = [{
319 | text = "HATE"
320 | }]
321 | topics_config = [{
322 | name = "investment_topic"
323 | examples = ["Where should I invest my money ?"]
324 | type = "DENY"
325 | definition = "Investment advice refers to inquiries, guidance, or recommendations regarding the management or allocation of funds or assets with the goal of generating returns ."
326 | }]
327 | foundation_model = "anthropic.claude-v2"
328 | instruction = "You are an automotive assisant who can provide detailed information about cars to a customer."
329 | }
330 | ```
331 |
332 | See the additional input variables for deploying guardrails [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L317)
333 |
334 | ## Prompt Management
335 |
336 | Amazon Bedrock provides the ability to create and save prompts using Prompt management so that you can save time by applying the same prompt to different workflows. You can include variables in the prompt so that you can adjust the prompt for different use case. To create a prompt, you set the `create_prompt` variable to `true` and pass in the appropriate values.
337 |
338 | ### Prompt Variants
339 |
340 | Prompt variants in the context of Amazon Bedrock refer to alternative configurations of a prompt, including its message or the model and inference configurations used. Prompt variants allow you to create different versions of a prompt, test them, and save the variant that works best for your use case. You can add prompt variants to a prompt by passing in the values for the `variants_list` variable:
341 |
342 | ```hcl
343 | variants_list = [
344 | {
345 | name = "variant-example"
346 | template_type = "TEXT"
347 | model_id = "amazon.titan-text-express-v1"
348 | inference_configuration = {
349 | text = {
350 | temperature = 1
351 | top_p = 0.9900000095367432
352 | max_tokens = 300
353 | stop_sequences = ["User:"]
354 | top_k = 250
355 | }
356 | }
357 | template_configuration = {
358 | text = {
359 | input_variables = [
360 | {
361 | name = "topic"
362 | }
363 | ]
364 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
365 | }
366 | }
367 | }
368 | ]
369 | ```
370 |
371 | ### Prompt Version
372 |
373 | A prompt version is a snapshot of a prompt at a specific point in time that you create when you are satisfied with a set of configurations. Versions allow you to deploy your prompt and easily switch between different configurations for your prompt and update your application with the most appropriate version for your use-case.
374 |
375 | You can create a Prompt version by setting `create_prompt_version` to `true` and adding an optional `prompt_version_description` and optional `prompt_version_tags`.
376 |
377 | Creating a prompt with a prompt version would look like:
378 |
379 | ```hcl
380 | module "bedrock" {
381 | source = "aws-ia/bedrock/aws"
382 | version = "0.0.31"
383 | create_agent = false
384 |
385 | # Prompt Management
386 | prompt_name = "prompt"
387 | default_variant = "variant-example"
388 | create_prompt = true
389 | create_prompt_version = true
390 | prompt_version_description = "Example prompt version"
391 | variants_list = [
392 | {
393 | name = "variant-example"
394 | template_type = "TEXT"
395 | model_id = "amazon.titan-text-express-v1"
396 | inference_configuration = {
397 | text = {
398 | temperature = 1
399 | top_p = 0.9900000095367432
400 | max_tokens = 300
401 | stop_sequences = ["User:"]
402 | top_k = 250
403 | }
404 | }
405 | template_configuration = {
406 | text = {
407 | input_variables = [
408 | {
409 | name = "topic"
410 | }
411 | ]
412 | text = "Make me a {{genre}} playlist consisting of the following number of songs: {{number}}."
413 | }
414 | }
415 | }
416 |
417 | ]
418 |
419 | }
420 | ```
421 |
422 | See the additional input variables for deploying prompt management [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L971)
423 |
424 | ## Application Inference Profile
425 |
426 | You can create an application inference profile with one or more Regions to track usage and costs when invoking a model.
427 |
428 | To create an application inference profile for one Region, specify a foundation model. Usage and costs for requests made to that Region with that model will be tracked.
429 |
430 | To create an application inference profile for multiple Regions, specify a cross region (system-defined) inference profile. The inference profile will route requests to the Regions defined in the cross region (system-defined) inference profile that you choose. Usage and costs for requests made to the Regions in the inference profile will be tracked. You can find the system defined inference profiles by navigating to your console (Amazon Bedrock -> Cross-region inference).
431 |
432 | ```hcl
433 | # Get current AWS account ID
434 | data "aws_caller_identity" "current" {}
435 |
436 | # Get current AWS region
437 | data "aws_region" "current" {}
438 |
439 | module "bedrock" {
440 | source = "aws-ia/bedrock/aws"
441 | version = "0.0.31"
442 | create_agent = false
443 |
444 | # Application Inference Profile
445 | create_app_inference_profile = true
446 | app_inference_profile_model_source = "arn:aws:bedrock:${data.aws_region.current.name}::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0"
447 | }
448 | ```
449 |
450 | See the additional input variables for deploying application inference profiles [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1057)
451 |
452 | ## Custom Models
453 |
454 | Model customization is the process of providing training data to a base model in order to improve its performance for specific use-cases. Custom models help improve performance on domain-specific tasks while maintaining the base capabilities of the foundation model. With custom models, you can do a continued pre-training or fine-tuning job which is started when the Terraform resource is created.
455 |
456 | To create a custom model, set the `create_custom_model` variable to `true` and pass in the necessary values for custom models:
457 |
458 | - `custom_model_id`
459 | - Defaults to `amazon.titan-text-express-v1`
460 | - `custom_model_name`
461 | - Defaults to `custom-model`
462 | - `custom_model_job_name`
463 | - Defaults to `custom-model-job`
464 | - `customization_type`
465 | - Defaults to `FINE_TUNING` but the other valid value is `CONTINUED_PRE_TRAINING`
466 | - `custom_model_hyperparameters`
467 | - Defaults to:
468 | {
469 | "epochCount" = "2",
470 | "batchSize" = "1",
471 | "learningRate" = "0.00001",
472 | "learningRateWarmupSteps" = "10"
473 | }
474 | - `custom_model_training_uri`
475 |
476 | See the additional input variables for deploying custom models [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1127)
477 |
478 | ## Bedrock Data Automation (BDA)
479 |
480 | ### BDA Project
481 |
482 | Amazon Bedrock Data AAutomation (BDA) helps you extract information and insights from your documents, images, videos, and audio files using foundation models (FMs). BDA provides both standard output and custom output through blueprints.
483 |
484 | BDA supports different extraction capabilities for each file type:
485 |
486 | - Documents
487 | - Text extraction with different granularity levels (word, line, page)
488 | - Bounding box information
489 | - Custom output formats
490 | - Images
491 | - Object and scene detection
492 | - Text extraction
493 | - Bounding box information
494 | - Custom generative fields
495 | - Video
496 | - Object and action detection
497 | - Scene analysis
498 | - Bounding box tracking
499 | - Custom generative fields
500 | - Audio
501 | - Speaker identification
502 | - Sentiment analysis
503 | - Language detection
504 | - Transcription
505 | - Custom generative fields
506 |
507 | ### Standard Output
508 |
509 | Standard output is pre-defined extraction managed by Bedrock. It can extract information from documents, images, videos, and audio files. You can configure what information to extract for each file type.
510 |
511 | ```hcl
512 | module "bedrock" {
513 | source = "aws-ia/bedrock/aws"
514 | version = "0.0.31"
515 | create_agent = false
516 | create_bda = true
517 |
518 | bda_standard_output_configuration = {
519 | document = {
520 | extraction = {
521 | bounding_box = {
522 | state = "ENABLED"
523 | }
524 | granularity = {
525 | types = ["WORD", "PAGE"]
526 | }
527 | }
528 | generative_field = {
529 | state = "ENABLED"
530 | }
531 | output_format = {
532 | additional_file_format = {
533 | state = "ENABLED"
534 | }
535 | text_format = {
536 | types = ["PLAIN_TEXT"]
537 | }
538 | }
539 | }
540 | }
541 | }
542 | ```
543 |
544 | ### Blueprints
545 |
546 | Blueprints allow you to define custom extraction schemas for your specific use cases. You can specify what information to extract and how to structure the output.
547 |
548 | ```hcl
549 | module "bedrock" {
550 | source = "aws-ia/bedrock/aws"
551 | version = "0.0.31"
552 | create_agent = false
553 |
554 | create_blueprint = true
555 | blueprint_name = "advertisement-analysis"
556 | blueprint_schema = jsonencode({
557 | "$schema" = "http://json-schema.org/draft-07/schema#"
558 | description = "Extract key information from advertisement images"
559 | class = "advertisement image"
560 | type = "object"
561 | properties = {
562 | image_sentiment = {
563 | type = "string"
564 | inferenceType = "explicit"
565 | instruction = "What is the overall sentiment of the image?"
566 | }
567 | # Additional properties as needed
568 | }
569 | })
570 | }
571 | ```
572 |
573 | See the additional input variables for deploying BDA projects and blueprints [here](https://github.com/aws-ia/terraform-aws-bedrock/blob/12b2681ce9a0ee5c7acd6d44289e5e1b98203a8a/variables.tf#L1530)
574 |
--------------------------------------------------------------------------------