├── .gitignore ├── LICENSE ├── README.md ├── backend.tf.example ├── main.tf ├── outputs.tf ├── terraform.tfvars.example └── variables.tf /.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | **/.terraform/* 3 | 4 | # .tfstate files 5 | *.tfstate 6 | *.tfstate.* 7 | 8 | # Crash log files 9 | crash.log 10 | crash.*.log 11 | 12 | # Exclude all .tfvars files, which are likely to contain sensitive data, such as 13 | # password, private keys, and other secrets. These should not be part of version 14 | # control as they are data points which are potentially sensitive and subject 15 | # to change depending on the environment. 16 | *.tfvars 17 | *.tfvars.json 18 | 19 | # Ignore override files as they are usually used to override resources locally and so 20 | # are not checked in 21 | override.tf 22 | override.tf.json 23 | *_override.tf 24 | *_override.tf.json 25 | 26 | # Include override files you do wish to add to version control using negated pattern 27 | # !example_override.tf 28 | 29 | # Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan 30 | # example: *tfplan* 31 | 32 | # Ignore CLI configuration files 33 | .terraformrc 34 | terraform.rc 35 | 36 | # local file 37 | .terraform.lock.hcl 38 | backend.tf 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Sonoda Ryohei 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dify-aws-terraform 2 | 3 | Terraform template for Dify on AWS 4 | 5 | ## Premise and summary 6 | 7 | - VPC はすでに構築済みであるものとします 8 | - 公式では SSRF 対策の Forward Proxy として Squid を利用していますが、ここでは省略しています 9 | - ElastiCache Redis のクラスターモードは接続エラーになったため無効にしています 10 | - PostgreSQL の `pgvector` を Vector Storage として利用しています 11 | - Aurora PostgreSQL Serverless で構築していますが、通常のものでも可能です 12 | 13 | ## Prerequisites 14 | 15 | - Terraform 16 | 17 | ## Usage 18 | 19 | 1. Clone this repository 20 | 2. Edit `terraform.tfvars` to set your variables 21 | 3. Edit `backend.tf` to set your S3 bucket and DynamoDB table 22 | 4. Run `terraform init` 23 | 5. Run `terraform plan` 24 | 6. Run `terraform apply -target aws_rds_cluster_instance.dify` 25 | 7. Execute the following SQL in the RDS cluster 26 | 27 | ```sql 28 | CREATE ROLE dify WITH LOGIN PASSWORD 'your-password'; 29 | GRANT dify TO postgres; 30 | CREATE DATABASE dify WITH OWNER dify; 31 | \c dify 32 | CREATE EXTENSION vector; 33 | ``` 34 | 35 | 8. Run `terraform apply` 36 | 9. Run `terraform apply` again, if task is not started 37 | 38 | 構築が完了し、ECS タスクがすべて起動したら Output の `dify_url` にアクセスしてください。 39 | -------------------------------------------------------------------------------- /backend.tf.example: -------------------------------------------------------------------------------- 1 | terraform { 2 | backend "s3" { 3 | region = "ap-northeast-1" 4 | bucket = "dify-terraform-state" 5 | key = "dify.tfstate" 6 | dynamodb_table = "dify-terraform-state-lock" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /main.tf: -------------------------------------------------------------------------------- 1 | provider "aws" { 2 | region = var.aws_region 3 | 4 | default_tags { 5 | tags = var.default_tags 6 | } 7 | } 8 | 9 | data "aws_caller_identity" "current" {} 10 | 11 | # S3 Bucket for Dify Storage 12 | 13 | resource "aws_s3_bucket" "storage" { 14 | bucket = var.dify_storage_bucket 15 | } 16 | 17 | # VPC 18 | 19 | data "aws_vpc" "this" { 20 | id = var.vpc_id 21 | } 22 | 23 | # Redis 24 | 25 | resource "aws_security_group" "redis" { 26 | name = "dify-redis" 27 | description = "Redis for Dify" 28 | vpc_id = var.vpc_id 29 | tags = { Name = "dify-redis" } 30 | # API/Worker からの ingress を下の方で定義している 31 | } 32 | 33 | resource "aws_elasticache_subnet_group" "redis" { 34 | name = "dify-redis" 35 | description = "Redis for Dify" 36 | subnet_ids = var.private_subnet_ids 37 | } 38 | 39 | # MOVED エラーが発生するのでクラスターモードは使わない 40 | resource "aws_elasticache_replication_group" "redis" { 41 | replication_group_id = "dify" 42 | description = "Redis for Dify" 43 | 44 | engine = "redis" 45 | engine_version = "7.1" 46 | 47 | node_type = "cache.t4g.micro" 48 | 49 | subnet_group_name = aws_elasticache_subnet_group.redis.name 50 | security_group_ids = [aws_security_group.redis.id] 51 | 52 | auto_minor_version_upgrade = true 53 | at_rest_encryption_enabled = true 54 | transit_encryption_enabled = true 55 | 56 | auth_token_update_strategy = "SET" 57 | auth_token = var.redis_password 58 | 59 | # auth token を後から変更する場合(ROTATE して SET する) 60 | # REDIS_PASSWORD='put your redis password' 61 | # aws elasticache modify-replication-group \ 62 | # --replication-group-id dify \ 63 | # --auth-token ${REDIS_PASSWORD} \ 64 | # --auth-token-update-strategy ROTATE \ 65 | # --apply-immediately 66 | # aws elasticache modify-replication-group \ 67 | # --replication-group-id dify \ 68 | # --auth-token ${REDIS_PASSWORD} \ 69 | # --auth-token-update-strategy SET \ 70 | # --apply-immediately 71 | 72 | maintenance_window = "sat:18:00-sat:19:00" 73 | snapshot_window = "20:00-21:00" 74 | snapshot_retention_limit = 1 75 | 76 | parameter_group_name = "default.redis7" 77 | 78 | lifecycle { 79 | ignore_changes = [auth_token] 80 | } 81 | } 82 | 83 | # Database 84 | 85 | resource "aws_security_group" "database" { 86 | name = "dify-db" 87 | description = "PostgreSQL for Dify" 88 | vpc_id = var.vpc_id 89 | tags = { Name = "dify-db" } 90 | # API/Worker からの ingress を下の方で定義している 91 | } 92 | 93 | # S3 バックアップなどでインターネットへのアクセスが必要な場合は egress を追加する。 94 | # VPC Endpoint や Managed Prefix List を使ってインターネットへのアクセスを制限するのがベター。 95 | resource "aws_security_group_rule" "database_to_internet" { 96 | security_group_id = aws_security_group.database.id 97 | type = "egress" 98 | description = "Internet" 99 | protocol = "all" 100 | from_port = 0 101 | to_port = 0 102 | cidr_blocks = ["0.0.0.0/0"] 103 | } 104 | 105 | resource "aws_db_subnet_group" "dify" { 106 | name = "dify" 107 | description = "PostgreSQL for Dify" 108 | subnet_ids = var.private_subnet_ids 109 | } 110 | 111 | resource "aws_rds_cluster" "dify" { 112 | cluster_identifier = "dify" 113 | 114 | engine = "aurora-postgresql" 115 | engine_version = "15.4" 116 | port = 5432 117 | 118 | db_subnet_group_name = aws_db_subnet_group.dify.name 119 | db_cluster_parameter_group_name = "default.aurora-postgresql15" 120 | vpc_security_group_ids = [aws_security_group.database.id] 121 | 122 | master_username = "postgres" 123 | master_password = var.db_master_password 124 | 125 | # データベースは後から構築する 126 | # -- CREATE ROLE dify WITH LOGIN PASSWORD 'password'; 127 | # -- GRANT dify TO postgres; 128 | # -- CREATE DATABASE dify WITH OWNER dify; 129 | # -- \c dify 130 | # -- CREATE EXTENSION vector; 131 | 132 | # 上記 SQL をマネジメントコンソールのクエリエディタで実行する場合は HTTP エンドポイントを有効にする。 133 | # エンドポイントを有効にしない場合は踏み台インスタンスなどを用意して上記 SQL を実行する。 134 | enable_http_endpoint = true 135 | 136 | backup_retention_period = 7 137 | delete_automated_backups = true 138 | 139 | preferred_backup_window = "13:29-13:59" 140 | preferred_maintenance_window = "sat:18:00-sat:19:00" 141 | skip_final_snapshot = true 142 | storage_encrypted = true 143 | copy_tags_to_snapshot = true 144 | 145 | serverlessv2_scaling_configuration { 146 | min_capacity = 2 147 | max_capacity = 4 148 | } 149 | 150 | lifecycle { 151 | ignore_changes = [engine_version, master_password] 152 | } 153 | } 154 | 155 | resource "aws_rds_cluster_instance" "dify" { 156 | identifier = "dify-instance-1" 157 | 158 | cluster_identifier = aws_rds_cluster.dify.cluster_identifier 159 | engine = aws_rds_cluster.dify.engine 160 | engine_version = aws_rds_cluster.dify.engine_version 161 | instance_class = "db.serverless" 162 | 163 | auto_minor_version_upgrade = true 164 | promotion_tier = 1 165 | 166 | db_parameter_group_name = "default.aurora-postgresql15" 167 | db_subnet_group_name = aws_db_subnet_group.dify.name 168 | 169 | performance_insights_enabled = true 170 | performance_insights_retention_period = 7 171 | } 172 | 173 | # Execution Role 174 | 175 | data "aws_iam_policy_document" "ecs_task" { 176 | statement { 177 | actions = ["sts:AssumeRole"] 178 | principals { 179 | type = "Service" 180 | identifiers = ["ecs-tasks.amazonaws.com"] 181 | } 182 | } 183 | } 184 | 185 | data "aws_iam_policy_document" "get_secret" { 186 | statement { 187 | actions = ["ssm:GetParameter", "ssm:GetParameters"] 188 | resources = ["arn:aws:ssm:*:${data.aws_caller_identity.current.account_id}:parameter/*"] 189 | } 190 | } 191 | 192 | resource "aws_iam_role" "exec" { 193 | name = "dify-task-execution-role" 194 | description = "AmazonECSTaskExecutionRole for Dify" 195 | assume_role_policy = data.aws_iam_policy_document.ecs_task.json 196 | } 197 | resource "aws_iam_role_policy_attachment" "AmazonECSTaskExecutionRolePolicy" { 198 | role = aws_iam_role.exec.id 199 | policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" 200 | } 201 | resource "aws_iam_role_policy" "get_secret" { 202 | role = aws_iam_role.exec.id 203 | name = "get-secret" 204 | policy = data.aws_iam_policy_document.get_secret.json 205 | } 206 | 207 | # Task Role Basic Policy 208 | 209 | data "aws_iam_policy_document" "ecs_base" { 210 | statement { 211 | actions = [ 212 | "logs:CreateLogGroup", 213 | "logs:CreateLogStream", 214 | "logs:DescribeLogStreams", 215 | "logs:DescribeLogGroups", 216 | "logs:PutLogEvents", 217 | "xray:PutTelemetryRecords", 218 | "xray:PutTraceSegments", 219 | # ECS execute-command 220 | # https://dev.classmethod.jp/articles/ecs-exec/ 221 | # "ssmmessages:CreateControlChannel", 222 | # "ssmmessages:CreateDataChannel", 223 | # "ssmmessages:OpenControlChannel", 224 | # "ssmmessages:OpenDataChannel", 225 | ] 226 | resources = ["*"] 227 | } 228 | } 229 | 230 | resource "aws_iam_policy" "ecs_base" { 231 | name = "dify-task-base-policy" 232 | description = "Base policy for Dify ECS tasks" 233 | policy = data.aws_iam_policy_document.ecs_base.json 234 | } 235 | 236 | # Log Group 237 | 238 | # ロググループは全コンテナ共通にしているが、運用を考えるとコンテナごとに分けた方がいいと思う。 239 | resource "aws_cloudwatch_log_group" "dify" { 240 | name = "/dify/container-logs" 241 | retention_in_days = 30 # TODO: variable 242 | } 243 | 244 | # Dependencies for API + Sandbox and Worker task 245 | 246 | locals { 247 | ssm_parameter_prefix = "/dify" 248 | } 249 | 250 | # セキュアにするなら Credentials は Terraform で管理しない方がいいと思う。 251 | resource "random_password" "sandbox_key" { 252 | length = 42 253 | special = true 254 | override_special = "%&-_=+:/" 255 | } 256 | 257 | resource "aws_ssm_parameter" "sandbox_key" { 258 | type = "SecureString" 259 | name = "${local.ssm_parameter_prefix}/SANDBOX_API_KEY" 260 | value = random_password.sandbox_key.result 261 | } 262 | 263 | resource "random_password" "session_secret_key" { 264 | length = 42 265 | special = true 266 | override_special = "-_=+/" 267 | } 268 | 269 | resource "aws_ssm_parameter" "session_secret_key" { 270 | type = "SecureString" 271 | name = "${local.ssm_parameter_prefix}/SESSION_SECRET_KEY" 272 | value = random_password.session_secret_key.result 273 | lifecycle { 274 | ignore_changes = [value] 275 | } 276 | } 277 | 278 | resource "aws_ssm_parameter" "db_password" { 279 | type = "SecureString" 280 | name = "${local.ssm_parameter_prefix}/DB_PASSWORD" 281 | value = var.dify_db_password 282 | lifecycle { 283 | ignore_changes = [value] 284 | } 285 | } 286 | 287 | resource "aws_ssm_parameter" "redis_password" { 288 | type = "SecureString" 289 | name = "${local.ssm_parameter_prefix}/REDIS_PASSWORD" 290 | value = var.redis_password 291 | lifecycle { 292 | ignore_changes = [value] 293 | } 294 | } 295 | 296 | # Broker URL はパスワードを含むためシークレットにする 297 | resource "aws_ssm_parameter" "broker_url" { 298 | depends_on = [aws_elasticache_replication_group.redis] 299 | type = "SecureString" 300 | name = "${local.ssm_parameter_prefix}/CELERY_BROKER_URL" 301 | value = "rediss://:${var.redis_password}@${aws_elasticache_replication_group.redis.primary_endpoint_address}:6379/0" # ElastiCache Redis では db0 以外使えない 302 | lifecycle { 303 | # ignore_changes = [value] 304 | } 305 | } 306 | 307 | data "aws_iam_policy_document" "storage" { 308 | statement { 309 | actions = ["s3:ListBucket"] 310 | resources = [aws_s3_bucket.storage.arn] 311 | } 312 | statement { 313 | actions = ["s3:GetObject", "s3:PutObject", "s3:DeleteObject"] 314 | resources = ["${aws_s3_bucket.storage.arn}/*"] 315 | } 316 | } 317 | 318 | data "aws_iam_policy_document" "bedrock" { 319 | statement { 320 | actions = ["bedrock:InvokeModel"] 321 | resources = ["arn:aws:bedrock:*::foundation-model/*"] 322 | } 323 | } 324 | 325 | resource "aws_iam_role" "app" { 326 | name = "dify-app-task-role" 327 | description = "Task Role for Dify API, Worker and Sandbox" 328 | assume_role_policy = data.aws_iam_policy_document.ecs_task.json 329 | } 330 | resource "aws_iam_role_policy_attachment" "ecs_base_app" { 331 | role = aws_iam_role.app.id 332 | policy_arn = aws_iam_policy.ecs_base.arn 333 | } 334 | resource "aws_iam_role_policy" "s3_storage" { 335 | role = aws_iam_role.app.id 336 | name = "s3-storage" 337 | policy = data.aws_iam_policy_document.storage.json 338 | } 339 | resource "aws_iam_role_policy" "bedrock" { 340 | role = aws_iam_role.app.id 341 | name = "invoke-bedrock-model" 342 | policy = data.aws_iam_policy_document.bedrock.json 343 | } 344 | 345 | # Dify API (with Sandbox) Task 346 | 347 | resource "aws_ecs_task_definition" "dify_api" { 348 | family = "dify-api" 349 | execution_role_arn = aws_iam_role.exec.arn 350 | task_role_arn = aws_iam_role.app.arn 351 | network_mode = "awsvpc" 352 | requires_compatibilities = ["FARGATE"] 353 | cpu = 1024 # TODO: variable 354 | memory = 2048 # TODO: variable 355 | 356 | volume { 357 | name = "dependencies" 358 | } 359 | 360 | container_definitions = jsonencode([ 361 | { 362 | name = "dify-api" 363 | image = "langgenius/dify-api:${var.dify_api_version}" 364 | essential = true 365 | portMappings = [ 366 | { 367 | hostPort = 5001 368 | protocol = "tcp" 369 | containerPort = 5001 370 | } 371 | ] 372 | environment = [ 373 | for name, value in { 374 | # Startup mode, 'api' starts the API server. 375 | MODE = "api" 376 | # The log level for the application. Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` 377 | LOG_LEVEL = "INFO" 378 | # enable DEBUG mode to output more logs 379 | # DEBUG = "true" 380 | # The base URL of console application web frontend, refers to the Console base URL of WEB service if console domain is 381 | # different from api or web app domain. 382 | # example: http://cloud.dify.ai 383 | CONSOLE_WEB_URL = "http://${aws_lb.dify.dns_name}" 384 | # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is different from api or web app domain. 385 | # example: http://cloud.dify.ai 386 | CONSOLE_API_URL = "http://${aws_lb.dify.dns_name}" 387 | # The URL prefix for Service API endpoints, refers to the base URL of the current API service if api domain is different from console domain. 388 | # example: http://api.dify.ai 389 | SERVICE_API_URL = "http://${aws_lb.dify.dns_name}" 390 | # The URL prefix for Web APP frontend, refers to the Web App base URL of WEB service if web app domain is different from console or api domain. 391 | # example: http://udify.app 392 | APP_WEB_URL = "http://${aws_lb.dify.dns_name}" 393 | # When enabled, migrations will be executed prior to application startup and the application will start after the migrations have completed. 394 | MIGRATION_ENABLED = var.migration_enabled 395 | # The configurations of postgres database connection. 396 | # It is consistent with the configuration in the 'db' service below. 397 | DB_USERNAME = var.dify_db_username 398 | DB_HOST = aws_rds_cluster.dify.endpoint 399 | DB_PORT = aws_rds_cluster.dify.port 400 | DB_DATABASE = var.dify_db_name 401 | # The configurations of redis connection. 402 | # It is consistent with the configuration in the 'redis' service below. 403 | REDIS_HOST = aws_elasticache_replication_group.redis.primary_endpoint_address 404 | REDIS_PORT = aws_elasticache_replication_group.redis.port 405 | REDIS_USE_SSL = true 406 | # use redis db 0 for redis cache 407 | REDIS_DB = 0 408 | # Specifies the allowed origins for cross-origin requests to the Web API, e.g. https://dify.app or * for all origins. 409 | WEB_API_CORS_ALLOW_ORIGINS = "*" 410 | # Specifies the allowed origins for cross-origin requests to the console API, e.g. https://cloud.dify.ai or * for all origins. 411 | CONSOLE_CORS_ALLOW_ORIGINS = "*" 412 | # CSRF Cookie settings 413 | # Controls whether a cookie is sent with cross-site requests, 414 | # providing some protection against cross-site request forgery attacks 415 | # 416 | # Default = `SameSite=Lax, Secure=false, HttpOnly=true` 417 | # This default configuration supports same-origin requests using either HTTP or HTTPS, 418 | # but does not support cross-origin requests. It is suitable for local debugging purposes. 419 | # 420 | # If you want to enable cross-origin support, 421 | # you must use the HTTPS protocol and set the configuration to `SameSite=None, Secure=true, HttpOnly=true`. 422 | # 423 | # The type of storage to use for storing user files. Supported values are `local` and `s3` and `azure-blob` and `google-storage`, Default = `local` 424 | STORAGE_TYPE = "s3" 425 | # The S3 storage configurations, only available when STORAGE_TYPE is `s3`. 426 | S3_USE_AWS_MANAGED_IAM = true 427 | S3_BUCKET_NAME = aws_s3_bucket.storage.bucket 428 | S3_REGION = var.aws_region 429 | # The type of vector store to use. Supported values are `weaviate`, `qdrant`, `milvus`, `relyt`. 430 | VECTOR_STORE = "pgvector" 431 | # pgvector configurations 432 | PGVECTOR_HOST = aws_rds_cluster.dify.endpoint 433 | PGVECTOR_PORT = aws_rds_cluster.dify.port 434 | PGVECTOR_USER = "dify" 435 | PGVECTOR_DATABASE = "dify" 436 | # # Mail configuration, support = resend, smtp 437 | # MAIL_TYPE = '' 438 | # # default send from email address, if not specified 439 | # MAIL_DEFAULT_SEND_FROM = 'YOUR EMAIL FROM (eg = no-reply )' 440 | # SMTP_SERVER = '' 441 | # SMTP_PORT = 587 442 | # SMTP_USERNAME = '' 443 | # SMTP_PASSWORD = '' 444 | # SMTP_USE_TLS = 'true' 445 | # The sandbox service endpoint. 446 | CODE_EXECUTION_ENDPOINT = "http://localhost:8194" # Fargate の task 内通信は localhost 宛 447 | CODE_MAX_NUMBER = "9223372036854775807" 448 | CODE_MIN_NUMBER = "-9223372036854775808" 449 | CODE_MAX_STRING_LENGTH = 80000 450 | TEMPLATE_TRANSFORM_MAX_LENGTH = 80000 451 | CODE_MAX_STRING_ARRAY_LENGTH = 30 452 | CODE_MAX_OBJECT_ARRAY_LENGTH = 30 453 | CODE_MAX_NUMBER_ARRAY_LENGTH = 1000 454 | # Indexing configuration 455 | INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = 1000 456 | } : { name = name, value = tostring(value) } 457 | ] 458 | secrets = [ 459 | { 460 | name = "SECRET_KEY" 461 | valueFrom = aws_ssm_parameter.session_secret_key.name 462 | }, 463 | { 464 | name = "DB_PASSWORD" 465 | valueFrom = aws_ssm_parameter.db_password.name 466 | }, 467 | { 468 | name = "REDIS_PASSWORD" 469 | valueFrom = aws_ssm_parameter.redis_password.name 470 | }, 471 | # The configurations of celery broker. 472 | # Use redis as the broker, and redis db 1 for celery broker. 473 | { 474 | name = "CELERY_BROKER_URL" 475 | valueFrom = aws_ssm_parameter.broker_url.name 476 | }, 477 | { 478 | name = "PGVECTOR_PASSWORD" 479 | valueFrom = aws_ssm_parameter.db_password.name 480 | }, 481 | { 482 | name = "CODE_EXECUTION_API_KEY" 483 | valueFrom = aws_ssm_parameter.sandbox_key.name 484 | } 485 | ] 486 | logConfiguration = { 487 | logDriver = "awslogs" 488 | options = { 489 | "awslogs-group" = aws_cloudwatch_log_group.dify.name 490 | "awslogs-region" = var.aws_region 491 | "awslogs-stream-prefix" = "dify-api" 492 | } 493 | } 494 | healthCheck = { 495 | command = ["CMD-SHELL", "curl -f http://localhost:5001/health || exit 1"] 496 | interval = 10 497 | timeout = 5 498 | retries = 3 499 | startPeriod = 30 500 | } 501 | cpu = 0 502 | volumesFrom = [] 503 | mountPoints = [] 504 | }, 505 | // `dify-sandbox:0.2.6` では `/dependencies/python-requirements.txt` が存在しないと起動時エラーになる。 506 | // そのため、簡易的ではあるが volume を利用して sandbox から見れるファイルを作成する。 507 | { 508 | name = "dify-sandbox-dependencies" 509 | image = "busybox:latest" # dify-sandbox イメージより軽量ならなんでもいい 510 | essential = false 511 | cpu = 0 512 | mountPoints = [ 513 | { 514 | sourceVolume = "dependencies" 515 | containerPath = "/dependencies" 516 | } 517 | ] 518 | entryPoint = ["sh", "-c"] 519 | command = ["touch /dependencies/python-requirements.txt && chmod 755 /dependencies/python-requirements.txt"] 520 | }, 521 | { 522 | name = "dify-sandbox" 523 | image = "langgenius/dify-sandbox:${var.dify_sandbox_version}" 524 | essential = true 525 | mountPoints = [ 526 | { 527 | sourceVolume = "dependencies" 528 | containerPath = "/dependencies" 529 | } 530 | ] 531 | portMappings = [ 532 | { 533 | hostPort = 8194 534 | protocol = "tcp" 535 | containerPort = 8194 536 | } 537 | ] 538 | environment = [ 539 | for name, value in { 540 | GIN_MODE = "release" 541 | WORKER_TIMEOUT = 15 542 | ENABLE_NETWORK = true 543 | SANDBOX_PORT = 8194 544 | } : { name = name, value = tostring(value) } 545 | ] 546 | secrets = [ 547 | { 548 | name = "API_KEY" 549 | valueFrom = aws_ssm_parameter.sandbox_key.name 550 | } 551 | ] 552 | logConfiguration = { 553 | logDriver = "awslogs" 554 | options = { 555 | "awslogs-group" = aws_cloudwatch_log_group.dify.name 556 | "awslogs-region" = var.aws_region 557 | "awslogs-stream-prefix" = "dify-sandbox" 558 | } 559 | } 560 | cpu = 0 561 | volumesFrom = [] 562 | }, 563 | ]) 564 | 565 | runtime_platform { 566 | operating_system_family = "LINUX" 567 | cpu_architecture = "ARM64" 568 | } 569 | 570 | lifecycle { 571 | create_before_destroy = true 572 | } 573 | } 574 | 575 | resource "aws_security_group" "api" { 576 | name = "dify-api" 577 | description = "Dify API" 578 | vpc_id = var.vpc_id 579 | tags = { Name = "dify-api" } 580 | } 581 | 582 | # TODO: 公式では SSRF 対策のために Forward Proxy として squid をプロビジョニングしているが、 583 | # 本構成では SSRF 対策の Forward Proxy は省略している。必要な場合は squid のタスクを用意したり、Firewall Manager などを利用する。 584 | resource "aws_security_group_rule" "api_to_internet" { 585 | security_group_id = aws_security_group.api.id 586 | type = "egress" 587 | description = "Internet" 588 | protocol = "all" 589 | from_port = 0 590 | to_port = 0 591 | cidr_blocks = ["0.0.0.0/0"] 592 | } 593 | 594 | resource "aws_security_group_rule" "alb_to_api" { 595 | security_group_id = aws_security_group.api.id 596 | type = "ingress" 597 | description = "ALB to API" 598 | protocol = "tcp" 599 | from_port = 5001 600 | to_port = 5001 601 | source_security_group_id = aws_security_group.alb.id 602 | } 603 | 604 | resource "aws_security_group_rule" "api_to_database" { 605 | security_group_id = aws_security_group.database.id 606 | type = "ingress" 607 | description = "API to Database" 608 | protocol = "tcp" 609 | from_port = 5432 610 | to_port = 5432 611 | source_security_group_id = aws_security_group.api.id 612 | } 613 | 614 | resource "aws_security_group_rule" "api_to_redis" { 615 | security_group_id = aws_security_group.redis.id 616 | type = "ingress" 617 | description = "API to Redis" 618 | protocol = "tcp" 619 | from_port = 6379 620 | to_port = 6379 621 | source_security_group_id = aws_security_group.api.id 622 | } 623 | 624 | 625 | # Dify Worker Task 626 | resource "aws_ecs_task_definition" "dify_worker" { 627 | family = "dify-worker" 628 | execution_role_arn = aws_iam_role.exec.arn 629 | task_role_arn = aws_iam_role.app.arn 630 | network_mode = "awsvpc" 631 | requires_compatibilities = ["FARGATE"] 632 | cpu = 1024 # TODO: variable 633 | memory = 2048 # TODO: variable 634 | 635 | container_definitions = jsonencode([ 636 | { 637 | name = "dify-worker" 638 | image = "langgenius/dify-api:${var.dify_api_version}" 639 | essential = true 640 | environment = [ 641 | for name, value in { 642 | # Startup mode, 'worker' starts the Celery worker for processing the queue. 643 | MODE = "worker" 644 | 645 | # --- All the configurations below are the same as those in the 'api' service. --- 646 | 647 | # The log level for the application. Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` 648 | LOG_LEVEL = "INFO" 649 | # The configurations of postgres database connection. 650 | # It is consistent with the configuration in the 'db' service below. 651 | DB_USERNAME = var.dify_db_username 652 | DB_HOST = aws_rds_cluster.dify.endpoint 653 | DB_PORT = aws_rds_cluster.dify.port 654 | DB_DATABASE = var.dify_db_name 655 | # The configurations of redis cache connection. 656 | REDIS_HOST = aws_elasticache_replication_group.redis.primary_endpoint_address 657 | REDIS_PORT = aws_elasticache_replication_group.redis.port 658 | REDIS_DB = "0" 659 | REDIS_USE_SSL = "true" 660 | # The type of storage to use for storing user files. Supported values are `local` and `s3` and `azure-blob` and `google-storage`, Default = `local` 661 | STORAGE_TYPE = "s3" 662 | # The S3 storage configurations, only available when STORAGE_TYPE is `s3`. 663 | S3_USE_AWS_MANAGED_IAM = true 664 | S3_BUCKET_NAME = aws_s3_bucket.storage.bucket 665 | S3_REGION = var.aws_region 666 | # The type of vector store to use. Supported values are `weaviate`, `qdrant`, `milvus`, `relyt`, `pgvector`. 667 | VECTOR_STORE = "pgvector" 668 | # pgvector configurations 669 | PGVECTOR_HOST = aws_rds_cluster.dify.endpoint 670 | PGVECTOR_PORT = aws_rds_cluster.dify.port 671 | PGVECTOR_USER = "dify" 672 | PGVECTOR_DATABASE = "dify" 673 | # Mail configuration, support = resend 674 | # MAIL_TYPE = '' 675 | # # default send from email address, if not specified 676 | # MAIL_DEFAULT_SEND_FROM = 'YOUR EMAIL FROM (eg = no-reply )' 677 | # SMTP_SERVER = '' 678 | # SMTP_PORT = 587 679 | # SMTP_USERNAME = '' 680 | # SMTP_PASSWORD = '' 681 | # SMTP_USE_TLS = 'true' 682 | # Indexing configuration 683 | INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH = "1000" 684 | } : { name = name, value = tostring(value) } 685 | ] 686 | secrets = [ 687 | { 688 | name = "SECRET_KEY" 689 | valueFrom = aws_ssm_parameter.session_secret_key.name 690 | }, 691 | { 692 | name = "DB_PASSWORD" 693 | valueFrom = aws_ssm_parameter.db_password.name 694 | }, 695 | { 696 | name = "REDIS_PASSWORD" 697 | valueFrom = aws_ssm_parameter.redis_password.name 698 | }, 699 | # The configurations of celery broker. 700 | # Use redis as the broker, and redis db 1 for celery broker. 701 | { 702 | name = "CELERY_BROKER_URL" 703 | valueFrom = aws_ssm_parameter.broker_url.name 704 | }, 705 | { 706 | name = "PGVECTOR_PASSWORD" 707 | valueFrom = aws_ssm_parameter.db_password.name 708 | } 709 | ] 710 | logConfiguration = { 711 | logDriver = "awslogs" 712 | options = { 713 | "awslogs-group" = aws_cloudwatch_log_group.dify.name 714 | "awslogs-region" = var.aws_region 715 | "awslogs-stream-prefix" = "dify-worker" 716 | } 717 | } 718 | cpu = 0 719 | volumesFrom = [] 720 | mountPoints = [] 721 | }, 722 | ]) 723 | 724 | runtime_platform { 725 | operating_system_family = "LINUX" 726 | cpu_architecture = "ARM64" 727 | } 728 | 729 | lifecycle { 730 | create_before_destroy = true 731 | } 732 | } 733 | 734 | resource "aws_security_group" "worker" { 735 | name = "dify-worker" 736 | description = "Dify Worker" 737 | vpc_id = var.vpc_id 738 | tags = { Name = "dify-worker" } 739 | } 740 | 741 | resource "aws_security_group_rule" "worker_to_internet" { 742 | security_group_id = aws_security_group.worker.id 743 | type = "egress" 744 | description = "Internet" 745 | protocol = "all" 746 | from_port = 0 747 | to_port = 0 748 | cidr_blocks = ["0.0.0.0/0"] 749 | } 750 | 751 | resource "aws_security_group_rule" "worker_to_database" { 752 | security_group_id = aws_security_group.database.id 753 | type = "ingress" 754 | description = "Worker to Database" 755 | protocol = "tcp" 756 | from_port = 5432 757 | to_port = 5432 758 | source_security_group_id = aws_security_group.worker.id 759 | } 760 | 761 | resource "aws_security_group_rule" "worker_to_redis" { 762 | security_group_id = aws_security_group.redis.id 763 | type = "ingress" 764 | description = "Worker to Redis" 765 | protocol = "tcp" 766 | from_port = 6379 767 | to_port = 6379 768 | source_security_group_id = aws_security_group.worker.id 769 | } 770 | 771 | # Dify Web Task 772 | 773 | resource "aws_iam_role" "web" { 774 | name = "dify-web-task-role" 775 | description = "Task Role for Dify Web" 776 | assume_role_policy = data.aws_iam_policy_document.ecs_task.json 777 | } 778 | resource "aws_iam_role_policy_attachment" "ecs_base_web" { 779 | role = aws_iam_role.web.id 780 | policy_arn = aws_iam_policy.ecs_base.arn 781 | } 782 | 783 | resource "aws_ecs_task_definition" "dify_web" { 784 | family = "dify-web" 785 | execution_role_arn = aws_iam_role.exec.arn 786 | task_role_arn = aws_iam_role.web.arn 787 | network_mode = "awsvpc" 788 | requires_compatibilities = ["FARGATE"] 789 | cpu = 1024 # TODO: variable 790 | memory = 2048 # TODO: variable 791 | 792 | container_definitions = jsonencode([ 793 | { 794 | name = "dify-web" 795 | image = "langgenius/dify-web:${var.dify_web_version}" 796 | essential = true 797 | environment = [ 798 | for name, value in { 799 | # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is 800 | # different from api or web app domain. 801 | # example: http://cloud.dify.ai 802 | CONSOLE_API_URL = "http://${aws_lb.dify.dns_name}" 803 | # # The URL for Web APP api server, refers to the Web App base URL of WEB service if web app domain is different from 804 | # # console or api domain. 805 | # # example: http://udify.app 806 | APP_API_URL = "http://${aws_lb.dify.dns_name}" 807 | NEXT_TELEMETRY_DISABLED = "0" 808 | } : { name = name, value = tostring(value) } 809 | ] 810 | portMappings = [ 811 | { 812 | hostPort = 3000 813 | protocol = "tcp" 814 | containerPort = 3000 815 | } 816 | ] 817 | logConfiguration = { 818 | logDriver = "awslogs" 819 | options = { 820 | "awslogs-group" = aws_cloudwatch_log_group.dify.name 821 | "awslogs-region" = var.aws_region 822 | "awslogs-stream-prefix" = "dify-web" 823 | } 824 | } 825 | cpu = 0 826 | volumesFrom = [] 827 | mountPoints = [] 828 | }, 829 | ]) 830 | 831 | runtime_platform { 832 | operating_system_family = "LINUX" 833 | cpu_architecture = "ARM64" 834 | } 835 | 836 | lifecycle { 837 | create_before_destroy = true 838 | } 839 | } 840 | 841 | resource "aws_security_group" "web" { 842 | name = "dify-web" 843 | description = "Dify Web" 844 | vpc_id = var.vpc_id 845 | tags = { Name = "dify-web" } 846 | } 847 | 848 | # インターネットアクセスは不要だが、これがないと ECR からイメージのダウンロードに失敗して 849 | # タスクの起動がエラーになる。VPC エンドポイントを作成できるならそちらの方がベター。 850 | resource "aws_security_group_rule" "web_to_internet" { 851 | security_group_id = aws_security_group.web.id 852 | type = "egress" 853 | description = "Web to Internet" 854 | protocol = "all" 855 | from_port = 0 856 | to_port = 0 857 | cidr_blocks = ["0.0.0.0/0"] 858 | } 859 | 860 | resource "aws_security_group_rule" "alb_to_web" { 861 | security_group_id = aws_security_group.web.id 862 | type = "ingress" 863 | description = "ALB to Web" 864 | protocol = "tcp" 865 | from_port = 3000 866 | to_port = 3000 867 | source_security_group_id = aws_security_group.alb.id 868 | } 869 | 870 | # ALB 871 | 872 | resource "aws_security_group" "alb" { 873 | name = "dify-alb" 874 | description = "ALB (Reverse Proxy) for Dify" 875 | vpc_id = var.vpc_id 876 | tags = { Name = "dify-alb" } 877 | } 878 | 879 | resource "aws_security_group_rule" "alb_to_targetgroup" { 880 | security_group_id = aws_security_group.alb.id 881 | type = "egress" 882 | description = "ALB to TargetGroup" 883 | protocol = "all" 884 | from_port = 0 885 | to_port = 0 886 | cidr_blocks = [data.aws_vpc.this.cidr_block] 887 | } 888 | 889 | resource "aws_security_group_rule" "http_from_internet" { 890 | security_group_id = aws_security_group.alb.id 891 | type = "ingress" 892 | description = "HTTP from Internet" 893 | protocol = "tcp" 894 | from_port = 80 895 | to_port = 80 896 | cidr_blocks = var.allowed_cidr_blocks 897 | } 898 | 899 | resource "aws_lb" "dify" { 900 | name = "dify-alb" 901 | load_balancer_type = "application" 902 | subnets = var.public_subnet_ids 903 | security_groups = [aws_security_group.alb.id] 904 | } 905 | 906 | # ALB Listener (HTTP) 907 | 908 | resource "aws_lb_target_group" "web" { 909 | name = "dify-web" 910 | vpc_id = var.vpc_id 911 | protocol = "HTTP" 912 | port = 3000 913 | target_type = "ip" 914 | 915 | slow_start = 0 916 | deregistration_delay = 65 917 | 918 | health_check { 919 | path = "/apps" # "/" だと 307 になる 920 | interval = 10 921 | # timeout = 5 922 | # healthy_threshold = 3 923 | # unhealthy_threshold = 5 924 | } 925 | } 926 | 927 | resource "aws_lb_listener" "http" { 928 | load_balancer_arn = aws_lb.dify.arn 929 | port = 80 930 | protocol = "HTTP" 931 | 932 | default_action { 933 | type = "forward" 934 | target_group_arn = aws_lb_target_group.web.arn 935 | } 936 | } 937 | 938 | # ALB Listener Rule (API) 939 | # path pattern によって API に振り分ける 940 | 941 | locals { 942 | api_paths = ["/console/api", "/api", "/v1", "/files"] 943 | } 944 | 945 | resource "aws_lb_listener_rule" "api" { 946 | listener_arn = aws_lb_listener.http.arn 947 | priority = 10 948 | 949 | condition { 950 | path_pattern { 951 | values = local.api_paths 952 | } 953 | } 954 | 955 | action { 956 | type = "forward" 957 | target_group_arn = aws_lb_target_group.api.arn 958 | } 959 | } 960 | 961 | resource "aws_lb_listener_rule" "api_wildcard" { 962 | listener_arn = aws_lb_listener.http.arn 963 | priority = 11 964 | 965 | condition { 966 | path_pattern { 967 | values = [for path in local.api_paths : "${path}/*"] 968 | } 969 | } 970 | 971 | action { 972 | type = "forward" 973 | target_group_arn = aws_lb_target_group.api.arn 974 | } 975 | } 976 | 977 | resource "aws_lb_target_group" "api" { 978 | name = "dify-api" 979 | vpc_id = var.vpc_id 980 | protocol = "HTTP" 981 | port = 5001 982 | target_type = "ip" 983 | 984 | slow_start = 0 985 | deregistration_delay = 65 986 | 987 | health_check { 988 | path = "/health" 989 | interval = 10 990 | # timeout = 5 991 | # healthy_threshold = 3 992 | # unhealthy_threshold = 5 993 | } 994 | } 995 | 996 | # ECS Cluster 997 | 998 | resource "aws_ecs_cluster" "dify" { 999 | name = "dify-cluster" 1000 | setting { 1001 | name = "containerInsights" 1002 | value = "enabled" 1003 | } 1004 | } 1005 | // AutoScaling などで FARGATE_SPOT を使う場合は追加しておく 1006 | resource "aws_ecs_cluster_capacity_providers" "this" { 1007 | cluster_name = aws_ecs_cluster.dify.name 1008 | capacity_providers = ["FARGATE", "FARGATE_SPOT"] 1009 | } 1010 | 1011 | # ECS Service 1012 | 1013 | resource "aws_ecs_service" "api" { 1014 | depends_on = [aws_lb_listener_rule.api] # ターゲットグループが ALB と紐付いていないと構築時にエラーになる 1015 | name = "dify-api" 1016 | cluster = aws_ecs_cluster.dify.name 1017 | desired_count = var.api_desired_count 1018 | task_definition = aws_ecs_task_definition.dify_api.arn 1019 | propagate_tags = "SERVICE" 1020 | launch_type = "FARGATE" 1021 | 1022 | network_configuration { 1023 | subnets = var.private_subnet_ids 1024 | security_groups = [aws_security_group.api.id] 1025 | } 1026 | 1027 | load_balancer { 1028 | target_group_arn = aws_lb_target_group.api.arn 1029 | container_name = "dify-api" 1030 | container_port = 5001 1031 | } 1032 | } 1033 | 1034 | resource "aws_ecs_service" "worker" { 1035 | name = "dify-worker" 1036 | cluster = aws_ecs_cluster.dify.name 1037 | desired_count = var.worker_desired_count 1038 | task_definition = aws_ecs_task_definition.dify_worker.arn 1039 | propagate_tags = "SERVICE" 1040 | launch_type = "FARGATE" 1041 | 1042 | network_configuration { 1043 | subnets = var.private_subnet_ids 1044 | security_groups = [aws_security_group.worker.id] 1045 | } 1046 | } 1047 | 1048 | resource "aws_ecs_service" "web" { 1049 | name = "dify-web" 1050 | cluster = aws_ecs_cluster.dify.name 1051 | desired_count = var.web_desired_count 1052 | task_definition = aws_ecs_task_definition.dify_web.arn 1053 | propagate_tags = "SERVICE" 1054 | launch_type = "FARGATE" 1055 | 1056 | network_configuration { 1057 | subnets = var.private_subnet_ids 1058 | security_groups = [aws_security_group.web.id] 1059 | } 1060 | 1061 | load_balancer { 1062 | target_group_arn = aws_lb_target_group.web.arn 1063 | container_name = "dify-web" 1064 | container_port = 3000 1065 | } 1066 | } 1067 | -------------------------------------------------------------------------------- /outputs.tf: -------------------------------------------------------------------------------- 1 | # S3 Bucket for Dify Storage 2 | 3 | output "storage_bucket_arn" { 4 | value = aws_s3_bucket.storage.arn 5 | } 6 | 7 | # Redis 8 | 9 | output "redis_endpoint" { 10 | value = aws_elasticache_replication_group.redis.configuration_endpoint_address 11 | } 12 | 13 | output "redis_port" { 14 | value = aws_elasticache_replication_group.redis.port 15 | } 16 | 17 | # Database 18 | 19 | output "db_host" { 20 | value = aws_rds_cluster.dify.endpoint 21 | } 22 | 23 | output "db_port" { 24 | value = aws_rds_cluster.dify.port 25 | } 26 | 27 | # Endpint 28 | 29 | output "dify_url" { 30 | value = "http://${aws_lb.dify.dns_name}" 31 | } 32 | -------------------------------------------------------------------------------- /terraform.tfvars.example: -------------------------------------------------------------------------------- 1 | # AWS Provider 2 | 3 | aws_region = "ap-northeast-1" 4 | 5 | default_tags = { 6 | Project = "sonodar/test" 7 | } 8 | 9 | # S3 Bucket for Dify Storage 10 | 11 | dify_storage_bucket = "dify-sonodar-test-1" 12 | 13 | # VPC 14 | 15 | vpc_id = "vpc-xxxxxxxxxxxxxxxxxx" 16 | private_subnet_ids = ["subnet-xxxxxxxxxxxxxxxxx", "subnet-xxxxxxxxxxxxxxxxx"] 17 | public_subnet_ids = ["subnet-xxxxxxxxxxxxxxxxx", "subnet-xxxxxxxxxxxxxxxxx"] 18 | 19 | redis_password = "openssl rand -base64 21" 20 | db_master_password = "openssl rand -base64 21" 21 | 22 | # Dify environment 23 | migration_enabled = "true" # only first time 24 | dify_db_password = "openssl rand -base64 21" 25 | -------------------------------------------------------------------------------- /variables.tf: -------------------------------------------------------------------------------- 1 | # AWS Provider 2 | 3 | variable "aws_region" {} 4 | 5 | variable "default_tags" { 6 | type = map(string) 7 | } 8 | 9 | # S3 Bucket for Dify Storage 10 | 11 | variable "dify_storage_bucket" { 12 | description = "s3 bucket name for dify storage" 13 | } 14 | 15 | # VPC 16 | 17 | variable "vpc_id" { 18 | } 19 | variable "private_subnet_ids" { 20 | type = list(string) 21 | } 22 | variable "public_subnet_ids" { 23 | type = list(string) 24 | } 25 | 26 | # Redis 27 | 28 | variable "redis_password" { 29 | default = "redis_dummy_auth_token" 30 | sensitive = true 31 | # 初回実行時は dummy で実行し、構築後に以下のコマンドで更新する。 32 | # aws elasticache modify-replication-group \ 33 | # --replication-group-id replication-group-sample \ 34 | # --auth-token new-token \ 35 | # --auth-token-update-strategy SET \ 36 | # --apply-immediately 37 | } 38 | 39 | # Database 40 | 41 | variable "db_master_password" { 42 | default = "dummy" # 初回実行時に TF_VAR_db_master_password=xxx で与える 43 | sensitive = true 44 | } 45 | 46 | # Dify environment 47 | 48 | variable "dify_api_version" { 49 | default = "0.7.3" 50 | } 51 | 52 | variable "dify_web_version" { 53 | default = "0.7.3" 54 | } 55 | 56 | variable "dify_sandbox_version" { 57 | default = "0.2.6" 58 | } 59 | 60 | variable "migration_enabled" { 61 | default = "true" 62 | } 63 | 64 | variable "dify_db_username" { 65 | default = "dify" 66 | } 67 | variable "dify_db_password" { 68 | default = "dummy" 69 | sensitive = true 70 | } 71 | variable "dify_db_name" { 72 | default = "dify" 73 | } 74 | 75 | # ALB 76 | 77 | variable "allowed_cidr_blocks" { 78 | type = list(string) 79 | default = [] 80 | } 81 | 82 | # Service 83 | 84 | variable "api_desired_count" { 85 | default = 1 86 | } 87 | 88 | variable "worker_desired_count" { 89 | default = 1 90 | } 91 | 92 | variable "web_desired_count" { 93 | default = 1 94 | } 95 | --------------------------------------------------------------------------------