├── .github ├── pull_request_template.md └── workflows │ ├── pull-request-lint.yml │ ├── claude-code-review.yml │ ├── claude.yml │ ├── upgrade-main.yml │ ├── build.yml │ └── release.yml ├── .prettierrc.yaml ├── src ├── index.ts ├── enum.ts ├── handler │ ├── handler.d.ts │ └── index.ts ├── iam-grant.ts ├── sql.ts ├── schema.ts ├── role.custom-resource.ts ├── database.ts └── role.ts ├── .prettierignore ├── .claude ├── settings.json └── hooks │ └── lint ├── .projen ├── files.json ├── deps.json └── tasks.json ├── .npmignore ├── lambda ├── engine.factory.ts ├── types.ts ├── engine.abstract.ts ├── engine.postgresql.test.ts ├── handler.test.ts ├── util.ts ├── engine.mysql.test.ts ├── engine.mysql.ts ├── engine.postgresql.ts ├── handler.postgresql.test.ts ├── handler.mysql.test.ts └── engine.dsql.ts ├── .mergify.yml ├── cdk.json ├── .gitattributes ├── tsconfig.json ├── tsconfig.dev.json ├── .conform.yaml ├── .gitignore ├── test ├── query-lambda.ts ├── vpc.ts ├── app.ts ├── instance1-stack.ts ├── dsql-stack.ts ├── instance2-stack.ts ├── __snapshots__ │ └── custom-resource-properties.test.ts.snap ├── serverlessv2-stack.ts └── instance-stack.test.ts ├── .pre-commit-config.yaml ├── .eslintrc.json ├── AGENTS.md ├── CLAUDE.md ├── package.json ├── .projenrc.ts └── LICENSE /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Fixes # -------------------------------------------------------------------------------- /.prettierrc.yaml: -------------------------------------------------------------------------------- 1 | # .prettierrc or .prettierrc.yaml 2 | trailingComma: "es5" 3 | tabWidth: 2 4 | useTabs: false 5 | semi: false 6 | singleQuote: false 7 | arrowParens: "always" 8 | printWidth: 90 9 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./enum" 2 | export * from "./provider" 3 | export * from "./sql" 4 | export * from "./schema" 5 | export * from "./role" 6 | export * from "./database" 7 | export * from "./iam-grant" 8 | -------------------------------------------------------------------------------- /src/enum.ts: -------------------------------------------------------------------------------- 1 | export enum RdsSqlResource { 2 | SCHEMA = "schema", 3 | ROLE = "role", 4 | SQL = "sql", 5 | DATABASE = "database", 6 | PARAMETER_PASSWORD = "parameter_password", 7 | IAM_GRANT = "iam_grant", 8 | } 9 | -------------------------------------------------------------------------------- /src/handler/handler.d.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationCustomResourceEvent } from "aws-lambda" 2 | import { CdkCustomResourceResponse } from "aws-lambda" 3 | 4 | export declare function handler( 5 | event: CloudFormationCustomResourceEvent 6 | ): Promise 7 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | dist 4 | res 5 | coverage 6 | cdk.json 7 | jest.config.js 8 | tsconfig.json 9 | .github 10 | .projen/ 11 | .eslintrc.json 12 | API.md 13 | cdk.out 14 | .claude/ 15 | lib/ 16 | cdk.context.json 17 | src/handler/handler.js 18 | tsconfig.dev.json 19 | -------------------------------------------------------------------------------- /src/handler/index.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CloudFormationCustomResourceCreateEvent, 3 | CloudFormationCustomResourceUpdateEvent, 4 | CloudFormationCustomResourceDeleteEvent, 5 | } from "aws-lambda" 6 | import { handler as handlerImpl } from "./handler.js" 7 | 8 | export async function handler( 9 | event: 10 | | CloudFormationCustomResourceCreateEvent 11 | | CloudFormationCustomResourceUpdateEvent 12 | | CloudFormationCustomResourceDeleteEvent 13 | ): Promise { 14 | return handlerImpl(event) 15 | } 16 | -------------------------------------------------------------------------------- /.claude/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "hooks": { 3 | "Stop": [ 4 | { 5 | "hooks": [ 6 | { 7 | "type": "command", 8 | "command": "$CLAUDE_PROJECT_DIR/.claude/hooks/lint" 9 | } 10 | ] 11 | } 12 | ], 13 | "PostToolUse": [ 14 | { 15 | "matcher": "Edit|MultiEdit|Write", 16 | "hooks": [ 17 | { 18 | "type": "command", 19 | "command": "touch $CLAUDE_PROJECT_DIR/.claude/hooks/.edited" 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.projen/files.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | ".eslintrc.json", 4 | ".gitattributes", 5 | ".github/pull_request_template.md", 6 | ".github/workflows/build.yml", 7 | ".github/workflows/pull-request-lint.yml", 8 | ".github/workflows/release.yml", 9 | ".github/workflows/upgrade-main.yml", 10 | ".gitignore", 11 | ".mergify.yml", 12 | ".projen/deps.json", 13 | ".projen/files.json", 14 | ".projen/tasks.json", 15 | "LICENSE", 16 | "tsconfig.dev.json" 17 | ], 18 | "//": "~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\"." 19 | } 20 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | cdk.context.json 3 | .idea/ 4 | cdk.out/ 5 | .envrc 6 | .env 7 | CONVENTIONS.md 8 | src/handler/handler.js 9 | *~ 10 | .claude/hooks/.edited 11 | /lambda/ 12 | /.projen/ 13 | /test-reports/ 14 | junit.xml 15 | /coverage/ 16 | permissions-backup.acl 17 | /dist/changelog.md 18 | /dist/version.txt 19 | /.mergify.yml 20 | /test/ 21 | /tsconfig.dev.json 22 | /src/ 23 | !/lib/ 24 | !/lib/**/*.js 25 | !/lib/**/*.d.ts 26 | dist 27 | /tsconfig.json 28 | /.github/ 29 | /.vscode/ 30 | /.idea/ 31 | /.projenrc.js 32 | tsconfig.tsbuildinfo 33 | /.eslintrc.json 34 | !.jsii 35 | /.gitattributes 36 | /.projenrc.ts 37 | /projenrc 38 | -------------------------------------------------------------------------------- /lambda/engine.factory.ts: -------------------------------------------------------------------------------- 1 | import { AbstractEngine } from "./engine.abstract" 2 | import { DsqlEngine } from "./engine.dsql" 3 | import { MysqlEngine } from "./engine.mysql" 4 | import { PostgresqlEngine } from "./engine.postgresql" 5 | 6 | export class EngineFactory { 7 | static createEngine(engine: string): AbstractEngine { 8 | switch (engine.toLowerCase()) { 9 | case "dsql": 10 | return new DsqlEngine() 11 | case "postgres": 12 | case "postgresql": 13 | case "aurora-postgresql": 14 | return new PostgresqlEngine() 15 | case "mysql": 16 | case "mariadb": 17 | case "aurora-mysql": 18 | return new MysqlEngine() 19 | default: 20 | throw new Error(`Unsupported database engine: ${engine}`) 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /.mergify.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | queue_rules: 4 | - name: default 5 | update_method: merge 6 | queue_conditions: 7 | - "#approved-reviews-by>=1" 8 | - -label~=(do-not-merge) 9 | - status-success=build 10 | - status-success=package-js 11 | merge_method: squash 12 | commit_message_template: |- 13 | {{ title }} (#{{ number }}) 14 | 15 | {{ body }} 16 | pull_request_rules: 17 | - name: Automatic merge on approval and successful build 18 | actions: 19 | delete_head_branch: {} 20 | queue: 21 | name: default 22 | conditions: 23 | - "#approved-reviews-by>=1" 24 | - -label~=(do-not-merge) 25 | - status-success=build 26 | - status-success=package-js 27 | merge_queue: 28 | max_parallel_checks: 1 29 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts test/app", 3 | "watch": { 4 | "include": ["**"], 5 | "exclude": [ 6 | "README.md", 7 | "cdk*.json", 8 | "**/*.d.ts", 9 | "**/*.js", 10 | "tsconfig.json", 11 | "package*.json", 12 | "yarn.lock", 13 | "node_modules", 14 | "test" 15 | ] 16 | }, 17 | "context": { 18 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true, 19 | "@aws-cdk/core:stackRelativeExports": true, 20 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true, 21 | "@aws-cdk/aws-lambda:recognizeVersionProps": true, 22 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true, 23 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 24 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 25 | "@aws-cdk/core:target-partitions": ["aws", "aws-cn"] 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | * text=auto eol=lf 4 | *.snap linguist-generated 5 | /.eslintrc.json linguist-generated 6 | /.gitattributes linguist-generated 7 | /.github/pull_request_template.md linguist-generated 8 | /.github/workflows/build.yml linguist-generated 9 | /.github/workflows/pull-request-lint.yml linguist-generated 10 | /.github/workflows/release.yml linguist-generated 11 | /.github/workflows/upgrade-main.yml linguist-generated 12 | /.gitignore linguist-generated 13 | /.mergify.yml linguist-generated 14 | /.npmignore linguist-generated 15 | /.projen/** linguist-generated 16 | /.projen/deps.json linguist-generated 17 | /.projen/files.json linguist-generated 18 | /.projen/tasks.json linguist-generated 19 | /LICENSE linguist-generated 20 | /package-lock.json linguist-generated 21 | /package.json linguist-generated 22 | /tsconfig.dev.json linguist-generated -------------------------------------------------------------------------------- /.github/workflows/pull-request-lint.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | name: pull-request-lint 4 | on: 5 | pull_request_target: 6 | types: 7 | - labeled 8 | - opened 9 | - synchronize 10 | - reopened 11 | - ready_for_review 12 | - edited 13 | merge_group: {} 14 | jobs: 15 | validate: 16 | name: Validate PR title 17 | runs-on: ubuntu-latest 18 | permissions: 19 | pull-requests: write 20 | if: (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') 21 | steps: 22 | - uses: amannn/action-semantic-pull-request@v6 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | with: 26 | types: |- 27 | feat 28 | fix 29 | chore 30 | test 31 | vendor 32 | requireScope: false 33 | -------------------------------------------------------------------------------- /.claude/hooks/lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -o errexit -o pipefail -o nounset 4 | 5 | # Script claude should use for linting. 6 | 7 | # Make sure we're in the root 8 | if [ ! -z "${CLAUDE_PROJECT_DIR:-}" ] 9 | then 10 | cd $CLAUDE_PROJECT_DIR 11 | fi 12 | 13 | # No file edited, leave 14 | if [ ! -e ".claude/hooks/.edited" ] 15 | then 16 | exit 0 17 | fi 18 | 19 | # Read hook input from stdin and check for stop_hook_active flag 20 | if command -v jq >/dev/null 2>&1; then 21 | input=$(cat) 22 | stop_hook_active=$(echo "$input" | jq -r '.stop_hook_active // false') 23 | if [ "$stop_hook_active" = "true" ]; then 24 | exit 0 25 | fi 26 | fi 27 | 28 | format=$(npx projen eslint) 29 | 30 | # Capture stdout and check exit code for the main linting command 31 | if ! output=$(npx projen typecheck 2>&1); then 32 | echo "$output" >&2 33 | exit 2 34 | fi 35 | 36 | # We're done, remove .edited 37 | rm .claude/hooks/.edited 38 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "outDir": "lib", 4 | "rootDir": "src", 5 | "declarationMap": false, 6 | "inlineSourceMap": true, 7 | "inlineSources": true, 8 | "alwaysStrict": true, 9 | "declaration": true, 10 | "incremental": true, 11 | "lib": [ 12 | "es2020" 13 | ], 14 | "module": "commonjs", 15 | "noEmitOnError": true, 16 | "noFallthroughCasesInSwitch": true, 17 | "noImplicitAny": true, 18 | "noImplicitReturns": true, 19 | "noImplicitThis": true, 20 | "noUnusedLocals": true, 21 | "noUnusedParameters": true, 22 | "resolveJsonModule": true, 23 | "skipLibCheck": true, 24 | "strict": true, 25 | "strictNullChecks": true, 26 | "strictPropertyInitialization": true, 27 | "stripInternal": false, 28 | "target": "es2020", 29 | "composite": false, 30 | "tsBuildInfoFile": "lib/tsconfig.tsbuildinfo" 31 | }, 32 | "include": [ 33 | "src/**/*.ts" 34 | ], 35 | "exclude": [ 36 | "node_modules" 37 | ], 38 | "_generated_by_jsii_": "Generated by jsii - safe to delete, and ideally should be in .gitignore" 39 | } -------------------------------------------------------------------------------- /tsconfig.dev.json: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | { 3 | "compilerOptions": { 4 | "alwaysStrict": true, 5 | "declaration": true, 6 | "esModuleInterop": true, 7 | "experimentalDecorators": true, 8 | "inlineSourceMap": true, 9 | "inlineSources": true, 10 | "lib": [ 11 | "es2020" 12 | ], 13 | "module": "CommonJS", 14 | "noEmitOnError": false, 15 | "noFallthroughCasesInSwitch": true, 16 | "noImplicitAny": true, 17 | "noImplicitReturns": true, 18 | "noImplicitThis": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "resolveJsonModule": true, 22 | "strict": true, 23 | "strictNullChecks": true, 24 | "strictPropertyInitialization": true, 25 | "stripInternal": true, 26 | "target": "es2022", 27 | "noImplicitOverride": true, 28 | "noUncheckedIndexedAccess": true 29 | }, 30 | "include": [ 31 | "src/**/*.ts", 32 | "test/**/*.ts", 33 | "lambda/**/*.ts", 34 | ".projenrc.ts", 35 | "projenrc/**/*.ts" 36 | ], 37 | "exclude": [ 38 | "node_modules" 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /.conform.yaml: -------------------------------------------------------------------------------- 1 | policies: 2 | - type: commit 3 | spec: 4 | header: 5 | length: 140 6 | imperative: true 7 | case: lower 8 | invalidLastCharacters: . 9 | body: 10 | required: false 11 | dco: false 12 | gpg: false 13 | spellcheck: 14 | locale: US_nz 15 | maximumOfOneCommit: false 16 | conventional: 17 | types: 18 | - "build" 19 | - "chore" 20 | - "ci" 21 | - "debug" 22 | - "docs" 23 | - "refactor" 24 | - "test" 25 | - "vendor" 26 | scopes: 27 | - "provider" 28 | - "database" 29 | - "schema" 30 | - "role" 31 | - "sql" 32 | - "iam-grant" 33 | - "dsql" 34 | - "postgres" 35 | - "mysql" 36 | descriptionLength: 160 37 | - type: license 38 | spec: 39 | skipPaths: 40 | - .git/ 41 | - .build*/ 42 | includeSuffixes: 43 | - .ext 44 | excludeSuffixes: 45 | - .exclude-ext-prefix.ext 46 | header: | 47 | This is the contents of a license header. 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | !/.gitattributes 3 | !/.projen/tasks.json 4 | !/.projen/deps.json 5 | !/.projen/files.json 6 | !/.github/workflows/pull-request-lint.yml 7 | !/package.json 8 | !/LICENSE 9 | !/.npmignore 10 | logs 11 | *.log 12 | npm-debug.log* 13 | yarn-debug.log* 14 | yarn-error.log* 15 | lerna-debug.log* 16 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | lib-cov 22 | coverage 23 | *.lcov 24 | .nyc_output 25 | build/Release 26 | node_modules/ 27 | jspm_packages/ 28 | *.tsbuildinfo 29 | .eslintcache 30 | *.tgz 31 | .yarn-integrity 32 | .cache 33 | cdk.context.json 34 | .idea/ 35 | cdk.out/ 36 | .envrc 37 | .env 38 | CONVENTIONS.md 39 | src/handler/handler.js 40 | *~ 41 | .claude/hooks/.edited 42 | /test-reports/ 43 | junit.xml 44 | /coverage/ 45 | !/.github/workflows/build.yml 46 | /dist/changelog.md 47 | /dist/version.txt 48 | !/.github/workflows/release.yml 49 | !/.mergify.yml 50 | !/.github/workflows/upgrade-main.yml 51 | !/.github/pull_request_template.md 52 | !/test/ 53 | !/tsconfig.dev.json 54 | !/src/ 55 | /lib 56 | /dist/ 57 | !/.eslintrc.json 58 | .jsii 59 | tsconfig.json 60 | !/.projenrc.ts 61 | -------------------------------------------------------------------------------- /src/iam-grant.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource } from "aws-cdk-lib" 2 | import { Construct } from "constructs" 3 | import { RdsSqlResource } from "./enum" 4 | import { IProvider, DatabaseEngine } from "./provider" 5 | 6 | export interface IamGrantProps { 7 | /** 8 | * Provider. 9 | */ 10 | readonly provider: IProvider 11 | 12 | /** 13 | * Database role name to grant IAM access to. 14 | */ 15 | readonly roleName: string 16 | 17 | /** 18 | * IAM resource ARN (role, user, or other IAM principal). 19 | */ 20 | readonly resourceArn: string 21 | } 22 | 23 | export class IamGrant extends CustomResource { 24 | constructor(scope: Construct, id: string, props: IamGrantProps) { 25 | // IAM grants are only supported on DSQL 26 | if (props.provider.engine !== DatabaseEngine.DSQL) { 27 | throw new Error( 28 | "IAM grants are only supported with DSQL clusters. Use regular database authentication for RDS/Aurora clusters." 29 | ) 30 | } 31 | 32 | super(scope, id, { 33 | serviceToken: props.provider.serviceToken, 34 | resourceType: "Custom::DsqlIamGrant", 35 | properties: { 36 | Resource: RdsSqlResource.IAM_GRANT, 37 | RoleName: props.roleName, 38 | ResourceArn: props.resourceArn, 39 | }, 40 | }) 41 | this.node.addDependency(props.provider) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /test/query-lambda.ts: -------------------------------------------------------------------------------- 1 | import { DsqlSigner } from "@aws-sdk/dsql-signer" 2 | import { Client } from "pg" 3 | 4 | export const handler = async (): Promise => { 5 | const host = process.env.PGHOST 6 | const dbName = process.env.PGDATABASE 7 | const dbUser = process.env.PGUSER 8 | 9 | if (!host) throw new Error("PGHOST must be set") 10 | if (!dbName) throw new Error("PGDATABASE must be set") 11 | if (!dbUser) throw new Error("PGUSER must be set") 12 | 13 | const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || "us-west-2" 14 | 15 | // Generate DSQL auth token using AWS SDK 16 | const signer = new DsqlSigner({ 17 | hostname: host, 18 | region, 19 | }) 20 | 21 | const authToken = await signer.getDbConnectAuthToken() 22 | 23 | const client = new Client({ 24 | host, 25 | port: 5432, 26 | user: dbUser, 27 | database: dbName, 28 | password: authToken, 29 | ssl: { rejectUnauthorized: false }, 30 | }) 31 | 32 | await client.connect() 33 | 34 | const result = await client.query("SELECT COUNT(*) FROM myschema.test_table") 35 | const count = result.rows[0].count 36 | 37 | await client.end() 38 | 39 | return { 40 | statusCode: 200, 41 | body: JSON.stringify({ 42 | message: `Found ${count} rows in myschema.test_table`, 43 | count: count, 44 | }), 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/sql.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource } from "aws-cdk-lib" 2 | import { Construct } from "constructs" 3 | import { IDatabase } from "./database" 4 | import { RdsSqlResource } from "./enum" 5 | import { IProvider } from "./provider" 6 | 7 | export interface SqlProps { 8 | /** 9 | * Provider. 10 | */ 11 | readonly provider: IProvider 12 | 13 | /** 14 | * Optional database. 15 | * 16 | * @default - use default database 17 | */ 18 | readonly database?: IDatabase 19 | 20 | /** 21 | * SQL. 22 | */ 23 | readonly statement?: string 24 | 25 | /** 26 | * Optional statment to be executed when the resource is deleted 27 | */ 28 | readonly rollback?: string 29 | } 30 | 31 | export class Sql extends CustomResource { 32 | constructor(scope: Construct, id: string, props: SqlProps) { 33 | super(scope, id, { 34 | serviceToken: props.provider.serviceToken, 35 | properties: { 36 | Resource: RdsSqlResource.SQL, 37 | ...(props.provider.secret ? { SecretArn: props.provider.secret.secretArn } : {}), 38 | DatabaseName: props.database ? props.database.databaseName : undefined, 39 | Statement: props.statement, 40 | Rollback: props.rollback, 41 | }, 42 | }) 43 | this.node.addDependency(props.provider) 44 | if (props.database) this.node.addDependency(props.database) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /test/vpc.ts: -------------------------------------------------------------------------------- 1 | import * as ec2 from "aws-cdk-lib/aws-ec2" 2 | import { Construct } from "constructs" 3 | 4 | export class Vpc extends Construct { 5 | vpc: ec2.IVpc 6 | 7 | constructor(scope: Construct, id: string) { 8 | super(scope, id) 9 | 10 | // Use an existing vpc if passed in, else create one 11 | const vpc_id = this.node.tryGetContext("vpc-id") 12 | if (vpc_id) { 13 | this.vpc = ec2.Vpc.fromLookup(this, "Vpc", { 14 | vpcId: vpc_id, 15 | }) 16 | } else { 17 | this.vpc = new ec2.Vpc(this, "Vpc", { 18 | vpcName: "cdk-rds-sql", 19 | ipAddresses: ec2.IpAddresses.cidr("192.168.249.0/24"), 20 | maxAzs: 2, 21 | natGateways: 0, 22 | createInternetGateway: false, 23 | subnetConfiguration: [ 24 | { 25 | cidrMask: 28, 26 | name: "rds", 27 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 28 | }, 29 | ], 30 | }) 31 | 32 | // Add VPC endpoint for Secrets Manager to allow Lambda to 33 | // access secrets without internet access 34 | this.vpc.addInterfaceEndpoint("SecretsManagerEndpoint", { 35 | service: ec2.InterfaceVpcEndpointAwsService.SECRETS_MANAGER, 36 | }) 37 | // Add VPC endpoint for SSM to allow Lambda to 38 | // access secrets without internet access 39 | this.vpc.addInterfaceEndpoint("SSMEndpoint", { 40 | service: ec2.InterfaceVpcEndpointAwsService.SSM, 41 | }) 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/schema.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource } from "aws-cdk-lib" 2 | import { Construct } from "constructs" 3 | import { IDatabase } from "./database" 4 | import { RdsSqlResource } from "./enum" 5 | import { IProvider } from "./provider" 6 | import { Role } from "./role" 7 | 8 | export interface SchemaProps { 9 | /** 10 | * Provider. 11 | */ 12 | readonly provider: IProvider 13 | 14 | /** 15 | * Optional database. 16 | * 17 | * @default - use default database 18 | */ 19 | readonly database?: IDatabase 20 | 21 | /** 22 | * Schema name. 23 | */ 24 | readonly schemaName: string 25 | 26 | /** 27 | * Optional role which will be granted usage and create permissions 28 | * to this schema. This way the role can read its own tables, but 29 | * cannot see or access tables created by others. 30 | */ 31 | readonly role?: Role 32 | } 33 | 34 | export class Schema extends CustomResource { 35 | public readonly schemaName: string 36 | 37 | constructor(scope: Construct, id: string, props: SchemaProps) { 38 | super(scope, id, { 39 | serviceToken: props.provider.serviceToken, 40 | properties: { 41 | Resource: RdsSqlResource.SCHEMA, 42 | ResourceId: props.schemaName, 43 | ...(props.provider.secret ? { SecretArn: props.provider.secret.secretArn } : {}), 44 | DatabaseName: props.database ? props.database.databaseName : undefined, 45 | RoleName: props.role ? props.role.roleName : undefined, 46 | }, 47 | }) 48 | this.node.addDependency(props.provider) 49 | this.schemaName = props.schemaName 50 | if (props.database) this.node.addDependency(props.database) 51 | if (props.role) this.node.addDependency(props.role) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /test/app.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import "source-map-support/register" 3 | import * as cdk from "aws-cdk-lib" 4 | import * as rds from "aws-cdk-lib/aws-rds" 5 | import * as dsqlStack from "./dsql-stack" 6 | import * as serverlessv2 from "./serverlessv2-stack" 7 | 8 | const app = new cdk.App() 9 | 10 | // Determine engine based on context 11 | let engine: rds.IClusterEngine 12 | const engineType = app.node.tryGetContext("engine") 13 | 14 | switch (engineType?.toLowerCase()) { 15 | case "mysql": 16 | engine = rds.DatabaseClusterEngine.auroraMysql({ 17 | version: rds.AuroraMysqlEngineVersion.VER_3_04_0, 18 | }) 19 | console.log("Using MySQL engine") 20 | break 21 | case "postgresql": 22 | engine = rds.DatabaseClusterEngine.auroraPostgres({ 23 | version: rds.AuroraPostgresEngineVersion.VER_15_3, 24 | }) 25 | console.log("Using PostgreSQL engine") 26 | break 27 | default: 28 | // Default to PostgreSQL 29 | engine = rds.DatabaseClusterEngine.auroraPostgres({ 30 | version: rds.AuroraPostgresEngineVersion.VER_15_3, 31 | }) 32 | console.log("Using default PostgreSQL engine") 33 | } 34 | 35 | new serverlessv2.TestStack(app, "TestRdsSqlServerlessV2Stack", { 36 | env: { 37 | account: process.env.CDK_DEFAULT_ACCOUNT, 38 | region: process.env.CDK_DEFAULT_REGION, 39 | }, 40 | description: "Feel free to delete", 41 | logger: true, 42 | ssl: true, 43 | engine: engine, 44 | }) 45 | 46 | // Create DSQL test stack 47 | new dsqlStack.TestDsqlStack(app, "TestRdsSqlDsqlStack", { 48 | env: { 49 | account: process.env.CDK_DEFAULT_ACCOUNT, 50 | region: process.env.CDK_DEFAULT_REGION, 51 | }, 52 | description: "DSQL test stack - feel free to delete", 53 | logger: true, 54 | }) 55 | -------------------------------------------------------------------------------- /test/instance1-stack.ts: -------------------------------------------------------------------------------- 1 | import { Duration, RemovalPolicy, Stack, StackProps } from "aws-cdk-lib" 2 | import * as ec2 from "aws-cdk-lib/aws-ec2" 3 | import * as rds from "aws-cdk-lib/aws-rds" 4 | import { Construct } from "constructs" 5 | import { Vpc } from "./vpc" 6 | import { Provider, Database, Role, Schema, Sql } from "../src/index" 7 | 8 | export class TestInstanceStack extends Stack { 9 | constructor(scope: Construct, id: string, props: StackProps) { 10 | super(scope, id, props) 11 | 12 | const vpc = new Vpc(this, "Vpc") 13 | 14 | const instance = new rds.DatabaseInstance(this, "Instance", { 15 | vpc: vpc.vpc, 16 | vpcSubnets: { 17 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 18 | }, 19 | engine: rds.DatabaseInstanceEngine.postgres({ 20 | version: rds.PostgresEngineVersion.VER_17_2, 21 | }), 22 | databaseName: "example", 23 | credentials: rds.Credentials.fromGeneratedSecret("pgroot"), 24 | instanceType: ec2.InstanceType.of( 25 | ec2.InstanceClass.BURSTABLE3, 26 | ec2.InstanceSize.MICRO 27 | ), 28 | removalPolicy: RemovalPolicy.DESTROY, 29 | }) 30 | 31 | const provider = new Provider(this, "Provider", { 32 | vpc: vpc.vpc, 33 | cluster: instance, 34 | secret: instance.secret!, 35 | timeout: Duration.seconds(10), 36 | functionProps: { 37 | allowPublicSubnet: true, 38 | }, 39 | }) 40 | Database.fromDatabaseName(this, "DefaultDatabase", "example") 41 | 42 | new Schema(this, "Schema", { 43 | provider: provider, 44 | schemaName: "myschema", 45 | }) 46 | const role = new Role(this, "Role", { 47 | provider: provider, 48 | roleName: "myrole", 49 | databaseName: "mydb", 50 | }) 51 | const database = new Database(this, "Database", { 52 | provider: provider, 53 | databaseName: "mydb", 54 | owner: role, 55 | }) 56 | new Sql(this, "Sql", { 57 | provider: provider, 58 | database: database, 59 | statement: "create table t (i int)", 60 | rollback: "drop table t", 61 | }) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /.github/workflows/claude-code-review.yml: -------------------------------------------------------------------------------- 1 | name: Claude Code Review 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize] 6 | # Optional: Only run on specific file changes 7 | # paths: 8 | # - "src/**/*.ts" 9 | # - "src/**/*.tsx" 10 | # - "src/**/*.js" 11 | # - "src/**/*.jsx" 12 | 13 | jobs: 14 | claude-review: 15 | # Optional: Filter by PR author 16 | # if: | 17 | # github.event.pull_request.user.login == 'external-contributor' || 18 | # github.event.pull_request.user.login == 'new-developer' || 19 | # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR' 20 | 21 | runs-on: ubuntu-latest 22 | permissions: 23 | contents: read 24 | pull-requests: read 25 | issues: read 26 | id-token: write 27 | 28 | steps: 29 | - name: Checkout repository 30 | uses: actions/checkout@v4 31 | with: 32 | fetch-depth: 1 33 | 34 | - name: Run Claude Code Review 35 | id: claude-review 36 | uses: anthropics/claude-code-action@v1 37 | with: 38 | claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} 39 | prompt: | 40 | Please review this pull request and provide feedback on: 41 | - Code quality and best practices 42 | - Potential bugs or issues 43 | - Performance considerations 44 | - Security concerns 45 | - Test coverage 46 | 47 | Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback. 48 | 49 | Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR. 50 | 51 | # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md 52 | # or https://docs.anthropic.com/en/docs/claude-code/sdk#command-line for available options 53 | claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"' 54 | 55 | -------------------------------------------------------------------------------- /.github/workflows/claude.yml: -------------------------------------------------------------------------------- 1 | name: Claude Code 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | issues: 9 | types: [opened, assigned] 10 | pull_request_review: 11 | types: [submitted] 12 | 13 | jobs: 14 | claude: 15 | if: | 16 | (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || 17 | (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || 18 | (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || 19 | (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | pull-requests: read 24 | issues: read 25 | id-token: write 26 | actions: read # Required for Claude to read CI results on PRs 27 | steps: 28 | - name: Checkout repository 29 | uses: actions/checkout@v4 30 | with: 31 | fetch-depth: 1 32 | 33 | - name: Run Claude Code 34 | id: claude 35 | uses: anthropics/claude-code-action@v1 36 | with: 37 | claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} 38 | 39 | # This is an optional setting that allows Claude to read CI results on PRs 40 | additional_permissions: | 41 | actions: read 42 | 43 | # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it. 44 | # prompt: 'Update the pull request description to include a summary of changes.' 45 | 46 | # Optional: Add claude_args to customize behavior and configuration 47 | # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md 48 | # or https://docs.anthropic.com/en/docs/claude-code/sdk#command-line for available options 49 | # claude_args: '--model claude-opus-4-1-20250805 --allowed-tools Bash(gh pr:*)' 50 | 51 | -------------------------------------------------------------------------------- /src/role.custom-resource.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource } from "aws-cdk-lib" 2 | import { Construct } from "constructs" 3 | import { IDatabase } from "./database" 4 | import { RdsSqlResource } from "./enum" 5 | import { IProvider } from "./provider" 6 | 7 | export interface RoleProps { 8 | /** 9 | * Provider. 10 | */ 11 | readonly provider: IProvider 12 | 13 | /** 14 | * SQL. 15 | */ 16 | readonly roleName: string 17 | 18 | /** 19 | * A new secret is created for this user. 20 | * 21 | * Optionally encrypt it with the given key. 22 | */ 23 | readonly passwordArn: string 24 | 25 | /** 26 | * Optional database this user is expected to use. 27 | * 28 | * Specify none of `database` or `databaseName` or only one of them. 29 | * 30 | * @default no connection to any database is granted 31 | */ 32 | readonly database?: IDatabase 33 | 34 | /** 35 | * Optional database name this user is expected to use. 36 | * 37 | * Specify none of `database` or `databaseName` or only one of them. 38 | * 39 | * @default no connection to any database is granted 40 | */ 41 | readonly databaseName?: string 42 | 43 | /** 44 | * Enable IAM authentication for this role. 45 | * 46 | * @default false - use password authentication 47 | */ 48 | readonly enableIamAuth?: boolean 49 | } 50 | 51 | export class Role extends CustomResource { 52 | constructor(scope: Construct, id: string, props: RoleProps) { 53 | if (props.database && props.databaseName) { 54 | throw "Specify either database or databaseName" 55 | } 56 | super(scope, id, { 57 | serviceToken: props.provider.serviceToken, 58 | properties: { 59 | Resource: RdsSqlResource.ROLE, 60 | ResourceId: props.roleName, 61 | ...(props.provider.secret ? { SecretArn: props.provider.secret.secretArn } : {}), 62 | PasswordArn: props.passwordArn, 63 | DatabaseName: props.database ? props.database.databaseName : props.databaseName, 64 | EnableIamAuth: props.enableIamAuth, 65 | }, 66 | }) 67 | this.node.addDependency(props.provider) 68 | if (props.database) this.node.addDependency(props.database) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/database.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource } from "aws-cdk-lib" 2 | import { Construct } from "constructs" 3 | import { RdsSqlResource } from "./enum" 4 | import { IProvider, DatabaseEngine } from "./provider" 5 | import { Role } from "./role" 6 | 7 | interface DatabaseAttributes { 8 | /** 9 | * Name of database to create. 10 | */ 11 | readonly databaseName: string 12 | 13 | /** 14 | * Optional database owner. 15 | */ 16 | readonly owner?: Role 17 | } 18 | 19 | export interface DatabaseProps extends DatabaseAttributes { 20 | /** 21 | * Provider. 22 | */ 23 | readonly provider: IProvider 24 | } 25 | 26 | export interface IDatabase { 27 | readonly databaseName: string 28 | } 29 | 30 | class ImportedDatabase extends Construct implements IDatabase { 31 | public readonly databaseName: string 32 | 33 | constructor(scope: Construct, id: string, props: DatabaseAttributes) { 34 | super(scope, id) 35 | this.databaseName = props.databaseName 36 | } 37 | } 38 | 39 | export class Database extends CustomResource implements IDatabase { 40 | /** 41 | * Return a Database based upon name only. Use for importing existing databases. 42 | */ 43 | static fromDatabaseName(scope: Construct, id: string, databaseName: string): IDatabase { 44 | return new ImportedDatabase(scope, id, { 45 | databaseName: databaseName, 46 | }) 47 | } 48 | 49 | public readonly databaseName: string 50 | 51 | constructor(scope: Construct, id: string, props: DatabaseProps) { 52 | // Check if using DSQL provider and forbid database creation 53 | if (props.provider.engine === DatabaseEngine.DSQL) { 54 | throw new Error( 55 | "Database creation is not supported with DSQL. DSQL always uses 'postgres' database." 56 | ) 57 | } 58 | 59 | super(scope, id, { 60 | serviceToken: props.provider.serviceToken, 61 | properties: { 62 | Resource: RdsSqlResource.DATABASE, 63 | ResourceId: props.databaseName, 64 | ...(props.provider.secret ? { SecretArn: props.provider.secret.secretArn } : {}), 65 | Owner: props.owner?.roleName, 66 | }, 67 | }) 68 | this.node.addDependency(props.provider) 69 | this.databaseName = props.databaseName 70 | if (props.owner) { 71 | this.node.addDependency(props.owner) 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_stages: [pre-commit] 2 | default_language_version: 3 | node: system 4 | fail_fast: true 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v4.6.0 8 | hooks: 9 | - id: check-yaml 10 | - id: end-of-file-fixer 11 | exclude: (tsconfig.json|API.md|.mergify.yml|.projen/|.github|tsconfig.dev.json|.gitignore|LICENSE|.gitattributes|.eslintrc.json) 12 | - id: trailing-whitespace 13 | - id: check-json 14 | exclude: (tsconfig.json|.eslintrc.json|tsconfig.dev.json) 15 | - id: detect-aws-credentials 16 | - id: detect-private-key 17 | - repo: https://github.com/talos-systems/conform 18 | rev: v0.1.0-alpha.26 19 | hooks: 20 | - id: conform 21 | stages: 22 | - commit-msg 23 | - repo: https://github.com/pre-commit/mirrors-prettier 24 | rev: "v2.7.1" 25 | hooks: 26 | - id: prettier 27 | exclude: (.mergify.yml|.projen/|.github|tsconfig.dev.json|.gitignore|LICENSE|.gitattributes|.eslintrc.json) 28 | stages: 29 | - pre-commit 30 | - repo: local 31 | hooks: 32 | - id: projen 33 | name: run-projen 34 | language: system 35 | entry: sh -c 'npx projen' 36 | files: \.ts$ 37 | pass_filenames: false 38 | - repo: local 39 | hooks: 40 | - id: build-typescript 41 | exclude: .projenrc.ts 42 | name: build-typescript 43 | language: system 44 | entry: sh -c 'npx tsc --noEmit' 45 | files: \.ts$ 46 | pass_filenames: false 47 | - repo: local 48 | hooks: 49 | - id: synth 50 | name: synth 51 | language: system 52 | entry: sh -c 'npx cdk synth --quiet --context vpc-id=$VPC_ID' 53 | files: src/.*\.ts$ 54 | pass_filenames: false 55 | - repo: local 56 | hooks: 57 | - id: compile-project 58 | exclude: .projenrc.ts 59 | name: compile-project 60 | language: system 61 | entry: sh -c 'npx projen compile' 62 | files: \.ts$ 63 | pass_filenames: false 64 | - repo: local 65 | hooks: 66 | - id: test 67 | exclude: .projenrc.ts 68 | name: test 69 | language: system 70 | entry: sh -c 'npx jest --coverage=false' 71 | files: \.ts$ 72 | pass_filenames: false 73 | -------------------------------------------------------------------------------- /lambda/types.ts: -------------------------------------------------------------------------------- 1 | import { CloudFormationCustomResourceResourcePropertiesCommon } from "aws-lambda" 2 | import { RdsSqlResource } from "../src/enum" 3 | 4 | // Engine-specific interfaces (business logic properties only) 5 | export interface EngineDatabaseProperties { 6 | readonly Owner?: string 7 | readonly MasterOwner?: string 8 | } 9 | 10 | export interface EngineRoleProperties { 11 | readonly PasswordArn?: string 12 | readonly DatabaseName?: string 13 | readonly EnableIamAuth?: boolean 14 | } 15 | 16 | export interface EngineSchemaProperties { 17 | readonly DatabaseName?: string 18 | readonly RoleName?: string 19 | } 20 | 21 | export interface EngineSqlProperties { 22 | readonly DatabaseName?: string 23 | readonly Statement?: string 24 | readonly Rollback?: string 25 | } 26 | 27 | export interface EngineIamGrantProperties { 28 | readonly RoleName: string 29 | readonly ResourceArn: string 30 | } 31 | 32 | // Common CloudFormation properties shared by all resources 33 | export interface CommonProperties 34 | extends CloudFormationCustomResourceResourcePropertiesCommon { 35 | readonly Resource: RdsSqlResource 36 | readonly ResourceId?: string // set if we know the exact physical resource id to return; not passed by sql or iam grant 37 | readonly SecretArn?: string // SecretArn is not set for DSQL 38 | } 39 | 40 | // CloudFormation-specific properties (engine properties + CloudFormation metadata) 41 | export interface DatabaseProperties extends CommonProperties, EngineDatabaseProperties { 42 | readonly Resource: RdsSqlResource.DATABASE 43 | } 44 | 45 | export interface RoleProperties extends CommonProperties { 46 | readonly Resource: RdsSqlResource.ROLE 47 | readonly PasswordArn?: string 48 | readonly DatabaseName?: string 49 | readonly EnableIamAuth?: string // CloudFormation passes boolean as string 50 | } 51 | 52 | export interface SchemaProperties extends CommonProperties, EngineSchemaProperties { 53 | readonly Resource: RdsSqlResource.SCHEMA 54 | } 55 | 56 | export interface SqlProperties extends CommonProperties, EngineSqlProperties { 57 | readonly Resource: RdsSqlResource.SQL 58 | } 59 | 60 | export interface IamGrantProperties extends CommonProperties, EngineIamGrantProperties { 61 | readonly Resource: RdsSqlResource.IAM_GRANT 62 | readonly ResourceArn: string 63 | } 64 | 65 | // Parameter password specific properties 66 | export interface ParameterPasswordProperties extends CommonProperties { 67 | readonly Resource: RdsSqlResource.PARAMETER_PASSWORD 68 | readonly PasswordArn: string 69 | readonly ParameterName: string 70 | } 71 | 72 | // Union type of all resource properties 73 | export type ResourceProperties = 74 | | DatabaseProperties 75 | | RoleProperties 76 | | SchemaProperties 77 | | SqlProperties 78 | | IamGrantProperties 79 | | ParameterPasswordProperties 80 | 81 | // Custom resource response interface 82 | export interface CustomResourceResponse { 83 | PhysicalResourceId?: string 84 | Data?: any 85 | NoEcho?: boolean 86 | } 87 | -------------------------------------------------------------------------------- /.github/workflows/upgrade-main.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | name: upgrade-main 4 | on: 5 | workflow_dispatch: {} 6 | schedule: 7 | - cron: 0 0 * * * 8 | jobs: 9 | upgrade: 10 | name: Upgrade 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: write 14 | pull-requests: write 15 | outputs: 16 | patch_created: ${{ steps.create_patch.outputs.patch_created }} 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v5 20 | with: 21 | ref: main 22 | - name: Setup Node.js 23 | uses: actions/setup-node@v5 24 | with: 25 | node-version: 24.x 26 | - name: Install dependencies 27 | run: npm ci 28 | - name: Upgrade dependencies 29 | run: npx projen upgrade 30 | - name: Find mutations 31 | id: create_patch 32 | run: |- 33 | git add . 34 | git diff --staged --patch --exit-code > repo.patch || echo "patch_created=true" >> $GITHUB_OUTPUT 35 | shell: bash 36 | working-directory: ./ 37 | - name: Upload patch 38 | if: steps.create_patch.outputs.patch_created 39 | uses: actions/upload-artifact@v4.6.2 40 | with: 41 | name: repo.patch 42 | path: repo.patch 43 | overwrite: true 44 | pr: 45 | name: Create Pull Request 46 | needs: upgrade 47 | runs-on: ubuntu-latest 48 | permissions: 49 | contents: write 50 | pull-requests: write 51 | if: ${{ needs.upgrade.outputs.patch_created }} 52 | steps: 53 | - name: Checkout 54 | uses: actions/checkout@v5 55 | with: 56 | ref: main 57 | - name: Download patch 58 | uses: actions/download-artifact@v5 59 | with: 60 | name: repo.patch 61 | path: ${{ runner.temp }} 62 | - name: Apply patch 63 | run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo "Empty patch. Skipping."' 64 | - name: Set git identity 65 | run: |- 66 | git config user.name "github-actions[bot]" 67 | git config user.email "41898282+github-actions[bot]@users.noreply.github.com" 68 | - name: Create Pull Request 69 | id: create-pr 70 | uses: peter-evans/create-pull-request@v7 71 | with: 72 | token: ${{ secrets.GITHUB_TOKEN }} 73 | commit-message: |- 74 | chore(deps): upgrade dependencies 75 | 76 | Upgrades project dependencies. See details in [workflow run]. 77 | 78 | [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} 79 | 80 | ------ 81 | 82 | *Automatically created by projen via the "upgrade-main" workflow* 83 | branch: github-actions/upgrade-main 84 | title: "chore(deps): upgrade dependencies" 85 | body: |- 86 | Upgrades project dependencies. See details in [workflow run]. 87 | 88 | [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} 89 | 90 | ------ 91 | 92 | *Automatically created by projen via the "upgrade-main" workflow* 93 | author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> 94 | committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> 95 | signoff: true 96 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | { 3 | "env": { 4 | "jest": true, 5 | "node": true 6 | }, 7 | "root": true, 8 | "plugins": [ 9 | "@typescript-eslint", 10 | "import" 11 | ], 12 | "parser": "@typescript-eslint/parser", 13 | "parserOptions": { 14 | "ecmaVersion": 2018, 15 | "sourceType": "module", 16 | "project": "./tsconfig.dev.json" 17 | }, 18 | "extends": [ 19 | "plugin:import/typescript", 20 | "plugin:prettier/recommended" 21 | ], 22 | "settings": { 23 | "import/parsers": { 24 | "@typescript-eslint/parser": [ 25 | ".ts", 26 | ".tsx" 27 | ] 28 | }, 29 | "import/resolver": { 30 | "node": {}, 31 | "typescript": { 32 | "project": "./tsconfig.dev.json", 33 | "alwaysTryTypes": true 34 | } 35 | } 36 | }, 37 | "ignorePatterns": [ 38 | "*.js", 39 | "*.d.ts", 40 | "node_modules/", 41 | "*.generated.ts", 42 | "coverage", 43 | "!.projenrc.ts", 44 | "!projenrc/**/*.ts" 45 | ], 46 | "rules": { 47 | "curly": [ 48 | "error", 49 | "multi-line", 50 | "consistent" 51 | ], 52 | "@typescript-eslint/no-require-imports": "error", 53 | "import/no-extraneous-dependencies": [ 54 | "error", 55 | { 56 | "devDependencies": [ 57 | "**/test/**", 58 | "**/build-tools/**", 59 | ".projenrc.ts", 60 | "projenrc/**/*.ts" 61 | ], 62 | "optionalDependencies": false, 63 | "peerDependencies": true 64 | } 65 | ], 66 | "import/no-unresolved": [ 67 | "error" 68 | ], 69 | "import/order": [ 70 | "warn", 71 | { 72 | "groups": [ 73 | "builtin", 74 | "external" 75 | ], 76 | "alphabetize": { 77 | "order": "asc", 78 | "caseInsensitive": true 79 | } 80 | } 81 | ], 82 | "import/no-duplicates": [ 83 | "error" 84 | ], 85 | "no-shadow": [ 86 | "off" 87 | ], 88 | "@typescript-eslint/no-shadow": "error", 89 | "@typescript-eslint/no-floating-promises": "error", 90 | "no-return-await": [ 91 | "off" 92 | ], 93 | "@typescript-eslint/return-await": "error", 94 | "dot-notation": [ 95 | "error" 96 | ], 97 | "no-bitwise": [ 98 | "error" 99 | ], 100 | "@typescript-eslint/member-ordering": [ 101 | "error", 102 | { 103 | "default": [ 104 | "public-static-field", 105 | "public-static-method", 106 | "protected-static-field", 107 | "protected-static-method", 108 | "private-static-field", 109 | "private-static-method", 110 | "field", 111 | "constructor", 112 | "method" 113 | ] 114 | } 115 | ], 116 | "semi": [ 117 | "off" 118 | ], 119 | "quotes": [ 120 | "error", 121 | "double" 122 | ] 123 | }, 124 | "overrides": [ 125 | { 126 | "files": [ 127 | "lambda/*.ts" 128 | ], 129 | "rules": { 130 | "import/no-extraneous-dependencies": "off" 131 | } 132 | }, 133 | { 134 | "files": [ 135 | ".projenrc.ts" 136 | ], 137 | "rules": { 138 | "@typescript-eslint/no-require-imports": "off", 139 | "import/no-extraneous-dependencies": "off" 140 | } 141 | } 142 | ] 143 | } 144 | -------------------------------------------------------------------------------- /lambda/engine.abstract.ts: -------------------------------------------------------------------------------- 1 | import { ConnectionOptions } from "tls" 2 | import { 3 | SecretsManagerClient, 4 | GetSecretValueCommand, 5 | } from "@aws-sdk/client-secrets-manager" 6 | import { 7 | EngineDatabaseProperties, 8 | EngineRoleProperties, 9 | EngineSchemaProperties, 10 | EngineSqlProperties, 11 | } from "./types" 12 | 13 | export interface EngineConnectionConfig { 14 | host: string 15 | port: number 16 | user: string 17 | password: string 18 | database: string 19 | ssl?: boolean | ConnectionOptions 20 | } 21 | 22 | export abstract class AbstractEngine { 23 | protected log: (message?: any, ...optionalParams: any[]) => void 24 | 25 | constructor() { 26 | this.log = 27 | process.env.LOGGER === "true" 28 | ? console.debug 29 | : (_message?: any, ..._optionalParams: any[]) => {} 30 | } 31 | 32 | abstract createDatabase( 33 | resourceId: string, 34 | props: EngineDatabaseProperties 35 | ): string | string[] 36 | abstract updateDatabase( 37 | resourceId: string, 38 | oldResourceId: string, 39 | props: EngineDatabaseProperties 40 | ): string | string[] 41 | abstract deleteDatabase(resourceId: string, masterUser: string): string | string[] 42 | 43 | abstract createRole( 44 | resourceId: string, 45 | props: EngineRoleProperties 46 | ): Promise 47 | abstract updateRole( 48 | resourceId: string, 49 | oldResourceId: string, 50 | props: EngineRoleProperties, 51 | oldProps: EngineRoleProperties 52 | ): Promise 53 | abstract deleteRole( 54 | resourceId: string, 55 | props: EngineRoleProperties 56 | ): Promise 57 | 58 | abstract createSchema( 59 | resourceId: string, 60 | props: EngineSchemaProperties 61 | ): string | string[] 62 | abstract updateSchema( 63 | resourceId: string, 64 | oldResourceId: string, 65 | props: EngineSchemaProperties 66 | ): string | string[] 67 | abstract deleteSchema( 68 | resourceId: string, 69 | props: EngineSchemaProperties 70 | ): string | string[] 71 | 72 | abstract createSql(resourceId: string, props: EngineSqlProperties): string | string[] 73 | abstract updateSql( 74 | resourceId: string, 75 | oldResourceId: string, 76 | props: EngineSqlProperties 77 | ): string | string[] 78 | abstract deleteSql(resourceId: string, props: EngineSqlProperties): string | string[] 79 | 80 | abstract createIamGrant(roleName: string, iamArn: string): string | string[] 81 | abstract updateIamGrant( 82 | roleName: string, 83 | oldRoleName: string, 84 | iamArn: string, 85 | oldIamArn: string 86 | ): string | string[] 87 | abstract deleteIamGrant(roleName: string, iamArn: string): string | string[] 88 | 89 | abstract executeSQL( 90 | sql: string | string[], 91 | config: EngineConnectionConfig 92 | ): Promise 93 | 94 | /** 95 | * Parse password field from secret. Returns void on error or if no password field exists. 96 | */ 97 | protected async getPassword(arn: string): Promise { 98 | if (!arn) return 99 | const secrets_client = new SecretsManagerClient({ 100 | requestHandler: { 101 | connectionTimeout: 5000, 102 | requestTimeout: 10000, 103 | }, 104 | }) 105 | const command = new GetSecretValueCommand({ 106 | SecretId: arn, 107 | }) 108 | const secret = await secrets_client.send(command) 109 | if (secret.SecretString) { 110 | const json = JSON.parse(secret.SecretString) 111 | return json.password 112 | } 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | This is a projen managed project. It contains a CDK construct library for creating databases, schemas, and roles in Aurora Serverless v2, RDS Database Cluster, or Database Instance. It supports both PostgreSQL and MySQL databases and is designed for enterprise environments with isolated subnets. 8 | 9 | ## Key Architecture 10 | 11 | ### Core Components 12 | 13 | - **Provider**: Main construct that creates a Lambda function handler to execute database operations 14 | - **Role**: Creates database users/roles with optional IAM authentication 15 | - **Database**: Creates databases with optional ownership assignment 16 | - **Schema**: Creates database schemas with role permissions 17 | - **Sql**: Executes arbitrary SQL statements 18 | 19 | ### Lambda Handler Architecture 20 | 21 | - **Engine Factory Pattern**: `lambda/engine.factory.ts` creates database-specific engines 22 | - **Abstract Engine**: `lambda/engine.abstract.ts` defines common interface 23 | - **Concrete Engines**: `lambda/engine.postgresql.ts` and `lambda/engine.mysql.ts` 24 | - **Main Handler**: `lambda/handler.ts` orchestrates CloudFormation custom resource operations 25 | 26 | The Lambda handler supports both PostgreSQL and MySQL databases, auto-detecting the engine type from the cluster/instance configuration. 27 | 28 | ## Development Commands 29 | 30 | ### Build and Test 31 | 32 | ```bash 33 | # After changing .projenrc.ts 34 | npx projen 35 | 36 | # Build the project 37 | npx projen build 38 | 39 | # Run all tests 40 | npx projen test 41 | 42 | # Run tests in watch mode 43 | npx projen test:watch 44 | 45 | # Run typecheck (instead of npx tsc --noEmit) 46 | npx projen typecheck 47 | 48 | # Run linting 49 | npx projen eslint 50 | 51 | # Build only the Lambda handler 52 | npx projen build:handler 53 | ``` 54 | 55 | ### Integration Testing 56 | 57 | ```bash 58 | # Deploy PostgreSQL serverless test stack 59 | npx projen integ:deploy:postgresql:serverless 60 | 61 | # Deploy MySQL serverless test stack 62 | npx projen integ:deploy:mysql:serverless 63 | 64 | # Destroy test stack 65 | npx projen integ:destroy:serverless 66 | ``` 67 | 68 | ## Testing Setup 69 | 70 | - **Framework**: Jest with TypeScript support 71 | - **Test Files**: Located in `test/` and `lambda/` directories 72 | - **Configuration**: See `jestOptions` in `.projenrc.ts`. 73 | - **Coverage**: Enabled with multiple reporters (json, lcov, clover, cobertura, text) 74 | 75 | ## Database Engine Support 76 | 77 | ### PostgreSQL 78 | 79 | - Uses `pg` library for connections 80 | - Supports schemas, roles, and databases 81 | - IAM authentication supported 82 | 83 | ### MySQL 84 | 85 | - Uses `mysql2` library for connections 86 | - Creates users with '%' host for VPC compatibility 87 | - IAM authentication supported 88 | 89 | ## Build Process 90 | 91 | The project uses projen for build management. Do not change files 92 | managed by projen, but instead change `.projenrc.ts`. After changing 93 | `.projenrc.ts` run `npx projen`. 94 | 95 | In particular: 96 | 97 | 1. Do not change `package.json`. 98 | 2. Do not change any of the github workflow files. 99 | 100 | Projen tasks: 101 | 102 | 1. `pre-compile`: Transpiles Lambda handler using esbuild 103 | 2. `compile`: TypeScript compilation 104 | 3. `post-compile`: Copies handler to lib directory 105 | 4. `eslint`: Runs linter 106 | 5. `typecheck`: Runs typecheck 107 | 108 | ## SSL/TLS Configuration 109 | 110 | - SSL is enabled by default 111 | - Global RDS certificate bundle is automatically downloaded during build 112 | - Can be disabled via `ssl: false` in Provider props 113 | 114 | ## Code style 115 | 116 | - Avoid any. 117 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | This is a projen managed project. It contains a CDK construct library for creating databases, schemas, and roles in Aurora Serverless v2, RDS Database Cluster, or Database Instance. It supports both PostgreSQL and MySQL databases and is designed for enterprise environments with isolated subnets. 8 | 9 | ## Key Architecture 10 | 11 | ### Core Components 12 | 13 | - **Provider**: Main construct that creates a Lambda function handler to execute database operations 14 | - **Role**: Creates database users/roles with optional IAM authentication 15 | - **Database**: Creates databases with optional ownership assignment 16 | - **Schema**: Creates database schemas with role permissions 17 | - **Sql**: Executes arbitrary SQL statements 18 | 19 | ### Lambda Handler Architecture 20 | 21 | - **Engine Factory Pattern**: `lambda/engine.factory.ts` creates database-specific engines 22 | - **Abstract Engine**: `lambda/engine.abstract.ts` defines common interface 23 | - **Concrete Engines**: `lambda/engine.postgresql.ts` and `lambda/engine.mysql.ts` 24 | - **Main Handler**: `lambda/handler.ts` orchestrates CloudFormation custom resource operations 25 | 26 | The Lambda handler supports both PostgreSQL and MySQL databases, auto-detecting the engine type from the cluster/instance configuration. 27 | 28 | ## Development Commands 29 | 30 | ### Build and Test 31 | 32 | ```bash 33 | # After changing .projenrc.ts 34 | npx projen 35 | 36 | # Build the project 37 | npx projen build 38 | 39 | # Run all tests 40 | npx projen test 41 | 42 | # Run tests in watch mode 43 | npx projen test:watch 44 | 45 | # Run typecheck (instead of npx tsc --noEmit) 46 | npx projen typecheck 47 | 48 | # Run linting 49 | npx projen eslint 50 | 51 | # Build only the Lambda handler 52 | npx projen build:handler 53 | ``` 54 | 55 | ### Integration Testing 56 | 57 | ```bash 58 | # Deploy PostgreSQL serverless test stack 59 | npx projen integ:deploy:postgresql:serverless 60 | 61 | # Deploy MySQL serverless test stack 62 | npx projen integ:deploy:mysql:serverless 63 | 64 | # Destroy test stack 65 | npx projen integ:destroy:serverless 66 | ``` 67 | 68 | ## Testing Setup 69 | 70 | - **Framework**: Jest with TypeScript support 71 | - **Test Files**: Located in `test/` and `lambda/` directories 72 | - **Configuration**: See `jestOptions` in `.projenrc.ts`. 73 | - **Coverage**: Enabled with multiple reporters (json, lcov, clover, cobertura, text) 74 | 75 | ## Database Engine Support 76 | 77 | ### PostgreSQL 78 | 79 | - Uses `pg` library for connections 80 | - Supports schemas, roles, and databases 81 | - IAM authentication supported 82 | 83 | ### MySQL 84 | 85 | - Uses `mysql2` library for connections 86 | - Creates users with '%' host for VPC compatibility 87 | - IAM authentication supported 88 | 89 | ## Build Process 90 | 91 | The project uses projen for build management. Do not change files 92 | managed by projen, but instead change `.projenrc.ts`. After changing 93 | `.projenrc.ts` run `npx projen`. 94 | 95 | In particular: 96 | 97 | 1. Do not change `package.json`. 98 | 2. Do not change any of the github workflow files. 99 | 100 | Projen tasks: 101 | 102 | 1. `pre-compile`: Transpiles Lambda handler using esbuild 103 | 2. `compile`: TypeScript compilation 104 | 3. `post-compile`: Copies handler to lib directory 105 | 4. `eslint`: Runs linter 106 | 5. `typecheck`: Runs typecheck 107 | 108 | ## SSL/TLS Configuration 109 | 110 | - SSL is enabled by default 111 | - Global RDS certificate bundle is automatically downloaded during build 112 | - Can be disabled via `ssl: false` in Provider props 113 | 114 | ## Code style 115 | 116 | - Avoid any. 117 | -------------------------------------------------------------------------------- /.projen/deps.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": [ 3 | { 4 | "name": "@aws-sdk/client-secrets-manager", 5 | "type": "build" 6 | }, 7 | { 8 | "name": "@aws-sdk/client-ssm", 9 | "type": "build" 10 | }, 11 | { 12 | "name": "@aws-sdk/dsql-signer", 13 | "type": "build" 14 | }, 15 | { 16 | "name": "@types/jest", 17 | "version": "~30", 18 | "type": "build" 19 | }, 20 | { 21 | "name": "@types/node", 22 | "version": "^22", 23 | "type": "build" 24 | }, 25 | { 26 | "name": "@types/pg", 27 | "version": "^8.11.11", 28 | "type": "build" 29 | }, 30 | { 31 | "name": "@typescript-eslint/eslint-plugin", 32 | "version": "^8", 33 | "type": "build" 34 | }, 35 | { 36 | "name": "@typescript-eslint/parser", 37 | "version": "^8", 38 | "type": "build" 39 | }, 40 | { 41 | "name": "commit-and-tag-version", 42 | "version": "^12", 43 | "type": "build" 44 | }, 45 | { 46 | "name": "esbuild", 47 | "type": "build" 48 | }, 49 | { 50 | "name": "eslint-config-prettier", 51 | "type": "build" 52 | }, 53 | { 54 | "name": "eslint-import-resolver-typescript", 55 | "type": "build" 56 | }, 57 | { 58 | "name": "eslint-plugin-import", 59 | "type": "build" 60 | }, 61 | { 62 | "name": "eslint-plugin-prettier", 63 | "type": "build" 64 | }, 65 | { 66 | "name": "eslint", 67 | "version": "^9", 68 | "type": "build" 69 | }, 70 | { 71 | "name": "exponential-backoff", 72 | "type": "build" 73 | }, 74 | { 75 | "name": "jest-junit", 76 | "version": "^16", 77 | "type": "build" 78 | }, 79 | { 80 | "name": "jest", 81 | "version": "~30", 82 | "type": "build" 83 | }, 84 | { 85 | "name": "jsii-diff", 86 | "type": "build" 87 | }, 88 | { 89 | "name": "jsii-pacmak", 90 | "type": "build" 91 | }, 92 | { 93 | "name": "jsii-rosetta", 94 | "version": "~5.9.0", 95 | "type": "build" 96 | }, 97 | { 98 | "name": "jsii", 99 | "version": "~5.9.0", 100 | "type": "build" 101 | }, 102 | { 103 | "name": "mysql2", 104 | "type": "build" 105 | }, 106 | { 107 | "name": "node-pg-format", 108 | "type": "build" 109 | }, 110 | { 111 | "name": "pg", 112 | "version": "^8.13.3", 113 | "type": "build" 114 | }, 115 | { 116 | "name": "prettier", 117 | "type": "build" 118 | }, 119 | { 120 | "name": "projen", 121 | "type": "build" 122 | }, 123 | { 124 | "name": "source-map-support", 125 | "type": "build" 126 | }, 127 | { 128 | "name": "testcontainers", 129 | "version": "11", 130 | "type": "build" 131 | }, 132 | { 133 | "name": "ts-jest", 134 | "version": "^29", 135 | "type": "build" 136 | }, 137 | { 138 | "name": "ts-node", 139 | "type": "build" 140 | }, 141 | { 142 | "name": "typescript", 143 | "version": "~5.9", 144 | "type": "build" 145 | }, 146 | { 147 | "name": "@types/aws-lambda", 148 | "type": "bundled" 149 | }, 150 | { 151 | "name": "aws-cdk-lib", 152 | "version": "^2.214.0", 153 | "type": "peer" 154 | }, 155 | { 156 | "name": "constructs", 157 | "version": "^10.4.2", 158 | "type": "peer" 159 | }, 160 | { 161 | "name": "@types/aws-lambda", 162 | "type": "runtime" 163 | } 164 | ], 165 | "//": "~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\"." 166 | } 167 | -------------------------------------------------------------------------------- /test/dsql-stack.ts: -------------------------------------------------------------------------------- 1 | import { Duration, Stack, StackProps } from "aws-cdk-lib" 2 | import * as dsql from "aws-cdk-lib/aws-dsql" 3 | import * as iam from "aws-cdk-lib/aws-iam" 4 | import { Runtime } from "aws-cdk-lib/aws-lambda" 5 | import * as lambda from "aws-cdk-lib/aws-lambda-nodejs" 6 | import { LogGroup, RetentionDays } from "aws-cdk-lib/aws-logs" 7 | import { Construct } from "constructs" 8 | import { Provider, Role, Schema, Sql, IamGrant } from "./../src/index" 9 | 10 | export interface TestDsqlStackProps extends StackProps { 11 | /** 12 | * Print SQL statements being executed. 13 | * 14 | * @default true 15 | */ 16 | logger?: boolean 17 | } 18 | 19 | export class TestDsqlStack extends Stack { 20 | constructor(scope: Construct, id: string, props: TestDsqlStackProps) { 21 | super(scope, id, props) 22 | 23 | // Create DSQL cluster - no VPC needed as it uses public endpoint 24 | const dsqlCluster = new dsql.CfnCluster(this, "DsqlCluster", { 25 | deletionProtectionEnabled: false, 26 | }) 27 | 28 | const provider = new Provider(this, "Provider", { 29 | cluster: dsqlCluster, 30 | functionProps: { 31 | logGroup: new LogGroup(this, "ProviderLogGroup", { 32 | retention: RetentionDays.ONE_WEEK, 33 | }), 34 | timeout: Duration.seconds(30), 35 | }, 36 | logger: props.logger, 37 | }) 38 | 39 | const role = new Role(this, "Role", { 40 | provider: provider, 41 | roleName: "testrole", 42 | }) 43 | 44 | const schema = new Schema(this, "Schema", { 45 | provider: provider, 46 | schemaName: "myschema", 47 | role, 48 | }) 49 | 50 | // DSQL doesn't support DDL and DML in the same transaction 51 | // Create table first (DDL) 52 | const createTableSql = new Sql(this, "CreateTable", { 53 | provider: provider, 54 | statement: ` 55 | create table if not exists myschema.test_table ( 56 | id uuid primary key default gen_random_uuid(), 57 | name varchar(100) not null, 58 | created_at timestamp default current_timestamp 59 | ); 60 | `, 61 | rollback: ` 62 | drop table if exists myschema.test_table; 63 | `, 64 | }) 65 | createTableSql.node.addDependency(schema) 66 | 67 | const grantPermissionsSql = new Sql(this, "GrantTablePermission", { 68 | provider: provider, 69 | statement: ` 70 | grant select on myschema.test_table to testrole; 71 | `, 72 | rollback: ` 73 | drop table if exists myschema.test_table; 74 | `, 75 | }) 76 | grantPermissionsSql.node.addDependency(createTableSql) 77 | 78 | // Then insert data (DML) - depends on table creation 79 | const insertDataSql = new Sql(this, "InsertData", { 80 | provider: provider, 81 | statement: ` 82 | insert into myschema.test_table (name) values ('test_data'); 83 | `, 84 | }) 85 | insertDataSql.node.addDependency(createTableSql) 86 | 87 | // Create a Lambda function that queries the DSQL table using our role 88 | const clusterId = dsqlCluster.attrIdentifier 89 | const region = Stack.of(this).region 90 | const dsqlEndpoint = `${clusterId}.dsql.${region}.on.aws` 91 | 92 | const queryLambda = new lambda.NodejsFunction(this, "QueryLambda", { 93 | runtime: Runtime.NODEJS_22_X, 94 | handler: "handler", 95 | entry: "test/query-lambda.ts", 96 | environment: { 97 | PGHOST: dsqlEndpoint, 98 | PGDATABASE: "postgres", 99 | PGUSER: role.roleName, 100 | }, 101 | timeout: Duration.seconds(30), 102 | initialPolicy: [ 103 | new iam.PolicyStatement({ 104 | effect: iam.Effect.ALLOW, 105 | actions: ["dsql:DbConnect"], 106 | resources: [ 107 | `arn:aws:dsql:${region}:${Stack.of(this).account}:cluster/${clusterId}`, 108 | ], 109 | }), 110 | ], 111 | }) 112 | 113 | // Grant the Lambda's execution role access to the DSQL role 114 | new IamGrant(this, "LambdaIamGrant", { 115 | provider: provider, 116 | roleName: role.roleName, 117 | resourceArn: queryLambda.role!.roleArn, 118 | }) 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /test/instance2-stack.ts: -------------------------------------------------------------------------------- 1 | import { Fn, RemovalPolicy, Stack, StackProps } from "aws-cdk-lib" 2 | import * as ec2 from "aws-cdk-lib/aws-ec2" 3 | import { LogGroup, RetentionDays } from "aws-cdk-lib/aws-logs" 4 | import * as rds from "aws-cdk-lib/aws-rds" 5 | import * as secrets from "aws-cdk-lib/aws-secretsmanager" 6 | import { Construct } from "constructs" 7 | import { Vpc } from "./vpc" 8 | import { Provider, Database, Role, Schema, Sql } from "../src/index" 9 | 10 | export class TestInstanceStack extends Stack { 11 | constructor(scope: Construct, id: string, props: StackProps) { 12 | super(scope, id, props) 13 | 14 | const vpc = new Vpc(this, "Vpc") 15 | 16 | const instance = new rds.DatabaseInstance(this, "Instance", { 17 | vpc: vpc.vpc, 18 | vpcSubnets: { 19 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 20 | }, 21 | engine: rds.DatabaseInstanceEngine.postgres({ 22 | version: rds.PostgresEngineVersion.VER_13_4, 23 | }), 24 | databaseName: "example", 25 | credentials: rds.Credentials.fromGeneratedSecret("pgroot"), 26 | instanceType: ec2.InstanceType.of( 27 | ec2.InstanceClass.BURSTABLE3, 28 | ec2.InstanceSize.MICRO 29 | ), 30 | removalPolicy: RemovalPolicy.DESTROY, 31 | }) 32 | 33 | const provider = new Provider(this, "Provider", { 34 | vpc: vpc.vpc, 35 | cluster: instance, 36 | secret: instance.secret!, 37 | functionProps: { 38 | logGroup: new LogGroup(this, "loggroup", { 39 | retention: RetentionDays.ONE_WEEK, 40 | logGroupName: "/aws/lambda/provider", 41 | }), 42 | allowPublicSubnet: true, 43 | }, 44 | }) 45 | Database.fromDatabaseName(this, "DefaultDatabase", "example") 46 | 47 | new Schema(this, "Schema", { 48 | provider: provider, 49 | schemaName: "myschema", 50 | }) 51 | const role = new Role(this, "Role", { 52 | provider: provider, 53 | roleName: "myrole", 54 | databaseName: "mydb", 55 | }) 56 | const database = new Database(this, "Database", { 57 | provider: provider, 58 | databaseName: "mydb", 59 | owner: role, 60 | }) 61 | new Sql(this, "Sql", { 62 | provider: provider, 63 | database: database, 64 | statement: ` 65 | create table if not exists t (i int); 66 | grant select on t to myrole; 67 | `, 68 | rollback: ` 69 | DO $$BEGIN 70 | IF EXISTS (select from pg_database WHERE datname = 't') THEN 71 | IF EXISTS (select from pg_catalog.pg_roles WHERE rolname = 'myrole') THEN 72 | revoke select database t from myrole; 73 | END IF; 74 | drop table t; 75 | END IF; 76 | END$$;, 77 | `, 78 | }) 79 | } 80 | } 81 | 82 | export class ImportedInstanceStack extends Stack { 83 | constructor(scope: Construct, id: string, props: StackProps) { 84 | super(scope, id, props) 85 | 86 | const vpc = new Vpc(this, "Vpc") 87 | 88 | const secret = secrets.Secret.fromSecretCompleteArn( 89 | this, 90 | "Secret", 91 | Fn.importValue("secret-arn") 92 | ) 93 | 94 | const instance = rds.DatabaseInstance.fromDatabaseInstanceAttributes( 95 | this, 96 | "DatabaseInstance", 97 | { 98 | instanceIdentifier: Fn.importValue("instance-identifier"), 99 | instanceEndpointAddress: Fn.importValue("instance-endpoint"), 100 | engine: rds.DatabaseInstanceEngine.postgres({ 101 | version: rds.PostgresEngineVersion.VER_13_4, 102 | }), 103 | port: 5432, // absence of port in import causes an exception 104 | securityGroups: [ 105 | ec2.SecurityGroup.fromSecurityGroupId( 106 | this, 107 | "RdsSecurityGroup", 108 | "sg-00bbd66b014133c45" 109 | ), 110 | ], 111 | } 112 | ) 113 | 114 | const provider = new Provider(this, "Provider", { 115 | vpc: vpc.vpc, 116 | cluster: instance, 117 | secret: secret, 118 | }) 119 | Database.fromDatabaseName(this, "DefaultDatabase", "example") 120 | 121 | new Role(this, "Role", { 122 | provider: provider, 123 | roleName: "myrole", 124 | databaseName: "mydb", 125 | }) 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /test/__snapshots__/custom-resource-properties.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing 2 | 3 | exports[`Custom Resource Properties Database resource properties database properties remain consistent across builds 1`] = ` 4 | { 5 | "Resource": "database", 6 | "ResourceId": "test_db", 7 | "SecretArn": { 8 | "Ref": "ClusterSecretAttachment769E6258", 9 | }, 10 | "ServiceToken": { 11 | "Fn::GetAtt": [ 12 | "ProviderRdsSqlframeworkonEventD3D137B7", 13 | "Arn", 14 | ], 15 | }, 16 | } 17 | `; 18 | 19 | exports[`Custom Resource Properties Property validation across all resource types comprehensive property validation 1`] = ` 20 | { 21 | "database": [ 22 | { 23 | "key": "TestDatabase", 24 | "properties": { 25 | "Resource": "database", 26 | "ResourceId": "test_db", 27 | "SecretArn": { 28 | "Ref": "ClusterSecretAttachment769E6258", 29 | }, 30 | "ServiceToken": { 31 | "Fn::GetAtt": [ 32 | "ProviderRdsSqlframeworkonEventD3D137B7", 33 | "Arn", 34 | ], 35 | }, 36 | }, 37 | }, 38 | ], 39 | "role": [ 40 | { 41 | "key": "TestRolePostgresRole3AD07047", 42 | "properties": { 43 | "DatabaseName": "test_db", 44 | "EnableIamAuth": false, 45 | "PasswordArn": { 46 | "Ref": "TestRoleSecret0CBCDF24", 47 | }, 48 | "Resource": "role", 49 | "ResourceId": "test_role", 50 | "SecretArn": { 51 | "Ref": "ClusterSecretAttachment769E6258", 52 | }, 53 | "ServiceToken": { 54 | "Fn::GetAtt": [ 55 | "ProviderRdsSqlframeworkonEventD3D137B7", 56 | "Arn", 57 | ], 58 | }, 59 | }, 60 | }, 61 | ], 62 | "schema": [ 63 | { 64 | "key": "TestSchema", 65 | "properties": { 66 | "DatabaseName": "test_db", 67 | "Resource": "schema", 68 | "ResourceId": "test_schema", 69 | "RoleName": "test_role", 70 | "SecretArn": { 71 | "Ref": "ClusterSecretAttachment769E6258", 72 | }, 73 | "ServiceToken": { 74 | "Fn::GetAtt": [ 75 | "ProviderRdsSqlframeworkonEventD3D137B7", 76 | "Arn", 77 | ], 78 | }, 79 | }, 80 | }, 81 | ], 82 | "sql": [ 83 | { 84 | "key": "TestSql", 85 | "properties": { 86 | "DatabaseName": "test_db", 87 | "Resource": "sql", 88 | "Rollback": "DROP TABLE test;", 89 | "SecretArn": { 90 | "Ref": "ClusterSecretAttachment769E6258", 91 | }, 92 | "ServiceToken": { 93 | "Fn::GetAtt": [ 94 | "ProviderRdsSqlframeworkonEventD3D137B7", 95 | "Arn", 96 | ], 97 | }, 98 | "Statement": "CREATE TABLE test (id INTEGER);", 99 | }, 100 | }, 101 | ], 102 | } 103 | `; 104 | 105 | exports[`Custom Resource Properties Role resource properties role properties remain consistent across builds 1`] = ` 106 | { 107 | "DatabaseName": "test_db", 108 | "EnableIamAuth": false, 109 | "PasswordArn": { 110 | "Ref": "TestRoleSecret0CBCDF24", 111 | }, 112 | "Resource": "role", 113 | "ResourceId": "test_role", 114 | "SecretArn": { 115 | "Ref": "ClusterSecretAttachment769E6258", 116 | }, 117 | "ServiceToken": { 118 | "Fn::GetAtt": [ 119 | "ProviderRdsSqlframeworkonEventD3D137B7", 120 | "Arn", 121 | ], 122 | }, 123 | } 124 | `; 125 | 126 | exports[`Custom Resource Properties SQL resource properties sql properties remain consistent across builds 1`] = ` 127 | { 128 | "Resource": "sql", 129 | "SecretArn": { 130 | "Ref": "ClusterSecretAttachment769E6258", 131 | }, 132 | "ServiceToken": { 133 | "Fn::GetAtt": [ 134 | "ProviderRdsSqlframeworkonEventD3D137B7", 135 | "Arn", 136 | ], 137 | }, 138 | "Statement": "CREATE TABLE test (id INTEGER);", 139 | } 140 | `; 141 | 142 | exports[`Custom Resource Properties Schema resource properties schema properties remain consistent across builds 1`] = ` 143 | { 144 | "Resource": "schema", 145 | "ResourceId": "test_schema", 146 | "SecretArn": { 147 | "Ref": "ClusterSecretAttachment769E6258", 148 | }, 149 | "ServiceToken": { 150 | "Fn::GetAtt": [ 151 | "ProviderRdsSqlframeworkonEventD3D137B7", 152 | "Arn", 153 | ], 154 | }, 155 | } 156 | `; 157 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | name: build 4 | on: 5 | pull_request: {} 6 | workflow_dispatch: {} 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | permissions: 11 | contents: write 12 | outputs: 13 | self_mutation_happened: ${{ steps.self_mutation.outputs.self_mutation_happened }} 14 | env: 15 | CI: "true" 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v5 19 | with: 20 | ref: ${{ github.event.pull_request.head.ref }} 21 | repository: ${{ github.event.pull_request.head.repo.full_name }} 22 | - name: Setup Node.js 23 | uses: actions/setup-node@v5 24 | with: 25 | node-version: 24.x 26 | - name: Install dependencies 27 | run: npm install 28 | - name: build 29 | run: npx projen build 30 | - name: Find mutations 31 | id: self_mutation 32 | run: |- 33 | git add . 34 | git diff --staged --patch --exit-code > repo.patch || echo "self_mutation_happened=true" >> $GITHUB_OUTPUT 35 | shell: bash 36 | working-directory: ./ 37 | - name: Upload patch 38 | if: steps.self_mutation.outputs.self_mutation_happened 39 | uses: actions/upload-artifact@v4.6.2 40 | with: 41 | name: repo.patch 42 | path: repo.patch 43 | overwrite: true 44 | - name: Fail build on mutation 45 | if: steps.self_mutation.outputs.self_mutation_happened 46 | run: |- 47 | echo "::error::Files were changed during build (see build log). If this was triggered from a fork, you will need to update your branch." 48 | cat repo.patch 49 | exit 1 50 | - name: Backup artifact permissions 51 | run: cd dist && getfacl -R . > permissions-backup.acl 52 | continue-on-error: true 53 | - name: Upload artifact 54 | uses: actions/upload-artifact@v4.6.2 55 | with: 56 | name: build-artifact 57 | path: dist 58 | overwrite: true 59 | self-mutation: 60 | needs: build 61 | runs-on: ubuntu-latest 62 | permissions: 63 | contents: write 64 | if: always() && needs.build.outputs.self_mutation_happened && !(github.event.pull_request.head.repo.full_name != github.repository) 65 | steps: 66 | - name: Checkout 67 | uses: actions/checkout@v5 68 | with: 69 | token: ${{ secrets.PROJEN_GITHUB_TOKEN }} 70 | ref: ${{ github.event.pull_request.head.ref }} 71 | repository: ${{ github.event.pull_request.head.repo.full_name }} 72 | - name: Download patch 73 | uses: actions/download-artifact@v5 74 | with: 75 | name: repo.patch 76 | path: ${{ runner.temp }} 77 | - name: Apply patch 78 | run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo "Empty patch. Skipping."' 79 | - name: Set git identity 80 | run: |- 81 | git config user.name "github-actions[bot]" 82 | git config user.email "41898282+github-actions[bot]@users.noreply.github.com" 83 | - name: Push changes 84 | env: 85 | PULL_REQUEST_REF: ${{ github.event.pull_request.head.ref }} 86 | run: |- 87 | git add . 88 | git commit -s -m "chore: self mutation" 89 | git push origin "HEAD:$PULL_REQUEST_REF" 90 | package-js: 91 | needs: build 92 | runs-on: ubuntu-latest 93 | permissions: 94 | contents: read 95 | if: ${{ !needs.build.outputs.self_mutation_happened }} 96 | steps: 97 | - uses: actions/setup-node@v5 98 | with: 99 | node-version: 24.x 100 | - name: Download build artifacts 101 | uses: actions/download-artifact@v5 102 | with: 103 | name: build-artifact 104 | path: dist 105 | - name: Restore build artifact permissions 106 | run: cd dist && setfacl --restore=permissions-backup.acl 107 | continue-on-error: true 108 | - name: Checkout 109 | uses: actions/checkout@v5 110 | with: 111 | ref: ${{ github.event.pull_request.head.ref }} 112 | repository: ${{ github.event.pull_request.head.repo.full_name }} 113 | path: .repo 114 | - name: Install Dependencies 115 | run: cd .repo && npm ci 116 | - name: Extract build artifact 117 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 118 | - name: Move build artifact out of the way 119 | run: mv dist dist.old 120 | - name: Create js artifact 121 | run: cd .repo && npx projen package:js 122 | - name: Collect js artifact 123 | run: mv .repo/dist dist 124 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.ts and run "npx projen". 2 | 3 | name: release 4 | on: 5 | push: 6 | branches: 7 | - main 8 | workflow_dispatch: {} 9 | concurrency: 10 | group: ${{ github.workflow }} 11 | cancel-in-progress: false 12 | jobs: 13 | release: 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: write 17 | outputs: 18 | latest_commit: ${{ steps.git_remote.outputs.latest_commit }} 19 | tag_exists: ${{ steps.check_tag_exists.outputs.exists }} 20 | env: 21 | CI: "true" 22 | steps: 23 | - name: Checkout 24 | uses: actions/checkout@v5 25 | with: 26 | fetch-depth: 0 27 | - name: Set git identity 28 | run: |- 29 | git config user.name "github-actions[bot]" 30 | git config user.email "41898282+github-actions[bot]@users.noreply.github.com" 31 | - name: Setup Node.js 32 | uses: actions/setup-node@v5 33 | with: 34 | node-version: 24.x 35 | - name: Install dependencies 36 | run: npm ci 37 | - name: release 38 | run: npx projen release 39 | - name: Check if version has already been tagged 40 | id: check_tag_exists 41 | run: |- 42 | TAG=$(cat dist/releasetag.txt) 43 | ([ ! -z "$TAG" ] && git ls-remote -q --exit-code --tags origin $TAG && (echo "exists=true" >> $GITHUB_OUTPUT)) || (echo "exists=false" >> $GITHUB_OUTPUT) 44 | cat $GITHUB_OUTPUT 45 | - name: Check for new commits 46 | id: git_remote 47 | run: |- 48 | echo "latest_commit=$(git ls-remote origin -h ${{ github.ref }} | cut -f1)" >> $GITHUB_OUTPUT 49 | cat $GITHUB_OUTPUT 50 | shell: bash 51 | - name: Backup artifact permissions 52 | if: ${{ steps.git_remote.outputs.latest_commit == github.sha }} 53 | run: cd dist && getfacl -R . > permissions-backup.acl 54 | continue-on-error: true 55 | - name: Upload artifact 56 | if: ${{ steps.git_remote.outputs.latest_commit == github.sha }} 57 | uses: actions/upload-artifact@v4.6.2 58 | with: 59 | name: build-artifact 60 | path: dist 61 | overwrite: true 62 | release_github: 63 | name: Publish to GitHub Releases 64 | needs: 65 | - release 66 | - release_npm 67 | runs-on: ubuntu-latest 68 | permissions: 69 | contents: write 70 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 71 | steps: 72 | - uses: actions/setup-node@v5 73 | with: 74 | node-version: 24.x 75 | - name: Download build artifacts 76 | uses: actions/download-artifact@v5 77 | with: 78 | name: build-artifact 79 | path: dist 80 | - name: Restore build artifact permissions 81 | run: cd dist && setfacl --restore=permissions-backup.acl 82 | continue-on-error: true 83 | - name: Release 84 | env: 85 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 86 | run: errout=$(mktemp); gh release create $(cat dist/releasetag.txt) -R $GITHUB_REPOSITORY -F dist/changelog.md -t $(cat dist/releasetag.txt) --target $GITHUB_SHA 2> $errout && true; exitcode=$?; if [ $exitcode -ne 0 ] && ! grep -q "Release.tag_name already exists" $errout; then cat $errout; exit $exitcode; fi 87 | release_npm: 88 | name: Publish to npm 89 | needs: release 90 | runs-on: ubuntu-latest 91 | permissions: 92 | id-token: write 93 | contents: read 94 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 95 | steps: 96 | - uses: actions/setup-node@v5 97 | with: 98 | node-version: 24.x 99 | - name: Download build artifacts 100 | uses: actions/download-artifact@v5 101 | with: 102 | name: build-artifact 103 | path: dist 104 | - name: Restore build artifact permissions 105 | run: cd dist && setfacl --restore=permissions-backup.acl 106 | continue-on-error: true 107 | - name: Checkout 108 | uses: actions/checkout@v5 109 | with: 110 | path: .repo 111 | - name: Install Dependencies 112 | run: cd .repo && npm ci 113 | - name: Extract build artifact 114 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 115 | - name: Move build artifact out of the way 116 | run: mv dist dist.old 117 | - name: Create js artifact 118 | run: cd .repo && npx projen package:js 119 | - name: Collect js artifact 120 | run: mv .repo/dist dist 121 | - name: Release 122 | env: 123 | NPM_DIST_TAG: latest 124 | NPM_REGISTRY: registry.npmjs.org 125 | NPM_CONFIG_PROVENANCE: "true" 126 | NPM_TRUSTED_PUBLISHER: "true" 127 | run: npx -p publib@latest publib-npm 128 | -------------------------------------------------------------------------------- /lambda/engine.postgresql.test.ts: -------------------------------------------------------------------------------- 1 | import { PostgresqlEngine } from "./engine.postgresql" 2 | 3 | describe("PostgreSQL Engine", () => { 4 | let engine: PostgresqlEngine 5 | 6 | beforeEach(() => { 7 | engine = new PostgresqlEngine() 8 | }) 9 | 10 | describe("Role", () => { 11 | it("should generate SQL to revoke privileges on old database when database is changed", async () => { 12 | // Mock getPassword implementation 13 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("test-password") 14 | 15 | const oldProps = { 16 | DatabaseName: "olddb", 17 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 18 | } 19 | 20 | const newProps = { 21 | DatabaseName: "newdb", 22 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 23 | } 24 | 25 | const sql = await engine.updateRole("testrole", "testrole", newProps, oldProps) 26 | 27 | expect(Array.isArray(sql)).toBe(true) 28 | 29 | // Check for transaction start 30 | expect(sql[0]).toBe("start transaction") 31 | 32 | // Find the revoke statement for the old database 33 | const revokeStatement = sql.find( 34 | (statement) => 35 | statement.includes("revoke connect on database") && statement.includes("olddb") 36 | ) 37 | expect(revokeStatement).toBeDefined() 38 | 39 | // Check for grant statement for the new database 40 | const grantStatement = sql.find( 41 | (statement) => 42 | statement.includes("grant connect on database") && statement.includes("newdb") 43 | ) 44 | expect(grantStatement).toBeDefined() 45 | 46 | // Check for transaction commit 47 | expect(sql[sql.length - 1]).toBe("commit") 48 | }) 49 | 50 | describe("IAM Authentication", () => { 51 | it("should create role with IAM authentication", async () => { 52 | const props = { 53 | EnableIamAuth: true, 54 | DatabaseName: "testdb", 55 | } 56 | 57 | const sql = await engine.createRole("iamrole", props) 58 | 59 | expect(Array.isArray(sql)).toBe(true) 60 | expect(sql[0]).toBe("start transaction") 61 | expect(sql).toContain("create role iamrole with login") 62 | expect(sql).toContain("grant rds_iam to iamrole") 63 | expect(sql[sql.length - 1]).toBe("commit") 64 | }) 65 | 66 | it("should create role without IAM authentication", async () => { 67 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("test-password") 68 | 69 | const props = { 70 | EnableIamAuth: false, 71 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 72 | DatabaseName: "testdb", 73 | } 74 | 75 | const sql = await engine.createRole("passwordrole", props) 76 | 77 | expect(Array.isArray(sql)).toBe(true) 78 | expect(sql[0]).toBe("start transaction") 79 | expect(sql).toContain( 80 | "create role passwordrole with login password 'test-password'" 81 | ) 82 | expect(sql[sql.length - 1]).toBe("commit") 83 | }) 84 | 85 | it("should switch from password to IAM authentication", async () => { 86 | const oldProps = { 87 | EnableIamAuth: false, 88 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 89 | } 90 | 91 | const newProps = { 92 | EnableIamAuth: true, 93 | } 94 | 95 | const sql = await engine.updateRole( 96 | "switchrole", 97 | "switchrole", 98 | newProps, 99 | oldProps 100 | ) 101 | 102 | expect(Array.isArray(sql)).toBe(true) 103 | expect(sql[0]).toBe("start transaction") 104 | expect(sql).toContain("grant rds_iam to switchrole") 105 | expect(sql[sql.length - 1]).toBe("commit") 106 | }) 107 | 108 | it("should switch from IAM to password authentication", async () => { 109 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("new-password") 110 | 111 | const oldProps = { 112 | EnableIamAuth: true, 113 | } 114 | 115 | const newProps = { 116 | EnableIamAuth: false, 117 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 118 | } 119 | 120 | const sql = await engine.updateRole( 121 | "switchrole", 122 | "switchrole", 123 | newProps, 124 | oldProps 125 | ) 126 | 127 | expect(Array.isArray(sql)).toBe(true) 128 | expect(sql[0]).toBe("start transaction") 129 | expect(sql).toContain("revoke rds_iam from switchrole") 130 | expect(sql).toContain("alter role switchrole with password 'new-password'") 131 | expect(sql[sql.length - 1]).toBe("commit") 132 | }) 133 | }) 134 | }) 135 | 136 | // Add basic tests for other PostgreSQL functionality 137 | describe("Database", () => { 138 | it("should generate correct SQL for creating a database", () => { 139 | const sql = engine.createDatabase("testdb", {}) 140 | expect(sql).toContain("create database testdb") 141 | }) 142 | }) 143 | 144 | describe("SQL", () => { 145 | it("should pass through SQL statements", () => { 146 | const statement = "SELECT * FROM users" 147 | const sql = engine.createSql("test", { Statement: statement }) 148 | expect(sql).toBe(statement) 149 | }) 150 | }) 151 | }) 152 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-rds-sql", 3 | "description": "A CDK construct that allows creating roles or users and databases on Aurora Serverless PostgreSQL or MySQL/MariaDB clusters, as well as AWS DSQL clusters.", 4 | "repository": { 5 | "type": "git", 6 | "url": "https://github.com/berenddeboer/cdk-rds-sql.git" 7 | }, 8 | "scripts": { 9 | "build": "npx projen build", 10 | "build:handler": "npx projen build:handler", 11 | "bump": "npx projen bump", 12 | "clobber": "npx projen clobber", 13 | "compat": "npx projen compat", 14 | "compile": "npx projen compile", 15 | "copy:handler": "npx projen copy:handler", 16 | "default": "npx projen default", 17 | "eject": "npx projen eject", 18 | "eslint": "npx projen eslint", 19 | "integ:deploy:dsql": "npx projen integ:deploy:dsql", 20 | "integ:deploy:mysql:serverless": "npx projen integ:deploy:mysql:serverless", 21 | "integ:deploy:postgresql:serverless": "npx projen integ:deploy:postgresql:serverless", 22 | "integ:destroy:dsql": "npx projen integ:destroy:dsql", 23 | "integ:destroy:serverless": "npx projen integ:destroy:serverless", 24 | "package": "npx projen package", 25 | "package-all": "npx projen package-all", 26 | "package:js": "npx projen package:js", 27 | "post-compile": "npx projen post-compile", 28 | "post-upgrade": "npx projen post-upgrade", 29 | "pre-compile": "npx projen pre-compile", 30 | "release": "npx projen release", 31 | "test": "npx projen test", 32 | "test:watch": "npx projen test:watch", 33 | "typecheck": "npx projen typecheck", 34 | "unbump": "npx projen unbump", 35 | "upgrade": "npx projen upgrade", 36 | "watch": "npx projen watch", 37 | "projen": "npx projen" 38 | }, 39 | "author": { 40 | "name": "Berend de Boer", 41 | "email": "berend@pobox.com", 42 | "organization": false 43 | }, 44 | "devDependencies": { 45 | "@aws-sdk/client-secrets-manager": "^3.953.0", 46 | "@aws-sdk/client-ssm": "^3.953.0", 47 | "@aws-sdk/dsql-signer": "^3.953.0", 48 | "@types/jest": "~30", 49 | "@types/node": "^22", 50 | "@types/pg": "^8.11.11", 51 | "@typescript-eslint/eslint-plugin": "^8", 52 | "@typescript-eslint/parser": "^8", 53 | "aws-cdk-lib": "2.214.0", 54 | "commit-and-tag-version": "^12", 55 | "constructs": "10.4.2", 56 | "esbuild": "0.27.2", 57 | "eslint": "^9", 58 | "eslint-config-prettier": "^8.10.2", 59 | "eslint-import-resolver-typescript": "^3.10.1", 60 | "eslint-plugin-import": "^2.32.0", 61 | "eslint-plugin-prettier": "^4.2.5", 62 | "exponential-backoff": "^3.1.3", 63 | "jest": "~30", 64 | "jest-junit": "^16", 65 | "jsii": "~5.9.0", 66 | "jsii-diff": "^1.121.0", 67 | "jsii-pacmak": "^1.121.0", 68 | "jsii-rosetta": "~5.9.0", 69 | "mysql2": "^3.16.0", 70 | "node-pg-format": "^1.3.5", 71 | "pg": "^8.13.3", 72 | "prettier": "^2.8.8", 73 | "projen": "^0.98.30", 74 | "source-map-support": "^0.5.21", 75 | "testcontainers": "11", 76 | "ts-jest": "^29", 77 | "ts-node": "^10.9.2", 78 | "typescript": "~5.9" 79 | }, 80 | "peerDependencies": { 81 | "aws-cdk-lib": "^2.214.0", 82 | "constructs": "^10.4.2" 83 | }, 84 | "dependencies": { 85 | "@types/aws-lambda": "^8.10.159" 86 | }, 87 | "bundledDependencies": [ 88 | "@types/aws-lambda" 89 | ], 90 | "keywords": [ 91 | "aurora", 92 | "aws", 93 | "aws-cdk", 94 | "cdk", 95 | "dsql", 96 | "mysql", 97 | "postgres", 98 | "rds" 99 | ], 100 | "engines": { 101 | "node": ">= 22.14" 102 | }, 103 | "main": "lib/index.js", 104 | "license": "Apache-2.0", 105 | "publishConfig": { 106 | "access": "public" 107 | }, 108 | "version": "0.0.0", 109 | "jest": { 110 | "coverageProvider": "v8", 111 | "testMatch": [ 112 | "/@(src|test|lambda)/**/*(*.)@(spec|test).ts", 113 | "/@(src|test)/**/*(*.)@(spec|test).ts?(x)", 114 | "/@(src|test)/**/__tests__/**/*.ts?(x)", 115 | "/@(projenrc)/**/*(*.)@(spec|test).ts?(x)", 116 | "/@(projenrc)/**/__tests__/**/*.ts?(x)" 117 | ], 118 | "testPathIgnorePatterns": [ 119 | "/node_modules/", 120 | "/cdk.out/", 121 | "/.github/", 122 | "/dist/" 123 | ], 124 | "maxConcurrency": 4, 125 | "maxWorkers": "75%", 126 | "clearMocks": true, 127 | "collectCoverage": true, 128 | "coverageReporters": [ 129 | "json", 130 | "lcov", 131 | "clover", 132 | "cobertura", 133 | "text" 134 | ], 135 | "coverageDirectory": "coverage", 136 | "coveragePathIgnorePatterns": [ 137 | "/node_modules/", 138 | "/cdk.out/", 139 | "/.github/", 140 | "/dist/" 141 | ], 142 | "watchPathIgnorePatterns": [ 143 | "/node_modules/" 144 | ], 145 | "reporters": [ 146 | "default", 147 | [ 148 | "jest-junit", 149 | { 150 | "outputDirectory": "test-reports" 151 | } 152 | ] 153 | ], 154 | "transform": { 155 | "^.+\\.[t]sx?$": [ 156 | "ts-jest", 157 | { 158 | "tsconfig": "tsconfig.dev.json" 159 | } 160 | ] 161 | } 162 | }, 163 | "types": "lib/index.d.ts", 164 | "stability": "stable", 165 | "jsii": { 166 | "outdir": "dist", 167 | "targets": {}, 168 | "tsc": { 169 | "outDir": "lib", 170 | "rootDir": "src" 171 | } 172 | }, 173 | "//": "~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\"." 174 | } 175 | -------------------------------------------------------------------------------- /.projenrc.ts: -------------------------------------------------------------------------------- 1 | import { awscdk, github } from "projen" 2 | import { NodePackageManager } from "projen/lib/javascript" 3 | 4 | const tmpDirectories = [ 5 | "cdk.context.json", 6 | ".idea/", 7 | "cdk.out/", 8 | ".envrc", 9 | ".env", 10 | "CONVENTIONS.md", 11 | "src/handler/handler.js", 12 | "*~", 13 | ".claude/hooks/.edited", 14 | ] 15 | 16 | const project = new awscdk.AwsCdkConstructLibrary({ 17 | author: "Berend de Boer", 18 | authorAddress: "berend@pobox.com", 19 | authorEmail: "berend@pobox.com", 20 | name: "cdk-rds-sql", 21 | description: 22 | "A CDK construct that allows creating roles or users and databases on Aurora Serverless PostgreSQL or MySQL/MariaDB clusters, as well as AWS DSQL clusters.", 23 | defaultReleaseBranch: "main", 24 | repositoryUrl: "https://github.com/berenddeboer/cdk-rds-sql.git", 25 | projenrcTs: true, 26 | packageManager: NodePackageManager.NPM, 27 | depsUpgrade: true, 28 | depsUpgradeOptions: { 29 | workflow: true, 30 | workflowOptions: { 31 | projenCredentials: github.GithubCredentials.fromPersonalAccessToken({ 32 | secret: "GITHUB_TOKEN", 33 | }), 34 | permissions: { 35 | contents: github.workflows.JobPermission.WRITE, 36 | pullRequests: github.workflows.JobPermission.WRITE, 37 | }, 38 | }, 39 | }, 40 | minNodeVersion: "22.14", 41 | githubOptions: { 42 | pullRequestLintOptions: { 43 | semanticTitleOptions: { 44 | types: ["feat", "fix", "chore", "test", "vendor"], 45 | }, 46 | }, 47 | }, 48 | jestOptions: { 49 | jestVersion: "~30", 50 | jestConfig: { 51 | testMatch: ["/@(src|test|lambda)/**/*(*.)@(spec|test).ts"], 52 | testPathIgnorePatterns: ["/node_modules/", "/cdk.out/", "/.github/", "/dist/"], 53 | maxConcurrency: 4, 54 | maxWorkers: "75%", 55 | }, 56 | }, 57 | typescriptVersion: "~5.9", 58 | constructsVersion: "10.4.2", 59 | cdkVersion: "2.214.0", 60 | jsiiVersion: "~5.9.0", 61 | tsconfigDev: { 62 | compilerOptions: { 63 | esModuleInterop: true, 64 | noUnusedLocals: true, 65 | target: "es2022", 66 | noImplicitOverride: true, 67 | noUncheckedIndexedAccess: true, 68 | }, 69 | include: ["lambda/**/*.ts"], 70 | }, 71 | eslint: true, 72 | eslintOptions: { 73 | dirs: ["src", "lambda"], 74 | prettier: true, 75 | }, 76 | gitignore: tmpDirectories, 77 | npmignore: [...tmpDirectories, "/lambda/"], 78 | docgen: false, 79 | workflowNodeVersion: "24.x", 80 | deps: ["@types/aws-lambda"], 81 | bundledDeps: ["@types/aws-lambda"], 82 | devDeps: [ 83 | "@aws-sdk/client-secrets-manager", 84 | "@aws-sdk/client-ssm", 85 | "@aws-sdk/dsql-signer", 86 | "@types/pg@^8.11.11", 87 | "esbuild", 88 | "exponential-backoff", 89 | "mysql2", 90 | "node-pg-format", 91 | "pg@^8.13.3", 92 | "source-map-support", 93 | "testcontainers@11", 94 | ], 95 | keywords: ["aws", "aws-cdk", "rds", "aurora", "postgres", "mysql", "dsql"], 96 | minMajorVersion: 1, 97 | }) 98 | if (project.eslint) { 99 | project.eslint.addRules({ 100 | semi: ["off"], 101 | quotes: ["error", "double"], 102 | }) 103 | 104 | // Add an override for lambda directory to disable import/no-extraneous-dependencies 105 | project.eslint.addOverride({ 106 | files: ["lambda/*.ts"], 107 | rules: { 108 | "import/no-extraneous-dependencies": "off", 109 | }, 110 | }) 111 | } 112 | 113 | project.addTask("integ:deploy:postgresql:serverless", { 114 | description: "Deploy the Aurora Serverless V2 integration test stack", 115 | exec: "npx cdk deploy TestRdsSqlServerlessV2Stack --require-approval never", 116 | }) 117 | 118 | project.addTask("integ:deploy:mysql:serverless", { 119 | description: "Deploy the Aurora Serverless V2 integration test stack", 120 | exec: "npx cdk deploy TestRdsSqlServerlessV2Stack --context engine=mysql --require-approval never", 121 | }) 122 | 123 | project.addTask("integ:destroy:serverless", { 124 | description: "Destroy the Aurora Serverless V2 integration test stack", 125 | exec: "npx cdk destroy TestRdsSqlServerlessV2Stack --force", 126 | }) 127 | 128 | project.addTask("integ:deploy:dsql", { 129 | description: "Deploy the DSQL integration test stack", 130 | exec: "npx cdk deploy TestRdsSqlDsqlStack --require-approval never", 131 | }) 132 | 133 | project.addTask("integ:destroy:dsql", { 134 | description: "Destroy the DSQL integration test stack", 135 | exec: "npx cdk destroy TestRdsSqlDsqlStack --force", 136 | }) 137 | 138 | // Add build tasks for transpiling the Lambda handler 139 | project.addTask("build:handler", { 140 | description: "Transpile the Lambda handler to JavaScript", 141 | exec: "esbuild lambda/handler.ts --bundle --platform=node --target=node20 --external:aws-sdk --external:@aws-sdk/* --outfile=src/handler/handler.js", 142 | }) 143 | 144 | project.addTask("copy:handler", { 145 | description: "Copy transpiled handler into lib", 146 | exec: "cp src/handler/handler.js lib/handler/handler.js", 147 | }) 148 | 149 | project.addTask("typecheck", { 150 | description: "Typecheck typescript", 151 | exec: "npx tsc --project tsconfig.dev.json --noEmit", 152 | }) 153 | 154 | // Hook these tasks into the build process 155 | project.tasks.tryFind("pre-compile")?.spawn(project.tasks.tryFind("build:handler")!) 156 | project.tasks.tryFind("compile")?.spawn(project.tasks.tryFind("copy:handler")!) 157 | 158 | // Override release workflow to remove NPM_TOKEN and add NPM_TRUSTED_PUBLISHER 159 | const releaseWorkflow = project.github?.tryFindWorkflow("release") 160 | if (releaseWorkflow?.file) { 161 | // Target the Release step's environment variables specifically 162 | releaseWorkflow.file.addOverride("jobs.release_npm.steps.9.env", { 163 | NPM_TRUSTED_PUBLISHER: "true", 164 | NPM_TOKEN: undefined, 165 | }) 166 | } 167 | 168 | project.synth() 169 | -------------------------------------------------------------------------------- /test/serverlessv2-stack.ts: -------------------------------------------------------------------------------- 1 | import { Duration, Fn, RemovalPolicy, Stack, StackProps } from "aws-cdk-lib" 2 | import * as ec2 from "aws-cdk-lib/aws-ec2" 3 | import { LogGroup, RetentionDays } from "aws-cdk-lib/aws-logs" 4 | import * as rds from "aws-cdk-lib/aws-rds" 5 | import * as secrets from "aws-cdk-lib/aws-secretsmanager" 6 | import { Construct } from "constructs" 7 | import { Provider, Database, Role, Schema, Sql } from "./../src/index" 8 | import { Vpc } from "./vpc" 9 | 10 | export interface TestStackProps extends StackProps { 11 | /** 12 | * Print SQL statements being executed. 13 | * 14 | * @default true 15 | */ 16 | logger?: boolean 17 | 18 | /** 19 | * @default true 20 | */ 21 | ssl?: boolean 22 | 23 | /** 24 | * Database engine to use 25 | * 26 | * @default Aurora PostgreSQL 14.9 27 | */ 28 | engine?: rds.IClusterEngine 29 | } 30 | 31 | export class TestStack extends Stack { 32 | constructor(scope: Construct, id: string, props: TestStackProps) { 33 | super(scope, id, props) 34 | 35 | const vpc = new Vpc(this, "Vpc") 36 | 37 | // Use provided engine or default to PostgreSQL 14.9 38 | const engine = 39 | props.engine || 40 | rds.DatabaseClusterEngine.auroraPostgres({ 41 | version: rds.AuroraPostgresEngineVersion.VER_14_9, 42 | }) 43 | 44 | // Create a custom parameter group based on engine type 45 | const isMySql = engine.engineFamily === "MYSQL" 46 | const parameterGroup = new rds.ParameterGroup(this, "ClusterParameterGroup", { 47 | engine: engine, 48 | parameters: 49 | props.ssl !== false 50 | ? { 51 | // Set SSL enforcement parameter based on engine type 52 | ...(isMySql 53 | ? { require_secure_transport: "ON" } 54 | : { 55 | "rds.force_ssl": "1", 56 | }), 57 | } 58 | : {}, 59 | description: "Parameter group to enforce SSL connections", 60 | }) 61 | 62 | const cluster = new rds.DatabaseCluster(this, "Cluster2", { 63 | engine: engine, 64 | removalPolicy: RemovalPolicy.DESTROY, 65 | defaultDatabaseName: "example", 66 | writer: rds.ClusterInstance.serverlessV2("writer", { 67 | instanceIdentifier: "writer", 68 | publiclyAccessible: false, 69 | enablePerformanceInsights: false, 70 | }), 71 | serverlessV2MinCapacity: 0.5, 72 | serverlessV2MaxCapacity: 1, 73 | vpc: vpc.vpc, 74 | vpcSubnets: { 75 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 76 | }, 77 | parameterGroup: parameterGroup, // Use our custom parameter group 78 | }) 79 | 80 | const provider = new Provider(this, "Provider", { 81 | vpc: vpc.vpc, 82 | cluster: cluster, 83 | secret: cluster.secret!, 84 | functionProps: { 85 | logGroup: new LogGroup(this, "ProviderLogGroup", { 86 | retention: RetentionDays.ONE_WEEK, 87 | }), 88 | timeout: Duration.seconds(30), 89 | }, 90 | logger: props.logger, 91 | ssl: props.ssl, 92 | }) 93 | 94 | Database.fromDatabaseName(this, "DefaultDatabase", "example") 95 | 96 | const is_postgress = !props.engine || props.engine.engineFamily !== "MYSQL" 97 | 98 | if (is_postgress) { 99 | new Schema(this, "Schema", { 100 | provider: provider, 101 | schemaName: "myschema", 102 | }) 103 | } 104 | 105 | const role = new Role(this, "Role", { 106 | provider: provider, 107 | roleName: "myrole", 108 | databaseName: "mydb", 109 | }) 110 | const database = new Database(this, "Database", { 111 | provider: provider, 112 | databaseName: "mydb", 113 | owner: role, 114 | }) 115 | const statement = is_postgress 116 | ? ` 117 | create table t (i int); 118 | grant select on t to myrole; 119 | ` 120 | : ` 121 | create table if not exists t (i int); 122 | grant select on mydb.t to 'myrole'@'%'; 123 | ` 124 | const rollback = is_postgress 125 | ? ` 126 | DO $$BEGIN 127 | IF EXISTS (select from pg_database WHERE datname = 't') THEN 128 | IF EXISTS (select from pg_catalog.pg_roles WHERE rolname = 'myrole') THEN 129 | revoke select on database t from myrole; 130 | END IF; 131 | drop table t; 132 | END IF; 133 | END$$; 134 | ` 135 | : ` 136 | revoke select on table t from myrole; 137 | drop table t; 138 | ` 139 | new Sql(this, "Sql", { 140 | provider: provider, 141 | database: database, 142 | statement, 143 | rollback, 144 | }) 145 | } 146 | } 147 | 148 | export class ImportedClusterStack extends Stack { 149 | constructor(scope: Construct, id: string, props: StackProps) { 150 | super(scope, id, props) 151 | 152 | const vpc = new Vpc(this, "Vpc") 153 | 154 | const secret = secrets.Secret.fromSecretCompleteArn( 155 | this, 156 | "Secret", 157 | Fn.importValue("secret-arn") 158 | ) 159 | 160 | const cluster = rds.DatabaseCluster.fromDatabaseClusterAttributes( 161 | this, 162 | "DatabaseCluster", 163 | { 164 | clusterIdentifier: Fn.importValue("cluster-identifier"), 165 | clusterEndpointAddress: Fn.importValue("cluster-endpoint"), 166 | engine: rds.DatabaseClusterEngine.auroraPostgres({ 167 | version: rds.AuroraPostgresEngineVersion.VER_14_6, 168 | }), 169 | port: 5432, // absence of port in import causes an exception 170 | securityGroups: [ 171 | ec2.SecurityGroup.fromSecurityGroupId( 172 | this, 173 | "RdsSecurityGroup", 174 | "sg-00bbd66b014133c45" 175 | ), 176 | ], 177 | } 178 | ) 179 | 180 | const provider = new Provider(this, "Provider", { 181 | vpc: vpc.vpc, 182 | cluster: cluster, 183 | secret: secret, 184 | }) 185 | Database.fromDatabaseName(this, "DefaultDatabase", "example") 186 | 187 | new Role(this, "Role", { 188 | provider: provider, 189 | roleName: "myrole", 190 | databaseName: "mydb", 191 | }) 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /lambda/handler.test.ts: -------------------------------------------------------------------------------- 1 | import { SecretsManagerClient } from "@aws-sdk/client-secrets-manager" 2 | import { MysqlEngine } from "./engine.mysql" 3 | import { PostgresqlEngine } from "./engine.postgresql" 4 | import { handler } from "./handler" 5 | import { createRequest } from "./util" 6 | 7 | // Mock the secrets manager client 8 | jest.mock("@aws-sdk/client-secrets-manager") 9 | const SecretsManagerClientMock = SecretsManagerClient as jest.MockedClass< 10 | typeof SecretsManagerClient 11 | > 12 | 13 | // Mock the database engines 14 | jest.mock("./engine.mysql") 15 | jest.mock("./engine.postgresql") 16 | 17 | const MockedMysqlEngine = MysqlEngine as jest.MockedClass 18 | const MockedPostgresqlEngine = PostgresqlEngine as jest.MockedClass< 19 | typeof PostgresqlEngine 20 | > 21 | 22 | describe("Handler CloudFormation Property Conversion", () => { 23 | let mockMysqlEngine: jest.Mocked 24 | let mockPostgresqlEngine: jest.Mocked 25 | 26 | beforeEach(() => { 27 | jest.clearAllMocks() 28 | 29 | // Mock the engine instances 30 | mockMysqlEngine = { 31 | createRole: jest.fn(), 32 | executeSQL: jest.fn(), 33 | } as any 34 | 35 | mockPostgresqlEngine = { 36 | createRole: jest.fn(), 37 | executeSQL: jest.fn(), 38 | } as any 39 | 40 | MockedMysqlEngine.mockImplementation(() => mockMysqlEngine) 41 | MockedPostgresqlEngine.mockImplementation(() => mockPostgresqlEngine) 42 | 43 | // Mock secrets manager to return MySQL engine 44 | SecretsManagerClientMock.prototype.send.mockImplementation(() => { 45 | return Promise.resolve({ 46 | SecretString: JSON.stringify({ 47 | host: "localhost", 48 | port: 3306, 49 | username: "root", 50 | password: "password", 51 | dbname: "test", 52 | engine: "mysql", 53 | }), 54 | }) 55 | }) 56 | }) 57 | 58 | describe("EnableIamAuth property conversion", () => { 59 | it("should convert string 'false' to boolean false", async () => { 60 | mockMysqlEngine.createRole.mockResolvedValue(["CREATE USER test"]) 61 | mockMysqlEngine.executeSQL.mockResolvedValue({}) 62 | 63 | // Create a request with EnableIamAuth as string 'false' (simulating CloudFormation) 64 | const request = createRequest("role", "testuser", { 65 | EnableIamAuth: "false", 66 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 67 | }) 68 | 69 | await handler(request) 70 | 71 | // Verify that createRole was called with boolean false, not string 'false' 72 | expect(mockMysqlEngine.createRole).toHaveBeenCalledWith( 73 | "testuser", 74 | expect.objectContaining({ 75 | EnableIamAuth: false, // Should be converted to boolean false 76 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 77 | }) 78 | ) 79 | }) 80 | 81 | it("should convert string 'true' to boolean true", async () => { 82 | mockMysqlEngine.createRole.mockResolvedValue(["CREATE USER test"]) 83 | mockMysqlEngine.executeSQL.mockResolvedValue({}) 84 | 85 | // Create a request with EnableIamAuth as string 'true' (simulating CloudFormation) 86 | const request = createRequest("role", "testuser", { 87 | EnableIamAuth: "true", // CloudFormation passes boolean as string 88 | }) 89 | 90 | await handler(request) 91 | 92 | // Verify that createRole was called with boolean true, not string 'true' 93 | expect(mockMysqlEngine.createRole).toHaveBeenCalledWith( 94 | "testuser", 95 | expect.objectContaining({ 96 | EnableIamAuth: true, // Should be converted to boolean true 97 | }) 98 | ) 99 | }) 100 | 101 | it("should handle number 0 correctly", async () => { 102 | mockMysqlEngine.createRole.mockResolvedValue(["CREATE USER test"]) 103 | mockMysqlEngine.executeSQL.mockResolvedValue({}) 104 | 105 | // Create a request with EnableIamAuth as actual boolean false 106 | const request = createRequest("role", "testuser", { 107 | EnableIamAuth: "0", 108 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 109 | }) 110 | 111 | await handler(request) 112 | 113 | // Verify that createRole was called with boolean false 114 | expect(mockMysqlEngine.createRole).toHaveBeenCalledWith( 115 | "testuser", 116 | expect.objectContaining({ 117 | EnableIamAuth: false, 118 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 119 | }) 120 | ) 121 | }) 122 | 123 | it("should handle number 1 correctly", async () => { 124 | mockMysqlEngine.createRole.mockResolvedValue(["CREATE USER test"]) 125 | mockMysqlEngine.executeSQL.mockResolvedValue({}) 126 | 127 | // Create a request with EnableIamAuth as actual boolean true 128 | const request = createRequest("role", "testuser", { 129 | EnableIamAuth: "1", 130 | }) 131 | 132 | await handler(request) 133 | 134 | // Verify that createRole was called with boolean true 135 | expect(mockMysqlEngine.createRole).toHaveBeenCalledWith( 136 | "testuser", 137 | expect.objectContaining({ 138 | EnableIamAuth: true, 139 | }) 140 | ) 141 | }) 142 | 143 | it("should handle undefined EnableIamAuth correctly", async () => { 144 | mockMysqlEngine.createRole.mockResolvedValue(["CREATE USER test"]) 145 | mockMysqlEngine.executeSQL.mockResolvedValue({}) 146 | 147 | // Create a request without EnableIamAuth property 148 | const request = createRequest("role", "testuser", { 149 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 150 | }) 151 | 152 | await handler(request) 153 | 154 | // Verify that createRole was called without EnableIamAuth 155 | expect(mockMysqlEngine.createRole).toHaveBeenCalledWith( 156 | "testuser", 157 | expect.objectContaining({ 158 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 159 | EnableIamAuth: false, 160 | }) 161 | ) 162 | }) 163 | }) 164 | }) 165 | -------------------------------------------------------------------------------- /lambda/util.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CloudFormationCustomResourceCreateEvent, 3 | CloudFormationCustomResourceUpdateEvent, 4 | CloudFormationCustomResourceDeleteEvent, 5 | } from "aws-lambda" 6 | import { Client } from "pg" 7 | import { ResourceProperties } from "./types" 8 | 9 | // Helper interface to simplify test request creation 10 | export interface ResourcePropertiesInput { 11 | readonly SecretArn?: string 12 | readonly PasswordArn?: string 13 | readonly DatabaseName?: string 14 | readonly Owner?: string 15 | readonly Statement?: string 16 | readonly Rollback?: string 17 | readonly RoleName?: string 18 | readonly ParameterName?: string 19 | [key: string]: any 20 | } 21 | 22 | /** 23 | * Helpers to create CloudFormation requests. 24 | */ 25 | export const createRequest = ( 26 | resource: string, 27 | resourceId: string, 28 | props?: Partial 29 | ): CloudFormationCustomResourceCreateEvent => { 30 | return { 31 | ServiceToken: "", 32 | ResponseURL: "", 33 | StackId: "", 34 | RequestId: "", 35 | LogicalResourceId: "", 36 | ResourceType: "", 37 | ResourceProperties: { 38 | ServiceToken: "", 39 | Resource: resource as any, // Needed for backward compatibility with tests 40 | ResourceId: resourceId, 41 | SecretArn: 42 | props?.SecretArn || "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 43 | ...props, 44 | }, 45 | RequestType: "Create", 46 | } 47 | } 48 | 49 | export const updateRequest = ( 50 | resource: string, 51 | oldResourceId: string, 52 | newResourceId: string, 53 | props?: ResourcePropertiesInput 54 | ): CloudFormationCustomResourceUpdateEvent => { 55 | return { 56 | ServiceToken: "", 57 | ResponseURL: "", 58 | StackId: "", 59 | RequestId: "", 60 | LogicalResourceId: "", 61 | ResourceType: "", 62 | ResourceProperties: { 63 | ServiceToken: "", 64 | Resource: resource as any, // Needed for backward compatibility with tests 65 | ResourceId: newResourceId, 66 | SecretArn: 67 | props?.SecretArn || "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 68 | ...props, 69 | } as unknown as ResourceProperties, 70 | RequestType: "Update", 71 | PhysicalResourceId: oldResourceId, 72 | OldResourceProperties: { 73 | ServiceToken: "", 74 | Resource: resource as any, 75 | ResourceId: oldResourceId, 76 | SecretArn: 77 | props?.SecretArn || "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 78 | } as unknown as ResourceProperties, 79 | } 80 | } 81 | 82 | export const deleteRequest = ( 83 | resource: string, 84 | resourceId: string, 85 | props?: ResourcePropertiesInput 86 | ): CloudFormationCustomResourceDeleteEvent => { 87 | return { 88 | ServiceToken: "", 89 | ResponseURL: "", 90 | StackId: "", 91 | RequestId: "", 92 | LogicalResourceId: "", 93 | ResourceType: "", 94 | ResourceProperties: { 95 | ServiceToken: "", 96 | Resource: resource as any, // Needed for backward compatibility with tests 97 | ResourceId: resourceId, 98 | SecretArn: 99 | props?.SecretArn || "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 100 | ...props, 101 | } as unknown as ResourceProperties, 102 | RequestType: "Delete", 103 | PhysicalResourceId: resourceId, 104 | } 105 | } 106 | 107 | /** 108 | * Database helpers. 109 | */ 110 | export const schemaExists = async (client: Client, schema: string): Promise => { 111 | const schemas = await getSchemas(client) 112 | return schemas.find((s) => s === schema) !== undefined 113 | } 114 | 115 | export const roleGrantedForSchema = async ( 116 | client: Client, 117 | schema: string, 118 | role: string 119 | ): Promise => { 120 | const sql = `select nspname as schema_name, r.rolname as role_name,\ 121 | pg_catalog.has_schema_privilege(r.rolname, nspname, 'CREATE') as create_grant,\ 122 | pg_catalog.has_schema_privilege(r.rolname, nspname, 'USAGE') as usage_grant\ 123 | from pg_namespace pn,pg_catalog.pg_roles r \ 124 | where array_to_string(nspacl,',') like '%'||r.rolname||'%' and nspowner > 1 \ 125 | and nspname = '${schema}' and r.rolname = '${role}'` 126 | const { rows } = await client.query(sql) 127 | return ( 128 | rows.length === 1 && rows[0].create_grant === true && rows[0].usage_grant === true 129 | ) 130 | } 131 | 132 | const getSchemas = async (client: Client): Promise => { 133 | const { rows } = await client.query( 134 | "select schema_name from information_schema.schemata" 135 | ) 136 | return rows.map((r) => r.schema_name) 137 | } 138 | 139 | export const roleExists = async (client: Client, role: string): Promise => { 140 | const roles = await getRoles(client) 141 | return roles.find((r) => r === role) !== undefined 142 | } 143 | 144 | const getRoles = async (client: Client): Promise => { 145 | const { rows } = await client.query("select rolname from pg_roles") 146 | return rows.map((r) => r.rolname) 147 | } 148 | 149 | export const databaseExists = async ( 150 | client: Client, 151 | database: string 152 | ): Promise => { 153 | const databases = await getDatabases(client) 154 | return databases.find((r) => r === database) !== undefined 155 | } 156 | 157 | const getDatabases = async (client: Client): Promise => { 158 | const { rows } = await client.query( 159 | "select datname from pg_database where datistemplate = false" 160 | ) 161 | return rows.map((r) => r.datname) 162 | } 163 | 164 | export const databaseOwnerIs = async ( 165 | client: Client, 166 | database: string, 167 | user_name: string 168 | ): Promise => { 169 | const databases = await getDatabasesWithOwner(client, user_name) 170 | return databases.find((r) => r === database) !== undefined 171 | } 172 | 173 | const getDatabasesWithOwner = async ( 174 | client: Client, 175 | user_name: string 176 | ): Promise => { 177 | const { rows } = await client.query( 178 | `select datname from pg_database where datistemplate = false and pg_catalog.pg_get_userbyid(datdba) = '${user_name}'` 179 | ) 180 | return rows.map((r) => r.datname) 181 | } 182 | 183 | export const rowCount = async (client: Client, table_name: string): Promise => { 184 | const { rows } = await client.query(`select count(*) from ${table_name}`) 185 | return parseInt(rows[0].count) 186 | } 187 | -------------------------------------------------------------------------------- /test/instance-stack.test.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from "aws-cdk-lib" 2 | import { Match, Template } from "aws-cdk-lib/assertions" 3 | import * as ec2 from "aws-cdk-lib/aws-ec2" 4 | import * as rds from "aws-cdk-lib/aws-rds" 5 | import * as serverlessInstancev1 from "./instance1-stack" 6 | import * as serverlessInstancev2 from "./instance2-stack" 7 | import { Provider } from "../src/provider" 8 | import { Role } from "../src/role" 9 | 10 | test("serverless instance v1", () => { 11 | const app = new cdk.App() 12 | const stack = new serverlessInstancev1.TestInstanceStack(app, "TestInstanceStack", { 13 | env: { 14 | account: "123456789", 15 | region: "us-east-1", 16 | }, 17 | }) 18 | let template = Template.fromStack(stack) 19 | template.hasResourceProperties("AWS::CloudFormation::CustomResource", { 20 | Resource: "role", 21 | }) 22 | 23 | // Check for engine property 24 | template.hasResourceProperties("AWS::SecretsManager::Secret", { 25 | GenerateSecretString: { 26 | SecretStringTemplate: { 27 | "Fn::Join": [ 28 | "", // First element is an empty string (the separator) 29 | // eslint-disable-next-line quotes 30 | Match.arrayWith([Match.stringLikeRegexp('"engine":\\s*"postgres"')]), // Second element is the array of strings to join 31 | ], 32 | }, 33 | }, 34 | }) 35 | }) 36 | 37 | test("instance role without database", () => { 38 | const app = new cdk.App() 39 | const stack = new cdk.Stack(app, "TestInstanceStack", { 40 | env: { 41 | account: "123456789", 42 | region: "us-east-1", 43 | }, 44 | }) 45 | const vpc = new ec2.Vpc(stack, "Vpc", { 46 | subnetConfiguration: [ 47 | { 48 | cidrMask: 28, 49 | name: "rds", 50 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 51 | }, 52 | ], 53 | }) 54 | 55 | const instance = new rds.DatabaseInstance(stack, "Instance", { 56 | vpc: vpc, 57 | vpcSubnets: { 58 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 59 | }, 60 | engine: rds.DatabaseInstanceEngine.postgres({ 61 | version: rds.PostgresEngineVersion.VER_15, 62 | }), 63 | }) 64 | 65 | const provider = new Provider(stack, "Provider", { 66 | vpc: vpc, 67 | cluster: instance, 68 | secret: instance.secret!, 69 | }) 70 | 71 | expect(() => { 72 | new Role(stack, "Role", { 73 | provider: provider, 74 | roleName: "role", 75 | }) 76 | }).toThrow() 77 | }) 78 | 79 | test("serverless instance v2", () => { 80 | const app = new cdk.App() 81 | const stack = new serverlessInstancev2.TestInstanceStack(app, "TestInstanceStack", { 82 | env: { 83 | account: "123456789", 84 | region: "us-east-1", 85 | }, 86 | }) 87 | let template = Template.fromStack(stack) 88 | template.hasResourceProperties("AWS::CloudFormation::CustomResource", { 89 | Resource: "role", 90 | }) 91 | /* 92 | template.hasResourceProperties("AWS::SecretsManager::Secret", { 93 | GenerateSecretString: { 94 | SecretStringTemplate: "{\"username\":\"myrole\"}", 95 | }, 96 | }) 97 | */ 98 | template.hasResourceProperties("AWS::EC2::SecurityGroupIngress", { 99 | FromPort: { 100 | "Fn::GetAtt": ["InstanceC1063A87", "Endpoint.Port"], 101 | }, 102 | IpProtocol: "tcp", 103 | SourceSecurityGroupId: { 104 | "Fn::GetAtt": [ 105 | "RdsSql28b9e791af604a33bca8ffb6f30ef8c5SecurityGroup60F64508", 106 | "GroupId", 107 | ], 108 | }, 109 | }) 110 | }) 111 | 112 | test("absence of security group is detected", () => { 113 | const app = new cdk.App() 114 | const stack = new serverlessInstancev2.ImportedInstanceStack(app, "TestInstanceStack", { 115 | env: { 116 | account: "123456789", 117 | region: "us-east-1", 118 | }, 119 | }) 120 | let template = Template.fromStack(stack) 121 | template.hasResourceProperties("AWS::CloudFormation::CustomResource", { 122 | Resource: "role", 123 | }) 124 | template.hasResourceProperties("AWS::EC2::SecurityGroupIngress", { 125 | FromPort: 5432, 126 | IpProtocol: "tcp", 127 | SourceSecurityGroupId: { 128 | "Fn::GetAtt": [ 129 | "RdsSql28b9e791af604a33bca8ffb6f30ef8c5SecurityGroup60F64508", 130 | "GroupId", 131 | ], 132 | }, 133 | }) 134 | }) 135 | 136 | test("vpcSubnet selection can be specified", () => { 137 | const app = new cdk.App() 138 | const stack = new cdk.Stack(app, "TestInstanceStack", { 139 | env: { 140 | account: "123456789", 141 | region: "us-east-1", 142 | }, 143 | }) 144 | const vpc = new ec2.Vpc(stack, "Vpc", { 145 | subnetConfiguration: [ 146 | { 147 | cidrMask: 28, 148 | name: "rds", 149 | subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, 150 | }, 151 | { 152 | cidrMask: 28, 153 | name: "nat", 154 | subnetType: ec2.SubnetType.PUBLIC, 155 | }, 156 | ], 157 | }) 158 | 159 | const instance = new rds.DatabaseInstance(stack, "Instance", { 160 | vpc: vpc, 161 | // vpcSubnets: { 162 | // subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 163 | // }, 164 | engine: rds.DatabaseInstanceEngine.postgres({ 165 | version: rds.PostgresEngineVersion.VER_15, 166 | }), 167 | }) 168 | 169 | const provider = new Provider(stack, "Provider", { 170 | vpc: vpc, 171 | vpcSubnets: { 172 | subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS, 173 | }, 174 | cluster: instance, 175 | secret: instance.secret!, 176 | }) 177 | 178 | expect(() => { 179 | new Role(stack, "Role", { 180 | provider: provider, 181 | roleName: "role", 182 | }) 183 | }).toThrow() 184 | }) 185 | 186 | test("mysql database instance engine is set in secret", () => { 187 | const app = new cdk.App() 188 | const stack = new cdk.Stack(app, "TestStack", { 189 | env: { 190 | account: "123456789", 191 | region: "us-east-1", 192 | }, 193 | }) 194 | const vpc = new ec2.Vpc(stack, "Vpc", { 195 | subnetConfiguration: [ 196 | { 197 | cidrMask: 28, 198 | name: "rds", 199 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 200 | }, 201 | { 202 | cidrMask: 28, 203 | name: "nat", 204 | subnetType: ec2.SubnetType.PUBLIC, 205 | }, 206 | ], 207 | }) 208 | 209 | const instance = new rds.DatabaseInstance(stack, "Instance", { 210 | vpc, 211 | vpcSubnets: { 212 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 213 | }, 214 | engine: rds.DatabaseInstanceEngine.mariaDb({ 215 | version: rds.MariaDbEngineVersion.VER_11_4_3, 216 | }), 217 | databaseName: "example", 218 | credentials: rds.Credentials.fromGeneratedSecret("admin"), 219 | instanceType: ec2.InstanceType.of( 220 | ec2.InstanceClass.BURSTABLE3, 221 | ec2.InstanceSize.MICRO 222 | ), 223 | }) 224 | 225 | const provider = new Provider(stack, "Provider", { 226 | vpc: vpc, 227 | vpcSubnets: { 228 | subnetType: ec2.SubnetType.PRIVATE_ISOLATED, 229 | }, 230 | cluster: instance, 231 | secret: instance.secret!, 232 | }) 233 | 234 | new Role(stack, "Role", { 235 | provider: provider, 236 | roleName: "myrole", 237 | databaseName: "mydb", 238 | }) 239 | 240 | const template = Template.fromStack(stack) 241 | 242 | // Check for engine property 243 | template.hasResourceProperties("AWS::SecretsManager::Secret", { 244 | GenerateSecretString: { 245 | SecretStringTemplate: { 246 | "Fn::Join": [ 247 | "", // First element is an empty string (the separator) 248 | // eslint-disable-next-line quotes 249 | Match.arrayWith([Match.stringLikeRegexp('"engine":\\s*"mysql"')]), // Second element is the array of strings to join 250 | ], 251 | }, 252 | }, 253 | }) 254 | }) 255 | -------------------------------------------------------------------------------- /lambda/engine.mysql.test.ts: -------------------------------------------------------------------------------- 1 | import { MysqlEngine } from "./engine.mysql" 2 | 3 | describe("MySQL Engine", () => { 4 | let engine: MysqlEngine 5 | 6 | beforeEach(() => { 7 | engine = new MysqlEngine() 8 | }) 9 | 10 | describe("Database", () => { 11 | it("should generate correct SQL for creating a database", () => { 12 | const sql = engine.createDatabase("testdb", {}) 13 | expect(Array.isArray(sql)).toBe(true) 14 | expect(sql[0]).toContain("CREATE DATABASE IF NOT EXISTS") 15 | }) 16 | 17 | it("should generate correct SQL for creating a database with an owner", () => { 18 | const sql = engine.createDatabase("testdb", { Owner: "testuser" }) 19 | expect(Array.isArray(sql)).toBe(true) 20 | expect(sql[0]).toContain("CREATE DATABASE IF NOT EXISTS") 21 | expect(sql[1]).toContain("GRANT ALL PRIVILEGES") 22 | }) 23 | 24 | it("should generate correct SQL for deleting a database", () => { 25 | const sql = engine.deleteDatabase("testdb", "masteruser") 26 | expect(Array.isArray(sql)).toBe(true) 27 | expect(sql[0]).toContain("DROP DATABASE IF EXISTS") 28 | }) 29 | }) 30 | 31 | describe("Role", () => { 32 | it("should throw error when creating a role without password ARN", async () => { 33 | await expect(engine.createRole("testrole", {})).rejects.toThrow( 34 | "No PasswordArn provided" 35 | ) 36 | }) 37 | 38 | it("should generate correct SQL for renaming a role", async () => { 39 | // Mock getPassword implementation 40 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("test-password") 41 | 42 | const sql = await engine.updateRole( 43 | "newrole", 44 | "oldrole", 45 | { 46 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 47 | }, 48 | {} 49 | ) 50 | 51 | expect(Array.isArray(sql)).toBe(true) 52 | expect(sql[0]).toContain("CREATE USER IF NOT EXISTS") 53 | expect(sql[1]).toContain("DROP USER IF EXISTS") 54 | expect(sql[2]).toContain("FLUSH PRIVILEGES") 55 | }) 56 | 57 | it("should generate correct SQL for deleting a role", async () => { 58 | const sql = await engine.deleteRole("testrole", { DatabaseName: "testdb" }) 59 | expect(Array.isArray(sql)).toBe(true) 60 | expect(sql[0]).toContain("REVOKE ALL PRIVILEGES") 61 | expect(sql[1]).toContain("DROP USER IF EXISTS") 62 | }) 63 | 64 | it("should generate SQL to revoke privileges on old database when database is changed", async () => { 65 | // Mock getPassword implementation 66 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("test-password") 67 | 68 | const oldProps = { 69 | DatabaseName: "olddb", 70 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 71 | } 72 | 73 | const newProps = { 74 | DatabaseName: "newdb", 75 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 76 | } 77 | 78 | const sql = await engine.updateRole("testrole", "testrole", newProps, oldProps) 79 | 80 | expect(Array.isArray(sql)).toBe(true) 81 | 82 | // Check for revoke statement for the old database 83 | const revokeStatement = sql.find( 84 | (statement) => 85 | statement.includes("REVOKE ALL PRIVILEGES") && statement.includes("`olddb`") 86 | ) 87 | expect(revokeStatement).toBeDefined() 88 | 89 | // Check for grant statement for the new database 90 | const grantStatement = sql.find( 91 | (statement) => 92 | statement.includes("GRANT ALL PRIVILEGES") && statement.includes("`newdb`") 93 | ) 94 | expect(grantStatement).toBeDefined() 95 | }) 96 | 97 | describe("IAM Authentication", () => { 98 | it("should create user with IAM authentication", async () => { 99 | const props = { 100 | EnableIamAuth: true, 101 | DatabaseName: "testdb", 102 | } 103 | 104 | const sql = await engine.createRole("iamuser", props) 105 | 106 | expect(Array.isArray(sql)).toBe(true) 107 | expect(sql[0]).toContain( 108 | "CREATE USER IF NOT EXISTS 'iamuser'@'%' IDENTIFIED WITH AWSAuthenticationPlugin as 'RDS'" 109 | ) 110 | expect(sql[1]).toContain("GRANT ALL PRIVILEGES ON `testdb`.* TO 'iamuser'@'%'") 111 | expect(sql[2]).toBe("FLUSH PRIVILEGES") 112 | }) 113 | 114 | it("should create user without IAM authentication", async () => { 115 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("test-password") 116 | 117 | const props = { 118 | EnableIamAuth: false, 119 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 120 | DatabaseName: "testdb", 121 | } 122 | 123 | const sql = await engine.createRole("passworduser", props) 124 | 125 | expect(Array.isArray(sql)).toBe(true) 126 | expect(sql[0]).toContain( 127 | "CREATE USER IF NOT EXISTS 'passworduser'@'%' IDENTIFIED BY 'test-password'" 128 | ) 129 | expect(sql[1]).toContain( 130 | "GRANT ALL PRIVILEGES ON `testdb`.* TO 'passworduser'@'%'" 131 | ) 132 | expect(sql[2]).toBe("FLUSH PRIVILEGES") 133 | }) 134 | 135 | it("should switch from password to IAM authentication", async () => { 136 | const oldProps = { 137 | EnableIamAuth: false, 138 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 139 | } 140 | 141 | const newProps = { 142 | EnableIamAuth: true, 143 | DatabaseName: "testdb", 144 | } 145 | 146 | const sql = await engine.updateRole( 147 | "switchuser", 148 | "switchuser", 149 | newProps, 150 | oldProps 151 | ) 152 | 153 | expect(Array.isArray(sql)).toBe(true) 154 | expect(sql[0]).toContain("DROP USER IF EXISTS 'switchuser'@'%'") 155 | expect(sql[1]).toContain( 156 | "CREATE USER 'switchuser'@'%' IDENTIFIED WITH AWSAuthenticationPlugin as 'RDS'" 157 | ) 158 | expect(sql[2]).toContain("GRANT ALL PRIVILEGES ON `testdb`.* TO 'switchuser'@'%'") 159 | expect(sql[3]).toBe("FLUSH PRIVILEGES") 160 | }) 161 | 162 | it("should switch from IAM to password authentication", async () => { 163 | jest.spyOn(engine as any, "getPassword").mockResolvedValue("new-password") 164 | 165 | const oldProps = { 166 | EnableIamAuth: true, 167 | } 168 | 169 | const newProps = { 170 | EnableIamAuth: false, 171 | PasswordArn: "arn:aws:secretsmanager:region:account:secret:name", 172 | DatabaseName: "testdb", 173 | } 174 | 175 | const sql = await engine.updateRole( 176 | "switchuser", 177 | "switchuser", 178 | newProps, 179 | oldProps 180 | ) 181 | 182 | expect(Array.isArray(sql)).toBe(true) 183 | expect(sql[0]).toContain("DROP USER IF EXISTS 'switchuser'@'%'") 184 | expect(sql[1]).toContain( 185 | "CREATE USER 'switchuser'@'%' IDENTIFIED BY 'new-password'" 186 | ) 187 | expect(sql[2]).toContain("GRANT ALL PRIVILEGES ON `testdb`.* TO 'switchuser'@'%'") 188 | expect(sql[3]).toBe("FLUSH PRIVILEGES") 189 | }) 190 | }) 191 | }) 192 | 193 | describe("Schema", () => { 194 | it("should throw an error when trying to create a schema", () => { 195 | expect(() => engine.createSchema("testschema", {})).toThrow("not supported") 196 | }) 197 | 198 | it("should throw an error when trying to update a schema", () => { 199 | expect(() => engine.updateSchema("newschema", "oldschema", {})).toThrow( 200 | "not supported" 201 | ) 202 | }) 203 | 204 | it("should throw an error when trying to delete a schema", () => { 205 | expect(() => engine.deleteSchema("testschema", {})).toThrow("not supported") 206 | }) 207 | }) 208 | 209 | describe("SQL", () => { 210 | it("should pass through SQL statements for create", () => { 211 | const statement = "SELECT * FROM users" 212 | const sql = engine.createSql("test", { Statement: statement }) 213 | expect(sql).toBe(statement) 214 | }) 215 | 216 | it("should pass through SQL statements for update", () => { 217 | const statement = "UPDATE users SET name = 'test'" 218 | const sql = engine.updateSql("test", "old", { Statement: statement }) 219 | expect(sql).toBe(statement) 220 | }) 221 | 222 | it("should pass through rollback SQL for delete", () => { 223 | const rollback = "DROP TABLE users" 224 | const sql = engine.deleteSql("test", { Rollback: rollback }) 225 | expect(sql).toBe(rollback) 226 | }) 227 | }) 228 | }) 229 | -------------------------------------------------------------------------------- /lambda/engine.mysql.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs" 2 | import { AbstractEngine, EngineConnectionConfig } from "./engine.abstract" 3 | import { 4 | EngineDatabaseProperties, 5 | EngineRoleProperties, 6 | EngineSchemaProperties, 7 | EngineSqlProperties, 8 | } from "./types" 9 | 10 | export class MysqlEngine extends AbstractEngine { 11 | createDatabase(resourceId: string, props: EngineDatabaseProperties): string[] { 12 | const sql = [`CREATE DATABASE IF NOT EXISTS \`${resourceId}\``] 13 | 14 | if (props.Owner) { 15 | sql.push(`GRANT ALL PRIVILEGES ON \`${resourceId}\`.* TO '${props.Owner}'@'%'`) 16 | sql.push("FLUSH PRIVILEGES") 17 | } 18 | 19 | return sql 20 | } 21 | 22 | updateDatabase(): string[] { 23 | throw new Error("Renaming database is not supported in MySQL.") 24 | } 25 | 26 | deleteDatabase(resourceId: string, _masterUser: string): string[] { 27 | return [`DROP DATABASE IF EXISTS \`${resourceId}\``] 28 | } 29 | 30 | async createRole(resourceId: string, props: EngineRoleProperties): Promise { 31 | const sql: string[] = [] 32 | 33 | if (props.EnableIamAuth) { 34 | // Create user for IAM authentication 35 | sql.push( 36 | `CREATE USER IF NOT EXISTS '${resourceId}'@'%' IDENTIFIED WITH AWSAuthenticationPlugin as 'RDS'` 37 | ) 38 | } else { 39 | // Create user with password authentication 40 | if (!props.PasswordArn) throw new Error("No PasswordArn provided") 41 | const password = await this.getPassword(props.PasswordArn) 42 | if (!password) throw `Cannot parse password from ${props.PasswordArn}` 43 | 44 | sql.push( 45 | `CREATE USER IF NOT EXISTS '${resourceId}'@'%' IDENTIFIED BY '${password}'` 46 | ) 47 | } 48 | 49 | if (props.DatabaseName) { 50 | sql.push( 51 | `GRANT ALL PRIVILEGES ON \`${props.DatabaseName}\`.* TO '${resourceId}'@'%'` 52 | ) 53 | } 54 | 55 | sql.push("FLUSH PRIVILEGES") 56 | return sql 57 | } 58 | 59 | async updateRole( 60 | resourceId: string, 61 | oldResourceId: string, 62 | props: EngineRoleProperties, 63 | oldProps: EngineRoleProperties 64 | ): Promise { 65 | const sql: string[] = [] 66 | 67 | if (oldResourceId !== resourceId) { 68 | // MySQL doesn't allow renaming users directly, we need to create a new one and drop the old one 69 | if (props.EnableIamAuth) { 70 | // Create new user with IAM auth 71 | sql.push( 72 | `CREATE USER IF NOT EXISTS '${resourceId}'@'%' IDENTIFIED WITH AWSAuthenticationPlugin as 'RDS'` 73 | ) 74 | } else if (props?.PasswordArn) { 75 | // Create new user with password auth 76 | const password = await this.getPassword(props.PasswordArn) 77 | if (!password) throw `Cannot parse password from ${props.PasswordArn}` 78 | 79 | sql.push( 80 | `CREATE USER IF NOT EXISTS '${resourceId}'@'%' IDENTIFIED BY '${password}'` 81 | ) 82 | } else { 83 | // If no password is provided, create user with a random password then expire it 84 | sql.push(`CREATE USER IF NOT EXISTS '${resourceId}'@'%' IDENTIFIED BY UUID()`) 85 | sql.push(`ALTER USER '${resourceId}'@'%' PASSWORD EXPIRE`) 86 | } 87 | 88 | // Drop the old user 89 | sql.push(`DROP USER IF EXISTS '${oldResourceId}'@'%'`) 90 | } else { 91 | // Handle authentication method changes for existing user 92 | if (props?.EnableIamAuth && !oldProps?.EnableIamAuth) { 93 | // Switching from password to IAM auth - need to recreate user 94 | sql.push(`DROP USER IF EXISTS '${resourceId}'@'%'`) 95 | sql.push( 96 | `CREATE USER '${resourceId}'@'%' IDENTIFIED WITH AWSAuthenticationPlugin as 'RDS'` 97 | ) 98 | } else if (!props?.EnableIamAuth && oldProps?.EnableIamAuth) { 99 | // Switching from IAM to password auth - need to recreate user 100 | sql.push(`DROP USER IF EXISTS '${resourceId}'@'%'`) 101 | if (props?.PasswordArn) { 102 | const password = await this.getPassword(props.PasswordArn) 103 | if (!password) throw `Cannot parse password from ${props.PasswordArn}` 104 | sql.push(`CREATE USER '${resourceId}'@'%' IDENTIFIED BY '${password}'`) 105 | } 106 | } else if (!props?.EnableIamAuth && props?.PasswordArn) { 107 | // Just update the password for password auth 108 | const password = await this.getPassword(props.PasswordArn) 109 | if (!password) throw `Cannot parse password from ${props.PasswordArn}` 110 | sql.push(`ALTER USER '${resourceId}'@'%' IDENTIFIED BY '${password}'`) 111 | } 112 | } 113 | 114 | // Check if database name has changed 115 | if ( 116 | oldProps?.DatabaseName && 117 | props?.DatabaseName && 118 | oldProps.DatabaseName !== props.DatabaseName 119 | ) { 120 | // Revoke from old database 121 | sql.push( 122 | `REVOKE ALL PRIVILEGES ON \`${oldProps.DatabaseName}\`.* FROM '${resourceId}'@'%'` 123 | ) 124 | } 125 | 126 | if (props?.DatabaseName) { 127 | sql.push( 128 | `GRANT ALL PRIVILEGES ON \`${props.DatabaseName}\`.* TO '${resourceId}'@'%'` 129 | ) 130 | } 131 | 132 | if (sql.length > 0) { 133 | sql.push("FLUSH PRIVILEGES") 134 | } 135 | 136 | return sql 137 | } 138 | 139 | async deleteRole(resourceId: string, props: EngineRoleProperties): Promise { 140 | const sql: string[] = [] 141 | 142 | if (props?.DatabaseName) { 143 | sql.push( 144 | `REVOKE ALL PRIVILEGES ON \`${props.DatabaseName}\`.* FROM '${resourceId}'@'%'` 145 | ) 146 | } 147 | 148 | sql.push(`DROP USER IF EXISTS '${resourceId}'@'%'`) 149 | sql.push("FLUSH PRIVILEGES") 150 | 151 | return sql 152 | } 153 | 154 | createSchema(_resourceId: string, _props: EngineSchemaProperties): string[] { 155 | throw new Error("Schemas are not supported in MySQL/MariaDB") 156 | } 157 | 158 | updateSchema( 159 | _resourceId: string, 160 | _oldResourceId: string, 161 | _props: EngineSchemaProperties 162 | ): string[] { 163 | throw new Error("Schemas are not supported in MySQL/MariaDB") 164 | } 165 | 166 | deleteSchema(_resourceId: string, _props: EngineSchemaProperties): string[] { 167 | throw new Error("Schemas are not supported in MySQL/MariaDB") 168 | } 169 | 170 | createSql(_resourceId: string, props: EngineSqlProperties): string { 171 | return props?.Statement || "" 172 | } 173 | 174 | updateSql( 175 | _resourceId: string, 176 | _oldResourceId: string, 177 | props: EngineSqlProperties 178 | ): string { 179 | return props?.Statement || "" 180 | } 181 | 182 | deleteSql(_resourceId: string, props: EngineSqlProperties): string { 183 | return props?.Rollback || "" 184 | } 185 | 186 | createIamGrant(_roleName: string, _iamArn: string): string | string[] { 187 | throw new Error("IAM grants are only supported with DSQL clusters") 188 | } 189 | 190 | updateIamGrant( 191 | _roleName: string, 192 | _oldRoleName: string, 193 | _iamArn: string, 194 | _oldIamArn: string 195 | ): string | string[] { 196 | throw new Error("IAM grants are only supported with DSQL clusters") 197 | } 198 | 199 | deleteIamGrant(_roleName: string, _iamArn: string): string | string[] { 200 | throw new Error("IAM grants are only supported with DSQL clusters") 201 | } 202 | 203 | async executeSQL(sql: string | string[], config: EngineConnectionConfig): Promise { 204 | // Dynamic import to avoid bundling issues 205 | const { createConnection } = await import("mysql2/promise") 206 | 207 | const isSslEnabled = process.env.SSL ? JSON.parse(process.env.SSL) : true 208 | 209 | const sslOptions = isSslEnabled 210 | ? { 211 | ssl: { 212 | ca: fs.readFileSync(`${process.env.LAMBDA_TASK_ROOT}/global-bundle.pem`), 213 | rejectUnauthorized: true, 214 | }, 215 | } 216 | : {} 217 | 218 | const connectionConfig = { 219 | host: config.host, 220 | port: config.port, 221 | user: config.user, 222 | password: config.password, 223 | database: config.database, 224 | connectTimeout: 30000, 225 | multipleStatements: true, 226 | ...sslOptions, 227 | } 228 | 229 | this.log( 230 | `Connecting to MySQL/MariaDB host ${connectionConfig.host}:${ 231 | connectionConfig.port 232 | }${isSslEnabled ? " using a secure connection" : ""}, database ${ 233 | connectionConfig.database 234 | } as ${connectionConfig.user}` 235 | ) 236 | this.log("Executing SQL", sql) 237 | 238 | const connection = await createConnection(connectionConfig) 239 | try { 240 | if (typeof sql === "string") { 241 | return await connection.query(sql) 242 | } else if (sql) { 243 | return await Promise.all(sql.map((statement) => connection.query(statement))) 244 | } 245 | } finally { 246 | await connection.end() 247 | } 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /src/role.ts: -------------------------------------------------------------------------------- 1 | import { CustomResource, RemovalPolicy, Stack } from "aws-cdk-lib" 2 | import * as iam from "aws-cdk-lib/aws-iam" 3 | import * as kms from "aws-cdk-lib/aws-kms" 4 | import { IDatabaseCluster, IDatabaseInstance } from "aws-cdk-lib/aws-rds" 5 | import { ISecret, Secret } from "aws-cdk-lib/aws-secretsmanager" 6 | import * as ssm from "aws-cdk-lib/aws-ssm" 7 | import { Construct } from "constructs" 8 | import { IDatabase } from "./database" 9 | import { RdsSqlResource } from "./enum" 10 | import { IProvider, DatabaseEngine } from "./provider" 11 | import { Role as CustomResourceRole } from "./role.custom-resource" 12 | 13 | export interface RoleProps { 14 | /** 15 | * Provider. 16 | */ 17 | readonly provider: IProvider 18 | 19 | /** 20 | * SQL. 21 | */ 22 | readonly roleName: string 23 | 24 | /** 25 | * Optional database this user is expected to use. 26 | * 27 | * If the database exists, connect privileges are granted. 28 | * 29 | * Specify one of `database` or `databaseName`. This is the name 30 | * that will be stored in the role's secret as the database name to 31 | * use. 32 | */ 33 | readonly database?: IDatabase 34 | 35 | /** 36 | * Optional database name this user is expected to use. 37 | * 38 | * If the database exists, connect privileges are granted. 39 | * 40 | * Specify one of `database` or `databaseName`. This is the name 41 | * that will be stored in the role's secret as the database name to 42 | * use. 43 | */ 44 | readonly databaseName?: string 45 | 46 | /** 47 | * A new secret is created for this user. 48 | * 49 | * Optionally encrypt it with the given key. 50 | */ 51 | readonly encryptionKey?: kms.IKey 52 | 53 | /** 54 | * A new secret is created for this user. 55 | * 56 | * Optionally add secret name to the secret. 57 | */ 58 | readonly secretName?: string 59 | 60 | /** 61 | * Prefix for SSM parameters to store credentials in Parameter Store. 62 | * When defined, credentials will also be stored as parameters. 63 | * 64 | * The parameter names such as "password" is simply appended to 65 | * `parameterPrefix`, so make sure the prefix ends with a slash if 66 | * you have your parameter names slash separated. 67 | * 68 | * Note that the password from the secret is copied just once, they 69 | * are not kept in sync. 70 | * 71 | * @default - credentials are only stored in Secrets Manager 72 | */ 73 | readonly parameterPrefix?: string 74 | 75 | /** 76 | * Enable IAM authentication for this role. 77 | * 78 | * When enabled, the role will be created without a password and 79 | * configured for AWS IAM database authentication. No secret will 80 | * be created for this role. 81 | * 82 | * Note: For DSQL clusters, this property is ignored as DSQL always 83 | * uses IAM authentication. 84 | * 85 | * @default false - use password authentication 86 | */ 87 | readonly enableIamAuth?: boolean 88 | } 89 | 90 | // Private Parameters construct (not exported) 91 | class Parameters extends Construct { 92 | constructor( 93 | scope: Construct, 94 | id: string, 95 | props: { 96 | provider: IProvider 97 | secretArn: string 98 | parameterPrefix: string 99 | passwordArn?: string 100 | providerServiceToken: string 101 | paramData: Record 102 | } 103 | ) { 104 | super(scope, id) 105 | 106 | // Create parameters for each key-value pair 107 | Object.entries(props.paramData).forEach(([key, value]) => { 108 | if (value !== undefined) { 109 | new ssm.StringParameter(this, `Parameter-${key}`, { 110 | parameterName: `${props.parameterPrefix}${key}`, 111 | stringValue: value.toString(), 112 | }) 113 | } 114 | }) 115 | 116 | // For password, use the existing provider to store it in SSM 117 | // Skip password parameter for IAM auth roles (no passwordArn) 118 | if (props.passwordArn) { 119 | const passwordParameterName = `${props.parameterPrefix}password` 120 | const password_parameter = new CustomResource(this, "PasswordParameter", { 121 | serviceToken: props.providerServiceToken, 122 | properties: { 123 | SecretArn: props.secretArn, 124 | Resource: RdsSqlResource.PARAMETER_PASSWORD, 125 | PasswordArn: props.passwordArn, 126 | ParameterName: passwordParameterName, 127 | }, 128 | }) 129 | password_parameter.node.addDependency(props.provider) 130 | 131 | const paramArn = `arn:aws:ssm:${Stack.of(this).region}:${ 132 | Stack.of(this).account 133 | }:parameter${ 134 | passwordParameterName.startsWith("/") ? "" : "/" 135 | }${passwordParameterName}` 136 | 137 | props.provider.handler.addToRolePolicy( 138 | new iam.PolicyStatement({ 139 | actions: ["ssm:PutParameter", "ssm:AddTagsToResource", "ssm:GetParameters"], 140 | resources: [paramArn], 141 | }) 142 | ) 143 | } 144 | } 145 | } 146 | 147 | export class Role extends Construct { 148 | /** 149 | * The role name. 150 | */ 151 | public readonly roleName: string 152 | 153 | /** 154 | * The generated secret containing connection information and password. 155 | * 156 | * This is only available when: 157 | * - The provider is not a DSQL cluster (DSQL uses IAM authentication) 158 | * - `enableIamAuth` is not set to `true` 159 | * 160 | * When using IAM authentication, no secret is created as the password 161 | * is generated dynamically using IAM credentials. 162 | */ 163 | public readonly secret?: ISecret 164 | 165 | constructor(scope: Construct, id: string, props: RoleProps) { 166 | if (props.provider.engine !== DatabaseEngine.DSQL) { 167 | if (props.database && props.databaseName) { 168 | throw "Specify either database or databaseName" 169 | } 170 | if (!props.database && !props.databaseName) { 171 | // If neither is specified, we might need a default or throw an error depending on desired behavior. 172 | // For now, let's assume it's allowed but the secret won't have a dbname. 173 | // If it should be required, uncomment the line below: 174 | throw "Specify either database or databaseName" 175 | } 176 | } 177 | super(scope, id) 178 | 179 | // Skip secret creation for DSQL (always uses IAM auth) or when enableIamAuth is true 180 | const useIamAuth = 181 | props.enableIamAuth || props.provider.engine === DatabaseEngine.DSQL 182 | 183 | // For non-DSQL providers, we need cluster info for secrets and/or parameters 184 | if (props.provider.engine !== DatabaseEngine.DSQL) { 185 | // For imported providers without cluster details, provide helpful error message 186 | if (!props.provider.cluster) { 187 | throw new Error( 188 | "Role creation requires cluster information. When importing a provider with " + 189 | "Provider.fromProviderAttributes(), include the 'cluster' property if you plan " + 190 | "to create new roles. Alternatively, use existing roles created with the original provider." 191 | ) 192 | } 193 | 194 | // For RDS/Aurora clusters and instances, get endpoint details 195 | const host = (props.provider.cluster as IDatabaseCluster).clusterEndpoint 196 | ? (props.provider.cluster as IDatabaseCluster).clusterEndpoint.hostname 197 | : (props.provider.cluster as IDatabaseInstance).instanceEndpoint.hostname 198 | 199 | const port = (props.provider.cluster as IDatabaseCluster).clusterEndpoint 200 | ? (props.provider.cluster as IDatabaseCluster).clusterEndpoint.port 201 | : (props.provider.cluster as IDatabaseInstance).instanceEndpoint.port 202 | 203 | const identifier = (props.provider.cluster as IDatabaseCluster).clusterIdentifier 204 | ? (props.provider.cluster as IDatabaseCluster).clusterIdentifier 205 | : (props.provider.cluster as IDatabaseInstance).instanceIdentifier 206 | 207 | const databaseName = props.database 208 | ? props.database.databaseName 209 | : props.databaseName 210 | 211 | // Create secret only for password auth (not IAM auth) 212 | if (!useIamAuth) { 213 | const secretTemplate = { 214 | dbClusterIdentifier: identifier, 215 | engine: props.provider.engine, 216 | host: host, 217 | port: port, 218 | username: props.roleName, 219 | dbname: databaseName, 220 | } 221 | 222 | this.secret = new Secret(this, "Secret", { 223 | secretName: props.secretName, 224 | encryptionKey: props.encryptionKey, 225 | description: `Generated secret for ${props.provider.engine} role ${props.roleName}`, 226 | generateSecretString: { 227 | passwordLength: 30, // Oracle password cannot have more than 30 characters 228 | secretStringTemplate: JSON.stringify(secretTemplate), 229 | generateStringKey: "password", 230 | excludeCharacters: " %+~`#$&*()|[]{}:;<>?!'/@\"\\", 231 | }, 232 | removalPolicy: RemovalPolicy.DESTROY, 233 | }) 234 | } 235 | 236 | // Create Parameters if parameterPrefix is provided (for both password and IAM auth) 237 | if (props.parameterPrefix) { 238 | const paramData: Record = { 239 | dbClusterIdentifier: identifier, 240 | engine: props.provider.engine, 241 | host: host, 242 | port: port, 243 | username: props.roleName, 244 | } 245 | if (databaseName) { 246 | paramData.dbname = databaseName 247 | } 248 | 249 | new Parameters(this, "Parameters", { 250 | secretArn: props.provider.secret?.secretArn || "", 251 | parameterPrefix: props.parameterPrefix, 252 | passwordArn: this.secret?.secretArn, // undefined for IAM auth - skips password param 253 | providerServiceToken: props.provider.serviceToken, 254 | provider: props.provider, 255 | paramData, 256 | }) 257 | } 258 | } 259 | 260 | const role = new CustomResourceRole(this, "PostgresRole", { 261 | provider: props.provider, 262 | roleName: props.roleName, 263 | passwordArn: useIamAuth ? "" : this.secret!.secretArn, 264 | database: props.database, 265 | databaseName: props.databaseName, 266 | enableIamAuth: useIamAuth, 267 | }) 268 | 269 | if (this.secret) { 270 | role.node.addDependency(this.secret) 271 | this.secret.grantRead(props.provider.handler) 272 | if (this.secret.encryptionKey) { 273 | // It seems we need to grant explicit permission 274 | this.secret.encryptionKey.grantDecrypt(props.provider.handler) 275 | } 276 | } 277 | 278 | this.roleName = props.roleName 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /lambda/engine.postgresql.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs" 2 | import { ConnectionOptions } from "tls" 3 | import { format as pgFormat } from "node-pg-format" 4 | import { Client, ClientConfig } from "pg" 5 | import { AbstractEngine, EngineConnectionConfig } from "./engine.abstract" 6 | import { 7 | EngineDatabaseProperties, 8 | EngineRoleProperties, 9 | EngineSchemaProperties, 10 | EngineSqlProperties, 11 | } from "./types" 12 | 13 | export class PostgresqlEngine extends AbstractEngine { 14 | createDatabase(resourceId: string, props: EngineDatabaseProperties): string | string[] { 15 | const owner = props?.Owner 16 | if (owner) { 17 | return [ 18 | pgFormat("create database %I", resourceId), 19 | pgFormat("alter database %I owner to %I", resourceId, owner), 20 | ] 21 | } else { 22 | return pgFormat("create database %I", resourceId) 23 | } 24 | } 25 | 26 | updateDatabase( 27 | resourceId: string, 28 | oldResourceId: string, 29 | props: EngineDatabaseProperties 30 | ): string[] { 31 | const statements: string[] = [] 32 | if (resourceId !== oldResourceId) { 33 | if (props?.MasterOwner) { 34 | statements.push( 35 | pgFormat("alter database %I owner to %I", oldResourceId, props.MasterOwner) 36 | ) 37 | } 38 | statements.push( 39 | pgFormat("alter database %I rename to %I", oldResourceId, resourceId) 40 | ) 41 | } 42 | const owner = props?.Owner 43 | if (owner) { 44 | statements.push(pgFormat("alter database %I owner to %I", resourceId, props.Owner)) 45 | } 46 | return statements 47 | } 48 | 49 | deleteDatabase(resourceId: string, masterUser: string): string[] { 50 | return [ 51 | pgFormat( 52 | "select pg_terminate_backend(pg_stat_activity.pid) from pg_stat_activity where datname = %L", 53 | resourceId 54 | ), 55 | pgFormat( 56 | "DO $$BEGIN\nIF EXISTS (select from pg_database WHERE datname = '%s') THEN alter database %I owner to %I; END IF;\nEND$$;", 57 | resourceId, 58 | resourceId, 59 | masterUser 60 | ), 61 | pgFormat("drop database if exists %I", resourceId), 62 | ] 63 | } 64 | 65 | async createRole(resourceId: string, props: EngineRoleProperties): Promise { 66 | const sql = ["start transaction"] 67 | 68 | if (props?.EnableIamAuth) { 69 | // Create role for IAM authentication 70 | sql.push(pgFormat("create role %I with login", resourceId)) 71 | sql.push(pgFormat("grant rds_iam to %I", resourceId)) 72 | } else { 73 | // Create role with password authentication 74 | if (!props?.PasswordArn) throw "No PasswordArn provided" 75 | const password = await this.getPassword(props.PasswordArn) 76 | if (!password) { 77 | throw `Cannot parse password from ${props.PasswordArn}` 78 | } 79 | sql.push(pgFormat("create role %I with login password %L", resourceId, password)) 80 | } 81 | 82 | if (props?.DatabaseName) { 83 | sql.push( 84 | pgFormat( 85 | `DO $$ 86 | BEGIN 87 | IF EXISTS (select from pg_database where datname = '%s' and datistemplate = false) THEN 88 | grant connect on database %I to %I; 89 | END IF; 90 | END$$;`, 91 | props.DatabaseName, 92 | props.DatabaseName, 93 | resourceId 94 | ) 95 | ) 96 | } 97 | 98 | sql.push("commit") 99 | return sql 100 | } 101 | 102 | async updateRole( 103 | resourceId: string, 104 | oldResourceId: string, 105 | props: EngineRoleProperties, 106 | oldProps: EngineRoleProperties 107 | ): Promise { 108 | const sql = ["start transaction"] 109 | 110 | if (oldResourceId !== resourceId) { 111 | sql.push(pgFormat("alter role %I rename to %I", oldResourceId, resourceId)) 112 | } 113 | 114 | // Handle authentication method changes 115 | if (props?.EnableIamAuth && !oldProps?.EnableIamAuth) { 116 | // Switching from password to IAM auth 117 | sql.push(pgFormat("grant rds_iam to %I", resourceId)) 118 | } else if (!props?.EnableIamAuth && oldProps?.EnableIamAuth) { 119 | // Switching from IAM to password auth 120 | sql.push(pgFormat("revoke rds_iam from %I", resourceId)) 121 | if (props?.PasswordArn) { 122 | const password = await this.getPassword(props.PasswordArn) 123 | if (!password) { 124 | throw `Cannot parse password from ${props.PasswordArn}` 125 | } 126 | sql.push(pgFormat("alter role %I with password %L", resourceId, password)) 127 | } 128 | } else if (!props?.EnableIamAuth && props?.PasswordArn) { 129 | // Update password for password auth 130 | const password = await this.getPassword(props.PasswordArn) 131 | if (!password) { 132 | throw `Cannot parse password from ${props.PasswordArn}` 133 | } 134 | sql.push(pgFormat("alter role %I with password %L", resourceId, password)) 135 | } 136 | 137 | // Check if database name has changed 138 | if ( 139 | oldProps?.DatabaseName && 140 | props?.DatabaseName && 141 | oldProps.DatabaseName !== props.DatabaseName 142 | ) { 143 | // Revoke from old database 144 | sql.push( 145 | pgFormat( 146 | `DO $$ 147 | BEGIN 148 | IF EXISTS (select from pg_database where datname = '%s' and datistemplate = false) THEN 149 | revoke connect on database %I from %I; 150 | END IF; 151 | END$$;`, 152 | oldProps.DatabaseName, 153 | oldProps.DatabaseName, 154 | resourceId 155 | ) 156 | ) 157 | } 158 | 159 | if (props?.DatabaseName) { 160 | sql.push( 161 | pgFormat( 162 | `DO $$ 163 | BEGIN 164 | IF EXISTS (select from pg_database where datname = '%s' and datistemplate = false) THEN 165 | grant connect on database %I to %I; 166 | END IF; 167 | END$$;`, 168 | props.DatabaseName, 169 | props.DatabaseName, 170 | resourceId 171 | ) 172 | ) 173 | } 174 | 175 | sql.push("commit") 176 | return sql 177 | } 178 | 179 | async deleteRole(resourceId: string, props: EngineRoleProperties): Promise { 180 | return [ 181 | "start transaction", 182 | pgFormat( 183 | `DO $$ 184 | BEGIN 185 | IF EXISTS (select from pg_catalog.pg_roles WHERE rolname = '%s') AND EXISTS (select from pg_database WHERE datname = '%s') THEN 186 | revoke all privileges on database %I from %I; 187 | END IF; 188 | END$$;`, 189 | resourceId, 190 | props?.DatabaseName, 191 | props?.DatabaseName, 192 | resourceId 193 | ), 194 | pgFormat("drop role if exists %I", resourceId), 195 | "commit", 196 | ] 197 | } 198 | 199 | createSchema(resourceId: string, props: EngineSchemaProperties): string[] { 200 | const sql: string[] = [pgFormat("create schema if not exists %I", resourceId)] 201 | if (props?.RoleName) { 202 | this.grantRoleForSchema(resourceId, props.RoleName).forEach((stmt) => 203 | sql.push(stmt) 204 | ) 205 | } 206 | return sql 207 | } 208 | 209 | updateSchema( 210 | resourceId: string, 211 | oldResourceId: string, 212 | props: EngineSchemaProperties 213 | ): string[] { 214 | const sql: string[] = [] 215 | if (props?.RoleName) { 216 | this.revokeRoleFromSchema(oldResourceId, props.RoleName).forEach((stmt) => 217 | sql.push(stmt) 218 | ) 219 | } 220 | sql.push(pgFormat("alter schema %I rename to %I", oldResourceId, resourceId)) 221 | if (props?.RoleName) { 222 | this.grantRoleForSchema(resourceId, props.RoleName).forEach((stmt) => 223 | sql.push(stmt) 224 | ) 225 | } 226 | return sql 227 | } 228 | 229 | deleteSchema(resourceId: string, props: EngineSchemaProperties): string[] { 230 | const sql: string[] = [] 231 | if (props?.RoleName) { 232 | this.revokeRoleFromSchema(resourceId, props.RoleName).forEach((stmt) => 233 | sql.push(stmt) 234 | ) 235 | } 236 | sql.push(pgFormat("drop schema if exists %I cascade", resourceId)) 237 | return sql 238 | } 239 | 240 | createSql(_resourceId: string, props: EngineSqlProperties): string { 241 | return props?.Statement || "" 242 | } 243 | 244 | updateSql( 245 | _resourceId: string, 246 | _oldResourceId: string, 247 | props: EngineSqlProperties 248 | ): string { 249 | return props?.Statement || "" 250 | } 251 | 252 | deleteSql(_resourceId: string, props: EngineSqlProperties): string { 253 | return props?.Rollback || "" 254 | } 255 | 256 | private grantRoleForSchema(schema: string, roleName: string): string[] { 257 | return [ 258 | pgFormat("GRANT USAGE ON SCHEMA %I TO %I", schema, roleName), 259 | pgFormat("GRANT CREATE ON SCHEMA %I TO %I", schema, roleName), 260 | ] 261 | } 262 | 263 | private revokeRoleFromSchema(schema: string, roleName: string): string[] { 264 | return [ 265 | pgFormat("REVOKE CREATE ON SCHEMA %I FROM %I", schema, roleName), 266 | pgFormat("REVOKE ALL ON SCHEMA %I FROM %I", schema, roleName), 267 | ] 268 | } 269 | 270 | createIamGrant(_roleName: string, _iamArn: string): string | string[] { 271 | throw new Error("IAM grants are only supported with DSQL clusters") 272 | } 273 | 274 | updateIamGrant( 275 | _roleName: string, 276 | _oldRoleName: string, 277 | _iamArn: string, 278 | _oldIamArn: string 279 | ): string | string[] { 280 | throw new Error("IAM grants are only supported with DSQL clusters") 281 | } 282 | 283 | deleteIamGrant(_roleName: string, _iamArn: string): string | string[] { 284 | throw new Error("IAM grants are only supported with DSQL clusters") 285 | } 286 | 287 | async executeSQL(sql: string | string[], config: EngineConnectionConfig): Promise { 288 | const isSslEnabled = process.env.SSL ? JSON.parse(process.env.SSL) : true 289 | const ssl: ConnectionOptions | false = isSslEnabled 290 | ? { 291 | ca: fs.readFileSync(`${process.env.LAMBDA_TASK_ROOT}/global-bundle.pem`), 292 | rejectUnauthorized: true, 293 | } 294 | : false 295 | 296 | const params: ClientConfig = { 297 | host: config.host, 298 | port: config.port, 299 | user: config.user, 300 | password: config.password, 301 | database: config.database, 302 | connectionTimeoutMillis: Number(process.env.CONNECTION_TIMEOUT) ?? 30000, 303 | ssl, 304 | } 305 | 306 | this.log( 307 | `Connecting to PostgreSQL host ${params.host}:${params.port}${ 308 | ssl ? " using a secure connection" : "" 309 | }, database ${params.database} as ${params.user}` 310 | ) 311 | this.log("Executing SQL", sql) 312 | 313 | const pg_client = new Client(params) 314 | await pg_client.connect() 315 | try { 316 | if (typeof sql === "string") { 317 | await pg_client.query(sql) 318 | } else if (sql) { 319 | await Promise.all( 320 | sql.map((statement) => { 321 | return pg_client.query(statement) 322 | }) 323 | ) 324 | } 325 | } finally { 326 | await pg_client.end() 327 | } 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /lambda/handler.postgresql.test.ts: -------------------------------------------------------------------------------- 1 | import { SecretsManagerClient } from "@aws-sdk/client-secrets-manager" 2 | import { Client, ClientConfig } from "pg" 3 | import { GenericContainer, StartedTestContainer } from "testcontainers" 4 | import { handler } from "./handler" 5 | import { 6 | createRequest, 7 | updateRequest, 8 | deleteRequest, 9 | schemaExists, 10 | roleExists, 11 | databaseExists, 12 | databaseOwnerIs, 13 | rowCount, 14 | roleGrantedForSchema, 15 | } from "./util" 16 | 17 | jest.mock("@aws-sdk/client-secrets-manager") 18 | const SecretsManagerClientMock = SecretsManagerClient as jest.MockedClass< 19 | typeof SecretsManagerClient 20 | > 21 | SecretsManagerClientMock.prototype.send.mockImplementation(() => { 22 | return { 23 | SecretString: JSON.stringify({ 24 | host: pgHost, 25 | port: pgPort, 26 | username: DB_MASTER_USERNAME, 27 | password: DB_MASTER_PASSWORD, 28 | dbname: DB_DEFAULT_DB, 29 | engine: "postgres", 30 | dbClusterIdentifier: "dummy", 31 | }), 32 | } 33 | }) 34 | 35 | const DB_PORT = 5432 36 | const DB_MASTER_USERNAME = "pgroot" 37 | const DB_MASTER_PASSWORD = "masterpwd" 38 | const DB_DEFAULT_DB = "dummy" 39 | 40 | let pgContainer: StartedTestContainer 41 | let pgHost: string 42 | let pgPort: number 43 | 44 | beforeAll(async () => { 45 | process.env.LOGGER = "true" 46 | process.env.SSL = "false" 47 | process.env.CONNECTION_TIMEOUT = "5000" 48 | pgContainer = await new GenericContainer("postgres:15") 49 | .withExposedPorts(DB_PORT) 50 | .withEnvironment({ 51 | POSTGRES_USER: DB_MASTER_USERNAME, 52 | POSTGRES_PASSWORD: DB_MASTER_PASSWORD, 53 | POSTGRES_DB: DB_DEFAULT_DB, 54 | }) 55 | .start() 56 | pgHost = pgContainer.getHost() 57 | pgPort = pgContainer.getMappedPort(DB_PORT) 58 | }, 60000) 59 | 60 | afterAll(async () => { 61 | if (pgContainer) { 62 | await pgContainer.stop() 63 | } 64 | }) 65 | 66 | beforeEach(async () => { 67 | jest.clearAllMocks() 68 | 69 | // Clean up databases, schemas, and roles created by tests 70 | const client = new Client({ 71 | host: pgHost, 72 | port: pgPort, 73 | database: DB_DEFAULT_DB, 74 | user: DB_MASTER_USERNAME, 75 | password: DB_MASTER_PASSWORD, 76 | }) 77 | 78 | await client.connect() 79 | 80 | try { 81 | // Drop all databases except system ones and default 82 | const databases = await client.query( 83 | "SELECT datname FROM pg_database WHERE datistemplate = false AND datname != $1", 84 | [DB_DEFAULT_DB] 85 | ) 86 | 87 | for (const db of databases.rows) { 88 | await client.query(`DROP DATABASE IF EXISTS "${db.datname}"`) 89 | } 90 | 91 | // Drop all schemas except system ones 92 | const schemas = await client.query( 93 | "SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT IN ('information_schema', 'pg_catalog', 'pg_toast', 'public')" 94 | ) 95 | 96 | for (const schema of schemas.rows) { 97 | await client.query(`DROP SCHEMA IF EXISTS "${schema.schema_name}" CASCADE`) 98 | } 99 | 100 | // Drop all roles except system ones 101 | const roles = await client.query( 102 | "SELECT rolname FROM pg_roles WHERE rolname NOT LIKE 'pg_%' AND rolname != $1", 103 | [DB_MASTER_USERNAME] 104 | ) 105 | 106 | for (const role of roles.rows) { 107 | await client.query(`DROP ROLE IF EXISTS "${role.rolname}"`) 108 | } 109 | } finally { 110 | await client.end() 111 | } 112 | }) 113 | 114 | //jest.setTimeout(ms("15s")) 115 | 116 | test("schema", async () => { 117 | const oldSchemaName = "test" 118 | const newSchemaName = "test2" 119 | const create = createRequest("schema", oldSchemaName) 120 | await handler(create) 121 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(1) 122 | 123 | const client = await newClient() 124 | try { 125 | expect(await schemaExists(client, oldSchemaName)).toEqual(true) 126 | const update = updateRequest("schema", oldSchemaName, newSchemaName) 127 | await handler(update) 128 | expect(await schemaExists(client, oldSchemaName)).toEqual(false) 129 | expect(await schemaExists(client, newSchemaName)).toEqual(true) 130 | 131 | // CloudFormation will send a delete afterward, so test that too 132 | const remove = deleteRequest("schema", newSchemaName) 133 | await handler(remove) 134 | expect(await schemaExists(client, newSchemaName)).toEqual(false) 135 | 136 | // create role for testing 137 | const roleName = "schematest" 138 | const createRole = createRequest("role", roleName, { 139 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 140 | DatabaseName: "postgres", 141 | }) 142 | await handler(createRole) 143 | 144 | const createWithRole = createRequest("schema", oldSchemaName, { 145 | RoleName: roleName, 146 | }) 147 | await handler(createWithRole) 148 | expect(await roleGrantedForSchema(client, oldSchemaName, roleName)).toEqual(true) 149 | const updateWithRole = updateRequest("schema", oldSchemaName, newSchemaName, { 150 | RoleName: roleName, 151 | }) 152 | await handler(updateWithRole) 153 | expect(await roleGrantedForSchema(client, oldSchemaName, roleName)).toEqual(false) 154 | expect(await roleGrantedForSchema(client, newSchemaName, roleName)).toEqual(true) 155 | const removeWithRole = deleteRequest("schema", newSchemaName, { 156 | RoleName: roleName, 157 | }) 158 | await handler(removeWithRole) 159 | expect(await roleGrantedForSchema(client, newSchemaName, roleName)).toEqual(false) 160 | 161 | const removeRole = deleteRequest("role", roleName, { 162 | DatabaseName: "postgres", 163 | }) 164 | await handler(removeRole) 165 | } finally { 166 | await client.end() 167 | } 168 | }) 169 | 170 | test("role with existing database", async () => { 171 | const oldRoleName = "example" 172 | const newRoleName = "example2" 173 | const create = createRequest("role", oldRoleName, { 174 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 175 | DatabaseName: "postgres", 176 | }) 177 | await handler(create) 178 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(2) 179 | const client = await newClient() 180 | try { 181 | expect(await roleExists(client, oldRoleName)).toEqual(true) 182 | 183 | // Attempt to connect as this role 184 | const client2 = await newClient({ 185 | user: oldRoleName, 186 | }) 187 | await client2.end() 188 | 189 | const update = updateRequest("role", oldRoleName, newRoleName, { 190 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 191 | DatabaseName: "postgres", 192 | }) 193 | await handler(update) 194 | expect(await roleExists(client, oldRoleName)).toEqual(false) 195 | expect(await roleExists(client, newRoleName)).toEqual(true) 196 | 197 | // CloudFormation will send a delete afterward as we change the 198 | // physical id, so test that too 199 | const remove = deleteRequest("role", oldRoleName, { 200 | DatabaseName: "postgres", 201 | }) 202 | await handler(remove) 203 | expect(await roleExists(client, oldRoleName)).toEqual(false) 204 | } finally { 205 | await client.end() 206 | } 207 | }) 208 | 209 | test("role without database", async () => { 210 | const roleName = "example" 211 | const create = createRequest("role", roleName, { 212 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 213 | }) 214 | await handler(create) 215 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(2) 216 | const client = await newClient() 217 | try { 218 | expect(await roleExists(client, roleName)).toEqual(true) 219 | } finally { 220 | await client.end() 221 | } 222 | }) 223 | 224 | test("change role password", async () => { 225 | const roleName = "example" 226 | const create = createRequest("role", roleName, { 227 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 228 | }) 229 | await handler(create) 230 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(2) 231 | const client = await newClient() 232 | try { 233 | expect(await roleExists(client, roleName)).toEqual(true) 234 | const update = updateRequest("role", roleName, roleName, { 235 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 236 | }) 237 | await handler(update) 238 | } finally { 239 | await client.end() 240 | } 241 | }) 242 | 243 | test("database", async () => { 244 | const oldDatabaseName = "mydb" 245 | const newDatabaseName = "mydb2" 246 | const create = createRequest("database", oldDatabaseName) 247 | await handler(create) 248 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(1) 249 | const client = await newClient() 250 | try { 251 | expect(await databaseExists(client, oldDatabaseName)).toEqual(true) 252 | expect(await databaseOwnerIs(client, oldDatabaseName, DB_MASTER_USERNAME)).toEqual( 253 | true 254 | ) 255 | const update = updateRequest("database", oldDatabaseName, newDatabaseName) 256 | await handler(update) 257 | expect(await databaseExists(client, oldDatabaseName)).toEqual(false) 258 | expect(await databaseExists(client, newDatabaseName)).toEqual(true) 259 | 260 | // CloudFormation will send a delete afterward, so test that too 261 | const remove = deleteRequest("database", oldDatabaseName) 262 | await handler(remove) 263 | expect(await databaseExists(client, oldDatabaseName)).toEqual(false) 264 | } finally { 265 | await client.end() 266 | } 267 | }) 268 | 269 | test("database with owner", async () => { 270 | const databaseName = "mydb" 271 | const roleName = "example" 272 | const create_role = createRequest("role", roleName, { 273 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 274 | DatabaseName: "mydb", // database does not exist yet 275 | }) 276 | await handler(create_role) 277 | const create_db = createRequest("database", databaseName, { Owner: "example" }) 278 | await handler(create_db) 279 | const client = await newClient() 280 | try { 281 | expect(await databaseExists(client, databaseName)).toEqual(true) 282 | expect(await databaseOwnerIs(client, databaseName, roleName)).toEqual(true) 283 | const create_table = createRequest("sql", "", { 284 | DatabaseName: databaseName, 285 | Statement: "create table t(i int)", 286 | }) 287 | await handler(create_table) 288 | 289 | // Verify we can login as owner 290 | { 291 | const client2 = await newClient({ 292 | user: roleName, 293 | database: databaseName, 294 | }) 295 | try { 296 | expect(await rowCount(client2, "t")).toEqual(0) 297 | } finally { 298 | await client2.end() 299 | } 300 | } 301 | 302 | const oldDatabaseName = "mydb" 303 | const newDatabaseName = "mydb2" 304 | const update = updateRequest("database", oldDatabaseName, newDatabaseName, { 305 | Owner: "example", 306 | }) 307 | await handler(update) 308 | expect(await databaseExists(client, oldDatabaseName)).toEqual(false) 309 | expect(await databaseExists(client, newDatabaseName)).toEqual(true) 310 | 311 | // Verify we can login as owner against renamed database 312 | { 313 | const client2 = await newClient({ 314 | user: roleName, 315 | database: newDatabaseName, 316 | }) 317 | try { 318 | expect(await rowCount(client2, "t")).toEqual(0) 319 | } finally { 320 | await client2.end() 321 | } 322 | } 323 | } finally { 324 | await client.end() 325 | } 326 | }) 327 | 328 | const newClient = async (config?: ClientConfig): Promise => { 329 | const client = new Client({ 330 | host: pgHost, 331 | port: pgPort, 332 | database: config && config.database ? config.database : DB_DEFAULT_DB, 333 | user: DB_MASTER_USERNAME, 334 | password: DB_MASTER_PASSWORD, 335 | }) 336 | await client.connect() 337 | return client 338 | } 339 | -------------------------------------------------------------------------------- /.projen/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "tasks": { 3 | "build": { 4 | "name": "build", 5 | "description": "Full release build", 6 | "steps": [ 7 | { 8 | "spawn": "default" 9 | }, 10 | { 11 | "spawn": "pre-compile" 12 | }, 13 | { 14 | "spawn": "compile" 15 | }, 16 | { 17 | "spawn": "post-compile" 18 | }, 19 | { 20 | "spawn": "test" 21 | }, 22 | { 23 | "spawn": "package" 24 | } 25 | ] 26 | }, 27 | "build:handler": { 28 | "name": "build:handler", 29 | "description": "Transpile the Lambda handler to JavaScript", 30 | "steps": [ 31 | { 32 | "exec": "esbuild lambda/handler.ts --bundle --platform=node --target=node20 --external:aws-sdk --external:@aws-sdk/* --outfile=src/handler/handler.js" 33 | } 34 | ] 35 | }, 36 | "bump": { 37 | "name": "bump", 38 | "description": "Bumps version based on latest git tag and generates a changelog entry", 39 | "env": { 40 | "OUTFILE": "package.json", 41 | "CHANGELOG": "dist/changelog.md", 42 | "BUMPFILE": "dist/version.txt", 43 | "RELEASETAG": "dist/releasetag.txt", 44 | "RELEASE_TAG_PREFIX": "", 45 | "BUMP_PACKAGE": "commit-and-tag-version@^12" 46 | }, 47 | "steps": [ 48 | { 49 | "builtin": "release/bump-version" 50 | } 51 | ], 52 | "condition": "git log --oneline -1 | grep -qv \"chore(release):\"" 53 | }, 54 | "clobber": { 55 | "name": "clobber", 56 | "description": "hard resets to HEAD of origin and cleans the local repo", 57 | "env": { 58 | "BRANCH": "$(git branch --show-current)" 59 | }, 60 | "steps": [ 61 | { 62 | "exec": "git checkout -b scratch", 63 | "name": "save current HEAD in \"scratch\" branch" 64 | }, 65 | { 66 | "exec": "git checkout $BRANCH" 67 | }, 68 | { 69 | "exec": "git fetch origin", 70 | "name": "fetch latest changes from origin" 71 | }, 72 | { 73 | "exec": "git reset --hard origin/$BRANCH", 74 | "name": "hard reset to origin commit" 75 | }, 76 | { 77 | "exec": "git clean -fdx", 78 | "name": "clean all untracked files" 79 | }, 80 | { 81 | "say": "ready to rock! (unpushed commits are under the \"scratch\" branch)" 82 | } 83 | ], 84 | "condition": "git diff --exit-code > /dev/null" 85 | }, 86 | "compat": { 87 | "name": "compat", 88 | "description": "Perform API compatibility check against latest version", 89 | "steps": [ 90 | { 91 | "exec": "jsii-diff npm:$(node -p \"require('./package.json').name\") -k --ignore-file .compatignore || (echo \"\nUNEXPECTED BREAKING CHANGES: add keys such as 'removed:constructs.Node.of' to .compatignore to skip.\n\" && exit 1)" 92 | } 93 | ] 94 | }, 95 | "compile": { 96 | "name": "compile", 97 | "description": "Only compile", 98 | "steps": [ 99 | { 100 | "exec": "jsii --silence-warnings=reserved-word" 101 | }, 102 | { 103 | "spawn": "copy:handler" 104 | } 105 | ] 106 | }, 107 | "copy:handler": { 108 | "name": "copy:handler", 109 | "description": "Copy transpiled handler into lib", 110 | "steps": [ 111 | { 112 | "exec": "cp src/handler/handler.js lib/handler/handler.js" 113 | } 114 | ] 115 | }, 116 | "default": { 117 | "name": "default", 118 | "description": "Synthesize project files", 119 | "steps": [ 120 | { 121 | "exec": "ts-node --project tsconfig.dev.json .projenrc.ts" 122 | } 123 | ] 124 | }, 125 | "eject": { 126 | "name": "eject", 127 | "description": "Remove projen from the project", 128 | "env": { 129 | "PROJEN_EJECTING": "true" 130 | }, 131 | "steps": [ 132 | { 133 | "spawn": "default" 134 | } 135 | ] 136 | }, 137 | "eslint": { 138 | "name": "eslint", 139 | "description": "Runs eslint against the codebase", 140 | "env": { 141 | "ESLINT_USE_FLAT_CONFIG": "false", 142 | "NODE_NO_WARNINGS": "1" 143 | }, 144 | "steps": [ 145 | { 146 | "exec": "eslint --ext .ts,.tsx --fix --no-error-on-unmatched-pattern $@ src lambda test build-tools projenrc .projenrc.ts", 147 | "receiveArgs": true 148 | } 149 | ] 150 | }, 151 | "install": { 152 | "name": "install", 153 | "description": "Install project dependencies and update lockfile (non-frozen)", 154 | "steps": [ 155 | { 156 | "exec": "npm install" 157 | } 158 | ] 159 | }, 160 | "install:ci": { 161 | "name": "install:ci", 162 | "description": "Install project dependencies using frozen lockfile", 163 | "steps": [ 164 | { 165 | "exec": "npm ci" 166 | } 167 | ] 168 | }, 169 | "integ:deploy:dsql": { 170 | "name": "integ:deploy:dsql", 171 | "description": "Deploy the DSQL integration test stack", 172 | "steps": [ 173 | { 174 | "exec": "npx cdk deploy TestRdsSqlDsqlStack --require-approval never" 175 | } 176 | ] 177 | }, 178 | "integ:deploy:mysql:serverless": { 179 | "name": "integ:deploy:mysql:serverless", 180 | "description": "Deploy the Aurora Serverless V2 integration test stack", 181 | "steps": [ 182 | { 183 | "exec": "npx cdk deploy TestRdsSqlServerlessV2Stack --context engine=mysql --require-approval never" 184 | } 185 | ] 186 | }, 187 | "integ:deploy:postgresql:serverless": { 188 | "name": "integ:deploy:postgresql:serverless", 189 | "description": "Deploy the Aurora Serverless V2 integration test stack", 190 | "steps": [ 191 | { 192 | "exec": "npx cdk deploy TestRdsSqlServerlessV2Stack --require-approval never" 193 | } 194 | ] 195 | }, 196 | "integ:destroy:dsql": { 197 | "name": "integ:destroy:dsql", 198 | "description": "Destroy the DSQL integration test stack", 199 | "steps": [ 200 | { 201 | "exec": "npx cdk destroy TestRdsSqlDsqlStack --force" 202 | } 203 | ] 204 | }, 205 | "integ:destroy:serverless": { 206 | "name": "integ:destroy:serverless", 207 | "description": "Destroy the Aurora Serverless V2 integration test stack", 208 | "steps": [ 209 | { 210 | "exec": "npx cdk destroy TestRdsSqlServerlessV2Stack --force" 211 | } 212 | ] 213 | }, 214 | "package": { 215 | "name": "package", 216 | "description": "Creates the distribution package", 217 | "steps": [ 218 | { 219 | "spawn": "package:js", 220 | "condition": "node -e \"if (!process.env.CI) process.exit(1)\"" 221 | }, 222 | { 223 | "spawn": "package-all", 224 | "condition": "node -e \"if (process.env.CI) process.exit(1)\"" 225 | } 226 | ] 227 | }, 228 | "package-all": { 229 | "name": "package-all", 230 | "description": "Packages artifacts for all target languages", 231 | "steps": [ 232 | { 233 | "spawn": "package:js" 234 | } 235 | ] 236 | }, 237 | "package:js": { 238 | "name": "package:js", 239 | "description": "Create js language bindings", 240 | "steps": [ 241 | { 242 | "exec": "jsii-pacmak -v --target js" 243 | } 244 | ] 245 | }, 246 | "post-compile": { 247 | "name": "post-compile", 248 | "description": "Runs after successful compilation" 249 | }, 250 | "post-upgrade": { 251 | "name": "post-upgrade", 252 | "description": "Runs after upgrading dependencies" 253 | }, 254 | "pre-compile": { 255 | "name": "pre-compile", 256 | "description": "Prepare the project for compilation", 257 | "steps": [ 258 | { 259 | "spawn": "build:handler" 260 | } 261 | ] 262 | }, 263 | "release": { 264 | "name": "release", 265 | "description": "Prepare a release from \"main\" branch", 266 | "env": { 267 | "RELEASE": "true", 268 | "MIN_MAJOR": "1" 269 | }, 270 | "steps": [ 271 | { 272 | "exec": "rm -fr dist" 273 | }, 274 | { 275 | "spawn": "bump" 276 | }, 277 | { 278 | "spawn": "build" 279 | }, 280 | { 281 | "spawn": "unbump" 282 | }, 283 | { 284 | "exec": "git diff --ignore-space-at-eol --exit-code" 285 | } 286 | ] 287 | }, 288 | "test": { 289 | "name": "test", 290 | "description": "Run tests", 291 | "steps": [ 292 | { 293 | "exec": "jest --passWithNoTests --updateSnapshot", 294 | "receiveArgs": true 295 | }, 296 | { 297 | "spawn": "eslint" 298 | } 299 | ] 300 | }, 301 | "test:watch": { 302 | "name": "test:watch", 303 | "description": "Run jest in watch mode", 304 | "steps": [ 305 | { 306 | "exec": "jest --watch" 307 | } 308 | ] 309 | }, 310 | "typecheck": { 311 | "name": "typecheck", 312 | "description": "Typecheck typescript", 313 | "steps": [ 314 | { 315 | "exec": "npx tsc --project tsconfig.dev.json --noEmit" 316 | } 317 | ] 318 | }, 319 | "unbump": { 320 | "name": "unbump", 321 | "description": "Restores version to 0.0.0", 322 | "env": { 323 | "OUTFILE": "package.json", 324 | "CHANGELOG": "dist/changelog.md", 325 | "BUMPFILE": "dist/version.txt", 326 | "RELEASETAG": "dist/releasetag.txt", 327 | "RELEASE_TAG_PREFIX": "", 328 | "BUMP_PACKAGE": "commit-and-tag-version@^12" 329 | }, 330 | "steps": [ 331 | { 332 | "builtin": "release/reset-version" 333 | } 334 | ] 335 | }, 336 | "upgrade": { 337 | "name": "upgrade", 338 | "description": "upgrade dependencies", 339 | "env": { 340 | "CI": "0" 341 | }, 342 | "steps": [ 343 | { 344 | "exec": "npx npm-check-updates@18 --upgrade --target=minor --peer --no-deprecated --dep=dev,peer,prod,optional --filter=@aws-sdk/client-secrets-manager,@aws-sdk/client-ssm,@aws-sdk/dsql-signer,esbuild,eslint-config-prettier,eslint-import-resolver-typescript,eslint-plugin-import,eslint-plugin-prettier,exponential-backoff,jsii-diff,jsii-pacmak,mysql2,node-pg-format,prettier,projen,source-map-support,ts-node,@types/aws-lambda" 345 | }, 346 | { 347 | "exec": "npm install" 348 | }, 349 | { 350 | "exec": "npm update @aws-sdk/client-secrets-manager @aws-sdk/client-ssm @aws-sdk/dsql-signer @types/jest @types/node @types/pg @typescript-eslint/eslint-plugin @typescript-eslint/parser commit-and-tag-version esbuild eslint-config-prettier eslint-import-resolver-typescript eslint-plugin-import eslint-plugin-prettier eslint exponential-backoff jest-junit jest jsii-diff jsii-pacmak jsii-rosetta jsii mysql2 node-pg-format pg prettier projen source-map-support testcontainers ts-jest ts-node typescript @types/aws-lambda aws-cdk-lib constructs" 351 | }, 352 | { 353 | "exec": "npx projen" 354 | }, 355 | { 356 | "spawn": "post-upgrade" 357 | } 358 | ] 359 | }, 360 | "watch": { 361 | "name": "watch", 362 | "description": "Watch & compile in the background", 363 | "steps": [ 364 | { 365 | "exec": "jsii -w --silence-warnings=reserved-word" 366 | } 367 | ] 368 | } 369 | }, 370 | "env": { 371 | "PATH": "$(npx -c \"node --print process.env.PATH\")" 372 | }, 373 | "//": "~~ Generated by projen. To modify, edit .projenrc.ts and run \"npx projen\"." 374 | } 375 | -------------------------------------------------------------------------------- /lambda/handler.mysql.test.ts: -------------------------------------------------------------------------------- 1 | import { SecretsManagerClient } from "@aws-sdk/client-secrets-manager" 2 | import { createConnection } from "mysql2/promise" 3 | import { GenericContainer, StartedTestContainer } from "testcontainers" 4 | import { handler } from "./handler" 5 | import { createRequest, updateRequest, deleteRequest } from "./util" 6 | 7 | jest.mock("@aws-sdk/client-secrets-manager") 8 | const SecretsManagerClientMock = SecretsManagerClient as jest.MockedClass< 9 | typeof SecretsManagerClient 10 | > 11 | 12 | const DB_PORT = 3306 13 | const DB_MASTER_USERNAME = "root" 14 | const DB_MASTER_PASSWORD = "masterpwd" 15 | const DB_DEFAULT_DB = "dummy" 16 | 17 | let mysqlContainer: StartedTestContainer 18 | let mysqlHost: string 19 | let mysqlPort: number 20 | 21 | beforeAll(async () => { 22 | process.env.LOGGER = "true" 23 | process.env.SSL = "false" 24 | process.env.CONNECTION_TIMEOUT = "5000" 25 | mysqlContainer = await new GenericContainer("mysql:8") 26 | .withExposedPorts(DB_PORT) 27 | .withEnvironment({ 28 | MYSQL_ROOT_PASSWORD: DB_MASTER_PASSWORD, 29 | MYSQL_DATABASE: DB_DEFAULT_DB, 30 | MYSQL_INITDB_SKIP_TZINFO: "true", 31 | }) 32 | .start() 33 | mysqlHost = mysqlContainer.getHost() 34 | mysqlPort = mysqlContainer.getMappedPort(DB_PORT) 35 | 36 | // Set up the mock after we have the host and port values 37 | SecretsManagerClientMock.prototype.send.mockImplementation(() => { 38 | return { 39 | SecretString: JSON.stringify({ 40 | host: mysqlHost, 41 | port: mysqlPort, 42 | username: DB_MASTER_USERNAME, 43 | password: DB_MASTER_PASSWORD, 44 | dbname: DB_DEFAULT_DB, 45 | engine: "mysql", 46 | dbClusterIdentifier: "dummy", 47 | }), 48 | } 49 | }) 50 | }, 60000) 51 | 52 | afterAll(async () => { 53 | if (mysqlContainer) { 54 | await mysqlContainer.stop() 55 | } 56 | }) 57 | 58 | beforeEach(async () => { 59 | jest.clearAllMocks() 60 | 61 | // Clean up databases and users created by tests 62 | const connection = await newConnection() 63 | 64 | try { 65 | // Drop all databases except system ones and default 66 | const [databases] = await connection.query( 67 | "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME NOT IN ('information_schema', 'mysql', 'performance_schema', 'sys', ?)", 68 | [DB_DEFAULT_DB] 69 | ) 70 | 71 | for (const db of databases) { 72 | await connection.query(`DROP DATABASE IF EXISTS \`${db.SCHEMA_NAME}\``) 73 | } 74 | 75 | // Drop all users except system ones 76 | const [users] = await connection.query( 77 | "SELECT User FROM mysql.user WHERE User NOT IN ('mysql.sys', 'mysql.session', 'mysql.infoschema', 'root')" 78 | ) 79 | 80 | for (const user of users) { 81 | await connection.query(`DROP USER IF EXISTS '${user.User}'@'%'`) 82 | } 83 | } finally { 84 | await connection.end() 85 | } 86 | }) 87 | 88 | // Helper functions for MySQL tests 89 | async function databaseExists(connection: any, dbName: string): Promise { 90 | const [rows] = await connection.query( 91 | "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = ?", 92 | [dbName] 93 | ) 94 | return rows.length > 0 95 | } 96 | 97 | async function userExists(connection: any, username: string): Promise { 98 | const [rows] = await connection.query("SELECT User FROM mysql.user WHERE User = ?", [ 99 | username, 100 | ]) 101 | return rows.length > 0 102 | } 103 | 104 | async function tableExists( 105 | connection: any, 106 | dbName: string, 107 | tableName: string 108 | ): Promise { 109 | const [rows] = await connection.query( 110 | "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?", 111 | [dbName, tableName] 112 | ) 113 | return rows.length > 0 114 | } 115 | 116 | async function rowCount(connection: any, tableName: string): Promise { 117 | const [rows] = await connection.query(`SELECT COUNT(*) as count FROM ${tableName}`) 118 | return rows[0].count 119 | } 120 | 121 | async function newConnection(config?: any): Promise { 122 | const conn = await createConnection({ 123 | host: mysqlHost, 124 | port: mysqlPort, 125 | database: config && config.database ? config.database : DB_DEFAULT_DB, 126 | user: config && config.user ? config.user : DB_MASTER_USERNAME, 127 | password: config && config.password ? config.password : DB_MASTER_PASSWORD, 128 | }) 129 | return conn 130 | } 131 | 132 | test("database", async () => { 133 | const oldDatabaseName = "mydb" 134 | const newDatabaseName = "mydb2" 135 | const create = createRequest("database", oldDatabaseName) 136 | await handler(create) 137 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(1) 138 | const connection = await newConnection() 139 | try { 140 | expect(await databaseExists(connection, oldDatabaseName)).toEqual(true) 141 | 142 | const update = updateRequest("database", oldDatabaseName, newDatabaseName) 143 | await expect(handler(update)).rejects.toThrow( 144 | "Renaming database is not supported in MySQL." 145 | ) 146 | 147 | const remove = deleteRequest("database", oldDatabaseName) 148 | await handler(remove) 149 | expect(await databaseExists(connection, newDatabaseName)).toEqual(false) 150 | } finally { 151 | await connection.end() 152 | } 153 | }) 154 | 155 | test("database with owner", async () => { 156 | const databaseName = "mydb" 157 | const userName = "example" 158 | 159 | // First create user 160 | const create_role = createRequest("role", userName, { 161 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 162 | }) 163 | await handler(create_role) 164 | 165 | // Then create database with owner 166 | const create_db = createRequest("database", databaseName, { Owner: userName }) 167 | await handler(create_db) 168 | 169 | const connection = await newConnection() 170 | try { 171 | expect(await databaseExists(connection, databaseName)).toEqual(true) 172 | 173 | // Create a table in the new database 174 | const create_table = createRequest("sql", "", { 175 | DatabaseName: databaseName, 176 | Statement: "CREATE TABLE t(i INT)", 177 | }) 178 | await handler(create_table) 179 | 180 | // Verify we can login as owner and access the table 181 | const userConn = await newConnection({ 182 | user: userName, 183 | database: databaseName, 184 | }) 185 | 186 | try { 187 | expect(await tableExists(userConn, databaseName, "t")).toEqual(true) 188 | // Test we can insert data 189 | await userConn.query("INSERT INTO t VALUES (1), (2), (3)") 190 | expect(await rowCount(userConn, "t")).toEqual(3) 191 | } finally { 192 | await userConn.end() 193 | } 194 | 195 | // Test database rename 196 | const oldDatabaseName = databaseName 197 | const newDatabaseName = "mydb2" 198 | 199 | const update = updateRequest("database", oldDatabaseName, newDatabaseName, { 200 | Owner: userName, 201 | }) 202 | await expect(handler(update)).rejects.toThrow( 203 | "Renaming database is not supported in MySQL." 204 | ) 205 | } finally { 206 | await connection.end() 207 | } 208 | }) 209 | 210 | describe("User creation", () => { 211 | it("can create a user", async () => { 212 | const oldRoleName = "testuser" 213 | const newRoleName = "testuser2" 214 | 215 | // Create role. We must specify a database as in mysql you cannot 216 | // connect without a database. 217 | const create = createRequest("role", oldRoleName, { 218 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 219 | DatabaseName: DB_DEFAULT_DB, 220 | }) 221 | await handler(create) 222 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(2) 223 | 224 | const connection = await newConnection() 225 | try { 226 | expect(await userExists(connection, oldRoleName)).toEqual(true) 227 | 228 | // Attempt to connect as this user 229 | const userConn = await newConnection({ 230 | user: oldRoleName, 231 | }) 232 | await userConn.end() 233 | 234 | // Update role - rename 235 | const update = updateRequest("role", oldRoleName, newRoleName, { 236 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 237 | }) 238 | await handler(update) 239 | 240 | expect(await userExists(connection, oldRoleName)).toEqual(false) 241 | expect(await userExists(connection, newRoleName)).toEqual(true) 242 | 243 | // Delete role 244 | const remove = deleteRequest("role", newRoleName) 245 | await handler(remove) 246 | expect(await userExists(connection, newRoleName)).toEqual(false) 247 | } finally { 248 | await connection.end() 249 | } 250 | }) 251 | 252 | it("can change a user's password", async () => { 253 | const userName = "pwduser" 254 | 255 | // Create user 256 | const create = createRequest("role", userName, { 257 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 258 | DatabaseName: DB_DEFAULT_DB, 259 | }) 260 | await handler(create) 261 | expect(SecretsManagerClientMock.prototype.send).toHaveBeenCalledTimes(2) 262 | 263 | const connection = await newConnection() 264 | try { 265 | expect(await userExists(connection, userName)).toEqual(true) 266 | 267 | // Test we can connect with the initial password 268 | const userConn = await newConnection({ 269 | user: userName, 270 | }) 271 | await userConn.end() 272 | 273 | // Update role - change password 274 | const update = updateRequest("role", userName, userName, { 275 | PasswordArn: "arn:aws:secretsmanager:us-east-1:123456789:secret:dummy", 276 | }) 277 | await handler(update) 278 | 279 | // Password changed but should still be able to connect 280 | const userConn2 = await newConnection({ 281 | user: userName, 282 | }) 283 | await userConn2.end() 284 | } finally { 285 | await connection.end() 286 | } 287 | }) 288 | }) 289 | 290 | test("sql execution", async () => { 291 | const databaseName = "sqltest" 292 | 293 | // Create database 294 | const create_db = createRequest("database", databaseName) 295 | await handler(create_db) 296 | 297 | // Execute SQL to create table 298 | const create_table = createRequest("sql", "", { 299 | DatabaseName: databaseName, 300 | Statement: "CREATE TABLE test_table (id INT, name VARCHAR(50))", 301 | }) 302 | await handler(create_table) 303 | 304 | // Execute multiple SQL statements 305 | const create_multiple_tables = createRequest("sql", "", { 306 | DatabaseName: databaseName, 307 | Statement: "CREATE TABLE t1 (id INT); CREATE TABLE t2 (id INT);", 308 | }) 309 | await handler(create_multiple_tables) 310 | 311 | const connection = await newConnection({ database: databaseName }) 312 | try { 313 | expect(await tableExists(connection, databaseName, "test_table")).toEqual(true) 314 | 315 | // Execute SQL to insert data 316 | const insert_data = createRequest("sql", "", { 317 | DatabaseName: databaseName, 318 | Statement: "INSERT INTO test_table VALUES (1, 'Test 1'), (2, 'Test 2')", 319 | }) 320 | await handler(insert_data) 321 | 322 | expect(await rowCount(connection, "test_table")).toEqual(2) 323 | 324 | // Execute SQL with rollback for deletion test 325 | const update_sql = updateRequest("sql", "dummy", "dummy", { 326 | DatabaseName: databaseName, 327 | Statement: "UPDATE test_table SET name = 'Updated' WHERE id = 1", 328 | Rollback: "UPDATE test_table SET name = 'Test 1' WHERE id = 1", 329 | }) 330 | await handler(update_sql) 331 | 332 | // Verify update happened 333 | const [rows] = await connection.query("SELECT name FROM test_table WHERE id = 1") 334 | expect(rows[0].name).toEqual("Updated") 335 | 336 | // Test rollback on delete 337 | const delete_sql = deleteRequest("sql", "dummy", { 338 | DatabaseName: databaseName, 339 | Rollback: "UPDATE test_table SET name = 'Rollback test' WHERE id = 1", 340 | }) 341 | await handler(delete_sql) 342 | 343 | // Verify rollback executed 344 | const [updated] = await connection.query("SELECT name FROM test_table WHERE id = 1") 345 | expect(updated[0].name).toEqual("Rollback test") 346 | } finally { 347 | await connection.end() 348 | } 349 | }) 350 | -------------------------------------------------------------------------------- /lambda/engine.dsql.ts: -------------------------------------------------------------------------------- 1 | import { DsqlSigner } from "@aws-sdk/dsql-signer" 2 | import { format as pgFormat } from "node-pg-format" 3 | import { Client, ClientConfig } from "pg" 4 | import { AbstractEngine, EngineConnectionConfig } from "./engine.abstract" 5 | import { 6 | EngineDatabaseProperties, 7 | EngineRoleProperties, 8 | EngineSchemaProperties, 9 | EngineSqlProperties, 10 | } from "./types" 11 | 12 | export class DsqlEngine extends AbstractEngine { 13 | createDatabase( 14 | _resourceId: string, 15 | _props: EngineDatabaseProperties 16 | ): string | string[] { 17 | throw new Error( 18 | "DSQL does not support creating databases. Database is always 'postgres'." 19 | ) 20 | } 21 | 22 | updateDatabase( 23 | _resourceId: string, 24 | _oldResourceId: string, 25 | _props: EngineDatabaseProperties 26 | ): string[] { 27 | throw new Error( 28 | "DSQL does not support database operations. Database is always 'postgres'." 29 | ) 30 | } 31 | 32 | deleteDatabase(_resourceId: string, _masterUser: string): string | string[] { 33 | throw new Error( 34 | "DSQL does not support deleting databases. Database is always 'postgres'." 35 | ) 36 | } 37 | 38 | async createRole(resourceId: string, props: EngineRoleProperties): Promise { 39 | return this.generateCreateRoleSql(resourceId, props?.DatabaseName) 40 | } 41 | 42 | async updateRole( 43 | resourceId: string, 44 | oldResourceId: string, 45 | props: EngineRoleProperties, 46 | oldProps: EngineRoleProperties 47 | ): Promise { 48 | const sql: string[] = [] 49 | 50 | if (oldResourceId !== resourceId) { 51 | // DSQL doesn't support RENAME, so we create new role and drop old one 52 | // Create new role with old database permissions first 53 | sql.push(...this.generateCreateRoleSql(resourceId, oldProps?.DatabaseName)) 54 | 55 | // Add special marker for IAM grant revocation 56 | sql.push(`-- REVOKE_IAM_GRANTS_FOR_ROLE: ${oldResourceId}`) 57 | 58 | // Drop old role 59 | sql.push(...this.generateDropRoleSql(oldResourceId)) 60 | 61 | // If database changed as well, grant new database permissions 62 | if (props?.DatabaseName && props.DatabaseName !== oldProps?.DatabaseName) { 63 | sql.push("BEGIN") 64 | sql.push( 65 | pgFormat("GRANT CONNECT ON DATABASE %I TO %I", props.DatabaseName, resourceId) 66 | ) 67 | sql.push("COMMIT") 68 | } 69 | } else { 70 | // Only permissions are changing (no rename) 71 | sql.push("BEGIN") 72 | 73 | // Handle database permission changes 74 | if ( 75 | oldProps?.DatabaseName && 76 | props?.DatabaseName && 77 | oldProps.DatabaseName !== props.DatabaseName 78 | ) { 79 | // Revoke from old database and grant to new 80 | sql.push( 81 | pgFormat( 82 | "REVOKE CONNECT ON DATABASE %I FROM %I", 83 | oldProps.DatabaseName, 84 | resourceId 85 | ) 86 | ) 87 | sql.push( 88 | pgFormat("GRANT CONNECT ON DATABASE %I TO %I", props.DatabaseName, resourceId) 89 | ) 90 | } else if (props?.DatabaseName && !oldProps?.DatabaseName) { 91 | // Grant to new database 92 | sql.push( 93 | pgFormat("GRANT CONNECT ON DATABASE %I TO %I", props.DatabaseName, resourceId) 94 | ) 95 | } else if (!props?.DatabaseName && oldProps?.DatabaseName) { 96 | // Revoke from old database 97 | sql.push( 98 | pgFormat( 99 | "REVOKE CONNECT ON DATABASE %I FROM %I", 100 | oldProps.DatabaseName, 101 | resourceId 102 | ) 103 | ) 104 | } 105 | 106 | sql.push("COMMIT") 107 | } 108 | 109 | return sql 110 | } 111 | 112 | async deleteRole( 113 | resourceId: string, 114 | props: EngineRoleProperties 115 | ): Promise { 116 | const sql: string[] = [] 117 | 118 | // Add special marker for IAM grant revocation 119 | sql.push(`-- REVOKE_IAM_GRANTS_FOR_ROLE: ${resourceId}`) 120 | 121 | // Add the regular drop role SQL 122 | sql.push(...this.generateDropRoleSql(resourceId, props?.DatabaseName)) 123 | 124 | return sql 125 | } 126 | 127 | createSchema(resourceId: string, props: EngineSchemaProperties): string | string[] { 128 | const sql: string[] = [pgFormat("CREATE SCHEMA IF NOT EXISTS %I", resourceId)] 129 | if (props?.RoleName) { 130 | this.grantRoleForSchema(resourceId, props.RoleName).forEach((stmt) => 131 | sql.push(stmt) 132 | ) 133 | } 134 | return sql 135 | } 136 | 137 | updateSchema( 138 | resourceId: string, 139 | oldResourceId: string, 140 | props: EngineSchemaProperties 141 | ): string | string[] { 142 | const statements: string[] = [] 143 | if (props?.RoleName) { 144 | this.revokeRoleFromSchema(oldResourceId, props.RoleName).forEach((stmt) => 145 | statements.push(stmt) 146 | ) 147 | } 148 | if (resourceId !== oldResourceId) { 149 | statements.push(pgFormat("ALTER SCHEMA %I RENAME TO %I", oldResourceId, resourceId)) 150 | } 151 | if (props?.RoleName) { 152 | this.grantRoleForSchema(resourceId, props.RoleName).forEach((stmt) => 153 | statements.push(stmt) 154 | ) 155 | } 156 | return statements 157 | } 158 | 159 | deleteSchema(resourceId: string, props: EngineSchemaProperties): string | string[] { 160 | const statements: string[] = [] 161 | if (props?.RoleName) { 162 | this.revokeRoleFromSchema(resourceId, props.RoleName).forEach((stmt) => 163 | statements.push(stmt) 164 | ) 165 | } 166 | statements.push(pgFormat("DROP SCHEMA IF EXISTS %I CASCADE", resourceId)) 167 | return statements 168 | } 169 | 170 | createSql(_resourceId: string, props: EngineSqlProperties): string | string[] { 171 | if (!props.Statement) { 172 | throw new Error("Statement is required for SQL resource") 173 | } 174 | return props.Statement 175 | } 176 | 177 | updateSql( 178 | _resourceId: string, 179 | _oldResourceId: string, 180 | props: EngineSqlProperties 181 | ): string | string[] { 182 | if (!props.Statement) { 183 | throw new Error("Statement is required for SQL resource") 184 | } 185 | return props.Statement 186 | } 187 | 188 | deleteSql(_resourceId: string, props: EngineSqlProperties): string | string[] { 189 | if (props.Rollback) { 190 | return props.Rollback 191 | } 192 | return [] 193 | } 194 | 195 | createIamGrant(roleName: string, iamArn: string): string | string[] { 196 | if (!iamArn) { 197 | throw new Error("ResourceArn is required for IAM grant") 198 | } 199 | return pgFormat("AWS IAM GRANT %I TO %L", roleName, iamArn) 200 | } 201 | 202 | updateIamGrant( 203 | roleName: string, 204 | oldRoleName: string, 205 | iamArn: string, 206 | oldIamArn: string 207 | ): string | string[] { 208 | const statements: string[] = [] 209 | 210 | // Only update if role name or IAM role ARN changed 211 | const roleNameChanged = oldRoleName !== roleName 212 | const arnChanged = oldIamArn !== iamArn 213 | 214 | if (!roleNameChanged && !arnChanged) { 215 | return statements // No changes needed 216 | } 217 | 218 | // Revoke old grant if resource ARN or role name changed 219 | if (oldIamArn) { 220 | statements.push(pgFormat("AWS IAM REVOKE %I FROM %L", oldRoleName, oldIamArn)) 221 | } 222 | 223 | // Grant new permissions 224 | if (iamArn) { 225 | statements.push(pgFormat("AWS IAM GRANT %I TO %L", roleName, iamArn)) 226 | } 227 | 228 | return statements 229 | } 230 | 231 | deleteIamGrant(roleName: string, iamArn: string): string | string[] { 232 | if (!iamArn) { 233 | throw new Error("ResourceArn is required for IAM grant deletion") 234 | } 235 | return pgFormat("AWS IAM REVOKE %I FROM %L", roleName, iamArn) 236 | } 237 | 238 | async executeSQL(sql: string | string[], config: EngineConnectionConfig): Promise { 239 | this.log("Connecting to DSQL cluster...") 240 | 241 | // For DSQL, we need to generate an IAM auth token 242 | const region = process.env.AWS_REGION 243 | if (!region) { 244 | throw new Error("AWS_REGION environment variable is required for DSQL") 245 | } 246 | 247 | const signer = new DsqlSigner({ 248 | hostname: config.host, 249 | region: region, 250 | }) 251 | const authToken = await signer.getDbConnectAdminAuthToken() 252 | 253 | const clientConfig: ClientConfig = { 254 | host: config.host, 255 | port: config.port, 256 | user: "admin", // DSQL always uses 'admin' user 257 | password: authToken, // Use IAM auth token as password 258 | database: "postgres", // DSQL always uses 'postgres' database 259 | ssl: { 260 | //ca: fs.readFileSync(`${process.env.LAMBDA_TASK_ROOT}/global-bundle.pem`), 261 | rejectUnauthorized: true, 262 | }, 263 | connectionTimeoutMillis: 30000, 264 | query_timeout: 30000, 265 | //statement_timeout: 30000, // not supported on DSQL 266 | } 267 | 268 | const client = new Client(clientConfig) 269 | 270 | try { 271 | await client.connect() 272 | this.log("Connected to DSQL cluster") 273 | 274 | const statements = Array.isArray(sql) ? sql : [sql] 275 | const results = [] 276 | 277 | for (const statement of statements) { 278 | if (statement.trim()) { 279 | // Handle special IAM grant revocation marker 280 | if (statement.startsWith("-- REVOKE_IAM_GRANTS_FOR_ROLE:")) { 281 | const roleName = statement 282 | .replace("-- REVOKE_IAM_GRANTS_FOR_ROLE:", "") 283 | .trim() 284 | await this.revokeIamGrantsForRole(client, roleName) 285 | } else if (statement.includes("AWS IAM REVOKE")) { 286 | this.log(`Executing SQL: ${statement}`) 287 | try { 288 | const result = await client.query(statement) 289 | results.push(result) 290 | } catch (error: any) { 291 | if (error.message && error.message.includes("does not exist")) { 292 | this.log( 293 | `Ignoring error for IAM REVOKE (role doesn't exist): ${error.message}` 294 | ) 295 | } else { 296 | throw error 297 | } 298 | } 299 | } else { 300 | this.log(`Executing SQL: ${statement}`) 301 | const result = await client.query(statement) 302 | results.push(result) 303 | } 304 | } 305 | } 306 | 307 | this.log("SQL execution completed") 308 | return results.length === 1 ? results[0] : results 309 | } catch (error) { 310 | this.log(`Error executing SQL: ${error}`) 311 | throw error 312 | } finally { 313 | try { 314 | await client.end() 315 | this.log("Disconnected from DSQL cluster") 316 | } catch (error) { 317 | this.log(`Error disconnecting: ${error}`) 318 | } 319 | } 320 | } 321 | 322 | private grantRoleForSchema(schema: string, roleName: string): string[] { 323 | return [ 324 | pgFormat("GRANT USAGE ON SCHEMA %I TO %I", schema, roleName), 325 | pgFormat("GRANT CREATE ON SCHEMA %I TO %I", schema, roleName), 326 | ] 327 | } 328 | 329 | private revokeRoleFromSchema(schema: string, roleName: string): string[] { 330 | return [ 331 | pgFormat("REVOKE CREATE ON SCHEMA %I FROM %I", schema, roleName), 332 | pgFormat("REVOKE ALL ON SCHEMA %I FROM %I", schema, roleName), 333 | ] 334 | } 335 | 336 | private generateCreateRoleSql(roleName: string, databaseName?: string): string[] { 337 | const sql = ["BEGIN"] 338 | sql.push(pgFormat("CREATE ROLE %I WITH LOGIN", roleName)) 339 | 340 | if (databaseName) { 341 | sql.push(pgFormat("GRANT CONNECT ON DATABASE %I TO %I", databaseName, roleName)) 342 | } 343 | 344 | sql.push("COMMIT") 345 | return sql 346 | } 347 | 348 | private generateDropRoleSql(roleName: string, databaseName?: string): string[] { 349 | const sql = ["BEGIN"] 350 | 351 | if (databaseName) { 352 | sql.push( 353 | pgFormat("REVOKE ALL PRIVILEGES ON DATABASE %I FROM %I", databaseName, roleName) 354 | ) 355 | } 356 | 357 | sql.push(pgFormat("DROP ROLE IF EXISTS %I", roleName)) 358 | sql.push("COMMIT") 359 | return sql 360 | } 361 | 362 | private async revokeIamGrantsForRole(client: Client, roleName: string): Promise { 363 | try { 364 | this.log(`Querying IAM grants for role: ${roleName}`) 365 | 366 | // Query the IAM role mappings for this role 367 | const result = await client.query( 368 | pgFormat( 369 | "SELECT arn FROM sys.iam_pg_role_mappings WHERE pg_role_name = %L", 370 | roleName 371 | ) 372 | ) 373 | 374 | if (result.rows && result.rows.length > 0) { 375 | this.log(`Found ${result.rows.length} IAM grants to revoke for role: ${roleName}`) 376 | 377 | // Revoke each IAM grant 378 | for (const row of result.rows) { 379 | const revokeStatement = pgFormat("AWS IAM REVOKE %I FROM %L", roleName, row.arn) 380 | this.log(`Executing IAM revoke: ${revokeStatement}`) 381 | await client.query(revokeStatement) 382 | } 383 | } else { 384 | this.log(`No IAM grants found for role: ${roleName}`) 385 | } 386 | } catch (error) { 387 | this.log(`Error revoking IAM grants for role ${roleName}: ${error}`) 388 | // Don't throw here - the role might not have any IAM grants, which is fine 389 | } 390 | } 391 | } 392 | --------------------------------------------------------------------------------