├── .eslintrc.json ├── .gitattributes ├── .github ├── FUNDING.yml └── workflows │ ├── auto-approve.yml │ ├── build.yml │ ├── pull-request-lint.yml │ ├── release.yml │ ├── update-snapshot.yml │ └── upgrade-main.yml ├── .gitignore ├── .mergify.yml ├── .npmignore ├── .projen ├── deps.json ├── files.json └── tasks.json ├── .projenrc.js ├── API.md ├── LICENSE ├── README.md ├── benchmark ├── .gitignore ├── deployment.ts ├── function1 │ ├── index.py │ └── requirements.txt ├── function2 │ ├── index.py │ └── requirements.txt ├── function3 │ ├── index.py │ └── requirements.txt ├── function4 │ ├── index.py │ └── requirements.txt ├── function5 │ ├── index.py │ └── requirements.txt ├── turbo-5-app │ ├── app.ts │ └── cdk.json ├── turbo-app │ ├── app.ts │ └── cdk.json ├── vanilla-5-app │ ├── app.ts │ └── cdk.json └── vanilla-app │ ├── app.ts │ └── cdk.json ├── package.json ├── src ├── base.ts ├── cr.ts ├── credentials.ts ├── index.ts ├── java.ts ├── nodejs.ts ├── package-codebuild-function.ts ├── package-codebuild.lambda.ts ├── package-nodejs-function.ts ├── package-nodejs.lambda.ts ├── package-python-function.ts ├── package-python.lambda.py ├── package-ruby-function.ts ├── package-ruby.lambda.rb ├── python.ts └── ruby.ts ├── test ├── assets │ ├── bundler │ │ ├── Gemfile │ │ └── Gemfile.lock │ ├── maven │ │ ├── FunctionOne.jar │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ └── java │ │ │ └── helloworld │ │ │ └── App.java │ ├── npm │ │ ├── package-lock.json │ │ └── package.json │ ├── pipenv │ │ ├── Pipfile │ │ └── Pipfile.lock │ ├── poetry │ │ ├── poetry.lock │ │ └── pyproject.toml │ ├── requirements │ │ └── requirements.txt │ ├── ruby_handler │ │ └── index.rb │ └── yarn │ │ ├── package.json │ │ └── yarn.lock ├── default.integ.snapshot │ ├── Turbo-Layer-Test.assets.json │ └── Turbo-Layer-Test.template.json ├── default.integ.ts ├── nodejs.test.ts └── python.test.ts ├── tsconfig.dev.json └── yarn.lock /.eslintrc.json: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | { 3 | "env": { 4 | "jest": true, 5 | "node": true 6 | }, 7 | "root": true, 8 | "plugins": [ 9 | "@typescript-eslint", 10 | "import", 11 | "@stylistic" 12 | ], 13 | "parser": "@typescript-eslint/parser", 14 | "parserOptions": { 15 | "ecmaVersion": 2018, 16 | "sourceType": "module", 17 | "project": "./tsconfig.dev.json" 18 | }, 19 | "extends": [ 20 | "plugin:import/typescript" 21 | ], 22 | "settings": { 23 | "import/parsers": { 24 | "@typescript-eslint/parser": [ 25 | ".ts", 26 | ".tsx" 27 | ] 28 | }, 29 | "import/resolver": { 30 | "node": {}, 31 | "typescript": { 32 | "project": "./tsconfig.dev.json", 33 | "alwaysTryTypes": true 34 | } 35 | } 36 | }, 37 | "ignorePatterns": [ 38 | "*.js", 39 | "*.d.ts", 40 | "node_modules/", 41 | "*.generated.ts", 42 | "coverage", 43 | "!.projenrc.js" 44 | ], 45 | "rules": { 46 | "@stylistic/indent": [ 47 | "error", 48 | 2 49 | ], 50 | "@stylistic/quotes": [ 51 | "error", 52 | "single", 53 | { 54 | "avoidEscape": true 55 | } 56 | ], 57 | "@stylistic/comma-dangle": [ 58 | "error", 59 | "always-multiline" 60 | ], 61 | "@stylistic/comma-spacing": [ 62 | "error", 63 | { 64 | "before": false, 65 | "after": true 66 | } 67 | ], 68 | "@stylistic/no-multi-spaces": [ 69 | "error", 70 | { 71 | "ignoreEOLComments": false 72 | } 73 | ], 74 | "@stylistic/array-bracket-spacing": [ 75 | "error", 76 | "never" 77 | ], 78 | "@stylistic/array-bracket-newline": [ 79 | "error", 80 | "consistent" 81 | ], 82 | "@stylistic/object-curly-spacing": [ 83 | "error", 84 | "always" 85 | ], 86 | "@stylistic/object-curly-newline": [ 87 | "error", 88 | { 89 | "multiline": true, 90 | "consistent": true 91 | } 92 | ], 93 | "@stylistic/object-property-newline": [ 94 | "error", 95 | { 96 | "allowAllPropertiesOnSameLine": true 97 | } 98 | ], 99 | "@stylistic/keyword-spacing": [ 100 | "error" 101 | ], 102 | "@stylistic/brace-style": [ 103 | "error", 104 | "1tbs", 105 | { 106 | "allowSingleLine": true 107 | } 108 | ], 109 | "@stylistic/space-before-blocks": [ 110 | "error" 111 | ], 112 | "@stylistic/member-delimiter-style": [ 113 | "error" 114 | ], 115 | "@stylistic/semi": [ 116 | "error", 117 | "always" 118 | ], 119 | "@stylistic/max-len": [ 120 | "error", 121 | { 122 | "code": 150, 123 | "ignoreUrls": true, 124 | "ignoreStrings": true, 125 | "ignoreTemplateLiterals": true, 126 | "ignoreComments": true, 127 | "ignoreRegExpLiterals": true 128 | } 129 | ], 130 | "@stylistic/quote-props": [ 131 | "error", 132 | "consistent-as-needed" 133 | ], 134 | "@stylistic/key-spacing": [ 135 | "error" 136 | ], 137 | "@stylistic/no-multiple-empty-lines": [ 138 | "error" 139 | ], 140 | "@stylistic/no-trailing-spaces": [ 141 | "error" 142 | ], 143 | "curly": [ 144 | "error", 145 | "multi-line", 146 | "consistent" 147 | ], 148 | "@typescript-eslint/no-require-imports": "error", 149 | "import/no-extraneous-dependencies": [ 150 | "error", 151 | { 152 | "devDependencies": [ 153 | "**/test/**", 154 | "**/build-tools/**", 155 | "src/package-codebuild.lambda.ts", 156 | "src/package-nodejs.lambda.ts" 157 | ], 158 | "optionalDependencies": false, 159 | "peerDependencies": true 160 | } 161 | ], 162 | "import/no-unresolved": [ 163 | "error" 164 | ], 165 | "import/order": [ 166 | "warn", 167 | { 168 | "groups": [ 169 | "builtin", 170 | "external" 171 | ], 172 | "alphabetize": { 173 | "order": "asc", 174 | "caseInsensitive": true 175 | } 176 | } 177 | ], 178 | "import/no-duplicates": [ 179 | "error" 180 | ], 181 | "no-shadow": [ 182 | "off" 183 | ], 184 | "@typescript-eslint/no-shadow": "error", 185 | "@typescript-eslint/no-floating-promises": "error", 186 | "no-return-await": [ 187 | "off" 188 | ], 189 | "@typescript-eslint/return-await": "error", 190 | "dot-notation": [ 191 | "error" 192 | ], 193 | "no-bitwise": [ 194 | "error" 195 | ], 196 | "@typescript-eslint/member-ordering": [ 197 | "error", 198 | { 199 | "default": [ 200 | "public-static-field", 201 | "public-static-method", 202 | "protected-static-field", 203 | "protected-static-method", 204 | "private-static-field", 205 | "private-static-method", 206 | "field", 207 | "constructor", 208 | "method" 209 | ] 210 | } 211 | ] 212 | }, 213 | "overrides": [ 214 | { 215 | "files": [ 216 | ".projenrc.js" 217 | ], 218 | "rules": { 219 | "@typescript-eslint/no-require-imports": "off", 220 | "import/no-extraneous-dependencies": "off" 221 | } 222 | } 223 | ] 224 | } 225 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | * text=auto eol=lf 4 | *.js eol=lf 5 | *.json eol=lf 6 | *.sh eol=lf 7 | *.snap linguist-generated 8 | *.yml eol=lf 9 | /.eslintrc.json linguist-generated 10 | /.gitattributes linguist-generated 11 | /.github/workflows/auto-approve.yml linguist-generated 12 | /.github/workflows/build.yml linguist-generated 13 | /.github/workflows/pull-request-lint.yml linguist-generated 14 | /.github/workflows/release.yml linguist-generated 15 | /.github/workflows/upgrade-main.yml linguist-generated 16 | /.gitignore linguist-generated 17 | /.mergify.yml linguist-generated 18 | /.npmignore linguist-generated 19 | /.projen/** linguist-generated 20 | /.projen/deps.json linguist-generated 21 | /.projen/files.json linguist-generated 22 | /.projen/tasks.json linguist-generated 23 | /API.md linguist-generated 24 | /LICENSE linguist-generated 25 | /package.json linguist-generated 26 | /src/package-codebuild-function.ts linguist-generated 27 | /src/package-nodejs-function.ts linguist-generated 28 | /tsconfig.dev.json linguist-generated 29 | /yarn.lock linguist-generated 30 | Dockerfile eol=lf -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: CloudSnorkel 2 | -------------------------------------------------------------------------------- /.github/workflows/auto-approve.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | name: auto-approve 4 | on: 5 | pull_request_target: 6 | types: 7 | - labeled 8 | - opened 9 | - synchronize 10 | - reopened 11 | - ready_for_review 12 | jobs: 13 | approve: 14 | runs-on: ubuntu-latest 15 | permissions: 16 | pull-requests: write 17 | if: contains(github.event.pull_request.labels.*.name, 'auto-approve') && (github.event.pull_request.user.login == 'kichik' || github.event.pull_request.user.login == 'CloudSnorkelBot') 18 | steps: 19 | - uses: hmarr/auto-approve-action@v2.2.1 20 | with: 21 | github-token: ${{ secrets.GITHUB_TOKEN }} 22 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | name: build 4 | on: 5 | pull_request: {} 6 | workflow_dispatch: {} 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | permissions: 11 | contents: write 12 | outputs: 13 | self_mutation_happened: ${{ steps.self_mutation.outputs.self_mutation_happened }} 14 | env: 15 | CI: "true" 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | ref: ${{ github.event.pull_request.head.ref }} 21 | repository: ${{ github.event.pull_request.head.repo.full_name }} 22 | - name: Setup Ruby 23 | run: sudo apt-get update ; sudo apt-get install -y ruby 24 | - name: Setup Node.js 25 | uses: actions/setup-node@v4 26 | with: 27 | node-version: lts/* 28 | - name: Install dependencies 29 | run: yarn install --check-files 30 | - name: build 31 | run: npx projen build 32 | - name: Find mutations 33 | id: self_mutation 34 | run: |- 35 | git add . 36 | git diff --staged --patch --exit-code > repo.patch || echo "self_mutation_happened=true" >> $GITHUB_OUTPUT 37 | working-directory: ./ 38 | - name: Upload patch 39 | if: steps.self_mutation.outputs.self_mutation_happened 40 | uses: actions/upload-artifact@v4.4.0 41 | with: 42 | name: repo.patch 43 | path: repo.patch 44 | overwrite: true 45 | - name: Fail build on mutation 46 | if: steps.self_mutation.outputs.self_mutation_happened 47 | run: |- 48 | echo "::error::Files were changed during build (see build log). If this was triggered from a fork, you will need to update your branch." 49 | cat repo.patch 50 | exit 1 51 | - name: Backup artifact permissions 52 | run: cd dist && getfacl -R . > permissions-backup.acl 53 | continue-on-error: true 54 | - name: Upload artifact 55 | uses: actions/upload-artifact@v4.4.0 56 | with: 57 | name: build-artifact 58 | path: dist 59 | overwrite: true 60 | self-mutation: 61 | needs: build 62 | runs-on: ubuntu-latest 63 | permissions: 64 | contents: write 65 | if: always() && needs.build.outputs.self_mutation_happened && !(github.event.pull_request.head.repo.full_name != github.repository) 66 | steps: 67 | - name: Checkout 68 | uses: actions/checkout@v4 69 | with: 70 | token: ${{ secrets.PROJEN_GITHUB_TOKEN }} 71 | ref: ${{ github.event.pull_request.head.ref }} 72 | repository: ${{ github.event.pull_request.head.repo.full_name }} 73 | - name: Download patch 74 | uses: actions/download-artifact@v4 75 | with: 76 | name: repo.patch 77 | path: ${{ runner.temp }} 78 | - name: Apply patch 79 | run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo "Empty patch. Skipping."' 80 | - name: Set git identity 81 | run: |- 82 | git config user.name "github-actions" 83 | git config user.email "github-actions@github.com" 84 | - name: Push changes 85 | env: 86 | PULL_REQUEST_REF: ${{ github.event.pull_request.head.ref }} 87 | run: |- 88 | git add . 89 | git commit -s -m "chore: self mutation" 90 | git push origin HEAD:$PULL_REQUEST_REF 91 | package-js: 92 | needs: build 93 | runs-on: ubuntu-latest 94 | permissions: 95 | contents: read 96 | if: ${{ !needs.build.outputs.self_mutation_happened }} 97 | steps: 98 | - uses: actions/setup-node@v4 99 | with: 100 | node-version: lts/* 101 | - name: Download build artifacts 102 | uses: actions/download-artifact@v4 103 | with: 104 | name: build-artifact 105 | path: dist 106 | - name: Restore build artifact permissions 107 | run: cd dist && setfacl --restore=permissions-backup.acl 108 | continue-on-error: true 109 | - name: Setup Ruby 110 | run: sudo apt-get update ; sudo apt-get install -y ruby 111 | - name: Checkout 112 | uses: actions/checkout@v4 113 | with: 114 | ref: ${{ github.event.pull_request.head.ref }} 115 | repository: ${{ github.event.pull_request.head.repo.full_name }} 116 | path: .repo 117 | - name: Install Dependencies 118 | run: cd .repo && yarn install --check-files --frozen-lockfile 119 | - name: Extract build artifact 120 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 121 | - name: Move build artifact out of the way 122 | run: mv dist dist.old 123 | - name: Create js artifact 124 | run: cd .repo && npx projen package:js 125 | - name: Collect js artifact 126 | run: mv .repo/dist dist 127 | package-java: 128 | needs: build 129 | runs-on: ubuntu-latest 130 | permissions: 131 | contents: read 132 | if: ${{ !needs.build.outputs.self_mutation_happened }} 133 | steps: 134 | - uses: actions/setup-java@v4 135 | with: 136 | distribution: corretto 137 | java-version: "11" 138 | - uses: actions/setup-node@v4 139 | with: 140 | node-version: lts/* 141 | - name: Download build artifacts 142 | uses: actions/download-artifact@v4 143 | with: 144 | name: build-artifact 145 | path: dist 146 | - name: Restore build artifact permissions 147 | run: cd dist && setfacl --restore=permissions-backup.acl 148 | continue-on-error: true 149 | - name: Setup Ruby 150 | run: sudo apt-get update ; sudo apt-get install -y ruby 151 | - name: Checkout 152 | uses: actions/checkout@v4 153 | with: 154 | ref: ${{ github.event.pull_request.head.ref }} 155 | repository: ${{ github.event.pull_request.head.repo.full_name }} 156 | path: .repo 157 | - name: Install Dependencies 158 | run: cd .repo && yarn install --check-files --frozen-lockfile 159 | - name: Extract build artifact 160 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 161 | - name: Move build artifact out of the way 162 | run: mv dist dist.old 163 | - name: Create java artifact 164 | run: cd .repo && npx projen package:java 165 | - name: Collect java artifact 166 | run: mv .repo/dist dist 167 | package-python: 168 | needs: build 169 | runs-on: ubuntu-latest 170 | permissions: 171 | contents: read 172 | if: ${{ !needs.build.outputs.self_mutation_happened }} 173 | steps: 174 | - uses: actions/setup-node@v4 175 | with: 176 | node-version: lts/* 177 | - uses: actions/setup-python@v5 178 | with: 179 | python-version: 3.x 180 | - name: Download build artifacts 181 | uses: actions/download-artifact@v4 182 | with: 183 | name: build-artifact 184 | path: dist 185 | - name: Restore build artifact permissions 186 | run: cd dist && setfacl --restore=permissions-backup.acl 187 | continue-on-error: true 188 | - name: Setup Ruby 189 | run: sudo apt-get update ; sudo apt-get install -y ruby 190 | - name: Checkout 191 | uses: actions/checkout@v4 192 | with: 193 | ref: ${{ github.event.pull_request.head.ref }} 194 | repository: ${{ github.event.pull_request.head.repo.full_name }} 195 | path: .repo 196 | - name: Install Dependencies 197 | run: cd .repo && yarn install --check-files --frozen-lockfile 198 | - name: Extract build artifact 199 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 200 | - name: Move build artifact out of the way 201 | run: mv dist dist.old 202 | - name: Create python artifact 203 | run: cd .repo && npx projen package:python 204 | - name: Collect python artifact 205 | run: mv .repo/dist dist 206 | package-dotnet: 207 | needs: build 208 | runs-on: ubuntu-latest 209 | permissions: 210 | contents: read 211 | if: ${{ !needs.build.outputs.self_mutation_happened }} 212 | steps: 213 | - uses: actions/setup-node@v4 214 | with: 215 | node-version: lts/* 216 | - uses: actions/setup-dotnet@v4 217 | with: 218 | dotnet-version: 6.x 219 | - name: Download build artifacts 220 | uses: actions/download-artifact@v4 221 | with: 222 | name: build-artifact 223 | path: dist 224 | - name: Restore build artifact permissions 225 | run: cd dist && setfacl --restore=permissions-backup.acl 226 | continue-on-error: true 227 | - name: Setup Ruby 228 | run: sudo apt-get update ; sudo apt-get install -y ruby 229 | - name: Checkout 230 | uses: actions/checkout@v4 231 | with: 232 | ref: ${{ github.event.pull_request.head.ref }} 233 | repository: ${{ github.event.pull_request.head.repo.full_name }} 234 | path: .repo 235 | - name: Install Dependencies 236 | run: cd .repo && yarn install --check-files --frozen-lockfile 237 | - name: Extract build artifact 238 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 239 | - name: Move build artifact out of the way 240 | run: mv dist dist.old 241 | - name: Create dotnet artifact 242 | run: cd .repo && npx projen package:dotnet 243 | - name: Collect dotnet artifact 244 | run: mv .repo/dist dist 245 | package-go: 246 | needs: build 247 | runs-on: ubuntu-latest 248 | permissions: 249 | contents: read 250 | if: ${{ !needs.build.outputs.self_mutation_happened }} 251 | steps: 252 | - uses: actions/setup-node@v4 253 | with: 254 | node-version: lts/* 255 | - uses: actions/setup-go@v5 256 | with: 257 | go-version: ^1.18.0 258 | - name: Download build artifacts 259 | uses: actions/download-artifact@v4 260 | with: 261 | name: build-artifact 262 | path: dist 263 | - name: Restore build artifact permissions 264 | run: cd dist && setfacl --restore=permissions-backup.acl 265 | continue-on-error: true 266 | - name: Setup Ruby 267 | run: sudo apt-get update ; sudo apt-get install -y ruby 268 | - name: Checkout 269 | uses: actions/checkout@v4 270 | with: 271 | ref: ${{ github.event.pull_request.head.ref }} 272 | repository: ${{ github.event.pull_request.head.repo.full_name }} 273 | path: .repo 274 | - name: Install Dependencies 275 | run: cd .repo && yarn install --check-files --frozen-lockfile 276 | - name: Extract build artifact 277 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 278 | - name: Move build artifact out of the way 279 | run: mv dist dist.old 280 | - name: Create go artifact 281 | run: cd .repo && npx projen package:go 282 | - name: Collect go artifact 283 | run: mv .repo/dist dist 284 | -------------------------------------------------------------------------------- /.github/workflows/pull-request-lint.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | name: pull-request-lint 4 | on: 5 | pull_request_target: 6 | types: 7 | - labeled 8 | - opened 9 | - synchronize 10 | - reopened 11 | - ready_for_review 12 | - edited 13 | merge_group: {} 14 | jobs: 15 | validate: 16 | name: Validate PR title 17 | runs-on: ubuntu-latest 18 | permissions: 19 | pull-requests: write 20 | if: (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') 21 | steps: 22 | - uses: amannn/action-semantic-pull-request@v5.4.0 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | with: 26 | types: |- 27 | feat 28 | fix 29 | chore 30 | requireScope: false 31 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | name: release 4 | on: 5 | workflow_dispatch: {} 6 | concurrency: 7 | group: ${{ github.workflow }} 8 | cancel-in-progress: false 9 | jobs: 10 | release: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: write 14 | outputs: 15 | latest_commit: ${{ steps.git_remote.outputs.latest_commit }} 16 | tag_exists: ${{ steps.check_tag_exists.outputs.exists }} 17 | env: 18 | CI: "true" 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | with: 23 | fetch-depth: 0 24 | - name: Set git identity 25 | run: |- 26 | git config user.name "github-actions" 27 | git config user.email "github-actions@github.com" 28 | - name: Setup Ruby 29 | run: sudo apt-get update ; sudo apt-get install -y ruby 30 | - name: Setup Node.js 31 | uses: actions/setup-node@v4 32 | with: 33 | node-version: lts/* 34 | - name: Install dependencies 35 | run: yarn install --check-files --frozen-lockfile 36 | - name: release 37 | run: npx projen release 38 | - name: Check if version has already been tagged 39 | id: check_tag_exists 40 | run: |- 41 | TAG=$(cat dist/releasetag.txt) 42 | ([ ! -z "$TAG" ] && git ls-remote -q --exit-code --tags origin $TAG && (echo "exists=true" >> $GITHUB_OUTPUT)) || (echo "exists=false" >> $GITHUB_OUTPUT) 43 | cat $GITHUB_OUTPUT 44 | - name: Check for new commits 45 | id: git_remote 46 | run: |- 47 | echo "latest_commit=$(git ls-remote origin -h ${{ github.ref }} | cut -f1)" >> $GITHUB_OUTPUT 48 | cat $GITHUB_OUTPUT 49 | - name: Backup artifact permissions 50 | if: ${{ steps.git_remote.outputs.latest_commit == github.sha }} 51 | run: cd dist && getfacl -R . > permissions-backup.acl 52 | continue-on-error: true 53 | - name: Upload artifact 54 | if: ${{ steps.git_remote.outputs.latest_commit == github.sha }} 55 | uses: actions/upload-artifact@v4.4.0 56 | with: 57 | name: build-artifact 58 | path: dist 59 | overwrite: true 60 | release_github: 61 | name: Publish to GitHub Releases 62 | needs: 63 | - release 64 | - release_npm 65 | - release_maven 66 | - release_pypi 67 | - release_nuget 68 | - release_golang 69 | runs-on: ubuntu-latest 70 | permissions: 71 | contents: write 72 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 73 | steps: 74 | - uses: actions/setup-node@v4 75 | with: 76 | node-version: lts/* 77 | - name: Download build artifacts 78 | uses: actions/download-artifact@v4 79 | with: 80 | name: build-artifact 81 | path: dist 82 | - name: Restore build artifact permissions 83 | run: cd dist && setfacl --restore=permissions-backup.acl 84 | continue-on-error: true 85 | - name: Release 86 | env: 87 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 88 | run: errout=$(mktemp); gh release create $(cat dist/releasetag.txt) -R $GITHUB_REPOSITORY -F dist/changelog.md -t $(cat dist/releasetag.txt) --target $GITHUB_SHA 2> $errout && true; exitcode=$?; if [ $exitcode -ne 0 ] && ! grep -q "Release.tag_name already exists" $errout; then cat $errout; exit $exitcode; fi 89 | release_npm: 90 | name: Publish to npm 91 | needs: release 92 | runs-on: ubuntu-latest 93 | permissions: 94 | contents: read 95 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 96 | steps: 97 | - uses: actions/setup-node@v4 98 | with: 99 | node-version: lts/* 100 | - name: Download build artifacts 101 | uses: actions/download-artifact@v4 102 | with: 103 | name: build-artifact 104 | path: dist 105 | - name: Restore build artifact permissions 106 | run: cd dist && setfacl --restore=permissions-backup.acl 107 | continue-on-error: true 108 | - name: Setup Ruby 109 | run: sudo apt-get update ; sudo apt-get install -y ruby 110 | - name: Checkout 111 | uses: actions/checkout@v4 112 | with: 113 | path: .repo 114 | - name: Install Dependencies 115 | run: cd .repo && yarn install --check-files --frozen-lockfile 116 | - name: Extract build artifact 117 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 118 | - name: Move build artifact out of the way 119 | run: mv dist dist.old 120 | - name: Create js artifact 121 | run: cd .repo && npx projen package:js 122 | - name: Collect js artifact 123 | run: mv .repo/dist dist 124 | - name: Release 125 | env: 126 | NPM_DIST_TAG: latest 127 | NPM_REGISTRY: registry.npmjs.org 128 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 129 | run: npx -p publib@latest publib-npm 130 | release_maven: 131 | name: Publish to Maven Central 132 | needs: release 133 | runs-on: ubuntu-latest 134 | permissions: 135 | contents: read 136 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 137 | steps: 138 | - uses: actions/setup-java@v4 139 | with: 140 | distribution: corretto 141 | java-version: "11" 142 | - uses: actions/setup-node@v4 143 | with: 144 | node-version: lts/* 145 | - name: Download build artifacts 146 | uses: actions/download-artifact@v4 147 | with: 148 | name: build-artifact 149 | path: dist 150 | - name: Restore build artifact permissions 151 | run: cd dist && setfacl --restore=permissions-backup.acl 152 | continue-on-error: true 153 | - name: Setup Ruby 154 | run: sudo apt-get update ; sudo apt-get install -y ruby 155 | - name: Checkout 156 | uses: actions/checkout@v4 157 | with: 158 | path: .repo 159 | - name: Install Dependencies 160 | run: cd .repo && yarn install --check-files --frozen-lockfile 161 | - name: Extract build artifact 162 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 163 | - name: Move build artifact out of the way 164 | run: mv dist dist.old 165 | - name: Create java artifact 166 | run: cd .repo && npx projen package:java 167 | - name: Collect java artifact 168 | run: mv .repo/dist dist 169 | - name: Release 170 | env: 171 | MAVEN_ENDPOINT: https://s01.oss.sonatype.org 172 | MAVEN_GPG_PRIVATE_KEY: ${{ secrets.MAVEN_GPG_PRIVATE_KEY }} 173 | MAVEN_GPG_PRIVATE_KEY_PASSPHRASE: ${{ secrets.MAVEN_GPG_PRIVATE_KEY_PASSPHRASE }} 174 | MAVEN_PASSWORD: ${{ secrets.MAVEN_PASSWORD }} 175 | MAVEN_USERNAME: ${{ secrets.MAVEN_USERNAME }} 176 | MAVEN_STAGING_PROFILE_ID: ${{ secrets.MAVEN_STAGING_PROFILE_ID }} 177 | run: npx -p publib@latest publib-maven 178 | release_pypi: 179 | name: Publish to PyPI 180 | needs: release 181 | runs-on: ubuntu-latest 182 | permissions: 183 | contents: read 184 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 185 | steps: 186 | - uses: actions/setup-node@v4 187 | with: 188 | node-version: lts/* 189 | - uses: actions/setup-python@v5 190 | with: 191 | python-version: 3.x 192 | - name: Download build artifacts 193 | uses: actions/download-artifact@v4 194 | with: 195 | name: build-artifact 196 | path: dist 197 | - name: Restore build artifact permissions 198 | run: cd dist && setfacl --restore=permissions-backup.acl 199 | continue-on-error: true 200 | - name: Setup Ruby 201 | run: sudo apt-get update ; sudo apt-get install -y ruby 202 | - name: Checkout 203 | uses: actions/checkout@v4 204 | with: 205 | path: .repo 206 | - name: Install Dependencies 207 | run: cd .repo && yarn install --check-files --frozen-lockfile 208 | - name: Extract build artifact 209 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 210 | - name: Move build artifact out of the way 211 | run: mv dist dist.old 212 | - name: Create python artifact 213 | run: cd .repo && npx projen package:python 214 | - name: Collect python artifact 215 | run: mv .repo/dist dist 216 | - name: Release 217 | env: 218 | TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }} 219 | TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }} 220 | run: npx -p publib@latest publib-pypi 221 | release_nuget: 222 | name: Publish to NuGet Gallery 223 | needs: release 224 | runs-on: ubuntu-latest 225 | permissions: 226 | contents: read 227 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 228 | steps: 229 | - uses: actions/setup-node@v4 230 | with: 231 | node-version: lts/* 232 | - uses: actions/setup-dotnet@v4 233 | with: 234 | dotnet-version: 6.x 235 | - name: Download build artifacts 236 | uses: actions/download-artifact@v4 237 | with: 238 | name: build-artifact 239 | path: dist 240 | - name: Restore build artifact permissions 241 | run: cd dist && setfacl --restore=permissions-backup.acl 242 | continue-on-error: true 243 | - name: Setup Ruby 244 | run: sudo apt-get update ; sudo apt-get install -y ruby 245 | - name: Checkout 246 | uses: actions/checkout@v4 247 | with: 248 | path: .repo 249 | - name: Install Dependencies 250 | run: cd .repo && yarn install --check-files --frozen-lockfile 251 | - name: Extract build artifact 252 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 253 | - name: Move build artifact out of the way 254 | run: mv dist dist.old 255 | - name: Create dotnet artifact 256 | run: cd .repo && npx projen package:dotnet 257 | - name: Collect dotnet artifact 258 | run: mv .repo/dist dist 259 | - name: Release 260 | env: 261 | NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }} 262 | run: npx -p publib@latest publib-nuget 263 | release_golang: 264 | name: Publish to GitHub Go Module Repository 265 | needs: release 266 | runs-on: ubuntu-latest 267 | permissions: 268 | contents: read 269 | if: needs.release.outputs.tag_exists != 'true' && needs.release.outputs.latest_commit == github.sha 270 | steps: 271 | - uses: actions/setup-node@v4 272 | with: 273 | node-version: lts/* 274 | - uses: actions/setup-go@v5 275 | with: 276 | go-version: ^1.18.0 277 | - name: Download build artifacts 278 | uses: actions/download-artifact@v4 279 | with: 280 | name: build-artifact 281 | path: dist 282 | - name: Restore build artifact permissions 283 | run: cd dist && setfacl --restore=permissions-backup.acl 284 | continue-on-error: true 285 | - name: Setup Ruby 286 | run: sudo apt-get update ; sudo apt-get install -y ruby 287 | - name: Checkout 288 | uses: actions/checkout@v4 289 | with: 290 | path: .repo 291 | - name: Install Dependencies 292 | run: cd .repo && yarn install --check-files --frozen-lockfile 293 | - name: Extract build artifact 294 | run: tar --strip-components=1 -xzvf dist/js/*.tgz -C .repo 295 | - name: Move build artifact out of the way 296 | run: mv dist dist.old 297 | - name: Create go artifact 298 | run: cd .repo && npx projen package:go 299 | - name: Collect go artifact 300 | run: mv .repo/dist dist 301 | - name: Release 302 | env: 303 | GIT_USER_NAME: github-actions 304 | GIT_USER_EMAIL: github-actions@github.com 305 | GITHUB_TOKEN: ${{ secrets.GO_GITHUB_TOKEN }} 306 | run: npx -p publib@latest publib-golang 307 | -------------------------------------------------------------------------------- /.github/workflows/update-snapshot.yml: -------------------------------------------------------------------------------- 1 | name: update-snapshot 2 | 3 | on: 4 | workflow_run: 5 | workflows: [build] 6 | types: [completed] 7 | 8 | jobs: 9 | on-failure: 10 | runs-on: ubuntu-latest 11 | container: 12 | image: jsii/superchain:1-bookworm-slim-node22 13 | if: ${{ github.event.workflow_run.conclusion == 'failure' }} 14 | env: 15 | CI: "true" 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | ref: main 21 | fetch-depth: 0 22 | - name: Setup Ruby 23 | run: sudo apt-get update ; sudo apt-get install -y ruby 24 | - name: Install dependencies 25 | run: yarn install --check-files --frozen-lockfile 26 | - name: Snapshot main 27 | run: | 28 | npm run bundle 29 | npm run integ:default:snapshot 30 | - name: Switch to branch 31 | env: 32 | BRANCH: ${{ github.event.workflow_run.head_branch }} 33 | run: git checkout "$BRANCH" 34 | - name: Install dependencies 35 | run: yarn install --check-files --frozen-lockfile 36 | - name: Snapshot branch 37 | run: | 38 | npm run bundle 39 | npm run integ:default:snapshot 40 | - name: Find mutations 41 | id: create_patch 42 | run: |- 43 | git add . 44 | git diff --staged --patch --exit-code || echo "patch_created=true" >> $GITHUB_OUTPUT 45 | # upload snapshot (including assets from both main and branch) for easy diffing 46 | - name: Diff 47 | if: steps.create_patch.outputs.patch_created 48 | working-directory: test/default.integ.snapshot 49 | continue-on-error: true 50 | run: |- 51 | git diff --staged -U0 | grep '"path":' | cut -d '"' -f 4 | xargs -rL 2 diff -ruN > /tmp/assets.diff 52 | - name: Upload assets.diff 53 | if: steps.create_patch.outputs.patch_created 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: assets.diff 57 | path: /tmp/assets.diff 58 | - name: Upload snapshot 59 | if: steps.create_patch.outputs.patch_created 60 | uses: actions/upload-artifact@v4 61 | with: 62 | name: snapshot 63 | path: test/default.integ.snapshot 64 | - name: Set git identity 65 | if: steps.create_patch.outputs.patch_created 66 | run: |- 67 | git config user.name "github-actions" 68 | git config user.email "github-actions@github.com" 69 | # create a PR against the dependencies update PR for a proper snapshot 70 | - name: Create Pull Request 71 | if: steps.create_patch.outputs.patch_created 72 | uses: peter-evans/create-pull-request@v6 73 | with: 74 | token: ${{ secrets.PROJEN_GITHUB_TOKEN }} 75 | commit-message: |- 76 | chore(deps): update snapshot 77 | 78 | Update snapshot. See details in [workflow run]. 79 | 80 | [Workflow Run]: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} 81 | 82 | ------ 83 | 84 | *Automatically created by projen via the "upgrade-snapshot" workflow* 85 | branch: ${{ github.event.workflow_run.head_branch }}-upgrade-snapshot 86 | title: "chore(deps): update snapshot" 87 | body: |- 88 | Update snapshot. See details in [workflow run]. 89 | 90 | [Workflow Run]: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }} 91 | 92 | ------ 93 | 94 | *Automatically created by projen via the "upgrade-snapshot" workflow* 95 | author: github-actions 96 | committer: github-actions 97 | signoff: true 98 | -------------------------------------------------------------------------------- /.github/workflows/upgrade-main.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | name: upgrade-main 4 | on: 5 | workflow_dispatch: {} 6 | schedule: 7 | - cron: 0 0 * * 1 8 | jobs: 9 | upgrade: 10 | name: Upgrade 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | outputs: 15 | patch_created: ${{ steps.create_patch.outputs.patch_created }} 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | ref: main 21 | - name: Setup Ruby 22 | run: sudo apt-get update ; sudo apt-get install -y ruby 23 | - name: Setup Node.js 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: lts/* 27 | - name: Install dependencies 28 | run: yarn install --check-files --frozen-lockfile 29 | - name: Upgrade dependencies 30 | run: npx projen upgrade 31 | - name: Find mutations 32 | id: create_patch 33 | run: |- 34 | git add . 35 | git diff --staged --patch --exit-code > repo.patch || echo "patch_created=true" >> $GITHUB_OUTPUT 36 | working-directory: ./ 37 | - name: Upload patch 38 | if: steps.create_patch.outputs.patch_created 39 | uses: actions/upload-artifact@v4.4.0 40 | with: 41 | name: repo.patch 42 | path: repo.patch 43 | overwrite: true 44 | pr: 45 | name: Create Pull Request 46 | needs: upgrade 47 | runs-on: ubuntu-latest 48 | permissions: 49 | contents: read 50 | if: ${{ needs.upgrade.outputs.patch_created }} 51 | steps: 52 | - name: Checkout 53 | uses: actions/checkout@v4 54 | with: 55 | ref: main 56 | - name: Download patch 57 | uses: actions/download-artifact@v4 58 | with: 59 | name: repo.patch 60 | path: ${{ runner.temp }} 61 | - name: Apply patch 62 | run: '[ -s ${{ runner.temp }}/repo.patch ] && git apply ${{ runner.temp }}/repo.patch || echo "Empty patch. Skipping."' 63 | - name: Set git identity 64 | run: |- 65 | git config user.name "github-actions" 66 | git config user.email "github-actions@github.com" 67 | - name: Create Pull Request 68 | id: create-pr 69 | uses: peter-evans/create-pull-request@v6 70 | with: 71 | token: ${{ secrets.PROJEN_GITHUB_TOKEN }} 72 | commit-message: |- 73 | chore(deps): upgrade dependencies 74 | 75 | Upgrades project dependencies. See details in [workflow run]. 76 | 77 | [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} 78 | 79 | ------ 80 | 81 | *Automatically created by projen via the "upgrade-main" workflow* 82 | branch: github-actions/upgrade-main 83 | title: "chore(deps): upgrade dependencies" 84 | labels: auto-approve 85 | body: |- 86 | Upgrades project dependencies. See details in [workflow run]. 87 | 88 | [Workflow Run]: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} 89 | 90 | ------ 91 | 92 | *Automatically created by projen via the "upgrade-main" workflow* 93 | author: github-actions 94 | committer: github-actions 95 | signoff: true 96 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | !/.gitattributes 3 | !/.projen/tasks.json 4 | !/.projen/deps.json 5 | !/.projen/files.json 6 | !/.github/workflows/pull-request-lint.yml 7 | !/.github/workflows/auto-approve.yml 8 | !/package.json 9 | !/LICENSE 10 | !/.npmignore 11 | logs 12 | *.log 13 | npm-debug.log* 14 | yarn-debug.log* 15 | yarn-error.log* 16 | lerna-debug.log* 17 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 18 | pids 19 | *.pid 20 | *.seed 21 | *.pid.lock 22 | lib-cov 23 | coverage 24 | *.lcov 25 | .nyc_output 26 | build/Release 27 | node_modules/ 28 | jspm_packages/ 29 | *.tsbuildinfo 30 | .eslintcache 31 | *.tgz 32 | .yarn-integrity 33 | .cache 34 | cdk.out 35 | cdk.context.json 36 | /.idea 37 | status.json 38 | /test-reports/ 39 | junit.xml 40 | /coverage/ 41 | !/.github/workflows/build.yml 42 | /dist/changelog.md 43 | /dist/version.txt 44 | !/.github/workflows/release.yml 45 | !/.mergify.yml 46 | !/.github/workflows/upgrade-main.yml 47 | !/test/ 48 | !/tsconfig.dev.json 49 | !/src/ 50 | /lib 51 | /dist/ 52 | !/.eslintrc.json 53 | .jsii 54 | tsconfig.json 55 | !/API.md 56 | /assets/ 57 | !/src/package-codebuild-function.ts 58 | !/src/package-nodejs-function.ts 59 | test/.tmp 60 | test/default.integ.snapshot/asset.* 61 | test/default.integ.snapshot/**/asset.* 62 | test/default.integ.snapshot/cdk.out 63 | test/default.integ.snapshot/**/cdk.out 64 | test/default.integ.snapshot/manifest.json 65 | test/default.integ.snapshot/**/manifest.json 66 | test/default.integ.snapshot/tree.json 67 | test/default.integ.snapshot/**/tree.json 68 | !/.projenrc.js 69 | -------------------------------------------------------------------------------- /.mergify.yml: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | 3 | queue_rules: 4 | - name: default 5 | update_method: merge 6 | conditions: 7 | - "#approved-reviews-by>=1" 8 | - -label~=(do-not-merge) 9 | - status-success=build 10 | - status-success=package-js 11 | - status-success=package-java 12 | - status-success=package-python 13 | - status-success=package-dotnet 14 | - status-success=package-go 15 | merge_method: squash 16 | commit_message_template: |- 17 | {{ title }} (#{{ number }}) 18 | 19 | {{ body }} 20 | pull_request_rules: 21 | - name: Automatic merge on approval and successful build 22 | actions: 23 | delete_head_branch: {} 24 | queue: 25 | name: default 26 | conditions: 27 | - "#approved-reviews-by>=1" 28 | - -label~=(do-not-merge) 29 | - status-success=build 30 | - status-success=package-js 31 | - status-success=package-java 32 | - status-success=package-python 33 | - status-success=package-dotnet 34 | - status-success=package-go 35 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | /.projen/ 3 | /test-reports/ 4 | junit.xml 5 | /coverage/ 6 | permissions-backup.acl 7 | /dist/changelog.md 8 | /dist/version.txt 9 | /.mergify.yml 10 | /test/ 11 | /tsconfig.dev.json 12 | /src/ 13 | !/lib/ 14 | !/lib/**/*.js 15 | !/lib/**/*.d.ts 16 | dist 17 | /tsconfig.json 18 | /.github/ 19 | /.vscode/ 20 | /.idea/ 21 | /.projenrc.js 22 | tsconfig.tsbuildinfo 23 | /.eslintrc.json 24 | !.jsii 25 | !/assets/ 26 | test/.tmp 27 | test/default.integ.snapshot 28 | /.gitattributes 29 | -------------------------------------------------------------------------------- /.projen/deps.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": [ 3 | { 4 | "name": "@aws-cdk/aws-lambda-python-alpha", 5 | "type": "build" 6 | }, 7 | { 8 | "name": "@aws-sdk/client-codebuild", 9 | "type": "build" 10 | }, 11 | { 12 | "name": "@aws-sdk/client-s3", 13 | "type": "build" 14 | }, 15 | { 16 | "name": "@stylistic/eslint-plugin", 17 | "version": "^2", 18 | "type": "build" 19 | }, 20 | { 21 | "name": "@types/adm-zip", 22 | "type": "build" 23 | }, 24 | { 25 | "name": "@types/aws-lambda", 26 | "type": "build" 27 | }, 28 | { 29 | "name": "@types/jest", 30 | "type": "build" 31 | }, 32 | { 33 | "name": "@types/node", 34 | "version": "ts5.5", 35 | "type": "build" 36 | }, 37 | { 38 | "name": "@typescript-eslint/eslint-plugin", 39 | "version": "^8", 40 | "type": "build" 41 | }, 42 | { 43 | "name": "@typescript-eslint/parser", 44 | "version": "^8", 45 | "type": "build" 46 | }, 47 | { 48 | "name": "adm-zip", 49 | "type": "build" 50 | }, 51 | { 52 | "name": "aws-cdk", 53 | "version": "^2", 54 | "type": "build" 55 | }, 56 | { 57 | "name": "commit-and-tag-version", 58 | "version": "^12", 59 | "type": "build" 60 | }, 61 | { 62 | "name": "esbuild", 63 | "type": "build" 64 | }, 65 | { 66 | "name": "eslint-import-resolver-typescript", 67 | "type": "build" 68 | }, 69 | { 70 | "name": "eslint-plugin-import", 71 | "type": "build" 72 | }, 73 | { 74 | "name": "eslint", 75 | "version": "^9", 76 | "type": "build" 77 | }, 78 | { 79 | "name": "execa", 80 | "type": "build" 81 | }, 82 | { 83 | "name": "jest", 84 | "type": "build" 85 | }, 86 | { 87 | "name": "jest-junit", 88 | "version": "^16", 89 | "type": "build" 90 | }, 91 | { 92 | "name": "jsii-diff", 93 | "type": "build" 94 | }, 95 | { 96 | "name": "jsii-docgen", 97 | "version": "^10.5.0", 98 | "type": "build" 99 | }, 100 | { 101 | "name": "jsii-pacmak", 102 | "type": "build" 103 | }, 104 | { 105 | "name": "jsii-rosetta", 106 | "version": "5.5.x", 107 | "type": "build" 108 | }, 109 | { 110 | "name": "jsii", 111 | "version": "5.5.x", 112 | "type": "build" 113 | }, 114 | { 115 | "name": "projen", 116 | "type": "build" 117 | }, 118 | { 119 | "name": "ts-jest", 120 | "type": "build" 121 | }, 122 | { 123 | "name": "ts-node", 124 | "type": "build" 125 | }, 126 | { 127 | "name": "typescript", 128 | "version": "5.5.x", 129 | "type": "build" 130 | }, 131 | { 132 | "name": "xterm-benchmark", 133 | "type": "build" 134 | }, 135 | { 136 | "name": "aws-cdk-lib", 137 | "version": "^2.123.0", 138 | "type": "peer" 139 | }, 140 | { 141 | "name": "constructs", 142 | "version": "^10.0.5", 143 | "type": "peer" 144 | } 145 | ], 146 | "//": "~~ Generated by projen. To modify, edit .projenrc.js and run \"npx projen\"." 147 | } 148 | -------------------------------------------------------------------------------- /.projen/files.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | ".eslintrc.json", 4 | ".gitattributes", 5 | ".github/workflows/auto-approve.yml", 6 | ".github/workflows/build.yml", 7 | ".github/workflows/pull-request-lint.yml", 8 | ".github/workflows/release.yml", 9 | ".github/workflows/upgrade-main.yml", 10 | ".gitignore", 11 | ".mergify.yml", 12 | ".projen/deps.json", 13 | ".projen/files.json", 14 | ".projen/tasks.json", 15 | "LICENSE", 16 | "src/package-codebuild-function.ts", 17 | "src/package-nodejs-function.ts", 18 | "tsconfig.dev.json" 19 | ], 20 | "//": "~~ Generated by projen. To modify, edit .projenrc.js and run \"npx projen\"." 21 | } 22 | -------------------------------------------------------------------------------- /.projen/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "tasks": { 3 | "benchmark": { 4 | "name": "benchmark", 5 | "steps": [ 6 | { 7 | "exec": "esbuild benchmark/deployment.ts --bundle --target=node14 --platform=node --outfile=benchmark/deployment.js --format=cjs --external:xterm-benchmark && xterm-benchmark benchmark/deployment.js" 8 | } 9 | ] 10 | }, 11 | "build": { 12 | "name": "build", 13 | "description": "Full release build", 14 | "steps": [ 15 | { 16 | "spawn": "default" 17 | }, 18 | { 19 | "spawn": "pre-compile" 20 | }, 21 | { 22 | "spawn": "compile" 23 | }, 24 | { 25 | "spawn": "post-compile" 26 | }, 27 | { 28 | "spawn": "test" 29 | }, 30 | { 31 | "spawn": "package" 32 | } 33 | ] 34 | }, 35 | "bump": { 36 | "name": "bump", 37 | "description": "Bumps version based on latest git tag and generates a changelog entry", 38 | "env": { 39 | "OUTFILE": "package.json", 40 | "CHANGELOG": "dist/changelog.md", 41 | "BUMPFILE": "dist/version.txt", 42 | "RELEASETAG": "dist/releasetag.txt", 43 | "RELEASE_TAG_PREFIX": "", 44 | "BUMP_PACKAGE": "commit-and-tag-version@^12" 45 | }, 46 | "steps": [ 47 | { 48 | "builtin": "release/bump-version" 49 | } 50 | ], 51 | "condition": "git log --oneline -1 | grep -qv \"chore(release):\"" 52 | }, 53 | "bundle": { 54 | "name": "bundle", 55 | "description": "Prepare assets", 56 | "steps": [ 57 | { 58 | "spawn": "bundle:package-codebuild.lambda" 59 | }, 60 | { 61 | "spawn": "bundle:package-nodejs.lambda" 62 | }, 63 | { 64 | "exec": "rm -rf assets/package-ruby.lambda" 65 | }, 66 | { 67 | "exec": "mkdir -p assets/package-python.lambda assets/package-ruby.lambda" 68 | }, 69 | { 70 | "exec": "cp src/package-python.lambda.py assets/package-python.lambda/index.py" 71 | }, 72 | { 73 | "exec": "cp src/package-ruby.lambda.rb assets/package-ruby.lambda/index.rb" 74 | }, 75 | { 76 | "exec": "gem install --no-document --version 2.3.2 --install-dir assets/package-ruby.lambda/vendor rubyzip" 77 | }, 78 | { 79 | "exec": "mv assets/package-ruby.lambda/vendor/gems/rubyzip-2.3.2/lib assets/package-ruby.lambda/rubyzip" 80 | }, 81 | { 82 | "exec": "rm -rf assets/package-ruby.lambda/vendor" 83 | } 84 | ] 85 | }, 86 | "bundle:package-codebuild.lambda": { 87 | "name": "bundle:package-codebuild.lambda", 88 | "description": "Create a JavaScript bundle from src/package-codebuild.lambda.ts", 89 | "steps": [ 90 | { 91 | "exec": "esbuild --bundle src/package-codebuild.lambda.ts --target=\"node18\" --platform=\"node\" --outfile=\"assets/package-codebuild.lambda/index.js\" --tsconfig=\"tsconfig.dev.json\" --external:@aws-sdk/*" 92 | } 93 | ] 94 | }, 95 | "bundle:package-codebuild.lambda:watch": { 96 | "name": "bundle:package-codebuild.lambda:watch", 97 | "description": "Continuously update the JavaScript bundle from src/package-codebuild.lambda.ts", 98 | "steps": [ 99 | { 100 | "exec": "esbuild --bundle src/package-codebuild.lambda.ts --target=\"node18\" --platform=\"node\" --outfile=\"assets/package-codebuild.lambda/index.js\" --tsconfig=\"tsconfig.dev.json\" --external:@aws-sdk/* --watch" 101 | } 102 | ] 103 | }, 104 | "bundle:package-nodejs.lambda": { 105 | "name": "bundle:package-nodejs.lambda", 106 | "description": "Create a JavaScript bundle from src/package-nodejs.lambda.ts", 107 | "steps": [ 108 | { 109 | "exec": "esbuild --bundle src/package-nodejs.lambda.ts --target=\"node18\" --platform=\"node\" --outfile=\"assets/package-nodejs.lambda/index.js\" --tsconfig=\"tsconfig.dev.json\" " 110 | } 111 | ] 112 | }, 113 | "bundle:package-nodejs.lambda:watch": { 114 | "name": "bundle:package-nodejs.lambda:watch", 115 | "description": "Continuously update the JavaScript bundle from src/package-nodejs.lambda.ts", 116 | "steps": [ 117 | { 118 | "exec": "esbuild --bundle src/package-nodejs.lambda.ts --target=\"node18\" --platform=\"node\" --outfile=\"assets/package-nodejs.lambda/index.js\" --tsconfig=\"tsconfig.dev.json\" --external:@aws-sdk/* --watch" 119 | } 120 | ] 121 | }, 122 | "clobber": { 123 | "name": "clobber", 124 | "description": "hard resets to HEAD of origin and cleans the local repo", 125 | "env": { 126 | "BRANCH": "$(git branch --show-current)" 127 | }, 128 | "steps": [ 129 | { 130 | "exec": "git checkout -b scratch", 131 | "name": "save current HEAD in \"scratch\" branch" 132 | }, 133 | { 134 | "exec": "git checkout $BRANCH" 135 | }, 136 | { 137 | "exec": "git fetch origin", 138 | "name": "fetch latest changes from origin" 139 | }, 140 | { 141 | "exec": "git reset --hard origin/$BRANCH", 142 | "name": "hard reset to origin commit" 143 | }, 144 | { 145 | "exec": "git clean -fdx", 146 | "name": "clean all untracked files" 147 | }, 148 | { 149 | "say": "ready to rock! (unpushed commits are under the \"scratch\" branch)" 150 | } 151 | ], 152 | "condition": "git diff --exit-code > /dev/null" 153 | }, 154 | "compat": { 155 | "name": "compat", 156 | "description": "Perform API compatibility check against latest version", 157 | "steps": [ 158 | { 159 | "exec": "jsii-diff npm:$(node -p \"require('./package.json').name\") -k --ignore-file .compatignore || (echo \"\nUNEXPECTED BREAKING CHANGES: add keys such as 'removed:constructs.Node.of' to .compatignore to skip.\n\" && exit 1)" 160 | } 161 | ] 162 | }, 163 | "compile": { 164 | "name": "compile", 165 | "description": "Only compile", 166 | "steps": [ 167 | { 168 | "exec": "jsii --silence-warnings=reserved-word" 169 | } 170 | ] 171 | }, 172 | "default": { 173 | "name": "default", 174 | "description": "Synthesize project files", 175 | "steps": [ 176 | { 177 | "exec": "node .projenrc.js" 178 | } 179 | ] 180 | }, 181 | "docgen": { 182 | "name": "docgen", 183 | "description": "Generate API.md from .jsii manifest", 184 | "steps": [ 185 | { 186 | "exec": "jsii-docgen -o API.md" 187 | } 188 | ] 189 | }, 190 | "eject": { 191 | "name": "eject", 192 | "description": "Remove projen from the project", 193 | "env": { 194 | "PROJEN_EJECTING": "true" 195 | }, 196 | "steps": [ 197 | { 198 | "spawn": "default" 199 | } 200 | ] 201 | }, 202 | "eslint": { 203 | "name": "eslint", 204 | "description": "Runs eslint against the codebase", 205 | "env": { 206 | "ESLINT_USE_FLAT_CONFIG": "false" 207 | }, 208 | "steps": [ 209 | { 210 | "exec": "eslint --ext .ts,.tsx --fix --no-error-on-unmatched-pattern $@ benchmark test build-tools .projenrc.js", 211 | "receiveArgs": true 212 | } 213 | ] 214 | }, 215 | "install": { 216 | "name": "install", 217 | "description": "Install project dependencies and update lockfile (non-frozen)", 218 | "steps": [ 219 | { 220 | "exec": "yarn install --check-files" 221 | } 222 | ] 223 | }, 224 | "install:ci": { 225 | "name": "install:ci", 226 | "description": "Install project dependencies using frozen lockfile", 227 | "steps": [ 228 | { 229 | "exec": "yarn install --check-files --frozen-lockfile" 230 | } 231 | ] 232 | }, 233 | "integ:default:assert": { 234 | "name": "integ:default:assert", 235 | "description": "assert the snapshot of integration test 'default'", 236 | "steps": [ 237 | { 238 | "exec": "[ -d \"test/default.integ.snapshot\" ] || (echo \"No snapshot available for integration test 'default'. Run 'projen integ:default:deploy' to capture.\" && exit 1)" 239 | }, 240 | { 241 | "exec": "cdk synth --app \"ts-node -P tsconfig.dev.json test/default.integ.ts\" --no-notices --no-version-reporting --no-asset-metadata --no-path-metadata -o test/.tmp/default.integ/assert.cdk.out > /dev/null" 242 | }, 243 | { 244 | "exec": "diff -r -x asset.* -x cdk.out -x manifest.json -x tree.json test/default.integ.snapshot/ test/.tmp/default.integ/assert.cdk.out/" 245 | } 246 | ] 247 | }, 248 | "integ:default:deploy": { 249 | "name": "integ:default:deploy", 250 | "description": "deploy integration test 'default' and capture snapshot", 251 | "steps": [ 252 | { 253 | "exec": "rm -fr test/.tmp/default.integ/deploy.cdk.out" 254 | }, 255 | { 256 | "exec": "cdk deploy --app \"ts-node -P tsconfig.dev.json test/default.integ.ts\" --no-notices --no-version-reporting --no-asset-metadata --no-path-metadata '**' --require-approval=never -o test/.tmp/default.integ/deploy.cdk.out" 257 | }, 258 | { 259 | "exec": "rm -fr test/default.integ.snapshot" 260 | }, 261 | { 262 | "exec": "mv test/.tmp/default.integ/deploy.cdk.out test/default.integ.snapshot" 263 | }, 264 | { 265 | "spawn": "integ:default:destroy" 266 | } 267 | ] 268 | }, 269 | "integ:default:destroy": { 270 | "name": "integ:default:destroy", 271 | "description": "destroy integration test 'default'", 272 | "steps": [ 273 | { 274 | "exec": "cdk destroy --app test/default.integ.snapshot '**' --no-version-reporting" 275 | } 276 | ] 277 | }, 278 | "integ:default:snapshot": { 279 | "name": "integ:default:snapshot", 280 | "description": "update snapshot for integration test \"default\"", 281 | "steps": [ 282 | { 283 | "exec": "cdk synth --app \"ts-node -P tsconfig.dev.json test/default.integ.ts\" --no-notices --no-version-reporting --no-asset-metadata --no-path-metadata -o test/default.integ.snapshot > /dev/null" 284 | } 285 | ] 286 | }, 287 | "integ:default:watch": { 288 | "name": "integ:default:watch", 289 | "description": "watch integration test 'default' (without updating snapshots)", 290 | "steps": [ 291 | { 292 | "exec": "cdk watch --app \"ts-node -P tsconfig.dev.json test/default.integ.ts\" --no-notices --no-version-reporting --no-asset-metadata --no-path-metadata '**' -o test/.tmp/default.integ/deploy.cdk.out" 293 | } 294 | ] 295 | }, 296 | "integ:snapshot-all": { 297 | "name": "integ:snapshot-all", 298 | "description": "update snapshot for all integration tests", 299 | "steps": [ 300 | { 301 | "spawn": "integ:default:snapshot" 302 | } 303 | ] 304 | }, 305 | "package": { 306 | "name": "package", 307 | "description": "Creates the distribution package", 308 | "steps": [ 309 | { 310 | "spawn": "package:js", 311 | "condition": "node -e \"if (!process.env.CI) process.exit(1)\"" 312 | }, 313 | { 314 | "spawn": "package-all", 315 | "condition": "node -e \"if (process.env.CI) process.exit(1)\"" 316 | } 317 | ] 318 | }, 319 | "package-all": { 320 | "name": "package-all", 321 | "description": "Packages artifacts for all target languages", 322 | "steps": [ 323 | { 324 | "spawn": "package:js" 325 | }, 326 | { 327 | "spawn": "package:java" 328 | }, 329 | { 330 | "spawn": "package:python" 331 | }, 332 | { 333 | "spawn": "package:dotnet" 334 | }, 335 | { 336 | "spawn": "package:go" 337 | } 338 | ] 339 | }, 340 | "package:dotnet": { 341 | "name": "package:dotnet", 342 | "description": "Create dotnet language bindings", 343 | "steps": [ 344 | { 345 | "exec": "jsii-pacmak -v --target dotnet" 346 | } 347 | ] 348 | }, 349 | "package:go": { 350 | "name": "package:go", 351 | "description": "Create go language bindings", 352 | "steps": [ 353 | { 354 | "exec": "jsii-pacmak -v --target go" 355 | } 356 | ] 357 | }, 358 | "package:java": { 359 | "name": "package:java", 360 | "description": "Create java language bindings", 361 | "steps": [ 362 | { 363 | "exec": "jsii-pacmak -v --target java" 364 | } 365 | ] 366 | }, 367 | "package:js": { 368 | "name": "package:js", 369 | "description": "Create js language bindings", 370 | "steps": [ 371 | { 372 | "exec": "jsii-pacmak -v --target js" 373 | } 374 | ] 375 | }, 376 | "package:python": { 377 | "name": "package:python", 378 | "description": "Create python language bindings", 379 | "steps": [ 380 | { 381 | "exec": "jsii-pacmak -v --target python" 382 | } 383 | ] 384 | }, 385 | "post-compile": { 386 | "name": "post-compile", 387 | "description": "Runs after successful compilation", 388 | "steps": [ 389 | { 390 | "spawn": "docgen" 391 | } 392 | ] 393 | }, 394 | "post-upgrade": { 395 | "name": "post-upgrade", 396 | "description": "Runs after upgrading dependencies" 397 | }, 398 | "pre-compile": { 399 | "name": "pre-compile", 400 | "description": "Prepare the project for compilation", 401 | "steps": [ 402 | { 403 | "spawn": "bundle" 404 | } 405 | ] 406 | }, 407 | "release": { 408 | "name": "release", 409 | "description": "Prepare a release from \"main\" branch", 410 | "env": { 411 | "RELEASE": "true" 412 | }, 413 | "steps": [ 414 | { 415 | "exec": "rm -fr dist" 416 | }, 417 | { 418 | "spawn": "bump" 419 | }, 420 | { 421 | "spawn": "build" 422 | }, 423 | { 424 | "spawn": "unbump" 425 | }, 426 | { 427 | "exec": "git diff --ignore-space-at-eol --exit-code" 428 | } 429 | ] 430 | }, 431 | "test": { 432 | "name": "test", 433 | "description": "Run tests", 434 | "steps": [ 435 | { 436 | "exec": "jest --passWithNoTests --updateSnapshot", 437 | "receiveArgs": true 438 | }, 439 | { 440 | "spawn": "eslint" 441 | }, 442 | { 443 | "spawn": "integ:default:assert" 444 | } 445 | ] 446 | }, 447 | "test:watch": { 448 | "name": "test:watch", 449 | "description": "Run jest in watch mode", 450 | "steps": [ 451 | { 452 | "exec": "jest --watch" 453 | } 454 | ] 455 | }, 456 | "unbump": { 457 | "name": "unbump", 458 | "description": "Restores version to 0.0.0", 459 | "env": { 460 | "OUTFILE": "package.json", 461 | "CHANGELOG": "dist/changelog.md", 462 | "BUMPFILE": "dist/version.txt", 463 | "RELEASETAG": "dist/releasetag.txt", 464 | "RELEASE_TAG_PREFIX": "", 465 | "BUMP_PACKAGE": "commit-and-tag-version@^12" 466 | }, 467 | "steps": [ 468 | { 469 | "builtin": "release/reset-version" 470 | } 471 | ] 472 | }, 473 | "upgrade": { 474 | "name": "upgrade", 475 | "description": "upgrade dependencies", 476 | "env": { 477 | "CI": "0" 478 | }, 479 | "steps": [ 480 | { 481 | "exec": "npx npm-check-updates@16 --upgrade --target=minor --peer --no-deprecated --dep=dev,peer,prod,optional --filter=@aws-cdk/aws-lambda-python-alpha,@aws-sdk/client-codebuild,@aws-sdk/client-s3,@types/adm-zip,@types/aws-lambda,@types/jest,adm-zip,esbuild,eslint-import-resolver-typescript,eslint-plugin-import,execa,jest,jsii-diff,jsii-pacmak,projen,ts-jest,ts-node,xterm-benchmark" 482 | }, 483 | { 484 | "exec": "yarn install --check-files" 485 | }, 486 | { 487 | "exec": "yarn upgrade @aws-cdk/aws-lambda-python-alpha @aws-sdk/client-codebuild @aws-sdk/client-s3 @stylistic/eslint-plugin @types/adm-zip @types/aws-lambda @types/jest @types/node @typescript-eslint/eslint-plugin @typescript-eslint/parser adm-zip aws-cdk commit-and-tag-version esbuild eslint-import-resolver-typescript eslint-plugin-import eslint execa jest jest-junit jsii-diff jsii-docgen jsii-pacmak jsii-rosetta jsii projen ts-jest ts-node typescript xterm-benchmark aws-cdk-lib constructs" 488 | }, 489 | { 490 | "exec": "npx projen" 491 | }, 492 | { 493 | "spawn": "post-upgrade" 494 | } 495 | ] 496 | }, 497 | "watch": { 498 | "name": "watch", 499 | "description": "Watch & compile in the background", 500 | "steps": [ 501 | { 502 | "exec": "jsii -w --silence-warnings=reserved-word" 503 | } 504 | ] 505 | } 506 | }, 507 | "env": { 508 | "PATH": "$(npx -c \"node --print process.env.PATH\")" 509 | }, 510 | "//": "~~ Generated by projen. To modify, edit .projenrc.js and run \"npx projen\"." 511 | } 512 | -------------------------------------------------------------------------------- /.projenrc.js: -------------------------------------------------------------------------------- 1 | const { awscdk } = require('projen'); 2 | const { Stability } = require('projen/lib/cdk/jsii-project'); 3 | 4 | const project = new awscdk.AwsCdkConstructLibrary({ 5 | author: 'Amir Szekely', 6 | authorAddress: 'amir@cloudsnorkel.com', 7 | stability: Stability.EXPERIMENTAL, 8 | cdkVersion: '2.123.0', // 2.54.0 for https://github.com/aws/aws-cdk/pull/22124, 2.77.0 for removing node 14, 2.87.0 for node 18 on CodeBuild, 2.123.0 for lambda logs 9 | defaultReleaseBranch: 'main', 10 | name: '@cloudsnorkel/cdk-turbo-layers', 11 | repositoryUrl: 'https://github.com/CloudSnorkel/cdk-turbo-layers.git', 12 | license: 'Apache-2.0', 13 | description: 'Speed-up Lambda function deployment with dependency layers built in AWS', 14 | devDeps: [ 15 | 'esbuild', // for faster NodejsFunction bundling 16 | '@aws-sdk/client-codebuild', 17 | '@aws-sdk/client-s3', 18 | '@types/aws-lambda', 19 | 'adm-zip', 20 | '@types/adm-zip', 21 | 'xterm-benchmark', 22 | 'execa', 23 | '@aws-cdk/aws-lambda-python-alpha', 24 | ], 25 | deps: [ 26 | ], 27 | releaseToNpm: true, 28 | publishToPypi: { 29 | distName: 'cloudsnorkel.cdk-turbo-layers', 30 | module: 'cloudsnorkel.cdk_turbo_layers', 31 | }, 32 | publishToGo: { 33 | moduleName: 'github.com/CloudSnorkel/cdk-turbo-layers-go', 34 | }, 35 | publishToMaven: { 36 | mavenGroupId: 'com.cloudsnorkel', 37 | mavenArtifactId: 'cdk.turbo-layers', 38 | javaPackage: 'com.cloudsnorkel.cdk.turbo_layers', 39 | mavenEndpoint: 'https://s01.oss.sonatype.org', 40 | }, 41 | publishToNuget: { 42 | dotNetNamespace: 'CloudSnorkel', 43 | packageId: 'CloudSnorkel.Cdk.TurboLayers', 44 | }, 45 | keywords: [ 46 | 'aws', 47 | 'aws-cdk', 48 | 'aws-cdk-construct', 49 | 'cdk', 50 | 'codebuild', 51 | 'lambda', 52 | 'layer', 53 | 'python', 54 | 'nodejs', 55 | 'ruby', 56 | ], 57 | gitignore: [ 58 | 'cdk.out', 59 | 'cdk.context.json', 60 | '/.idea', 61 | 'status.json', 62 | ], 63 | sampleCode: false, 64 | compat: true, 65 | autoApproveOptions: { 66 | allowedUsernames: ['kichik', 'CloudSnorkelBot'], 67 | }, 68 | depsUpgradeOptions: { 69 | workflowOptions: { 70 | labels: ['auto-approve'], 71 | schedule: { 72 | cron: ['0 0 * * 1'], 73 | }, 74 | }, 75 | }, 76 | githubOptions: { 77 | pullRequestLintOptions: { 78 | semanticTitleOptions: { 79 | types: [ 80 | 'feat', 81 | 'fix', 82 | 'chore', 83 | ], 84 | }, 85 | }, 86 | }, 87 | pullRequestTemplate: false, 88 | workflowBootstrapSteps: [ 89 | { 90 | name: 'Setup Ruby', 91 | run: 'sudo apt-get update ; sudo apt-get install -y ruby', 92 | }, 93 | ], 94 | scripts: { 95 | benchmark: 'esbuild benchmark/deployment.ts --bundle --target=node14 --platform=node --outfile=benchmark/deployment.js --format=cjs --external:xterm-benchmark && xterm-benchmark benchmark/deployment.js', 96 | }, 97 | eslintOptions: { 98 | dirs: ['benchmark'], 99 | }, 100 | tsconfig: { 101 | include: ['benchmark/**/*.ts'], 102 | }, 103 | jsiiVersion: '5.5.x', 104 | typescriptVersion: '5.5.x', 105 | }); 106 | 107 | // disable automatic releases, but keep workflow that can be triggered manually 108 | const releaseWorkflow = project.github.tryFindWorkflow('release'); 109 | releaseWorkflow.file.addDeletionOverride('on.push'); 110 | 111 | // set proper line endings 112 | project.gitattributes.addAttributes('*.js', 'eol=lf'); 113 | project.gitattributes.addAttributes('*.json', 'eol=lf'); 114 | project.gitattributes.addAttributes('*.sh', 'eol=lf'); 115 | project.gitattributes.addAttributes('*.yml', 'eol=lf'); 116 | project.gitattributes.addAttributes('Dockerfile', 'eol=lf'); 117 | 118 | // we can't count on @aws-sdk to be there because the user might use nodejs 16 or 18 119 | const bundleNodejsStep = project.tasks.tryFind('bundle:package-nodejs.lambda').steps[0]; 120 | bundleNodejsStep.exec = bundleNodejsStep.exec.replace('--external:@aws-sdk/*', ''); 121 | 122 | // extra lambdas 123 | project.bundler.bundleTask.exec('rm -rf assets/package-ruby.lambda'); 124 | project.bundler.bundleTask.exec('mkdir -p assets/package-python.lambda assets/package-ruby.lambda'); 125 | project.bundler.bundleTask.exec('cp src/package-python.lambda.py assets/package-python.lambda/index.py'); 126 | project.bundler.bundleTask.exec('cp src/package-ruby.lambda.rb assets/package-ruby.lambda/index.rb'); 127 | project.bundler.bundleTask.exec('gem install --no-document --version 2.3.2 --install-dir assets/package-ruby.lambda/vendor rubyzip'); 128 | project.bundler.bundleTask.exec('mv assets/package-ruby.lambda/vendor/gems/rubyzip-2.3.2/lib assets/package-ruby.lambda/rubyzip'); 129 | project.bundler.bundleTask.exec('rm -rf assets/package-ruby.lambda/vendor'); 130 | 131 | // funding 132 | project.package.addField('funding', 'https://github.com/sponsors/CloudSnorkel'); 133 | 134 | project.synth(); 135 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Turbo Layers for CDK 2 | 3 | [![NPM](https://img.shields.io/npm/v/@cloudsnorkel/cdk-turbo-layers?label=npm&logo=npm)][7] 4 | [![PyPI](https://img.shields.io/pypi/v/cloudsnorkel.cdk-turbo-layers?label=pypi&logo=pypi)][6] 5 | [![Maven Central](https://img.shields.io/maven-central/v/com.cloudsnorkel/cdk.turbo-layers.svg?label=Maven%20Central&logo=java)][8] 6 | [![Go](https://img.shields.io/github/v/tag/CloudSnorkel/cdk-turbo-layers?color=red&label=go&logo=go)][11] 7 | [![Nuget](https://img.shields.io/nuget/v/CloudSnorkel.Cdk.TurboLayers?color=red&&logo=nuget)][12] 8 | [![License](https://img.shields.io/badge/license-Apache--2.0-blue)](https://github.com/CloudSnorkel/cdk-turbo-layers/blob/main/LICENSE) 9 | 10 | Speed up deployment of Lambda functions by creating dependency layers in AWS instead of locally. 11 | 12 | * ⛓️ Easily separate dependency deployment from Lambda code deployment 13 | * 🔁 Never re-package dependencies just because of a small code change 14 | * ☁️ Never download another single dependency package locally again 15 | * 🏋️ Never upload oversized code packages again 16 | * 🌎 Edit your code in the browser -- no more "deployment package too large to enable inline code editing" 17 | * ❌ Uninstall Docker from your laptop and extend your battery life 18 | * ☕ Take shorter coffee breaks when deploying 19 | 20 | Supported Lambda runtimes: 21 | 22 | * 🐍 Python 23 | * 📜 Node.js 24 | * 💎 Ruby 25 | * ☕ Java 26 | 27 | ## Benchmark 28 | 29 | Below are synth and deploy times for a simple Python function with [`PythonFunction`](https://docs.aws.amazon.com/cdk/api/v2/docs/@aws-cdk_aws-lambda-python-alpha.PythonFunction.html) compared to Turbo Layers. The [benchmark](benchmark/deployment.ts) ran three times and the best time were taken for each step. 30 | 31 | | | 💤 PythonFunction | 🚀 Turbo Layers | 💤 5x PythonFunction | 🚀 5x Functions w/ Shared Turbo Layer | 32 | |------------------------|---------------------|------------------|----------------------|---------------------------------------| 33 | | Initial Synth | 1:21 | 0:06 | 2:43 | 0:06 | 34 | | Initial Deploy | 1:18 | 2:05 | 2:10 | 2:06 | 35 | | Code Change Synth | 0:31 | 0:06 | 1:21 | 0:06 | 36 | | Code Change Deploy | 0:49 | 0:29 | 1:19 | 0:36 | 37 | | New Dependency Synth | 0:33 | 0:06 | 1:30 | 0:06 | 38 | | New Dependency Deploy | 0:52 | 1:50 | 1:31 | 1:50 | 39 | 40 | As you can see, code changes synth much faster and deploy a bit faster too. Dependency changes take longer to deploy, but are assumed to be way less frequent than code changes. The more dependencies your function uses, the better the results will be. 41 | 42 | To run the benchmark yourself use: 43 | 44 | ``` 45 | npm run bundle && npm run benchmark 46 | ``` 47 | 48 | ## API 49 | 50 | The best way to browse API documentation is on [Constructs Hub][13]. It is available in all supported programming languages. 51 | 52 | ## Installation 53 | 54 | 1. Confirm you're using CDK v2 55 | 2. Install the appropriate package 56 | 1. [Python][6] 57 | ``` 58 | pip install cloudsnorkel.cdk-turbo-layers 59 | ``` 60 | 2. [TypeScript or JavaScript][7] 61 | ``` 62 | npm i @cloudsnorkel/cdk-turbo-layers 63 | ``` 64 | 3. [Java][8] 65 | ```xml 66 | 67 | com.cloudsnorkel 68 | cdk.turbo-layers 69 | 70 | ``` 71 | 4. [Go][11] 72 | ``` 73 | go get github.com/CloudSnorkel/cdk-turbo-layers-go/cloudsnorkelcdkturbolayers 74 | ``` 75 | 5. [.NET][12] 76 | ``` 77 | dotnet add package CloudSnorkel.Cdk.TurboLayers 78 | ``` 79 | 80 | ## Examples 81 | 82 | The very basic example below will create a layer with dependencies specified as parameters and attach it to a Lambda function. 83 | 84 | ```typescript 85 | const packager = new PythonDependencyPackager(this, 'Packager', { 86 | runtime: lambda.Runtime.PYTHON_3_9, 87 | type: DependencyPackagerType.LAMBDA, 88 | }); 89 | new Function(this, 'Function with inline requirements', { 90 | handler: 'index.handler', 91 | code: lambda.Code.fromInline('def handler(event, context):\n import requests'), 92 | runtime: lambda.Runtime.PYTHON_3_9, 93 | // this will create a layer from with requests and Scrapy in a Lambda function instead of locally 94 | layers: [packager.layerFromInline('inline requirements', ['requests', 'Scrapy'])], 95 | }); 96 | ``` 97 | 98 | The next example will create a layer with dependencies specified in a `requirements.txt` file and attach it to a Lambda function. 99 | 100 | ```typescript 101 | const packager = new PythonDependencyPackager(this, 'Packager', { 102 | runtime: lambda.Runtime.PYTHON_3_9, 103 | type: DependencyPackagerType.LAMBDA, 104 | }); 105 | new Function(this, 'Function with external source and requirements', { 106 | handler: 'index.handler', 107 | code: lambda.Code.fromAsset('lambda-src'), 108 | runtime: lambda.Runtime.PYTHON_3_9, 109 | // this will read requirements.txt and create a layer from the requirements in a Lambda function instead of locally 110 | layers: [packager.layerFromRequirementsTxt('requirements.txt', 'lambda-src')], 111 | }); 112 | ``` 113 | 114 | Custom package managers like Pipenv or Poetry are also supported. 115 | 116 | ```typescript 117 | const packager = new PythonDependencyPackager(this, 'Packager', { 118 | runtime: lambda.Runtime.PYTHON_3_9, 119 | type: DependencyPackagerType.LAMBDA, 120 | }); 121 | new Function(this, 'Function with external source and requirements', { 122 | handler: 'index.handler', 123 | code: lambda.Code.fromAsset('lambda-poetry-src'), 124 | runtime: lambda.Runtime.PYTHON_3_9, 125 | // this will read pyproject.toml and poetry.lock and create a layer from the requirements in a Lambda function instead of locally 126 | layers: [packager.layerFromPoetry('poetry dependencies', 'lambda-poetry-src')], 127 | }); 128 | ``` 129 | 130 | If your dependencies have some C library dependencies, you may need to use the more capable but slower CodeBuild packager. 131 | 132 | ```typescript 133 | const packager = new PythonDependencyPackager(this, 'Packager', { 134 | runtime: lambda.Runtime.PYTHON_3_9, 135 | type: DependencyPackagerType.CODEBUILD, 136 | preinstallCommands: [ 137 | 'apt install -y libxml2-dev libxslt-dev libffi-dev libssl-dev', 138 | ], 139 | }); 140 | new Function(this, 'Function with external source and requirements', { 141 | handler: 'index.handler', 142 | code: lambda.Code.fromAsset('lambda-pipenv-src'), 143 | runtime: lambda.Runtime.PYTHON_3_9, 144 | layers: [packager.layerFromPipenv('pipenv dependencies', 'lambda-pipenv-src')], 145 | }); 146 | ``` 147 | 148 | Building layers for ARM64 functions is also supported. 149 | 150 | ```typescript 151 | const packager = new PythonDependencyPackager(this, 'Packager', { 152 | runtime: lambda.Runtime.PYTHON_3_9, 153 | type: DependencyPackagerType.LAMBDA, 154 | architecture: Architecture.ARM_64, 155 | }); 156 | new Function(this, 'Function with external source and requirements', { 157 | handler: 'index.handler', 158 | code: lambda.Code.fromAsset('lambda-poetry-src'), 159 | runtime: lambda.Runtime.PYTHON_3_9, 160 | architecture: Architecture.ARM_64, 161 | layers: [packager.layerFromPoetry('poetry dependencies', 'lambda-poetry-src')], 162 | }); 163 | ``` 164 | 165 | All these examples are for Python, but the same API is available for Node.js, Ruby, and Java. The same build options are available. Multiple different package managers are supported. See [Constructs Hub][13] for more details. 166 | 167 | ## Older Implementations 168 | 169 | * [lovage](https://github.com/CloudSnorkel/lovage): standalone Python framework that uses the same trick to deploy decorated functions to AWS 170 | * [serverless-pydeps](https://github.com/CloudSnorkel/serverless-pydeps): plugin for [Serverless Framework](https://www.serverless.com/) that speeds up deployment 171 | 172 | [6]: https://pypi.org/project/cloudsnorkel.cdk-turbo-layers 173 | [7]: https://www.npmjs.com/package/@cloudsnorkel/cdk-turbo-layers 174 | [8]: https://search.maven.org/search?q=g:%22com.cloudsnorkel%22%20AND%20a:%22cdk.turbo-layers%22 175 | [11]: https://pkg.go.dev/github.com/CloudSnorkel/cdk-turbo-layers-go/cloudsnorkelcdkturbolayers 176 | [12]: https://www.nuget.org/packages/CloudSnorkel.Cdk.TurboLayers/ 177 | [13]: https://constructs.dev/packages/@cloudsnorkel/cdk-turbo-layers/ 178 | -------------------------------------------------------------------------------- /benchmark/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | *.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /benchmark/deployment.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'node:fs'; 2 | import * as path from 'node:path'; 3 | import * as readline from 'node:readline'; 4 | import { execa } from 'execa'; /* eslint-disable-line import/no-extraneous-dependencies */ 5 | import { beforeEach, afterEach, before, perfContext, RuntimeCase } from 'xterm-benchmark'; /* eslint-disable-line import/no-extraneous-dependencies */ 6 | 7 | const VANILLA_APP_PATH = path.join(__dirname, 'vanilla-app'); 8 | const TURBO_APP_PATH = path.join(__dirname, 'turbo-app'); 9 | const VANILLA_5_APP_PATH = path.join(__dirname, 'vanilla-5-app'); 10 | const TURBO_5_APP_PATH = path.join(__dirname, 'turbo-5-app'); 11 | const FUNCTIONS = ['function1', 'function2', 'function3', 'function4', 'function5']; 12 | const FUNCTION_PY_PATHS = FUNCTIONS.map(f => path.join('benchmark', f, 'index.py')); 13 | const FUNCTION_REQS_PATHS = FUNCTIONS.map(f => path.join('benchmark', f, 'requirements.txt')); 14 | 15 | before(async () => { 16 | await new Promise((resolve, reject) => { 17 | const rl = readline.createInterface(process.stdin, process.stdout); 18 | rl.question(' WARNING: this will delete all Docker images, stopped containers, volumes, and networks on the system. Are you SURE you want to continue? [yN] ', answer => { 19 | if (answer != 'y' && answer != 'Y' && answer != 'YES' && answer != 'yes') { 20 | reject('Aborting...'); 21 | } else { 22 | resolve(0); 23 | } 24 | rl.close(); 25 | }); 26 | }); 27 | }); 28 | 29 | beforeEach(async () => { 30 | // clear docker cache to simulate fresh deploy 31 | console.log(' Clearing Docker cache...'); 32 | await execa('docker', ['system', 'prune', '-a', '-f']); 33 | // clear cdk.out cache to simulate fresh deploy 34 | console.log(' Deleting cdk.out...'); 35 | await fs.promises.rm(path.join(VANILLA_APP_PATH, 'cdk.out'), { recursive: true, force: true, maxRetries: 10, retryDelay: 1 }); 36 | await fs.promises.rm(path.join(TURBO_APP_PATH, 'cdk.out'), { recursive: true, force: true, maxRetries: 10, retryDelay: 1 }); 37 | }); 38 | 39 | afterEach(async () => { 40 | // clear cdk.out cache to simulate fresh deploy 41 | console.log(' Deleting cdk.out...'); 42 | await fs.promises.rm(path.join(VANILLA_APP_PATH, 'cdk.out'), { recursive: true, force: true, maxRetries: 10, retryDelay: 1 }); 43 | await fs.promises.rm(path.join(TURBO_APP_PATH, 'cdk.out'), { recursive: true, force: true, maxRetries: 10, retryDelay: 1 }); 44 | // restore index.py to unmodified version 45 | console.log(' Resetting function...'); 46 | await execa('git', ['checkout', '--', ...FUNCTION_PY_PATHS, ...FUNCTION_REQS_PATHS]); 47 | }); 48 | 49 | async function randomizeFunction() { 50 | for (const py of FUNCTION_PY_PATHS) { 51 | const code = await fs.promises.readFile(py, { encoding: 'utf-8' }); 52 | await fs.promises.writeFile(py, code.replace(/'RANDOM.*'/, `'RANDOM ${Date.now()}'`)); 53 | } 54 | } 55 | 56 | async function addRequirement() { 57 | for (const req of FUNCTION_REQS_PATHS) { 58 | const code = await fs.promises.readFile(req, { encoding: 'utf-8' }); 59 | await fs.promises.writeFile(req, code + '\nPillow'); 60 | } 61 | } 62 | 63 | function generateCases(appDir: string) { 64 | // we synth in a separate stage to make it clear how much of the deployment time is due to bundling (packaging dependencies locally) 65 | // we deploy using --method direct to isolate the relevant deployment part (change sets seem to have very random timing) 66 | 67 | return () => { 68 | new RuntimeCase('Synth', async () => { 69 | await randomizeFunction(); 70 | await execa('cdk', ['synth', '-q'], { cwd: appDir }); 71 | }).showRuntime(); 72 | new RuntimeCase('Deploy', async () => { 73 | await execa('cdk', ['deploy', '--app', 'cdk.out', '--method', 'direct', '--all', '--require-approval=never'], { cwd: appDir }); 74 | }).showRuntime(); 75 | new RuntimeCase('Synth (no change)', async () => { 76 | await execa('cdk', ['synth', '-q'], { cwd: appDir }); 77 | }).showRuntime(); 78 | new RuntimeCase('Deploy (no change)', async () => { 79 | await execa('cdk', ['deploy', '--app', 'cdk.out', '--method', 'direct', '--all', '--require-approval=never'], { cwd: appDir }); 80 | }).showRuntime(); 81 | new RuntimeCase('Synth (code change)', async () => { 82 | await randomizeFunction(); 83 | await execa('cdk', ['synth', '-q'], { cwd: appDir }); 84 | }).showRuntime(); 85 | new RuntimeCase('Deploy (code change)', async () => { 86 | await execa('cdk', ['deploy', '--app', 'cdk.out', '--method', 'direct', '--all', '--require-approval=never'], { cwd: appDir }); 87 | }).showRuntime(); 88 | new RuntimeCase('Synth (new requirement)', async () => { 89 | await addRequirement(); 90 | await execa('cdk', ['synth', '-q'], { cwd: appDir }); 91 | }).showRuntime(); 92 | new RuntimeCase('Deploy (new requirement)', async () => { 93 | await execa('cdk', ['deploy', '--app', 'cdk.out', '--method', 'direct', '--all', '--require-approval=never'], { cwd: appDir }); 94 | }).showRuntime(); 95 | new RuntimeCase('Destroy', async () => { 96 | await execa('cdk', ['destroy', '--all', '--force'], { cwd: appDir }); 97 | }).showRuntime(); 98 | }; 99 | } 100 | 101 | perfContext('Vanilla (PythonFunction)', generateCases(VANILLA_APP_PATH)); 102 | perfContext('Turbo Layers', generateCases(TURBO_APP_PATH)); 103 | perfContext('Vanilla (5x PythonFunction)', generateCases(VANILLA_5_APP_PATH)); 104 | perfContext('Turbo Layers (5x Function w/ shared layer)', generateCases(TURBO_5_APP_PATH)); 105 | -------------------------------------------------------------------------------- /benchmark/function1/index.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def handler(event, context): 4 | print('RANDOM') 5 | -------------------------------------------------------------------------------- /benchmark/function1/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | beautifulsoup4 -------------------------------------------------------------------------------- /benchmark/function2/index.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def handler(event, context): 4 | print('RANDOM') 5 | -------------------------------------------------------------------------------- /benchmark/function2/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | beautifulsoup4 -------------------------------------------------------------------------------- /benchmark/function3/index.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def handler(event, context): 4 | print('RANDOM') 5 | -------------------------------------------------------------------------------- /benchmark/function3/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | beautifulsoup4 -------------------------------------------------------------------------------- /benchmark/function4/index.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def handler(event, context): 4 | print('RANDOM') 5 | -------------------------------------------------------------------------------- /benchmark/function4/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | beautifulsoup4 -------------------------------------------------------------------------------- /benchmark/function5/index.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def handler(event, context): 4 | print('RANDOM') 5 | -------------------------------------------------------------------------------- /benchmark/function5/requirements.txt: -------------------------------------------------------------------------------- 1 | boto3 2 | requests 3 | beautifulsoup4 -------------------------------------------------------------------------------- /benchmark/turbo-5-app/app.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from 'aws-cdk-lib'; 2 | import { aws_lambda as lambda, aws_logs as logs } from 'aws-cdk-lib'; 3 | import { PythonDependencyPackager } from '../../lib'; 4 | 5 | const app = new cdk.App(); 6 | const stack = new cdk.Stack(app, 'Turbo-Layers-Benchmark', { 7 | env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, 8 | }); 9 | const sharedLayer = new PythonDependencyPackager(stack, 'Packager', { 10 | runtime: lambda.Runtime.PYTHON_3_9, 11 | }).layerFromRequirementsTxt('Layer', '../function1'); 12 | for (let i = 1; i <= 5; i++) { 13 | new lambda.Function(stack, `Function${i}`, { 14 | runtime: lambda.Runtime.PYTHON_3_9, 15 | code: lambda.Code.fromAsset(`../function${i}`), 16 | handler: 'index.handler', 17 | logRetention: logs.RetentionDays.ONE_DAY, 18 | layers: [sharedLayer], 19 | }); 20 | } 21 | -------------------------------------------------------------------------------- /benchmark/turbo-5-app/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts app.ts", 3 | "context": { 4 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 5 | "@aws-cdk/core:checkSecretUsage": true, 6 | "@aws-cdk/core:target-partitions": [ 7 | "aws", 8 | "aws-cn" 9 | ], 10 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 11 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 12 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 13 | "@aws-cdk/aws-iam:minimizePolicies": true, 14 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 15 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 16 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 17 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 18 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 19 | "@aws-cdk/core:enablePartitionLiterals": true, 20 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 21 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 22 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 23 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 24 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 25 | "@aws-cdk/aws-route53-patters:useCertificate": true, 26 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /benchmark/turbo-app/app.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from 'aws-cdk-lib'; 2 | import { aws_lambda as lambda, aws_logs as logs } from 'aws-cdk-lib'; 3 | import { PythonDependencyPackager } from '../../lib'; 4 | 5 | const app = new cdk.App(); 6 | const stack = new cdk.Stack(app, 'Turbo-Layers-Benchmark', { 7 | env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, 8 | }); 9 | new lambda.Function(stack, 'Function', { 10 | runtime: lambda.Runtime.PYTHON_3_9, 11 | code: lambda.Code.fromAsset('../function1'), 12 | handler: 'index.handler', 13 | logRetention: logs.RetentionDays.ONE_DAY, 14 | layers: [ 15 | new PythonDependencyPackager(stack, 'Packager', { 16 | runtime: lambda.Runtime.PYTHON_3_9, 17 | }).layerFromRequirementsTxt('Layer', '../function1'), 18 | ], 19 | }); 20 | -------------------------------------------------------------------------------- /benchmark/turbo-app/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts app.ts", 3 | "context": { 4 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 5 | "@aws-cdk/core:checkSecretUsage": true, 6 | "@aws-cdk/core:target-partitions": [ 7 | "aws", 8 | "aws-cn" 9 | ], 10 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 11 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 12 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 13 | "@aws-cdk/aws-iam:minimizePolicies": true, 14 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 15 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 16 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 17 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 18 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 19 | "@aws-cdk/core:enablePartitionLiterals": true, 20 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 21 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 22 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 23 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 24 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 25 | "@aws-cdk/aws-route53-patters:useCertificate": true, 26 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /benchmark/vanilla-5-app/app.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable-next-line import/no-extraneous-dependencies */ 2 | import * as lambda_python from '@aws-cdk/aws-lambda-python-alpha'; 3 | import * as cdk from 'aws-cdk-lib'; 4 | import { aws_lambda as lambda, aws_logs as logs } from 'aws-cdk-lib'; 5 | 6 | const app = new cdk.App(); 7 | const stack = new cdk.Stack(app, 'Turbo-Layers-Benchmark-Vanilla', { 8 | env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, 9 | }); 10 | for (let i = 1; i <= 5; i++) { 11 | new lambda_python.PythonFunction(stack, `Function${i}`, { 12 | runtime: lambda.Runtime.PYTHON_3_9, 13 | entry: `../function${i}`, 14 | logRetention: logs.RetentionDays.ONE_DAY, 15 | }); 16 | } 17 | -------------------------------------------------------------------------------- /benchmark/vanilla-5-app/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts app.ts", 3 | "context": { 4 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 5 | "@aws-cdk/core:checkSecretUsage": true, 6 | "@aws-cdk/core:target-partitions": [ 7 | "aws", 8 | "aws-cn" 9 | ], 10 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 11 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 12 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 13 | "@aws-cdk/aws-iam:minimizePolicies": true, 14 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 15 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 16 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 17 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 18 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 19 | "@aws-cdk/core:enablePartitionLiterals": true, 20 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 21 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 22 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 23 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 24 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 25 | "@aws-cdk/aws-route53-patters:useCertificate": true, 26 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /benchmark/vanilla-app/app.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable-next-line import/no-extraneous-dependencies */ 2 | import * as lambda_python from '@aws-cdk/aws-lambda-python-alpha'; 3 | import * as cdk from 'aws-cdk-lib'; 4 | import { aws_lambda as lambda, aws_logs as logs } from 'aws-cdk-lib'; 5 | 6 | const app = new cdk.App(); 7 | const stack = new cdk.Stack(app, 'Turbo-Layers-Benchmark-Vanilla', { 8 | env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, 9 | }); 10 | new lambda_python.PythonFunction(stack, 'Function', { 11 | runtime: lambda.Runtime.PYTHON_3_9, 12 | entry: '../function1', 13 | logRetention: logs.RetentionDays.ONE_DAY, 14 | }); 15 | -------------------------------------------------------------------------------- /benchmark/vanilla-app/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts app.ts", 3 | "context": { 4 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 5 | "@aws-cdk/core:checkSecretUsage": true, 6 | "@aws-cdk/core:target-partitions": [ 7 | "aws", 8 | "aws-cn" 9 | ], 10 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 11 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 12 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 13 | "@aws-cdk/aws-iam:minimizePolicies": true, 14 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 15 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 16 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 17 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 18 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 19 | "@aws-cdk/core:enablePartitionLiterals": true, 20 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 21 | "@aws-cdk/aws-iam:standardizedServicePrincipals": true, 22 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 23 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 24 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 25 | "@aws-cdk/aws-route53-patters:useCertificate": true, 26 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@cloudsnorkel/cdk-turbo-layers", 3 | "description": "Speed-up Lambda function deployment with dependency layers built in AWS", 4 | "repository": { 5 | "type": "git", 6 | "url": "https://github.com/CloudSnorkel/cdk-turbo-layers.git" 7 | }, 8 | "scripts": { 9 | "benchmark": "npx projen benchmark", 10 | "build": "npx projen build", 11 | "bump": "npx projen bump", 12 | "bundle": "npx projen bundle", 13 | "bundle:package-codebuild.lambda": "npx projen bundle:package-codebuild.lambda", 14 | "bundle:package-codebuild.lambda:watch": "npx projen bundle:package-codebuild.lambda:watch", 15 | "bundle:package-nodejs.lambda": "npx projen bundle:package-nodejs.lambda", 16 | "bundle:package-nodejs.lambda:watch": "npx projen bundle:package-nodejs.lambda:watch", 17 | "clobber": "npx projen clobber", 18 | "compat": "npx projen compat", 19 | "compile": "npx projen compile", 20 | "default": "npx projen default", 21 | "docgen": "npx projen docgen", 22 | "eject": "npx projen eject", 23 | "eslint": "npx projen eslint", 24 | "integ:default:assert": "npx projen integ:default:assert", 25 | "integ:default:deploy": "npx projen integ:default:deploy", 26 | "integ:default:destroy": "npx projen integ:default:destroy", 27 | "integ:default:snapshot": "npx projen integ:default:snapshot", 28 | "integ:default:watch": "npx projen integ:default:watch", 29 | "integ:snapshot-all": "npx projen integ:snapshot-all", 30 | "package": "npx projen package", 31 | "package-all": "npx projen package-all", 32 | "package:dotnet": "npx projen package:dotnet", 33 | "package:go": "npx projen package:go", 34 | "package:java": "npx projen package:java", 35 | "package:js": "npx projen package:js", 36 | "package:python": "npx projen package:python", 37 | "post-compile": "npx projen post-compile", 38 | "post-upgrade": "npx projen post-upgrade", 39 | "pre-compile": "npx projen pre-compile", 40 | "release": "npx projen release", 41 | "test": "npx projen test", 42 | "test:watch": "npx projen test:watch", 43 | "unbump": "npx projen unbump", 44 | "upgrade": "npx projen upgrade", 45 | "watch": "npx projen watch", 46 | "projen": "npx projen" 47 | }, 48 | "author": { 49 | "name": "Amir Szekely", 50 | "email": "amir@cloudsnorkel.com", 51 | "organization": false 52 | }, 53 | "devDependencies": { 54 | "@aws-cdk/aws-lambda-python-alpha": "^2.199.0-alpha.0", 55 | "@aws-sdk/client-codebuild": "^3.821.0", 56 | "@aws-sdk/client-s3": "^3.821.0", 57 | "@stylistic/eslint-plugin": "^2", 58 | "@types/adm-zip": "^0.5.7", 59 | "@types/aws-lambda": "^8.10.149", 60 | "@types/jest": "^27", 61 | "@types/node": "ts5.5", 62 | "@typescript-eslint/eslint-plugin": "^8", 63 | "@typescript-eslint/parser": "^8", 64 | "adm-zip": "^0.5.16", 65 | "aws-cdk": "^2", 66 | "aws-cdk-lib": "2.123.0", 67 | "commit-and-tag-version": "^12", 68 | "constructs": "10.0.5", 69 | "esbuild": "^0.25.5", 70 | "eslint": "^9", 71 | "eslint-import-resolver-typescript": "^3.10.1", 72 | "eslint-plugin-import": "^2.31.0", 73 | "execa": "^7.2.0", 74 | "jest": "^27", 75 | "jest-junit": "^16", 76 | "jsii": "5.5.x", 77 | "jsii-diff": "^1.112.0", 78 | "jsii-docgen": "^10.5.0", 79 | "jsii-pacmak": "^1.112.0", 80 | "jsii-rosetta": "5.5.x", 81 | "projen": "^0.92.9", 82 | "ts-jest": "^27", 83 | "ts-node": "^10.9.2", 84 | "typescript": "5.5.x", 85 | "xterm-benchmark": "^0.3.1" 86 | }, 87 | "peerDependencies": { 88 | "aws-cdk-lib": "^2.123.0", 89 | "constructs": "^10.0.5" 90 | }, 91 | "keywords": [ 92 | "aws", 93 | "aws-cdk", 94 | "aws-cdk-construct", 95 | "cdk", 96 | "codebuild", 97 | "lambda", 98 | "layer", 99 | "nodejs", 100 | "python", 101 | "ruby" 102 | ], 103 | "main": "lib/index.js", 104 | "license": "Apache-2.0", 105 | "version": "0.0.0", 106 | "jest": { 107 | "coverageProvider": "v8", 108 | "testMatch": [ 109 | "/@(src|test)/**/*(*.)@(spec|test).ts?(x)", 110 | "/@(src|test)/**/__tests__/**/*.ts?(x)" 111 | ], 112 | "clearMocks": true, 113 | "collectCoverage": true, 114 | "coverageReporters": [ 115 | "json", 116 | "lcov", 117 | "clover", 118 | "cobertura", 119 | "text" 120 | ], 121 | "coverageDirectory": "coverage", 122 | "coveragePathIgnorePatterns": [ 123 | "/node_modules/" 124 | ], 125 | "testPathIgnorePatterns": [ 126 | "/node_modules/" 127 | ], 128 | "watchPathIgnorePatterns": [ 129 | "/node_modules/" 130 | ], 131 | "reporters": [ 132 | "default", 133 | [ 134 | "jest-junit", 135 | { 136 | "outputDirectory": "test-reports" 137 | } 138 | ] 139 | ], 140 | "preset": "ts-jest", 141 | "globals": { 142 | "ts-jest": { 143 | "tsconfig": "tsconfig.dev.json" 144 | } 145 | } 146 | }, 147 | "types": "lib/index.d.ts", 148 | "stability": "experimental", 149 | "jsii": { 150 | "outdir": "dist", 151 | "targets": { 152 | "java": { 153 | "package": "com.cloudsnorkel.cdk.turbo_layers", 154 | "maven": { 155 | "groupId": "com.cloudsnorkel", 156 | "artifactId": "cdk.turbo-layers" 157 | } 158 | }, 159 | "python": { 160 | "distName": "cloudsnorkel.cdk-turbo-layers", 161 | "module": "cloudsnorkel.cdk_turbo_layers" 162 | }, 163 | "dotnet": { 164 | "namespace": "CloudSnorkel", 165 | "packageId": "CloudSnorkel.Cdk.TurboLayers" 166 | }, 167 | "go": { 168 | "moduleName": "github.com/CloudSnorkel/cdk-turbo-layers-go" 169 | } 170 | }, 171 | "tsc": { 172 | "outDir": "lib", 173 | "rootDir": "src" 174 | } 175 | }, 176 | "funding": "https://github.com/sponsors/CloudSnorkel", 177 | "//": "~~ Generated by projen. To modify, edit .projenrc.js and run \"npx projen\"." 178 | } 179 | -------------------------------------------------------------------------------- /src/base.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { join } from 'path'; 3 | import { 4 | AssetHashType, 5 | aws_codebuild as codebuild, 6 | aws_ec2 as ec2, 7 | aws_iam as iam, 8 | aws_lambda as lambda, 9 | aws_logs as logs, 10 | aws_s3 as s3, 11 | aws_s3_assets as s3_assets, 12 | BundlingOptions, 13 | CustomResource, 14 | DockerImage, 15 | Duration, 16 | FileSystem, 17 | RemovalPolicy, 18 | Size, 19 | Stack, 20 | } from 'aws-cdk-lib'; 21 | import { RetentionDays } from 'aws-cdk-lib/aws-logs'; 22 | import { BucketEncryption } from 'aws-cdk-lib/aws-s3'; 23 | import { Construct } from 'constructs'; 24 | import { PackageCodebuildFunction } from './package-codebuild-function'; 25 | import { PackageNodejsFunction } from './package-nodejs-function'; 26 | import { PackagePythonFunction } from './package-python-function'; 27 | import { PackageRubyFunction } from './package-ruby-function'; 28 | 29 | /** 30 | * Type of dependency packager. This affects timeouts and capabilities of the packager. 31 | */ 32 | export enum DependencyPackagerType { 33 | /** 34 | * Use Lambda function to package dependencies. It is much faster than the alternative, but limited to 15 minutes and can't build native extensions. 35 | */ 36 | LAMBDA, 37 | 38 | /** 39 | * Use CodeBuild to package dependencies. It is capable of everything your local machine can do, but takes a little longer to startup. 40 | */ 41 | CODEBUILD, 42 | } 43 | 44 | export interface DependencyPackagerProps { 45 | /** 46 | * Type of dependency packager. Use Lambda for speed and CodeBuild for complex dependencies that require building native extensions. 47 | * 48 | * @default {@link DependencyPackagerType.LAMBDA} 49 | */ 50 | readonly type?: DependencyPackagerType; 51 | 52 | /** 53 | * Target Lambda runtime. Packages will be installed for this runtime so make sure it fits your Lambda functions. 54 | */ 55 | readonly runtime?: lambda.Runtime; 56 | 57 | /** 58 | * Target Lambda architecture. Packages will be installed for this architecture so make sure it fits your Lambda functions. 59 | */ 60 | readonly architecture?: lambda.Architecture; 61 | 62 | /** 63 | * Additional commands to run before installing packages. Use this to authenticate your package repositories like CodeArtifact. 64 | * 65 | * @default [] 66 | */ 67 | readonly preinstallCommands?: string[]; 68 | 69 | /** 70 | * VPC used for packager. Use this if your package repositories are only available from within a VPC. 71 | * 72 | * @default no VPC 73 | */ 74 | readonly vpc?: ec2.IVpc; 75 | 76 | /** 77 | * VPC subnets used for packager. 78 | * 79 | * @default default subnets, if VPC is used 80 | */ 81 | readonly subnetSelection?: ec2.SubnetSelection; 82 | 83 | /** 84 | * The number of days log events are kept in CloudWatch Logs. When updating 85 | * this property, unsetting it doesn't remove the log retention policy. To 86 | * remove the retention policy, set the value to `INFINITE`. 87 | * 88 | * @default logs.RetentionDays.ONE_MONTH 89 | */ 90 | readonly logRetention?: logs.RetentionDays; 91 | 92 | /** 93 | * Removal policy for logs of image builds. If deployment fails on the custom resource, try setting this to `RemovalPolicy.RETAIN`. This way logs can still be viewed, and you can see why the build failed. 94 | * 95 | * We try to not leave anything behind when removed. But sometimes a log staying behind is useful. 96 | * 97 | * @default RemovalPolicy.DESTROY 98 | */ 99 | readonly logRemovalPolicy?: RemovalPolicy; 100 | } 101 | 102 | export interface LayerProps { 103 | /** 104 | * Always rebuild the layer, even when the dependencies definition files haven't changed. 105 | * 106 | * @default false 107 | */ 108 | readonly alwaysRebuild?: boolean; 109 | } 110 | 111 | /** 112 | * @internal 113 | */ 114 | interface InternalBaseDependencyPackagerProps { 115 | readonly props?: DependencyPackagerProps; 116 | readonly runtimeFamily: lambda.RuntimeFamily; 117 | readonly defaultRuntime: lambda.Runtime; 118 | readonly codeBuildRuntimeInstallCommands: string[]; 119 | readonly targetDirectory: string; 120 | } 121 | 122 | /** 123 | * @internal 124 | */ 125 | export class BaseDependencyPackager extends Construct implements iam.IGrantable, ec2.IConnectable { 126 | readonly connections: ec2.Connections; 127 | readonly grantPrincipal: iam.IPrincipal; 128 | private readonly project?: codebuild.Project; 129 | private readonly packagesBucket: s3.Bucket; 130 | private readonly provider: lambda.Function; 131 | private readonly targetDirectory: string; 132 | private readonly type: DependencyPackagerType; 133 | private readonly runtime: lambda.Runtime; 134 | private readonly architecture: lambda.Architecture; 135 | 136 | constructor(scope: Construct, id: string, readonly internalProps: InternalBaseDependencyPackagerProps) { 137 | super(scope, id); 138 | 139 | this.runtime = internalProps.props?.runtime ?? internalProps.defaultRuntime; 140 | if (this.runtime.family != internalProps.runtimeFamily) { 141 | throw new Error(`PythonDependencyPackager requires python runtime, got ${this.runtime.family}`); 142 | } 143 | 144 | this.packagesBucket = new s3.Bucket(this, 'Bucket', { 145 | autoDeleteObjects: true, 146 | enforceSSL: true, 147 | blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, 148 | encryption: BucketEncryption.S3_MANAGED, 149 | removalPolicy: RemovalPolicy.DESTROY, 150 | }); 151 | 152 | this.targetDirectory = internalProps.targetDirectory; 153 | this.architecture = internalProps.props?.architecture ?? lambda.Architecture.X86_64; 154 | 155 | const logGroup = new logs.LogGroup( 156 | this, 157 | 'Logs', 158 | { 159 | retention: internalProps.props?.logRetention ?? RetentionDays.ONE_MONTH, 160 | removalPolicy: internalProps.props?.logRemovalPolicy ?? RemovalPolicy.DESTROY, 161 | }, 162 | ); 163 | 164 | this.type = this.internalProps.props?.type ?? DependencyPackagerType.LAMBDA; 165 | if (this.type == DependencyPackagerType.CODEBUILD) { 166 | this.project = new codebuild.Project(this, 'Packager', { 167 | description: `Lambda dependency packager for ${this.runtime} in ${Stack.of(this).stackName}`, 168 | vpc: internalProps.props?.vpc, 169 | subnetSelection: internalProps.props?.subnetSelection, 170 | environment: { 171 | buildImage: this.architecture == lambda.Architecture.X86_64 ? 172 | codebuild.LinuxBuildImage.AMAZON_LINUX_2_5 : codebuild.LinuxArmBuildImage.AMAZON_LINUX_2_STANDARD_3_0, 173 | }, 174 | logging: { 175 | cloudWatch: { 176 | logGroup, 177 | }, 178 | }, 179 | buildSpec: codebuild.BuildSpec.fromObject({ 180 | version: '0.2', 181 | phases: { 182 | build: { 183 | commands: [ 184 | 'echo The real build spec will be put together by the custom resource', 185 | 'exit 1', 186 | ], 187 | }, 188 | }, 189 | }), 190 | }); 191 | 192 | this.connections = internalProps.props?.vpc ? this.project.connections : new ec2.Connections(); 193 | this.grantPrincipal = this.project.grantPrincipal; 194 | 195 | this.provider = new PackageCodebuildFunction(this, 'Package Handler', { 196 | description: `Turbo layer packager for ${this.runtime} using CodeBuild`, 197 | initialPolicy: [ 198 | new iam.PolicyStatement({ 199 | actions: ['codebuild:StartBuild'], 200 | resources: [this.project.projectArn], 201 | }), 202 | ], 203 | logGroup: logGroup, 204 | }); 205 | this.provider.node.addDependency(this.packagesBucket); // wait for everything, including auto deleter 206 | this.provider.node.addDependency(this.project); 207 | this.packagesBucket.grantDelete(this.provider); 208 | } else if (this.type == DependencyPackagerType.LAMBDA) { 209 | const lambdaProps = { 210 | description: `Turbo layer packager for ${this.runtime}`, 211 | runtime: this.runtime, 212 | timeout: Duration.minutes(15), 213 | memorySize: 1024, 214 | ephemeralStorageSize: Size.gibibytes(10), 215 | logGroup: logGroup, 216 | architecture: this.architecture, 217 | vpc: internalProps.props?.vpc, 218 | vpcSubnets: internalProps.props?.subnetSelection, 219 | // TODO for CodeArtifact login -- layers: [new lambda_layer_awscli.AwsCliLayer(this, 'AWS CLI Layer')], 220 | }; 221 | 222 | if (this.runtime.family == lambda.RuntimeFamily.PYTHON) { 223 | this.provider = new PackagePythonFunction(this, 'Packager', lambdaProps); 224 | } else if (this.runtime.family == lambda.RuntimeFamily.NODEJS) { 225 | this.provider = new PackageNodejsFunction(this, 'Packager', lambdaProps); 226 | // we can't set the runtime from here, so we have to manually override it. 227 | // projen puts `...props` before its own `runtime` setting and so its default `runtime` always wins. 228 | // https://github.com/projen/projen/blob/564341a55309e06939c86248bc76cabc590fd835/src/awscdk/lambda-function.ts#L253-L256 229 | const func = this.provider.node.defaultChild as lambda.CfnFunction; 230 | func.runtime = this.runtime.name; 231 | } else if (this.runtime.family == lambda.RuntimeFamily.RUBY) { 232 | this.provider = new PackageRubyFunction(this, 'Packager', lambdaProps); 233 | } else { 234 | throw new Error(`Runtime doesn't support Lambda packager: ${this.runtime}`); 235 | } 236 | this.connections = internalProps.props?.vpc ? this.provider.connections : new ec2.Connections(); 237 | this.grantPrincipal = this.provider.grantPrincipal; 238 | } else { 239 | throw new Error(`Unsupported type: ${this.type}`); 240 | } 241 | 242 | this.packagesBucket.grantWrite(this.grantPrincipal); 243 | this.packagesBucket.grantDelete(this.grantPrincipal); 244 | } 245 | 246 | /** 247 | * @internal 248 | */ 249 | protected _newLayer(id: string, path: string, assetGenerator: (outputDir: string) => void, 250 | assetHash: string, commands: string[], layerProps?: LayerProps) { 251 | 252 | return new LambdaDependencyLayer(this, id, { 253 | path, 254 | bundling: { 255 | local: { 256 | tryBundle(outputDir: string, _: BundlingOptions): boolean { 257 | assetGenerator(outputDir); 258 | return true; 259 | }, 260 | }, 261 | // no fallback 262 | image: DockerImage.fromRegistry('public.ecr.aws/docker/library/busybox:stable'), 263 | command: ['exit 1'], 264 | }, 265 | assetHash: assetHash, 266 | alwaysRebuild: layerProps?.alwaysRebuild ?? false, 267 | project: this.project, 268 | provider: this.provider, 269 | packagesBucket: this.packagesBucket, 270 | preinstallCommands: this.internalProps.props?.preinstallCommands ?? [], 271 | codeBuildRuntimeInstallCommands: this.internalProps.codeBuildRuntimeInstallCommands, 272 | commands: commands, 273 | targetDirectory: this.targetDirectory, 274 | runtime: this.runtime, 275 | architecture: this.architecture, 276 | }).layer; 277 | } 278 | 279 | /** 280 | * @internal 281 | */ 282 | protected _hashFiles(basePath: string, required: string[], optional?: string[]): string { 283 | let hash = ''; 284 | for (const f of required) { 285 | hash += FileSystem.fingerprint(join(basePath, f)); 286 | } 287 | for (const f of optional ?? []) { 288 | const p = join(basePath, f); 289 | if (fs.existsSync(p)) { 290 | hash += FileSystem.fingerprint(p); 291 | } 292 | } 293 | return hash; 294 | } 295 | } 296 | 297 | /** 298 | * @internal 299 | */ 300 | interface LambdaDependencyLayerProps { 301 | readonly path: string; 302 | readonly assetHash: string; 303 | readonly bundling: BundlingOptions; 304 | readonly alwaysRebuild: boolean; 305 | readonly project?: codebuild.Project; 306 | readonly provider: lambda.Function; 307 | readonly packagesBucket: s3.Bucket; 308 | readonly codeBuildRuntimeInstallCommands: string[]; 309 | readonly preinstallCommands: string[]; 310 | readonly commands: string[]; 311 | readonly targetDirectory: string; 312 | readonly runtime: lambda.Runtime; 313 | readonly architecture: lambda.Architecture; 314 | } 315 | 316 | class LambdaDependencyLayer extends Construct { 317 | readonly layer: lambda.LayerVersion; 318 | 319 | constructor(scope: Construct, id: string, readonly props: LambdaDependencyLayerProps) { 320 | super(scope, id); 321 | 322 | // We hash the output files instead of the whole directory because: 323 | // 324 | // 1. It's faster than hashing the entire source directory 325 | // 2. It allows the inline versions to use '.' as a fake source directory without conflict 326 | // 3. It allows multiple source folders to share the same asset if the dependencies are the same 327 | const asset = new s3_assets.Asset(this, 'Dependencies Definition', { 328 | path: props.path, 329 | assetHashType: AssetHashType.CUSTOM, 330 | bundling: props.bundling, 331 | assetHash: props.assetHash, 332 | }); 333 | 334 | const cr = new CustomResource(this, 'Layer Packager', { 335 | resourceType: 'Custom::LayerPackager', 336 | serviceToken: props.provider.functionArn, 337 | properties: { 338 | ProjectName: props.project?.projectName, 339 | BucketName: props.packagesBucket.bucketName, 340 | AlwaysRebuild: props.alwaysRebuild ? Date.now() : undefined, 341 | CodeBuildInstallCommands: props.codeBuildRuntimeInstallCommands, 342 | PreinstallCommands: props.preinstallCommands, 343 | Commands: props.commands, 344 | PackagedDirectory: props.targetDirectory, 345 | AssetBucket: asset.s3BucketName, 346 | AssetKey: asset.s3ObjectKey, 347 | }, 348 | }); 349 | 350 | cr.node.addDependency(props.provider); 351 | if (props.project) { 352 | cr.node.addDependency(props.project); 353 | asset.grantRead(props.project); 354 | } else { 355 | asset.grantRead(props.provider); 356 | } 357 | 358 | this.layer = new lambda.LayerVersion(this, 'Layer', { 359 | description: `Automatically generated by turbo layers for ${asset}`, 360 | code: lambda.Code.fromBucket(props.packagesBucket, cr.ref), 361 | compatibleRuntimes: [props.runtime], 362 | compatibleArchitectures: [props.architecture], 363 | }); 364 | } 365 | } -------------------------------------------------------------------------------- /src/cr.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable-next-line import/no-extraneous-dependencies */ 2 | import * as AWSLambda from 'aws-lambda'; 3 | 4 | export async function customResourceRespond(event: AWSLambda.CloudFormationCustomResourceEvent, responseStatus: string, 5 | reason: string, physicalResourceId: string, data: any) { 6 | const responseBody = JSON.stringify({ 7 | Status: responseStatus, 8 | Reason: reason, 9 | PhysicalResourceId: physicalResourceId, 10 | StackId: event.StackId, 11 | RequestId: event.RequestId, 12 | LogicalResourceId: event.LogicalResourceId, 13 | NoEcho: false, 14 | Data: data, 15 | }); 16 | 17 | console.log('Responding', responseBody); 18 | 19 | // eslint-disable-next-line @typescript-eslint/no-require-imports 20 | const parsedUrl = require('url').parse(event.ResponseURL); 21 | const requestOptions = { 22 | hostname: parsedUrl.hostname, 23 | path: parsedUrl.path, 24 | method: 'PUT', 25 | headers: { 26 | 'content-type': '', 27 | 'content-length': responseBody.length, 28 | }, 29 | }; 30 | 31 | return new Promise((resolve, reject) => { 32 | try { 33 | // eslint-disable-next-line @typescript-eslint/no-require-imports 34 | const request = require('https').request(requestOptions, resolve); 35 | request.on('error', reject); 36 | request.write(responseBody); 37 | request.end(); 38 | } catch (e) { 39 | reject(e); 40 | } 41 | }); 42 | } 43 | -------------------------------------------------------------------------------- /src/credentials.ts: -------------------------------------------------------------------------------- 1 | // import { aws_codeartifact as codeartifact, aws_ecr as ecr } from 'aws-cdk-lib'; 2 | // 3 | // /** 4 | // * Represents credentials used to access Python packages. 5 | // */ 6 | // export abstract class PythonCredential { 7 | // /** 8 | // * Creates a DockerCredential for DockerHub. 9 | // * Convenience method for `customRegistry('https://index.docker.io/v1/', opts)`. 10 | // */ 11 | // // public static dockerHub(secret: secretsmanager.ISecret, opts: ExternalDockerCredentialOptions = {}): DockerCredential { 12 | // // return new ExternalDockerCredential('https://index.docker.io/v1/', secret, opts); 13 | // // } 14 | // 15 | // /** 16 | // * Creates a DockerCredential for a registry, based on its domain name (e.g., 'www.example.com'). 17 | // */ 18 | // // public static customRegistry( 19 | // // registryDomain: string, 20 | // // secret: secretsmanager.ISecret, 21 | // // opts: ExternalDockerCredentialOptions = {}): DockerCredential { 22 | // // return new ExternalDockerCredential(registryDomain, secret, opts); 23 | // // } 24 | // 25 | // /** 26 | // * Creates a DockerCredential for one or more ECR repositories. 27 | // * 28 | // * NOTE - All ECR repositories in the same account and region share a domain name 29 | // * (e.g., 0123456789012.dkr.ecr.eu-west-1.amazonaws.com), and can only have one associated 30 | // * set of credentials (and DockerCredential). Attempting to associate one set of credentials 31 | // * with one ECR repo and another with another ECR repo in the same account and region will 32 | // * result in failures when using these credentials in the pipeline. 33 | // */ 34 | // public static codeArtifact(repositories: ecr.IRepository[], opts?: EcrDockerCredentialOptions): PythonCredential { 35 | // return new CodeArtifactPythonCredential(repositories, opts ?? {}); 36 | // } 37 | // 38 | // constructor(protected readonly usages?: DockerCredentialUsage[]) { } 39 | // 40 | // /** 41 | // * Determines if this credential is relevant to the input usage. 42 | // * @internal 43 | // */ 44 | // public _applicableForUsage(usage: DockerCredentialUsage) { 45 | // return !this.usages || this.usages.includes(usage); 46 | // } 47 | // 48 | // /** 49 | // * Grant read-only access to the registry credentials. 50 | // * This grants read access to any secrets, and pull access to any repositories. 51 | // */ 52 | // public abstract grantRead(grantee: iam.IGrantable, usage: DockerCredentialUsage): void; 53 | // 54 | // /** 55 | // * Creates and returns the credential configuration, to be used by `cdk-assets` 56 | // * to support the `docker-credential-cdk-assets` tool for `docker login`. 57 | // * @internal 58 | // */ 59 | // public abstract _renderLoginCommands(): string[] 60 | // } 61 | // 62 | // /** DockerCredential defined by a set of ECR repositories in the same account & region */ 63 | // class CodeArtifactPythonCredential extends PythonCredential { 64 | // public readonly registryDomain: string; 65 | // 66 | // constructor(private readonly repositories: codeartifact.CfnRepository[], private readonly opts: EcrDockerCredentialOptions) { 67 | // super(opts.usages); 68 | // 69 | // if (repositories.length === 0) { 70 | // throw new Error('must supply at least one `ecr.IRepository` to create an `EcrDockerCredential`'); 71 | // } 72 | // this.registryDomain = Fn.select(0, Fn.split('/', repositories[0].repositoryUri)); 73 | // } 74 | // 75 | // public grantRead(grantee: iam.IGrantable, usage: DockerCredentialUsage) { 76 | // if (!this._applicableForUsage(usage)) { return; } 77 | // 78 | // if (this.opts.assumeRole) { 79 | // grantee.grantPrincipal.addToPrincipalPolicy(new iam.PolicyStatement({ 80 | // actions: ['sts:AssumeRole'], 81 | // resources: [this.opts.assumeRole.roleArn], 82 | // })); 83 | // } 84 | // const role = this.opts.assumeRole ?? grantee; 85 | // this.repositories.forEach(repo => repo.grantPull(role)); 86 | // } 87 | // 88 | // public _renderLoginCommands(): string[] { 89 | // return [ 90 | // 'aws codeartifact login --domain test-domain --repository test-repo --tool pip', 91 | // ]; 92 | // } 93 | // } -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { LayerProps, DependencyPackagerProps, DependencyPackagerType } from './base'; 2 | export * from './python'; 3 | export * from './nodejs'; 4 | export * from './ruby'; 5 | export * from './java'; -------------------------------------------------------------------------------- /src/java.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { join } from 'path'; 3 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 4 | import { Construct } from 'constructs'; 5 | import { BaseDependencyPackager, DependencyPackagerProps, LayerProps } from './base'; 6 | 7 | /** 8 | * Packager for creating Lambda layers for Java dependencies in AWS. Nothing is done locally so this doesn't require Docker and doesn't upload huge files to S3. 9 | */ 10 | export class JavaDependencyPackager extends BaseDependencyPackager { 11 | private static basePackage(props?: DependencyPackagerProps) { 12 | switch (props?.runtime ?? lambda.Runtime.JAVA_11) { 13 | case lambda.Runtime.JAVA_8: 14 | case lambda.Runtime.JAVA_8_CORRETTO: 15 | return 'java-1.8.0-amazon-corretto'; 16 | case lambda.Runtime.JAVA_11: 17 | return 'java-11-amazon-corretto'; 18 | default: 19 | throw new Error(`We do not support ${props?.runtime?.name} yet`); 20 | } 21 | } 22 | 23 | private static packageArch(props?: DependencyPackagerProps) { 24 | switch (props?.architecture ?? lambda.Architecture.X86_64) { 25 | case lambda.Architecture.X86_64: 26 | return 'x86_64'; 27 | case lambda.Architecture.ARM_64: 28 | return 'aarch64'; 29 | default: 30 | throw new Error(`We do not support ${props?.architecture?.name} yet`); 31 | } 32 | } 33 | 34 | constructor(scope: Construct, id: string, props?: DependencyPackagerProps) { 35 | super(scope, id, { 36 | props, 37 | runtimeFamily: lambda.RuntimeFamily.JAVA, 38 | defaultRuntime: lambda.Runtime.JAVA_11, 39 | codeBuildRuntimeInstallCommands: [ 40 | `echo Installing ${JavaDependencyPackager.basePackage(props)}`, 41 | `yum install -y ${JavaDependencyPackager.basePackage(props)}-devel.${JavaDependencyPackager.packageArch(props)} ${JavaDependencyPackager.basePackage(props)}-headless.${JavaDependencyPackager.packageArch(props)}`, 42 | `alternatives --set java $(rpm -ql ${JavaDependencyPackager.basePackage(props)}-headless.${JavaDependencyPackager.packageArch(props)} | grep bin/java$ | head -n 1)`, 43 | `alternatives --set javac $(rpm -ql ${JavaDependencyPackager.basePackage(props)}-devel.${JavaDependencyPackager.packageArch(props)} | grep bin/javac$ | head -n 1)`, 44 | ], 45 | targetDirectory: 'java', 46 | }); 47 | } 48 | 49 | /** 50 | * Create a layer for dependencies defined in pom.xml installed with Maven. 51 | */ 52 | layerFromMaven(id: string, path: string, props?: LayerProps) { 53 | return this._newLayer( 54 | id, path, 55 | outputDir => { 56 | fs.copyFileSync(join(path, 'pom.xml'), join(outputDir, 'pom.xml')); 57 | }, 58 | this._hashFiles(path, ['pom.xml']), 59 | [ 60 | 'mvn -ntp -B dependency:copy-dependencies -DoutputDirectory=java/lib', 61 | ], 62 | props, 63 | ); 64 | } 65 | } -------------------------------------------------------------------------------- /src/nodejs.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { join } from 'path'; 3 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 4 | import { Construct } from 'constructs'; 5 | import { BaseDependencyPackager, DependencyPackagerProps, LayerProps } from './base'; 6 | 7 | /** 8 | * Packager for creating Lambda layers for Node.js dependencies in AWS. Nothing is done locally so this doesn't require Docker, doesn't download any packages and doesn't upload huge files to S3. 9 | */ 10 | export class NodejsDependencyPackager extends BaseDependencyPackager { 11 | private static runtimeVersion(props?: DependencyPackagerProps) { 12 | return (props?.runtime ?? lambda.Runtime.NODEJS_16_X).name.replace('nodejs', '').replace('.x', ''); // TODO ugly 13 | } 14 | 15 | constructor(scope: Construct, id: string, props?: DependencyPackagerProps) { 16 | super(scope, id, { 17 | props, 18 | runtimeFamily: lambda.RuntimeFamily.NODEJS, 19 | defaultRuntime: lambda.Runtime.NODEJS_16_X, 20 | codeBuildRuntimeInstallCommands: [ 21 | `echo Installing Node.js ${NodejsDependencyPackager.runtimeVersion(props)}`, 22 | `n ${NodejsDependencyPackager.runtimeVersion(props)}`, 23 | ], 24 | targetDirectory: 'nodejs', 25 | }); 26 | } 27 | 28 | /** 29 | * Create a layer for dependencies passed as an argument and installed with npm. 30 | */ 31 | layerFromInline(id: string, libraries: string[], props?: LayerProps) { 32 | return this._newLayer( 33 | id, '.', // uniqueTempDir, 34 | outputDir => { 35 | const packageJson = librariesToPackageJson(libraries); 36 | fs.writeFileSync(join(outputDir, 'package.json'), JSON.stringify(packageJson)); 37 | }, 38 | libraries.join(','), // CDK will hash it for us 39 | [ 40 | 'npm i', 41 | 'mkdir nodejs', 42 | 'mv node_modules nodejs/', 43 | ], 44 | props, 45 | ); 46 | } 47 | 48 | /** 49 | * Create a layer for dependencies defined in package.json and (optionally) package-lock.json and installed with npm. 50 | */ 51 | layerFromPackageJson(id: string, path: string, props?: LayerProps) { 52 | return this._newLayer( 53 | id, path, 54 | outputDir => { 55 | fs.copyFileSync(join(path, 'package.json'), join(outputDir, 'package.json')); 56 | if (fs.existsSync(join(path, 'package-lock.json'))) { 57 | fs.copyFileSync(join(path, 'package-lock.json'), join(outputDir, 'package-lock.json')); 58 | } 59 | }, 60 | this._hashFiles(path, ['package.json'], ['package-lock.json']), 61 | [ 62 | 'npm ci', 63 | 'mkdir nodejs', 64 | 'mv node_modules nodejs/', 65 | ], 66 | props, 67 | ); 68 | } 69 | 70 | /** 71 | * Create a layer for dependencies defined in package.json and yarn.lock and installed with yarn. 72 | */ 73 | layerFromYarn(id: string, path: string, props?: LayerProps) { 74 | return this._newLayer( 75 | id, path, 76 | outputDir => { 77 | fs.copyFileSync(join(path, 'package.json'), join(outputDir, 'package.json')); 78 | fs.copyFileSync(join(path, 'yarn.lock'), join(outputDir, 'yarn.lock')); 79 | }, 80 | this._hashFiles(path, ['package.json'], ['yarn.lock']), 81 | [ 82 | 'which yarn || npm install --global yarn', 83 | 'yarn install --check-files --frozen-lockfile', 84 | 'mkdir nodejs', 85 | 'mv node_modules nodejs/', 86 | ], 87 | props, 88 | ); 89 | } 90 | } 91 | 92 | /** 93 | * @internal 94 | */ 95 | export function librariesToPackageJson(libraries: string[]): any { 96 | let packageJson = { 97 | dependencies: <{ [id: string]: string }>{}, 98 | }; 99 | for (const library of libraries) { 100 | let prefix = ''; 101 | let libraryWithoutPrefix = library; 102 | if (library.length && library[0] == '@') { 103 | prefix = '@'; 104 | libraryWithoutPrefix = library.substring(1); 105 | } 106 | const [name, version] = libraryWithoutPrefix.split('@', 2); 107 | packageJson.dependencies[prefix + name] = version ?? '*'; 108 | } 109 | return packageJson; 110 | } -------------------------------------------------------------------------------- /src/package-codebuild-function.ts: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | import * as path from 'path'; 3 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 4 | import { Construct } from 'constructs'; 5 | 6 | /** 7 | * Props for PackageCodebuildFunction 8 | */ 9 | export interface PackageCodebuildFunctionProps extends lambda.FunctionOptions { 10 | } 11 | 12 | /** 13 | * An AWS Lambda function which executes src/package-codebuild. 14 | */ 15 | export class PackageCodebuildFunction extends lambda.Function { 16 | constructor(scope: Construct, id: string, props?: PackageCodebuildFunctionProps) { 17 | super(scope, id, { 18 | description: 'src/package-codebuild.lambda.ts', 19 | ...props, 20 | runtime: new lambda.Runtime('nodejs18.x', lambda.RuntimeFamily.NODEJS), 21 | handler: 'index.handler', 22 | code: lambda.Code.fromAsset(path.join(__dirname, '../assets/package-codebuild.lambda')), 23 | }); 24 | this.addEnvironment('AWS_NODEJS_CONNECTION_REUSE_ENABLED', '1', { removeInEdge: true }); 25 | } 26 | } -------------------------------------------------------------------------------- /src/package-codebuild.lambda.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable-next-line import/no-extraneous-dependencies,import/no-unresolved */ 2 | import { CodeBuildClient, StartBuildCommand } from '@aws-sdk/client-codebuild'; 3 | import { DeleteObjectCommand, S3Client } from '@aws-sdk/client-s3'; 4 | import * as AWSLambda from 'aws-lambda'; 5 | import { customResourceRespond } from './cr'; 6 | 7 | const codebuild = new CodeBuildClient(); 8 | const s3 = new S3Client(); 9 | 10 | 11 | /* eslint-disable @typescript-eslint/no-require-imports, import/no-extraneous-dependencies */ 12 | export async function handler(event: AWSLambda.CloudFormationCustomResourceEvent, context: AWSLambda.Context) { 13 | try { 14 | console.log(JSON.stringify({ ...event, ResponseURL: '...' })); 15 | 16 | const projectName = event.ResourceProperties.ProjectName; 17 | const installRuntimeCommands = event.ResourceProperties.CodeBuildInstallCommands; 18 | const preinstallCommands = event.ResourceProperties.PreinstallCommands; 19 | const commands = event.ResourceProperties.Commands; 20 | const packagedDirectory = event.ResourceProperties.PackagedDirectory; 21 | const assetBucket = event.ResourceProperties.AssetBucket; 22 | const assetKey = event.ResourceProperties.AssetKey; 23 | const targetBucket = event.ResourceProperties.BucketName; 24 | 25 | const failCheckCommands = [`if [ $CODEBUILD_BUILD_SUCCEEDING -ne 1 ]; then 26 | cat < /tmp/payload.json 27 | { 28 | "StackId": "$STACK_ID", 29 | "RequestId": "$REQUEST_ID", 30 | "LogicalResourceId": "$LOGICAL_RESOURCE_ID", 31 | "PhysicalResourceId": "fail", 32 | "Status": "FAILED", 33 | "Reason": \`tail -c 400 /tmp/codebuild.log | jq -Rsa .\`, 34 | "Data": {} 35 | } 36 | EOF 37 | if [ "$RESPONSE_URL" != "unspecified" ]; then 38 | jq . /tmp/payload.json; curl -fsSL -X PUT -H "Content-Type:" -d "@/tmp/payload.json" "$RESPONSE_URL" 39 | fi 40 | fi`]; 41 | 42 | switch (event.RequestType) { 43 | case 'Create': 44 | case 'Update': 45 | console.log(`Starting CodeBuild project ${projectName}`); 46 | await codebuild.send(new StartBuildCommand({ 47 | projectName, 48 | sourceTypeOverride: 'S3', 49 | sourceLocationOverride: `${assetBucket}/${assetKey}`, 50 | buildspecOverride: JSON.stringify({ 51 | version: '0.2', 52 | env: { 53 | variables: { 54 | STACK_ID: event.StackId, 55 | REQUEST_ID: event.RequestId, 56 | LOGICAL_RESOURCE_ID: event.LogicalResourceId, 57 | RESPONSE_URL: event.ResponseURL, 58 | }, 59 | }, 60 | phases: { 61 | install: { 62 | commands: logCommands(installRuntimeCommands), 63 | finally: failCheckCommands, 64 | }, 65 | pre_build: { 66 | commands: logCommands(preinstallCommands), 67 | finally: failCheckCommands, 68 | }, 69 | build: { 70 | commands: logCommands( 71 | commands.concat([ 72 | // `find ${internalProps.targetDirectory} | xargs -L 100 touch -m -t 200001010101.01`, // consistent dates for consistent zips 73 | `zip -r package.zip ${packagedDirectory}`, 74 | 'KEY=`sha256sum package.zip | cut -d " " -f 1`.zip', 75 | `aws s3 cp --no-progress package.zip s3://${targetBucket}/\${KEY}`, 76 | 'echo -n $KEY > KEY', 77 | ]), 78 | ), 79 | }, 80 | post_build: { 81 | commands: [ 82 | 'if [ $CODEBUILD_BUILD_SUCCEEDING -eq 1 ]; then\n' + 83 | 'cat < /tmp/payload.json\n' + 84 | '{\n' + 85 | ' "StackId": "$STACK_ID",\n' + 86 | ' "RequestId": "$REQUEST_ID",\n' + 87 | ' "LogicalResourceId": "$LOGICAL_RESOURCE_ID",\n' + 88 | // the physical resource id is the hash of the result package so changes will create a new layer 89 | ' "PhysicalResourceId": "`cat KEY`",\n' + 90 | ' "Status": "SUCCESS",\n' + 91 | ' "Reason": "",\n' + 92 | ' "Data": {}\n' + 93 | '}\n' + 94 | 'EOF\n' + 95 | 'if [ "$RESPONSE_URL" != "unspecified" ]; then\n' + 96 | 'jq . /tmp/payload.json; curl -fsSL -X PUT -H "Content-Type:" -d "@/tmp/payload.json" "$RESPONSE_URL"\n' + 97 | 'fi\n' + 98 | 'fi', 99 | ], 100 | finally: failCheckCommands, 101 | }, 102 | }, 103 | }, null, 2), 104 | })); 105 | break; 106 | case 'Delete': 107 | try { 108 | await s3.send(new DeleteObjectCommand({ 109 | Bucket: targetBucket, 110 | Key: event.PhysicalResourceId, 111 | })); 112 | } catch (e) { 113 | console.error(`Ignoring error to delete s3://${targetBucket}/${event.PhysicalResourceId}`); 114 | } 115 | await customResourceRespond(event, 'SUCCESS', '', event.PhysicalResourceId, {}); 116 | break; 117 | } 118 | } catch (e) { 119 | console.error(e); 120 | await customResourceRespond(event, 'FAILED', (e as Error).message || 'Internal Error', context.logStreamName, {}); 121 | } 122 | } 123 | 124 | /** 125 | * Convert commands to a command that logs everything into /tmp/codebuild.log. 126 | * 127 | * @return set -o pipefail ; ( set -ex; command-1 ; command 2 ; ) 2>&1 | tee /tmp/codebuild.log 128 | */ 129 | function logCommands(commands: string[]): string[] { 130 | return [['set -o pipefail ; ( set -ex'].concat(commands).concat([' ) 2>&1 | tee /tmp/codebuild.log']).join(' ; ')]; 131 | } 132 | -------------------------------------------------------------------------------- /src/package-nodejs-function.ts: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | import * as path from 'path'; 3 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 4 | import { Construct } from 'constructs'; 5 | 6 | /** 7 | * Props for PackageNodejsFunction 8 | */ 9 | export interface PackageNodejsFunctionProps extends lambda.FunctionOptions { 10 | } 11 | 12 | /** 13 | * An AWS Lambda function which executes src/package-nodejs. 14 | */ 15 | export class PackageNodejsFunction extends lambda.Function { 16 | constructor(scope: Construct, id: string, props?: PackageNodejsFunctionProps) { 17 | super(scope, id, { 18 | description: 'src/package-nodejs.lambda.ts', 19 | ...props, 20 | runtime: new lambda.Runtime('nodejs18.x', lambda.RuntimeFamily.NODEJS), 21 | handler: 'index.handler', 22 | code: lambda.Code.fromAsset(path.join(__dirname, '../assets/package-nodejs.lambda')), 23 | }); 24 | this.addEnvironment('AWS_NODEJS_CONNECTION_REUSE_ENABLED', '1', { removeInEdge: true }); 25 | } 26 | } -------------------------------------------------------------------------------- /src/package-nodejs.lambda.ts: -------------------------------------------------------------------------------- 1 | import * as child_process from 'child_process'; 2 | import * as fs from 'fs'; 3 | import * as path from 'path'; 4 | import { DeleteObjectCommand, GetObjectCommand, PutObjectCommand, S3Client } from '@aws-sdk/client-s3'; 5 | import * as AWSLambda from 'aws-lambda'; 6 | import { customResourceRespond } from './cr'; 7 | // eslint-disable-next-line @typescript-eslint/no-require-imports 8 | const AdmZip = require('adm-zip'); 9 | 10 | const s3 = new S3Client(); 11 | 12 | 13 | /* eslint-disable @typescript-eslint/no-require-imports, import/no-extraneous-dependencies */ 14 | export async function handler(event: AWSLambda.CloudFormationCustomResourceEvent, context: AWSLambda.Context) { 15 | try { 16 | console.log(JSON.stringify({ 17 | ...event, 18 | ResponseURL: '...', 19 | })); 20 | 21 | const preinstallCommands = event.ResourceProperties.PreinstallCommands; 22 | const commands = event.ResourceProperties.Commands; 23 | const packagedDirectory = event.ResourceProperties.PackagedDirectory; 24 | const assetBucket = event.ResourceProperties.AssetBucket; 25 | const assetKey = event.ResourceProperties.AssetKey; 26 | const targetBucket = event.ResourceProperties.BucketName; 27 | 28 | // setup home 29 | try { 30 | fs.mkdirSync('/tmp/home'); 31 | } catch (err) { 32 | // @ts-ignore 33 | if (err.code !== 'EEXIST') { 34 | throw err; 35 | } 36 | } 37 | process.env.HOME = '/tmp/home'; 38 | process.env.PATH += ':/tmp/home/.local/bin:/tmp/home/.npm-packages/bin'; 39 | process.env.NPM_PACKAGES = '/tmp/home/.npm-packages/'; 40 | await execAndGetOutput('npm config set prefix /tmp/home/.npm-packages', '/tmp/home'); 41 | 42 | setTimeout(() => { 43 | customResourceRespond(event, 'FAILED', 'Lambda timed out. Try using CodeBuild packager instead.', 'ERROR', {}).catch(console.error); 44 | }, context.getRemainingTimeInMillis() - 5000); 45 | 46 | switch (event.RequestType) { 47 | case 'Create': 48 | case 'Update': 49 | try { 50 | const key = await install(assetBucket, assetKey, preinstallCommands.concat(commands), packagedDirectory, targetBucket); 51 | await customResourceRespond(event, 'SUCCESS', '', key, {}); 52 | } catch (err) { 53 | console.error(err); 54 | await customResourceRespond(event, 'FAILED', (err as Error).message || 'Internal Error', 'ERROR', {}); 55 | } 56 | break; 57 | case 'Delete': 58 | try { 59 | await s3.send(new DeleteObjectCommand({ 60 | Bucket: targetBucket, 61 | Key: event.PhysicalResourceId, 62 | })); 63 | } catch (error) { 64 | console.error(`Ignoring error to delete s3://${targetBucket}/${event.PhysicalResourceId}`); 65 | } 66 | await customResourceRespond(event, 'SUCCESS', '', event.PhysicalResourceId, {}); 67 | break; 68 | } 69 | } catch (e) { 70 | console.error(e); 71 | await customResourceRespond(event, 'FAILED', (e as Error).message || 'Internal Error', context.logStreamName, {}); 72 | } 73 | } 74 | 75 | async function install(assetBucket: string, assetKey: string, commands: string[], packagedDirectory: string, targetBucket: string) { 76 | const temp = fs.mkdtempSync('/tmp/package-'); 77 | try { 78 | // extract asset with package.json file 79 | console.log('Downloading and unpacking asset...'); 80 | 81 | const assetObject = await s3.send(new GetObjectCommand({ 82 | Bucket: assetBucket, 83 | Key: assetKey, 84 | })); 85 | if (!assetObject.Body) { 86 | throw new Error('Unable to read asset'); 87 | } 88 | 89 | new AdmZip(Buffer.from(await assetObject.Body.transformToByteArray())).extractAllTo(temp); 90 | 91 | // run installation commands 92 | console.log('Running installation commands...'); 93 | 94 | for (const command of commands) { 95 | console.log(command); 96 | const { exitCode, output } = await execAndGetOutput(command, temp); 97 | console.log(output); 98 | if (exitCode != 0) { 99 | let error = output; 100 | if (error.length > 500) { 101 | error = '...' + output.substring(-500); 102 | } 103 | throw new Error(`COMMAND FAILED ${command}\n${error}`); 104 | } 105 | } 106 | 107 | // zip it up 108 | console.log('Packaging dependencies...'); 109 | 110 | const zipPath = path.join(temp, 'package.zip'); 111 | const zipPackage = new AdmZip(); 112 | zipPackage.addLocalFolder(path.join(temp, packagedDirectory), packagedDirectory); 113 | zipPackage.writeZip(zipPath); 114 | 115 | // hash the zip 116 | console.log('Hashing package...'); 117 | 118 | let zipHash: string; 119 | { 120 | const { exitCode, output } = await execAndGetOutput(`sha256sum "${zipPath}"`, temp); 121 | if (exitCode != 0) { 122 | throw new Error(`Unable to hash zip ${zipPath}`); 123 | } 124 | 125 | zipHash = output.split(' ')[0]; 126 | } 127 | 128 | // upload package 129 | console.log('Uploading package...'); 130 | await s3.send(new PutObjectCommand({ 131 | Bucket: targetBucket, 132 | Key: `${zipHash}.zip`, 133 | Body: fs.createReadStream(zipPath), 134 | })); 135 | 136 | return `${zipHash}.zip`; 137 | } finally { 138 | fs.rmSync(temp, { 139 | recursive: true, 140 | force: true, 141 | }); 142 | } 143 | } 144 | 145 | interface CommandOutput { 146 | exitCode: number; 147 | output: string; 148 | } 149 | 150 | function execAndGetOutput(command: string, cwd: string): Promise { 151 | return new Promise((resolve, reject) => { 152 | let output: string[] = []; 153 | const exec = child_process.exec(command, { cwd }); 154 | exec.stdout?.on('data', data => { 155 | output.push(data.toString()); 156 | }); 157 | exec.stderr?.on('data', data => { 158 | output.push(data.toString()); 159 | }); 160 | exec.on('close', (code, signal) => { 161 | if (signal !== null) { 162 | reject(new Error(`Exited with signal ${signal}`)); 163 | } else if (code !== null) { 164 | resolve({ 165 | exitCode: code, 166 | output: output.join(''), 167 | }); 168 | } else { 169 | reject(new Error('Unknown process exit issue')); 170 | } 171 | }); 172 | exec.on('error', err => { 173 | reject(err); 174 | }); 175 | }); 176 | } 177 | -------------------------------------------------------------------------------- /src/package-python-function.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 3 | import { Construct } from 'constructs'; 4 | 5 | /** 6 | * Props for PackagePythonFunction 7 | */ 8 | export interface PackagePythonFunctionProps extends lambda.FunctionOptions { 9 | } 10 | 11 | /** 12 | * An AWS Lambda function which executes src/package-python. 13 | */ 14 | export class PackagePythonFunction extends lambda.Function { 15 | constructor(scope: Construct, id: string, props?: PackagePythonFunctionProps) { 16 | super(scope, id, { 17 | description: 'src/package-python.lambda.py', 18 | runtime: lambda.Runtime.PYTHON_3_9, 19 | ...props, 20 | handler: 'index.handler', 21 | code: lambda.Code.fromAsset(path.join(__dirname, '../assets/package-python.lambda')), 22 | }); 23 | } 24 | } -------------------------------------------------------------------------------- /src/package-python.lambda.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import subprocess 4 | import tempfile 5 | import time 6 | import threading 7 | import traceback 8 | import zipfile 9 | 10 | import boto3 11 | import urllib3 12 | 13 | SUCCESS = "SUCCESS" 14 | FAILED = "FAILED" 15 | 16 | http = urllib3.PoolManager() 17 | s3 = boto3.client("s3") 18 | 19 | 20 | class CommandError(Exception): 21 | def __init__(self, message): 22 | super().__init__(message) 23 | self.message = message 24 | 25 | 26 | def cancel_on_timeout(event, context): 27 | time.sleep((context.get_remaining_time_in_millis() / 1000) - 5) 28 | send(event, context, FAILED, {}, "ERROR", reason="Lambda timed out. Try using CodeBuild packager instead.") 29 | 30 | 31 | def handler(event, context): 32 | try: 33 | # read properties 34 | request_type = event["RequestType"] 35 | properties = event["ResourceProperties"] 36 | preinstall_commands = properties["PreinstallCommands"] 37 | commands = properties["Commands"] 38 | packaged_directory = properties["PackagedDirectory"] 39 | asset_bucket = properties["AssetBucket"] 40 | asset_key = properties["AssetKey"] 41 | target_bucket = properties["BucketName"] 42 | 43 | print(request_type) 44 | print(properties) 45 | 46 | # setup home 47 | os.makedirs("/tmp/home", exist_ok=True) 48 | os.environ["HOME"] = "/tmp/home" 49 | os.environ["PATH"] += ":/tmp/home/.local/bin" 50 | 51 | # cancel custom resource on timeout because lambda runs up to 15 minutes, but custom resource waits for an hour 52 | threading.Thread(target=cancel_on_timeout, args=[event, context], daemon=True).start() 53 | 54 | # handle request 55 | if request_type in ["Create", "Update"]: 56 | try: 57 | key = install(event, context, asset_bucket, asset_key, preinstall_commands + commands, packaged_directory, target_bucket) 58 | send(event, context, SUCCESS, {}, key) 59 | except CommandError as e: 60 | send(event, context, FAILED, {}, "ERROR", reason=e.message) 61 | 62 | elif request_type == "Delete": 63 | key = event["PhysicalResourceId"] 64 | try: 65 | s3.delete_object(Bucket=target_bucket, Key=key) 66 | except Exception as e: 67 | print(f"Unable to delete package: {e}") 68 | send(event, context, SUCCESS, {}, key) 69 | 70 | else: 71 | send(event, context, FAILED, {}, "ERROR", reason="Bad request type") 72 | 73 | except Exception as e: 74 | send(event, context, FAILED, {}, "ERROR", reason=f"Internal error: {e}") 75 | traceback.print_exc() 76 | 77 | def install(event, context, asset_bucket, asset_key, commands, packaged_directory, target_bucket): 78 | with tempfile.TemporaryDirectory() as temp: 79 | # extract asset with requirements file 80 | print("Downloading and unpacking asset...") 81 | 82 | asset_path = os.path.join(temp, "asset.zip") 83 | s3.download_file(asset_bucket, asset_key, asset_path) 84 | zipfile.ZipFile(asset_path).extractall(temp) 85 | 86 | # run installation commands 87 | print("Running installation commands...") 88 | 89 | for command in commands: 90 | try: 91 | print(command) 92 | print(subprocess.check_output(command, cwd=temp, stderr=subprocess.STDOUT, shell=True, universal_newlines=True)) 93 | except subprocess.CalledProcessError as e: 94 | print(e.output) 95 | output = e.output 96 | if len(output) > 500: 97 | # custom resource response size is limited 98 | output = "..." + output[-500:] 99 | 100 | raise CommandError(f"COMMAND FAILED {command}\n{output}") 101 | 102 | # zip it up 103 | print("Packaging dependencies...") 104 | 105 | package_path = os.path.join(temp, "package.zip") 106 | with zipfile.ZipFile(package_path, "w") as z: 107 | for root, folders, files in os.walk(os.path.join(temp, packaged_directory)): 108 | for f in files: 109 | local_path = os.path.join(root, f) 110 | zip_path = os.path.relpath(local_path, temp) 111 | # we can use ZipInfo because it sets a consistent file date which lead to unchanged zips (for unchanged requirements) 112 | # but all package managers create .pyc files that are inconsistent because they have embedded dates 113 | # z.writestr(zipfile.ZipInfo(zip_path), open(local_path, 'rb').read(), zipfile.ZIP_DEFLATED) 114 | z.write(local_path, zip_path, zipfile.ZIP_DEFLATED) 115 | 116 | # hash the zip 117 | print("Hashing package...") 118 | 119 | package_hash = subprocess.check_output(["sha256sum", package_path], universal_newlines=True).split()[0] 120 | # try: 121 | # s3.head_object(Bucket=target_bucket, Key=f"{zip_hash}.zip") 122 | # except: 123 | # # TODO if exists, don't upload 124 | # pass 125 | 126 | # upload 127 | print("Uploading package...") 128 | s3.upload_file(package_path, target_bucket, f"{package_hash}.zip") 129 | 130 | return f"{package_hash}.zip" 131 | 132 | 133 | def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False, reason=None): 134 | responseUrl = event["ResponseURL"] 135 | 136 | responseBody = { 137 | "Status": responseStatus, 138 | "Reason": reason or "See the details in CloudWatch Log Stream: {}".format(context.log_stream_name), 139 | "PhysicalResourceId": physicalResourceId or context.log_stream_name, 140 | "StackId": event["StackId"], 141 | "RequestId": event["RequestId"], 142 | "LogicalResourceId": event["LogicalResourceId"], 143 | "NoEcho": noEcho, 144 | "Data": responseData 145 | } 146 | 147 | json_responseBody = json.dumps(responseBody) 148 | 149 | print("Response body:") 150 | print(json_responseBody) 151 | 152 | headers = { 153 | "content-type": "", 154 | "content-length": str(len(json_responseBody)) 155 | } 156 | 157 | try: 158 | response = http.request("PUT", responseUrl, headers=headers, body=json_responseBody) 159 | print("Status code:", response.status) 160 | 161 | except Exception as e: 162 | 163 | print("send(..) failed executing http.request(..):", e) 164 | -------------------------------------------------------------------------------- /src/package-ruby-function.ts: -------------------------------------------------------------------------------- 1 | import * as path from 'path'; 2 | import * as lambda from 'aws-cdk-lib/aws-lambda'; 3 | import { Construct } from 'constructs'; 4 | 5 | /** 6 | * Props for PackageRubyFunction 7 | */ 8 | export interface PackageRubyFunctionProps extends lambda.FunctionOptions { 9 | } 10 | 11 | /** 12 | * An AWS Lambda function which executes src/package-ruby. 13 | */ 14 | export class PackageRubyFunction extends lambda.Function { 15 | constructor(scope: Construct, id: string, props?: PackageRubyFunctionProps) { 16 | super(scope, id, { 17 | description: 'src/package-ruby.lambda.rb', 18 | runtime: lambda.Runtime.RUBY_2_7, 19 | ...props, 20 | handler: 'index.handler', 21 | code: lambda.Code.fromAsset(path.join(__dirname, '../assets/package-ruby.lambda')), 22 | }); 23 | } 24 | } -------------------------------------------------------------------------------- /src/package-ruby.lambda.rb: -------------------------------------------------------------------------------- 1 | require "aws-sdk-s3" 2 | require "json" 3 | require "net/http" 4 | require "open3" 5 | require "pathname" 6 | require "tmpdir" 7 | require "uri" 8 | 9 | $:.unshift(File.expand_path("./rubyzip")) # `npm run bundle` puts rubyzip there 10 | require "zip" 11 | 12 | SUCCESS = "SUCCESS" 13 | FAILED = "FAILED" 14 | 15 | $s3 = Aws::S3::Client.new() 16 | 17 | def handler(event:, context:) 18 | begin 19 | # read properties 20 | request_type = event["RequestType"] 21 | properties = event["ResourceProperties"] 22 | preinstall_commands = properties["PreinstallCommands"] 23 | commands = properties["Commands"] 24 | packaged_directory = properties["PackagedDirectory"] 25 | asset_bucket = properties["AssetBucket"] 26 | asset_key = properties["AssetKey"] 27 | target_bucket = properties["BucketName"] 28 | 29 | puts(request_type) 30 | puts(properties) 31 | 32 | # setup home 33 | FileUtils.mkdir_p("/tmp/home") 34 | ENV["HOME"] = "/tmp/home" 35 | ENV["PATH"] += ":/tmp/home/.local/bin" 36 | 37 | # cancel custom resource on timeout because lambda runs up to 15 minutes, but custom resource waits for an hour 38 | Thread.new { 39 | sleep((context.get_remaining_time_in_millis() / 1000) - 5) 40 | send_response(event, context, FAILED, {}, "ERROR", "Lambda timed out. Try using CodeBuild packager instead.") 41 | } 42 | 43 | # handle request 44 | if request_type == "Create" || request_type == "Update" 45 | begin 46 | key = install(event, context, asset_bucket, asset_key, preinstall_commands + commands, packaged_directory, target_bucket) 47 | send_response(event, context, SUCCESS, {}, key) 48 | rescue => error 49 | send_response(event, context, FAILED, {}, "ERROR", error.message) 50 | end 51 | 52 | elsif request_type == "Delete" 53 | key = event["PhysicalResourceId"] 54 | begin 55 | $s3.delete_object({bucket: target_bucket, key: key}) 56 | rescue => error 57 | puts("Unable to delete package: #{e}") 58 | end 59 | send_response(event, context, SUCCESS, {}, key) 60 | 61 | else 62 | send_response(event, context, FAILED, {}, "ERROR", "Bad request type") 63 | end 64 | 65 | rescue => error 66 | puts(error) 67 | send_response(event, context, FAILED, {}, "ERROR", "Internal error: #{error.message}") 68 | end 69 | end 70 | 71 | def install(event, context, asset_bucket, asset_key, commands, packaged_directory, target_bucket) 72 | temp = Dir.mktmpdir("package-", "/tmp") 73 | at_exit { FileUtils.remove_entry(dir) } 74 | 75 | # extract asset with requirements file 76 | puts("Downloading and unpacking asset...") 77 | 78 | asset_path = File.join(temp, "asset.zip") 79 | $s3.get_object({bucket: asset_bucket, key: asset_key}, target: asset_path) 80 | Zip::File.open(asset_path) do |zip_file| 81 | zip_file.each do |f| 82 | f_path = File.join(temp, f.name) 83 | FileUtils.mkdir_p(File.dirname(f_path)) 84 | zip_file.extract(f, f_path) unless File.exist?(f_path) 85 | end 86 | end 87 | 88 | # run installation commands 89 | puts("Running installation commands...") 90 | 91 | commands.each do |command| 92 | begin 93 | puts(command) 94 | output, exit_code = Open3.capture2e(command, :chdir=>temp) 95 | print(output) 96 | if exit_code != 0 97 | if output.length > 500 98 | output = "..." + output[-500..] 99 | end 100 | raise StandardError.new("COMMAND FAILED #{command}\n#{output}") 101 | end 102 | rescue => error 103 | raise StandardError.new("COMMAND FAILED #{command}\n#{error.message}") 104 | end 105 | end 106 | 107 | # zip it up 108 | puts("Packaging dependencies...") 109 | 110 | package_path = File.join(temp, "package.zip") 111 | Zip::File.open(package_path, create: true) do |zip_file| 112 | Dir[ File.join(temp, packaged_directory, "**", "**") ].each do |file| 113 | rel_file = Pathname.new(file).relative_path_from(temp).to_s 114 | zip_file.add(rel_file, file) 115 | end 116 | end 117 | 118 | # hash the zip 119 | puts("Hashing package...") 120 | 121 | package_hash_output, package_hash_exit_code = Open3.capture2("sha256sum #{package_path}") 122 | if package_hash_exit_code != 0 123 | raise StandardError.new("Unable to hash package") 124 | end 125 | package_hash = package_hash_output.split()[0] 126 | # try: 127 | # $s3.head_object(Bucket=target_bucket, Key=f"{zip_hash}.zip") 128 | # except: 129 | # # TODO if exists, don't upload 130 | # pass 131 | 132 | # upload 133 | puts("Uploading package...") 134 | File.open(package_path, "rb") do |package| 135 | $s3.put_object(bucket: target_bucket, key: "#{package_hash}.zip", body: package) 136 | end 137 | 138 | return "#{package_hash}.zip" 139 | 140 | end 141 | 142 | # thanks https://github.com/tongueroo/cfnresponse/blob/359f6f9/lib/cfnresponse.rb 143 | 144 | class Error < StandardError; end 145 | 146 | # Debugging puts kept to help debug custom resources 147 | def send_response(event, context, response_status, response_data={}, physical_id="PhysicalId", reason=nil) 148 | reason ||= "See the details in CloudWatch Log Group: #{context.log_group_name} Log Stream: #{context.log_stream_name}" 149 | 150 | body_data = { 151 | "Status" => response_status, 152 | "Reason" => reason, 153 | "PhysicalResourceId" => physical_id, 154 | "StackId" => event["StackId"], 155 | "RequestId" => event["RequestId"], 156 | "LogicalResourceId" => event["LogicalResourceId"], 157 | "Data" => response_data 158 | } 159 | 160 | puts "Response body:\n" 161 | puts JSON.dump(body_data) 162 | 163 | response_body = JSON.dump(body_data) # response_body is a JSON string 164 | 165 | url = event["ResponseURL"] 166 | uri = URI(url) 167 | http = Net::HTTP.new(uri.host, uri.port) 168 | http.open_timeout = http.read_timeout = 30 169 | http.use_ssl = true if uri.scheme == "https" 170 | 171 | # must used url to include the AWSAccessKeyId and Signature 172 | req = Net::HTTP::Put.new(url) # url includes query string and uri.path does not, must used url t 173 | req.body = response_body 174 | req.content_length = response_body.bytesize 175 | 176 | # set headers 177 | req["content-type"] = "" 178 | req["content-length"] = response_body.bytesize 179 | 180 | if ENV["CFNRESPONSE_TEST"] 181 | puts "uri #{uri.inspect}" 182 | return body_data # early return to not send the request 183 | end 184 | 185 | res = http.request(req) 186 | puts "status code: #{res.code}" 187 | puts "headers: #{res.each_header.to_h.inspect}" 188 | puts "body: #{res.body}" 189 | end 190 | -------------------------------------------------------------------------------- /src/python.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { join } from 'path'; 3 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 4 | import { Construct } from 'constructs'; 5 | import { BaseDependencyPackager, DependencyPackagerProps, LayerProps } from './base'; 6 | 7 | /** 8 | * Packager for creating Lambda layers for Python dependencies in AWS. Nothing is done locally so this doesn't require Docker, doesn't download any packages and doesn't upload huge files to S3. 9 | */ 10 | export class PythonDependencyPackager extends BaseDependencyPackager { 11 | private static runtimeVersion(props?: DependencyPackagerProps) { 12 | return (props?.runtime ?? lambda.Runtime.PYTHON_3_9).name.replace('python', ''); // TODO ugly 13 | } 14 | 15 | constructor(scope: Construct, id: string, props?: DependencyPackagerProps) { 16 | super(scope, id, { 17 | props, 18 | runtimeFamily: lambda.RuntimeFamily.PYTHON, 19 | defaultRuntime: lambda.Runtime.PYTHON_3_9, 20 | codeBuildRuntimeInstallCommands: [ 21 | // TODO fails? doesn't actually update -- 3.9.14 instead of 3.9.16 22 | // 'pyenv update || echo pyenv returned 1', 23 | `PYTHON_VERSION=\`pyenv install -l | tr -d ' ' | grep ^${PythonDependencyPackager.runtimeVersion(props)} | sort -Vr | head -n 1\``, 24 | 'echo Installing Python ${PYTHON_VERSION}', 25 | 'pyenv install -s ${PYTHON_VERSION}', 26 | 'pyenv global ${PYTHON_VERSION}', 27 | ], 28 | targetDirectory: 'python', 29 | }); 30 | } 31 | 32 | /** 33 | * Create a layer for dependencies passed as an argument and installed with pip. 34 | */ 35 | layerFromInline(id: string, requirements: string[], props?: LayerProps) { 36 | return this._newLayer( 37 | id, '.', 38 | outputDir => { 39 | fs.writeFileSync(join(outputDir, 'requirements.txt'), requirements.join('\n')); 40 | }, 41 | requirements.join(','), // CDK will hash it for us 42 | [ 43 | 'python -m venv .venv', 44 | '.venv/bin/python -m pip --no-input --disable-pip-version-check install -t python --progress-bar off -r requirements.txt', 45 | ], 46 | props, 47 | ); 48 | } 49 | 50 | /** 51 | * Create a layer for dependencies defined in requirements.txt and installed with pip. 52 | */ 53 | layerFromRequirementsTxt(id: string, path: string, props?: LayerProps) { 54 | return this._newLayer( 55 | id, path, 56 | outputDir => { 57 | fs.copyFileSync(join(path, 'requirements.txt'), join(outputDir, 'requirements.txt')); 58 | }, 59 | this._hashFiles(path, ['requirements.txt']), 60 | [ 61 | 'python -m venv .venv', 62 | '.venv/bin/python -m pip --no-input --disable-pip-version-check install -t python --progress-bar off -r requirements.txt', 63 | ], 64 | props, 65 | ); 66 | } 67 | 68 | /** 69 | * Create a layer for dependencies defined in Pipfile and (optionally) Pipfile.lock and installed with pipenv. 70 | */ 71 | layerFromPipenv(id: string, path: string, props?: LayerProps) { 72 | return this._newLayer( 73 | id, path, 74 | outputDir => { 75 | fs.copyFileSync(join(path, 'Pipfile'), join(outputDir, 'Pipfile')); 76 | if (fs.existsSync(join(path, 'Pipfile.lock'))) { 77 | fs.copyFileSync(join(path, 'Pipfile.lock'), join(outputDir, 'Pipfile.lock')); 78 | } 79 | }, 80 | this._hashFiles(path, ['Pipfile'], ['Pipfile.lock']), 81 | [ 82 | 'pip install --no-input pipenv', 83 | 'PIPENV_VENV_IN_PROJECT=1 pipenv sync', 84 | 'mv .venv python', 85 | ], 86 | props, 87 | ); 88 | } 89 | 90 | /** 91 | * Create a layer for dependencies defined in pyproject.toml and (optionally) poetry.lock and installed with poetry. 92 | */ 93 | layerFromPoetry(id: string, path: string, props?: LayerProps) { 94 | return this._newLayer( 95 | id, path, 96 | outputDir => { 97 | fs.copyFileSync(join(path, 'pyproject.toml'), join(outputDir, 'pyproject.toml')); 98 | if (fs.existsSync(join(path, 'poetry.lock'))) { 99 | fs.copyFileSync(join(path, 'poetry.lock'), join(outputDir, 'poetry.lock')); 100 | } 101 | }, 102 | this._hashFiles(path, ['pyproject.toml'], ['poetry.lock']), 103 | [ 104 | //'curl -sSL https://install.python-poetry.org | python -', 105 | 'pip install --no-input poetry', 106 | 'poetry config virtualenvs.in-project true', 107 | 'poetry install --sync', 108 | 'mv .venv python', 109 | ], 110 | props, 111 | ); 112 | } 113 | } -------------------------------------------------------------------------------- /src/ruby.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { join } from 'path'; 3 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 4 | import { Construct } from 'constructs'; 5 | import { BaseDependencyPackager, DependencyPackagerProps, LayerProps } from './base'; 6 | 7 | /** 8 | * Packager for creating Lambda layers for Ruby dependencies in AWS. Nothing is done locally so this doesn't require Docker, doesn't download any packages and doesn't upload huge files to S3. 9 | */ 10 | export class RubyDependencyPackager extends BaseDependencyPackager { 11 | private static runtimeVersion(props?: DependencyPackagerProps) { 12 | return (props?.runtime ?? lambda.Runtime.RUBY_2_7).name.replace('ruby', ''); // TODO ugly 13 | } 14 | 15 | constructor(scope: Construct, id: string, props?: DependencyPackagerProps) { 16 | super(scope, id, { 17 | props, 18 | runtimeFamily: lambda.RuntimeFamily.RUBY, 19 | defaultRuntime: lambda.Runtime.RUBY_2_7, 20 | codeBuildRuntimeInstallCommands: [ 21 | 'yum install -y perl', // Can't locate FindBin.pm in @INC 22 | `RUBY_VERSION=\`rbenv install -L | tr -d ' ' | grep ^${RubyDependencyPackager.runtimeVersion(props)} | sort -Vr | head -n 1\``, 23 | 'echo Installing Ruby ${RUBY_VERSION}', 24 | 'rbenv install -s ${RUBY_VERSION}', 25 | 'rbenv global ${RUBY_VERSION}', 26 | ], 27 | targetDirectory: 'ruby', 28 | }); 29 | } 30 | 31 | /** 32 | * Create a layer for dependencies defined in Gemfile and (optionally) Gemfile.lock and installed with Bundler. 33 | */ 34 | layerFromBundler(id: string, path: string, props?: LayerProps) { 35 | return this._newLayer( 36 | id, path, 37 | outputDir => { 38 | fs.copyFileSync(join(path, 'Gemfile'), join(outputDir, 'Gemfile')); 39 | if (fs.existsSync(join(path, 'Gemfile.lock'))) { 40 | fs.copyFileSync(join(path, 'Gemfile.lock'), join(outputDir, 'Gemfile.lock')); 41 | } 42 | }, 43 | this._hashFiles(path, ['Gemfile'], ['Gemfile.lock']), 44 | [ 45 | 'mkdir -p ruby/gems', 46 | 'bundle config set path ruby/gems', 47 | 'bundle install', 48 | 'mv ruby/gems/ruby/* ruby/gems/', 49 | 'rm -rf ruby/gems/*/cache', 50 | 'rm -rf ruby/gems/ruby', 51 | ], 52 | props, 53 | ); 54 | } 55 | } -------------------------------------------------------------------------------- /test/assets/bundler/Gemfile: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | 3 | gem "faker", "~> 3.0" -------------------------------------------------------------------------------- /test/assets/bundler/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.10) 5 | faker (3.0.0) 6 | i18n (>= 1.8.11, < 2) 7 | i18n (1.12.0) 8 | concurrent-ruby (~> 1.0) 9 | 10 | PLATFORMS 11 | x86_64-linux 12 | 13 | DEPENDENCIES 14 | faker (~> 3.0) 15 | 16 | BUNDLED WITH 17 | 2.3.7 -------------------------------------------------------------------------------- /test/assets/maven/FunctionOne.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CloudSnorkel/cdk-turbo-layers/13fff31a9d1ab853e075c1e5cffd880fd4a25355/test/assets/maven/FunctionOne.jar -------------------------------------------------------------------------------- /test/assets/maven/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | cdk-sample 5 | FunctionOne 6 | 1.0 7 | jar 8 | Function one 9 | 10 | 11 11 | 11 12 | 2.19.0 13 | UTF-8 14 | 15 | 16 | 17 | 18 | software.amazon.lambda 19 | powertools-tracing 20 | 1.12.3 21 | 22 | 23 | software.amazon.lambda 24 | powertools-metrics 25 | 1.12.3 26 | 27 | 28 | com.amazonaws 29 | aws-lambda-java-core 30 | 1.2.1 31 | 32 | 33 | com.amazonaws 34 | aws-lambda-java-events 35 | 3.11.0 36 | 37 | 38 | 39 | org.apache.logging.log4j 40 | log4j-core 41 | ${log4j.version} 42 | 43 | 44 | org.apache.logging.log4j 45 | log4j-api 46 | ${log4j.version} 47 | 48 | 49 | 50 | junit 51 | junit 52 | 4.13.2 53 | test 54 | 55 | 56 | 57 | 58 | 59 | 60 | org.codehaus.mojo 61 | aspectj-maven-plugin 62 | 1.14.0 63 | 64 | ${maven.compiler.source} 65 | ${maven.compiler.target} 66 | ${maven.compiler.target} 67 | 68 | 69 | software.amazon.lambda 70 | powertools-tracing 71 | 72 | 73 | software.amazon.lambda 74 | powertools-metrics 75 | 76 | 77 | 78 | 79 | 80 | 81 | compile 82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /test/assets/maven/src/main/java/helloworld/App.java: -------------------------------------------------------------------------------- 1 | package helloworld; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.IOException; 5 | import java.io.InputStreamReader; 6 | import java.net.URL; 7 | import java.util.HashMap; 8 | import java.util.Map; 9 | import java.util.stream.Collectors; 10 | 11 | import com.amazonaws.services.lambda.runtime.Context; 12 | import com.amazonaws.services.lambda.runtime.RequestHandler; 13 | import com.amazonaws.services.lambda.runtime.events.APIGatewayProxyRequestEvent; 14 | import com.amazonaws.services.lambda.runtime.events.APIGatewayProxyResponseEvent; 15 | import software.amazon.lambda.powertools.metrics.Metrics; 16 | import software.amazon.lambda.powertools.tracing.Tracing; 17 | 18 | import static software.amazon.lambda.powertools.tracing.CaptureMode.DISABLED; 19 | 20 | 21 | /** 22 | * Handler for requests to Lambda function. 23 | */ 24 | public class App implements RequestHandler { 25 | 26 | @Tracing(captureMode = DISABLED) 27 | @Metrics(captureColdStart = true) 28 | public APIGatewayProxyResponseEvent handleRequest(final APIGatewayProxyRequestEvent input, final Context context) { 29 | Map headers = new HashMap<>(); 30 | headers.put("Content-Type", "application/json"); 31 | headers.put("X-Custom-Header", "application/json"); 32 | 33 | APIGatewayProxyResponseEvent response = new APIGatewayProxyResponseEvent() 34 | .withHeaders(headers); 35 | try { 36 | final String pageContents = this.getPageContents("https://checkip.amazonaws.com"); 37 | String output = String.format("{ \"message\": \"hello world\", \"location\": \"%s\" }", pageContents); 38 | 39 | return response 40 | .withStatusCode(200) 41 | .withBody(output); 42 | } catch (IOException e) { 43 | return response 44 | .withBody("{}") 45 | .withStatusCode(500); 46 | } 47 | } 48 | 49 | @Tracing(namespace = "getPageContents") 50 | private String getPageContents(String address) throws IOException { 51 | URL url = new URL(address); 52 | try (BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()))) { 53 | return br.lines().collect(Collectors.joining(System.lineSeparator())); 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /test/assets/npm/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "npm", 3 | "lockfileVersion": 2, 4 | "requires": true, 5 | "packages": { 6 | "": { 7 | "dependencies": { 8 | "lodash": "^4.17.21" 9 | } 10 | }, 11 | "node_modules/lodash": { 12 | "version": "4.17.21", 13 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 14 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 15 | } 16 | }, 17 | "dependencies": { 18 | "lodash": { 19 | "version": "4.17.21", 20 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 21 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/assets/npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "lodash": "^4.17.21" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /test/assets/pipenv/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | requests = "*" 8 | 9 | [dev-packages] 10 | 11 | [requires] 12 | python_version = "3.9" -------------------------------------------------------------------------------- /test/assets/pipenv/Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "b8c2e1580c53e383cfe4254c1f16560b855d984fde8b2beb3bf6ee8fc2fe5a22" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.9" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "certifi": { 20 | "hashes": [ 21 | "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b", 22 | "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90" 23 | ], 24 | "index": "pypi", 25 | "markers": "python_version >= '3.6'", 26 | "version": "==2024.7.4" 27 | }, 28 | "charset-normalizer": { 29 | "hashes": [ 30 | "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", 31 | "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", 32 | "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", 33 | "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", 34 | "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", 35 | "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", 36 | "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", 37 | "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", 38 | "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", 39 | "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", 40 | "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", 41 | "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", 42 | "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", 43 | "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", 44 | "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", 45 | "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", 46 | "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", 47 | "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", 48 | "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", 49 | "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", 50 | "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", 51 | "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", 52 | "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", 53 | "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", 54 | "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", 55 | "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", 56 | "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", 57 | "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", 58 | "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", 59 | "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", 60 | "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", 61 | "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", 62 | "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", 63 | "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", 64 | "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", 65 | "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", 66 | "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", 67 | "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", 68 | "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", 69 | "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", 70 | "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", 71 | "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", 72 | "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", 73 | "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", 74 | "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", 75 | "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", 76 | "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", 77 | "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", 78 | "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", 79 | "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", 80 | "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", 81 | "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", 82 | "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", 83 | "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", 84 | "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", 85 | "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", 86 | "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", 87 | "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", 88 | "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", 89 | "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", 90 | "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", 91 | "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", 92 | "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", 93 | "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", 94 | "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", 95 | "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", 96 | "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", 97 | "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", 98 | "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", 99 | "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", 100 | "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", 101 | "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", 102 | "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", 103 | "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", 104 | "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", 105 | "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", 106 | "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", 107 | "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", 108 | "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", 109 | "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", 110 | "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", 111 | "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", 112 | "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", 113 | "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", 114 | "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", 115 | "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", 116 | "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", 117 | "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", 118 | "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", 119 | "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" 120 | ], 121 | "markers": "python_full_version >= '3.7.0'", 122 | "version": "==3.3.2" 123 | }, 124 | "idna": { 125 | "hashes": [ 126 | "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", 127 | "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0" 128 | ], 129 | "markers": "python_version >= '3.5'", 130 | "version": "==3.7" 131 | }, 132 | "requests": { 133 | "hashes": [ 134 | "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289", 135 | "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c" 136 | ], 137 | "index": "pypi", 138 | "markers": "python_version >= '3.8'", 139 | "version": "==2.32.2" 140 | }, 141 | "urllib3": { 142 | "hashes": [ 143 | "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", 144 | "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" 145 | ], 146 | "markers": "python_version >= '3.8'", 147 | "version": "==2.2.2" 148 | } 149 | }, 150 | "develop": {} 151 | } 152 | -------------------------------------------------------------------------------- /test/assets/poetry/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "certifi" 5 | version = "2024.7.4" 6 | description = "Python package for providing Mozilla's CA Bundle." 7 | optional = false 8 | python-versions = ">=3.6" 9 | files = [ 10 | {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, 11 | {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, 12 | ] 13 | 14 | [[package]] 15 | name = "charset-normalizer" 16 | version = "2.1.1" 17 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." 18 | optional = false 19 | python-versions = ">=3.6.0" 20 | files = [ 21 | {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, 22 | {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, 23 | ] 24 | 25 | [package.extras] 26 | unicode-backport = ["unicodedata2"] 27 | 28 | [[package]] 29 | name = "idna" 30 | version = "3.7" 31 | description = "Internationalized Domain Names in Applications (IDNA)" 32 | optional = false 33 | python-versions = ">=3.5" 34 | files = [ 35 | {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, 36 | {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, 37 | ] 38 | 39 | [[package]] 40 | name = "requests" 41 | version = "2.31.0" 42 | description = "Python HTTP for Humans." 43 | optional = false 44 | python-versions = ">=3.7" 45 | files = [ 46 | {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, 47 | {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, 48 | ] 49 | 50 | [package.dependencies] 51 | certifi = ">=2017.4.17" 52 | charset-normalizer = ">=2,<4" 53 | idna = ">=2.5,<4" 54 | urllib3 = ">=1.21.1,<3" 55 | 56 | [package.extras] 57 | socks = ["PySocks (>=1.5.6,!=1.5.7)"] 58 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] 59 | 60 | [[package]] 61 | name = "urllib3" 62 | version = "1.26.19" 63 | description = "HTTP library with thread-safe connection pooling, file post, and more." 64 | optional = false 65 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" 66 | files = [ 67 | {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, 68 | {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, 69 | ] 70 | 71 | [package.extras] 72 | brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] 73 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] 74 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 75 | 76 | [metadata] 77 | lock-version = "2.0" 78 | python-versions = "^3.9" 79 | content-hash = "ee7fed81cef220efc922f65299b2c6240968697a8ab9aad99eb924d83f71d2d5" 80 | -------------------------------------------------------------------------------- /test/assets/poetry/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "test" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Amir Szekely "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = "^3.9" 10 | requests = "^2.31.0" 11 | 12 | 13 | [build-system] 14 | requires = ["poetry-core"] 15 | build-backend = "poetry.core.masonry.api" -------------------------------------------------------------------------------- /test/assets/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | requests -------------------------------------------------------------------------------- /test/assets/ruby_handler/index.rb: -------------------------------------------------------------------------------- 1 | require "faker" 2 | 3 | def handler(event:, context:) 4 | end 5 | -------------------------------------------------------------------------------- /test/assets/yarn/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "lodash": "^4.17.21" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /test/assets/yarn/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | lodash@^4.17.21: 6 | version "4.17.21" 7 | resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" 8 | integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== 9 | -------------------------------------------------------------------------------- /test/default.integ.snapshot/Turbo-Layer-Test.assets.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "36.0.0", 3 | "files": { 4 | "b7f33614a69548d6bafe224d751a7ef238cde19097415e553fe8b63a4c8fd8a6": { 5 | "source": { 6 | "path": "asset.b7f33614a69548d6bafe224d751a7ef238cde19097415e553fe8b63a4c8fd8a6", 7 | "packaging": "zip" 8 | }, 9 | "destinations": { 10 | "current_account-current_region": { 11 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 12 | "objectKey": "b7f33614a69548d6bafe224d751a7ef238cde19097415e553fe8b63a4c8fd8a6.zip", 13 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 14 | } 15 | } 16 | }, 17 | "f1c4fab585061107d1f08fc14d92728adf646f1b14c789b69e5914fe7bff065f": { 18 | "source": { 19 | "path": "asset.f1c4fab585061107d1f08fc14d92728adf646f1b14c789b69e5914fe7bff065f.lambda", 20 | "packaging": "zip" 21 | }, 22 | "destinations": { 23 | "current_account-current_region": { 24 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 25 | "objectKey": "f1c4fab585061107d1f08fc14d92728adf646f1b14c789b69e5914fe7bff065f.zip", 26 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 27 | } 28 | } 29 | }, 30 | "9afa25b7aaac0f1e5c0a217c2ca3d8f0927ff079339a8514309eb1b4645571eb": { 31 | "source": { 32 | "path": "asset.9afa25b7aaac0f1e5c0a217c2ca3d8f0927ff079339a8514309eb1b4645571eb", 33 | "packaging": "zip" 34 | }, 35 | "destinations": { 36 | "current_account-current_region": { 37 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 38 | "objectKey": "9afa25b7aaac0f1e5c0a217c2ca3d8f0927ff079339a8514309eb1b4645571eb.zip", 39 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 40 | } 41 | } 42 | }, 43 | "ac4a1bd8c7c7879385a212db2c46b2cb8f9e23b91c993d826d478bff7dbc0af3": { 44 | "source": { 45 | "path": "asset.ac4a1bd8c7c7879385a212db2c46b2cb8f9e23b91c993d826d478bff7dbc0af3", 46 | "packaging": "zip" 47 | }, 48 | "destinations": { 49 | "current_account-current_region": { 50 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 51 | "objectKey": "ac4a1bd8c7c7879385a212db2c46b2cb8f9e23b91c993d826d478bff7dbc0af3.zip", 52 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 53 | } 54 | } 55 | }, 56 | "03f139329aa3cbf7300e981458b586c05c3fce0792c3ee22bfa34d0a9912646b": { 57 | "source": { 58 | "path": "asset.03f139329aa3cbf7300e981458b586c05c3fce0792c3ee22bfa34d0a9912646b", 59 | "packaging": "zip" 60 | }, 61 | "destinations": { 62 | "current_account-current_region": { 63 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 64 | "objectKey": "03f139329aa3cbf7300e981458b586c05c3fce0792c3ee22bfa34d0a9912646b.zip", 65 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 66 | } 67 | } 68 | }, 69 | "0f805da84ea23c43d2a95206954c8b5d60b696ef6cbc15dd86a842eae88a6356": { 70 | "source": { 71 | "path": "asset.0f805da84ea23c43d2a95206954c8b5d60b696ef6cbc15dd86a842eae88a6356", 72 | "packaging": "zip" 73 | }, 74 | "destinations": { 75 | "current_account-current_region": { 76 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 77 | "objectKey": "0f805da84ea23c43d2a95206954c8b5d60b696ef6cbc15dd86a842eae88a6356.zip", 78 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 79 | } 80 | } 81 | }, 82 | "6001dbd94424e99c60bef608f43590be11bd4eef9b76ee914e43e4765f7dc763": { 83 | "source": { 84 | "path": "asset.6001dbd94424e99c60bef608f43590be11bd4eef9b76ee914e43e4765f7dc763", 85 | "packaging": "zip" 86 | }, 87 | "destinations": { 88 | "current_account-current_region": { 89 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 90 | "objectKey": "6001dbd94424e99c60bef608f43590be11bd4eef9b76ee914e43e4765f7dc763.zip", 91 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 92 | } 93 | } 94 | }, 95 | "ef31fc89d7ced5ea0cff316469a82711784a006d3ae7a037f2b2422969eafc2e": { 96 | "source": { 97 | "path": "asset.ef31fc89d7ced5ea0cff316469a82711784a006d3ae7a037f2b2422969eafc2e.lambda", 98 | "packaging": "zip" 99 | }, 100 | "destinations": { 101 | "current_account-current_region": { 102 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 103 | "objectKey": "ef31fc89d7ced5ea0cff316469a82711784a006d3ae7a037f2b2422969eafc2e.zip", 104 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 105 | } 106 | } 107 | }, 108 | "3bf5aad3a5db75c30ba530f89f7210a1eddb01c9a91cf0427515e6ef904bc869": { 109 | "source": { 110 | "path": "asset.3bf5aad3a5db75c30ba530f89f7210a1eddb01c9a91cf0427515e6ef904bc869", 111 | "packaging": "zip" 112 | }, 113 | "destinations": { 114 | "current_account-current_region": { 115 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 116 | "objectKey": "3bf5aad3a5db75c30ba530f89f7210a1eddb01c9a91cf0427515e6ef904bc869.zip", 117 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 118 | } 119 | } 120 | }, 121 | "4a976e712b238000c04b209e96ba28d7567d8bbc7954fbbc97de024697cd4147": { 122 | "source": { 123 | "path": "asset.4a976e712b238000c04b209e96ba28d7567d8bbc7954fbbc97de024697cd4147", 124 | "packaging": "zip" 125 | }, 126 | "destinations": { 127 | "current_account-current_region": { 128 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 129 | "objectKey": "4a976e712b238000c04b209e96ba28d7567d8bbc7954fbbc97de024697cd4147.zip", 130 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 131 | } 132 | } 133 | }, 134 | "48521f734f8d682c72f0a91cf94ae3031c21aa5c06f2b05888e7f98061118c64": { 135 | "source": { 136 | "path": "asset.48521f734f8d682c72f0a91cf94ae3031c21aa5c06f2b05888e7f98061118c64", 137 | "packaging": "zip" 138 | }, 139 | "destinations": { 140 | "current_account-current_region": { 141 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 142 | "objectKey": "48521f734f8d682c72f0a91cf94ae3031c21aa5c06f2b05888e7f98061118c64.zip", 143 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 144 | } 145 | } 146 | }, 147 | "37505d489d351a93f865cf87639d4dfdc1839fff8243c3d2f6f0a8fc64df284c": { 148 | "source": { 149 | "path": "asset.37505d489d351a93f865cf87639d4dfdc1839fff8243c3d2f6f0a8fc64df284c.lambda", 150 | "packaging": "zip" 151 | }, 152 | "destinations": { 153 | "current_account-current_region": { 154 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 155 | "objectKey": "37505d489d351a93f865cf87639d4dfdc1839fff8243c3d2f6f0a8fc64df284c.zip", 156 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 157 | } 158 | } 159 | }, 160 | "6dc9869af4e1a4375706e7cebc576340facec2e9f959d4bbca7aef8ddaf68954": { 161 | "source": { 162 | "path": "asset.6dc9869af4e1a4375706e7cebc576340facec2e9f959d4bbca7aef8ddaf68954", 163 | "packaging": "zip" 164 | }, 165 | "destinations": { 166 | "current_account-current_region": { 167 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 168 | "objectKey": "6dc9869af4e1a4375706e7cebc576340facec2e9f959d4bbca7aef8ddaf68954.zip", 169 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 170 | } 171 | } 172 | }, 173 | "ef6e452b59838e0fe26d2ed2fc0e43cefd2011828c7097b696d39cd93616146e": { 174 | "source": { 175 | "path": "asset.ef6e452b59838e0fe26d2ed2fc0e43cefd2011828c7097b696d39cd93616146e", 176 | "packaging": "zip" 177 | }, 178 | "destinations": { 179 | "current_account-current_region": { 180 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 181 | "objectKey": "ef6e452b59838e0fe26d2ed2fc0e43cefd2011828c7097b696d39cd93616146e.zip", 182 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 183 | } 184 | } 185 | }, 186 | "f477c7f293bd090acdff1a27566482b6271087ecfa3b1a406cfee832f546e580": { 187 | "source": { 188 | "path": "asset.f477c7f293bd090acdff1a27566482b6271087ecfa3b1a406cfee832f546e580.lambda", 189 | "packaging": "zip" 190 | }, 191 | "destinations": { 192 | "current_account-current_region": { 193 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 194 | "objectKey": "f477c7f293bd090acdff1a27566482b6271087ecfa3b1a406cfee832f546e580.zip", 195 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 196 | } 197 | } 198 | }, 199 | "b48112b8d8d6205d35abe602baf6fd4a22bd95822534c285187d26c0ed3c7c81": { 200 | "source": { 201 | "path": "asset.b48112b8d8d6205d35abe602baf6fd4a22bd95822534c285187d26c0ed3c7c81", 202 | "packaging": "zip" 203 | }, 204 | "destinations": { 205 | "current_account-current_region": { 206 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 207 | "objectKey": "b48112b8d8d6205d35abe602baf6fd4a22bd95822534c285187d26c0ed3c7c81.zip", 208 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 209 | } 210 | } 211 | }, 212 | "8b5213c7062e6fae0c25738f70085c08a7b99e6f226d8ca8ef92da3e91460b8a": { 213 | "source": { 214 | "path": "asset.8b5213c7062e6fae0c25738f70085c08a7b99e6f226d8ca8ef92da3e91460b8a.jar", 215 | "packaging": "file" 216 | }, 217 | "destinations": { 218 | "current_account-current_region": { 219 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 220 | "objectKey": "8b5213c7062e6fae0c25738f70085c08a7b99e6f226d8ca8ef92da3e91460b8a.jar", 221 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 222 | } 223 | } 224 | }, 225 | "b01fb510513feea3e56d356dc934a8fedef206125f5997fce8e92f3d501a35ef": { 226 | "source": { 227 | "path": "Turbo-Layer-Test.template.json", 228 | "packaging": "file" 229 | }, 230 | "destinations": { 231 | "current_account-current_region": { 232 | "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", 233 | "objectKey": "b01fb510513feea3e56d356dc934a8fedef206125f5997fce8e92f3d501a35ef.json", 234 | "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" 235 | } 236 | } 237 | } 238 | }, 239 | "dockerImages": {} 240 | } -------------------------------------------------------------------------------- /test/default.integ.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Use TriggerFunction to create Lambda functions using our layers. This construct will also run these functions. The functions each try to import a 3 | * package that's not available in Lambda unless our layer worked. If the stack successfully deploys, that means the layer was successfully created 4 | * and our functions were able to successfully import the dependencies from our turbo layer. 5 | */ 6 | 7 | import * as cdk from 'aws-cdk-lib'; 8 | import { Aspects, aws_lambda as lambda, aws_logs as logs, CustomResource, IAspect, triggers } from 'aws-cdk-lib'; 9 | import { Construct, IConstruct } from 'constructs'; 10 | import { DependencyPackagerType, JavaDependencyPackager, NodejsDependencyPackager, PythonDependencyPackager, RubyDependencyPackager } from '../src'; 11 | 12 | const app = new cdk.App({ 13 | context: { 14 | // always run triggers by updating lambda on layer changes -- see https://github.com/aws/aws-cdk/issues/19098 15 | '@aws-cdk/aws-lambda:recognizeLayerVersion': true, 16 | }, 17 | }); 18 | 19 | const layerStack = new cdk.Stack(app, 'Turbo-Layer-Test'); 20 | const logGroup = new logs.LogGroup(layerStack, 'Logs', { 21 | retention: logs.RetentionDays.ONE_DAY, 22 | }); 23 | 24 | class PythonTest extends Construct { 25 | constructor(scope: Construct, id: string, runtime: lambda.Runtime, type: DependencyPackagerType, architecture: lambda.Architecture) { 26 | super(scope, id); 27 | 28 | const packager = new PythonDependencyPackager(this, 'Packager', { 29 | runtime, 30 | architecture, 31 | type, 32 | }); 33 | 34 | const pythonFunctionProps = { 35 | handler: 'index.handler', 36 | code: lambda.Code.fromInline('def handler(event, context):\n import requests'), 37 | runtime, 38 | architecture, 39 | timeout: cdk.Duration.minutes(1), // sometimes Lambda hates me 40 | logGroup, 41 | }; 42 | 43 | new triggers.TriggerFunction(this, 'Inline', { 44 | ...pythonFunctionProps, 45 | description: `Test inline requirements ${runtime} ${type}`, 46 | layers: [packager.layerFromInline('inline', ['requests'])], 47 | }); 48 | new triggers.TriggerFunction(this, 'Requirements.txt', { 49 | ...pythonFunctionProps, 50 | description: `Test requirements.txt ${runtime} ${type}`, 51 | layers: [packager.layerFromRequirementsTxt('req.txt', 'test/assets/requirements')], 52 | }); 53 | new triggers.TriggerFunction(this, 'Pipenv', { 54 | ...pythonFunctionProps, 55 | description: `Test pipenv ${runtime} ${type}`, 56 | layers: [packager.layerFromPipenv('pipenv', 'test/assets/pipenv')], 57 | }); 58 | new triggers.TriggerFunction(this, 'Poetry', { 59 | ...pythonFunctionProps, 60 | description: `Test poetry ${runtime} ${type}`, 61 | layers: [packager.layerFromPoetry('poetry', 'test/assets/poetry')], 62 | }); 63 | } 64 | } 65 | 66 | class NodejsTest extends Construct { 67 | constructor(scope: Construct, id: string, runtime: lambda.Runtime, type: DependencyPackagerType, architecture: lambda.Architecture) { 68 | super(scope, id); 69 | 70 | const packager = new NodejsDependencyPackager(this, 'Packager', { 71 | runtime, 72 | architecture, 73 | type, 74 | }); 75 | 76 | const nodeFunctionProps = { 77 | handler: 'index.handler', 78 | code: lambda.Code.fromInline('module.exports.handler = (event, context, callback) => {require("lodash");callback();}'), 79 | runtime, 80 | architecture, 81 | timeout: cdk.Duration.minutes(1), // sometimes Lambda hates me 82 | logGroup, 83 | }; 84 | 85 | new triggers.TriggerFunction(this, 'NPM Inline Test', { 86 | ...nodeFunctionProps, 87 | description: `Test npm ${runtime} ${type}`, 88 | layers: [packager.layerFromInline('npm inline', ['lodash@^4'])], 89 | }); 90 | new triggers.TriggerFunction(this, 'NPM Test', { 91 | ...nodeFunctionProps, 92 | description: `Test npm ${runtime} ${type}`, 93 | layers: [packager.layerFromPackageJson('npm', 'test/assets/npm')], 94 | }); 95 | new triggers.TriggerFunction(this, 'Yarn Test', { 96 | ...nodeFunctionProps, 97 | description: `Test yarn ${runtime} ${type}`, 98 | layers: [packager.layerFromYarn('yarn', 'test/assets/yarn')], 99 | }); 100 | } 101 | } 102 | 103 | class RubyTest extends Construct { 104 | constructor(scope: Construct, id: string, runtime: lambda.Runtime, type: DependencyPackagerType, architecture: lambda.Architecture) { 105 | super(scope, id); 106 | 107 | const packager = new RubyDependencyPackager(this, 'Packager', { 108 | runtime, 109 | architecture, 110 | type, 111 | }); 112 | 113 | const rubyFunctionProps = { 114 | handler: 'index.handler', 115 | code: lambda.Code.fromAsset('test/assets/ruby_handler'), 116 | runtime, 117 | architecture, 118 | timeout: cdk.Duration.minutes(5), // Ruby takes a while to start 119 | logGroup, 120 | }; 121 | 122 | new triggers.TriggerFunction(this, 'Bundler Test', { 123 | ...rubyFunctionProps, 124 | description: `Test bundler ${runtime} ${type}`, 125 | layers: [packager.layerFromBundler('bundler', 'test/assets/bundler')], 126 | }); 127 | } 128 | } 129 | 130 | class JavaTest extends Construct { 131 | constructor(scope: Construct, id: string, runtime: lambda.Runtime, type: DependencyPackagerType, architecture: lambda.Architecture) { 132 | super(scope, id); 133 | 134 | const packager = new JavaDependencyPackager(this, 'Packager', { 135 | runtime, 136 | architecture, 137 | type, 138 | }); 139 | 140 | const javaFunctionProps: lambda.FunctionProps = { 141 | handler: 'helloworld.App', 142 | // GitHub Actions is not configured to allow Docker-in-Docker, and even if it was, the JAR file hash changes with every build. 143 | // That's why we just use a static file that was built well once. It was built using the commented out code below. 144 | /* 145 | code: lambda.Code.fromAsset('test/assets/maven', { 146 | bundling: { 147 | image: runtime.bundlingImage, 148 | command: [ 149 | '/bin/sh', '-c', 150 | 'mvn package -ntp -B -Dmaven.repo.local=/.m2 && mv target/FunctionOne-1.0.jar /asset-output/ && rm -rf target', 151 | ], 152 | volumes: [ 153 | { 154 | // use host .m2 cache for speedier packaging 155 | hostPath: path.join(os.homedir(), '.m2'), 156 | containerPath: '/.m2/', 157 | }, 158 | ], 159 | outputType: cdk.BundlingOutput.ARCHIVED, 160 | }, 161 | }), 162 | */ 163 | code: lambda.Code.fromAsset('test/assets/maven/FunctionOne.jar'), 164 | runtime, 165 | architecture, 166 | timeout: cdk.Duration.minutes(1), 167 | memorySize: 512, // really java??? 168 | logGroup, 169 | }; 170 | 171 | new triggers.TriggerFunction(this, 'Maven Test', { 172 | ...javaFunctionProps, 173 | description: `Test maven ${runtime} ${type}`, 174 | layers: [packager.layerFromMaven('maven', 'test/assets/maven')], 175 | }); 176 | } 177 | } 178 | 179 | new PythonTest(layerStack, 'Python 3.9 CodeBuild x64', lambda.Runtime.PYTHON_3_9, DependencyPackagerType.CODEBUILD, lambda.Architecture.X86_64); 180 | new PythonTest(layerStack, 'Python 3.9 Lambda x64', lambda.Runtime.PYTHON_3_9, DependencyPackagerType.LAMBDA, lambda.Architecture.X86_64); 181 | new PythonTest(layerStack, 'Python 3.9 CodeBuild arm64', lambda.Runtime.PYTHON_3_9, DependencyPackagerType.CODEBUILD, lambda.Architecture.ARM_64); 182 | new PythonTest(layerStack, 'Python 3.9 Lambda arm64', lambda.Runtime.PYTHON_3_9, DependencyPackagerType.LAMBDA, lambda.Architecture.ARM_64); 183 | new NodejsTest(layerStack, 'Node.js 16 CodeBuild x64', lambda.Runtime.NODEJS_16_X, DependencyPackagerType.CODEBUILD, lambda.Architecture.X86_64); 184 | new NodejsTest(layerStack, 'Node.js 16 Lambda x64', lambda.Runtime.NODEJS_16_X, DependencyPackagerType.LAMBDA, lambda.Architecture.X86_64); 185 | new NodejsTest(layerStack, 'Node.js 18 CodeBuild x64', lambda.Runtime.NODEJS_18_X, DependencyPackagerType.CODEBUILD, lambda.Architecture.X86_64); 186 | new NodejsTest(layerStack, 'Node.js 18 Lambda x64', lambda.Runtime.NODEJS_18_X, DependencyPackagerType.LAMBDA, lambda.Architecture.X86_64); 187 | new NodejsTest(layerStack, 'Node.js 16 CodeBuild arm64', lambda.Runtime.NODEJS_16_X, DependencyPackagerType.CODEBUILD, lambda.Architecture.ARM_64); 188 | new NodejsTest(layerStack, 'Node.js 16 Lambda arm64', lambda.Runtime.NODEJS_16_X, DependencyPackagerType.LAMBDA, lambda.Architecture.ARM_64); 189 | new NodejsTest(layerStack, 'Node.js 18 CodeBuild arm64', lambda.Runtime.NODEJS_18_X, DependencyPackagerType.CODEBUILD, lambda.Architecture.ARM_64); 190 | new NodejsTest(layerStack, 'Node.js 18 Lambda arm64', lambda.Runtime.NODEJS_18_X, DependencyPackagerType.LAMBDA, lambda.Architecture.ARM_64); 191 | new RubyTest(layerStack, 'Ruby 2.7 CodeBuild x64', lambda.Runtime.RUBY_2_7, DependencyPackagerType.CODEBUILD, lambda.Architecture.X86_64); 192 | new RubyTest(layerStack, 'Ruby 2.7 Lambda x64', lambda.Runtime.RUBY_2_7, DependencyPackagerType.LAMBDA, lambda.Architecture.X86_64); 193 | new RubyTest(layerStack, 'Ruby 2.7 CodeBuild arm64', lambda.Runtime.RUBY_2_7, DependencyPackagerType.CODEBUILD, lambda.Architecture.ARM_64); 194 | new RubyTest(layerStack, 'Ruby 2.7 Lambda arm64', lambda.Runtime.RUBY_2_7, DependencyPackagerType.LAMBDA, lambda.Architecture.ARM_64); 195 | new JavaTest(layerStack, 'Java 11 CodeBuild x64', lambda.Runtime.JAVA_11, DependencyPackagerType.CODEBUILD, lambda.Architecture.X86_64); 196 | new JavaTest(layerStack, 'Java 11 CodeBuild arm64', lambda.Runtime.JAVA_11, DependencyPackagerType.CODEBUILD, lambda.Architecture.ARM_64); 197 | 198 | // don't run too many tests at once or CodeBuild will fail 199 | class CodeBuildThrottle implements IAspect { 200 | private lastBatch: CustomResource[] = []; 201 | private batch: CustomResource[] = []; 202 | 203 | public visit(construct: IConstruct) { 204 | if (construct instanceof CustomResource) { 205 | if (construct.node.id != 'Layer Packager' || !construct.node.path.includes('CodeBuild')) { 206 | return; 207 | } 208 | 209 | this.batch.push(construct); 210 | 211 | if (this.batch.length == 8) { 212 | this.flush(); 213 | } 214 | } 215 | } 216 | 217 | public flush() { 218 | for (const cr of this.batch) { 219 | cr.node.addDependency(...this.lastBatch); 220 | } 221 | this.lastBatch = this.batch; 222 | this.batch = []; 223 | } 224 | } 225 | 226 | const throttler = new CodeBuildThrottle(); 227 | Aspects.of(layerStack).add(throttler); 228 | throttler.flush(); 229 | -------------------------------------------------------------------------------- /test/nodejs.test.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from 'aws-cdk-lib'; 2 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 3 | import { Match, Template } from 'aws-cdk-lib/assertions'; 4 | import { librariesToPackageJson, NodejsDependencyPackager } from '../src'; 5 | 6 | test('Inline version parsing', () => { 7 | const packageJson = librariesToPackageJson(['@aws-sdk/client-s3@3.259.0', 'nothing', 'library@v595']); 8 | expect(packageJson).toMatchObject({ 9 | dependencies: { 10 | '@aws-sdk/client-s3': '3.259.0', 11 | 'nothing': '*', 12 | 'library': 'v595', 13 | }, 14 | }); 15 | }); 16 | 17 | test('Packager runtime version matches', () => { 18 | const app = new cdk.App(); 19 | const stack = new cdk.Stack(app, 'test'); 20 | 21 | new NodejsDependencyPackager(stack, 'packager 16', { 22 | runtime: lambda.Runtime.NODEJS_16_X, 23 | }); 24 | new NodejsDependencyPackager(stack, 'packager 18', { 25 | runtime: lambda.Runtime.NODEJS_18_X, 26 | }); 27 | 28 | const template = Template.fromStack(stack); 29 | 30 | template.hasResourceProperties( 31 | 'AWS::Lambda::Function', 32 | Match.objectLike({ 33 | Runtime: lambda.Runtime.NODEJS_16_X.name, 34 | }), 35 | ); 36 | template.hasResourceProperties( 37 | 'AWS::Lambda::Function', 38 | Match.objectLike({ 39 | Runtime: lambda.Runtime.NODEJS_18_X.name, 40 | }), 41 | ); 42 | }); 43 | 44 | -------------------------------------------------------------------------------- /test/python.test.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from 'aws-cdk-lib'; 2 | import { aws_lambda as lambda } from 'aws-cdk-lib'; 3 | import { Match, Template } from 'aws-cdk-lib/assertions'; 4 | import { PythonDependencyPackager } from '../src'; 5 | 6 | test('Packager runtime version matches', () => { 7 | const app = new cdk.App(); 8 | const stack = new cdk.Stack(app, 'test'); 9 | 10 | new PythonDependencyPackager(stack, 'packager 3.7', { 11 | runtime: lambda.Runtime.PYTHON_3_7, 12 | }); 13 | new PythonDependencyPackager(stack, 'packager 3.8', { 14 | runtime: lambda.Runtime.PYTHON_3_8, 15 | }); 16 | new PythonDependencyPackager(stack, 'packager 3.9', { 17 | runtime: lambda.Runtime.PYTHON_3_9, 18 | }); 19 | 20 | const template = Template.fromStack(stack); 21 | 22 | template.hasResourceProperties( 23 | 'AWS::Lambda::Function', 24 | Match.objectLike({ 25 | Runtime: lambda.Runtime.PYTHON_3_7.name, 26 | }), 27 | ); 28 | template.hasResourceProperties( 29 | 'AWS::Lambda::Function', 30 | Match.objectLike({ 31 | Runtime: lambda.Runtime.PYTHON_3_8.name, 32 | }), 33 | ); 34 | template.hasResourceProperties( 35 | 'AWS::Lambda::Function', 36 | Match.objectLike({ 37 | Runtime: lambda.Runtime.PYTHON_3_9.name, 38 | }), 39 | ); 40 | }); 41 | -------------------------------------------------------------------------------- /tsconfig.dev.json: -------------------------------------------------------------------------------- 1 | // ~~ Generated by projen. To modify, edit .projenrc.js and run "npx projen". 2 | { 3 | "compilerOptions": { 4 | "alwaysStrict": true, 5 | "declaration": true, 6 | "esModuleInterop": true, 7 | "experimentalDecorators": true, 8 | "inlineSourceMap": true, 9 | "inlineSources": true, 10 | "lib": [ 11 | "es2020" 12 | ], 13 | "module": "CommonJS", 14 | "noEmitOnError": false, 15 | "noFallthroughCasesInSwitch": true, 16 | "noImplicitAny": true, 17 | "noImplicitReturns": true, 18 | "noImplicitThis": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "resolveJsonModule": true, 22 | "strict": true, 23 | "strictNullChecks": true, 24 | "strictPropertyInitialization": true, 25 | "stripInternal": true, 26 | "target": "ES2020" 27 | }, 28 | "include": [ 29 | "src/**/*.ts", 30 | "test/**/*.ts", 31 | "benchmark/**/*.ts", 32 | ".projenrc.js" 33 | ], 34 | "exclude": [ 35 | "node_modules" 36 | ] 37 | } 38 | --------------------------------------------------------------------------------