├── .eslintrc ├── .github └── workflows │ └── release.yaml ├── .gitignore ├── .huskyrc.js ├── Readme.md ├── commitlint.config.js ├── jest.config.js ├── lint-staged.config.js ├── manual-releases.md ├── package.json ├── src ├── compileCodeEsbuild.ts ├── getConfiguredTypeScriptFunction.ts ├── runUpdateTypeScriptFunction.ts ├── snakeToPascal.spec.ts ├── snakeToPascal.ts ├── typeScriptFunction.ts ├── update-typescript-function.ts └── updateTypeScriptFunction.ts ├── tsconfig.json └── yarn.lock /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "parser": "@typescript-eslint/parser", 3 | "parserOptions": { 4 | "project": "./tsconfig.json" 5 | }, 6 | "settings": { 7 | "import/resolver": { 8 | "typescript": {} 9 | } 10 | }, 11 | "extends": [ 12 | "airbnb-typescript/base", 13 | "eslint:recommended", 14 | "plugin:@typescript-eslint/eslint-recommended", 15 | "plugin:@typescript-eslint/recommended", 16 | "prettier/@typescript-eslint", 17 | "plugin:prettier/recommended" 18 | ], 19 | "plugins": ["@typescript-eslint", "prettier"], 20 | "rules": { 21 | "import/no-extraneous-dependencies": [ 22 | "error", 23 | { 24 | "devDependencies": [ 25 | "**/*.test.ts", 26 | "**/*.spec.ts" 27 | ] 28 | } 29 | ], 30 | "prettier/prettier": "error", 31 | "import/prefer-default-export": 0, 32 | "no-underscore-dangle": 0, 33 | "no-cond-assign": [ 34 | "error", 35 | "except-parens" 36 | ], 37 | "import/no-cycle": 0, 38 | "@typescript-eslint/interface-name-prefix": 0, 39 | "@typescript-eslint/no-use-before-define": 0 40 | }, 41 | "env": { 42 | "jest": true 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: push 3 | jobs: 4 | test: 5 | runs-on: ubuntu-18.04 6 | steps: 7 | - uses: actions/checkout@v2 8 | - uses: actions/setup-node@v1 9 | with: 10 | node-version: '12' 11 | - run: yarn 12 | - run: yarn test 13 | - run: yarn build 14 | - name: Release 15 | env: 16 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | run: npx semantic-release 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | node_modules 3 | coverage 4 | build/ 5 | .cache 6 | .nyc_output 7 | yarn-error.log 8 | .eslintcache 9 | -------------------------------------------------------------------------------- /.huskyrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | hooks: { 3 | "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", 4 | "pre-commit": "lint-staged", 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # cdk-typescript-tooling 2 | 3 | AWS is great. 4 | Lambdas changed the way we think about programming. 5 | CDK makes it all even better. 6 | 7 | Nonetheless, working with these technologies we've encountered some pain points. We want to address a few of them in this package. 8 | 9 | - streamlining build of TypeScript code 10 | - speeding up the development feedback loop with lambda updates 11 | - streamlined/shared error logging 12 | - "smart defaults" for exposing lambdas through http 13 | - Lambda and DynamoDB tables dependency management 14 | 15 | ## CDK TypeScript resource 16 | 17 | ### Why? 18 | 19 | All-typescript codebase including infrastructure as a code with CDK and application code is a pleasure to use. 20 | Deployment of it, not so much. 21 | 22 | You can transpile your code to a new directory, copy package.json there, install production dependencies, and then have CDK send that whole folder to lambda. It's painful to set up, and it will bring a ton of unnecessary code (everything in node_modules) along. 23 | Webpack is better - it can take an entrypoint and create a small bundled file that includes only what you need. 24 | Unfortunately, no one likes to set up complex and diverging webpack configurations, especially in a monorepo with tens if not hundreds of tiny packages. 25 | 26 | ### What? 27 | 28 | We wanted TypeScript Lambda experience to be seamless - if a developer wants to create a new function, he should create a typescript file, add it to CDK and that's it. Now you can do it like so: 29 | 30 | ```typescript 31 | import { TypeScriptFunction } from "cdk-typescript-tooling"; 32 | // ... 33 | const statusHandle = new TypeScriptFunction(scope, "Purchase-Status-Endpoint", { 34 | entry: require.resolve("@sales/purchase-endpoint/src/handler.ts"), 35 | }); 36 | ``` 37 | 38 | It takes all the parameters that you know from [@aws-cdk/aws-lambda](https://docs.aws.amazon.com/cdk/api/latest/docs/aws-lambda-readme.html), like `runtime`, `environment`, `timeout`, and so on, because we extend it. 39 | 40 | ## Instant updates (development) 41 | 42 | ### Why? 43 | 44 | Deploying the whole stack everytime when you want to check your changes is tiresome and boring. 45 | If you do it by pushing to CI system - it's even slower. 46 | If you do it locally, it's still slow. And if your build relies on multiple secrets (like most do), you can't even do it properly from your local dev. 47 | Changing files in-line through lambda panel is painful - you can't paste TypeScript code because that will result in Syntax Errors. You also risk forgetting about some changes in the code, and later losing them after the next push, or - even worse - QAing and approving the functionality and merging to master, even though the code in repository does not have the required fix. It's a mess :) 48 | 49 | ### What? 50 | 51 | Using the TypeScriptFunction from our tool gives you the ability to use `update-typescript-function` command. 52 | 53 | #### Updating all functions: 54 | 55 | Assuming your stack is declared at `./src/cdk.ts` Run it like this: 56 | 57 | ``` 58 | npx update-typescript-function ./src/cdk.ts 59 | ``` 60 | 61 | And it will quickly and automatically update all TypeScript Lambda functions found in your CDK Stack. 62 | 63 | #### Configuration: 64 | 65 | Actually, you might need to do a few exports first... ;-) 66 | 67 | ``` 68 | export AWS_SECRET_ACCESS_KEY=SECRET_ACCESS_KEY 69 | export AWS_ACCESS_KEY_ID=ACCESS_KEY 70 | export AWS_REGION=us-east-2 71 | ``` 72 | 73 | In the future we do want to read those from ~/.aws/credentials, but for now please export the values. 74 | 75 | We need your cdk file to export a stack, in most cases you will do something like this: 76 | 77 | ```typescript 78 | import * as cdk from "@aws-cdk/core"; 79 | import { SalesSystem } from "./SalesSystem"; 80 | 81 | const baseStackName = "SalesSystemExample"; 82 | export default new SalesSystem(new cdk.App(), baseStackName); 83 | ``` 84 | 85 | If you need to do something async before returning a stack, a pattern like this should work: 86 | 87 | ```typescript 88 | export default (async () => { 89 | const stackSuffix = await getStackSuffix(baseStackName); 90 | return new SalesSystem(new cdk.App(), `${baseStackName}${stackSuffix}`); 91 | })(); 92 | ``` 93 | 94 | > We like to deploy a stack per branch, so all our branches have different StackNames and also differently named (suffixed) resources. Because of that we rely on branch name to cache your stack information. Worst case scenario you will have cache built multiple times with the same data. 95 | 96 | #### Updating single function: 97 | 98 | The compilation and uploading of functions happen in parallel. Because of that it is crazy fast (<10 s for ~20 functions) and in most cases that is what you should be doing. It comes with the advantage that if you change a code that's used by a few different functions, all of them will be redeployed. Sometimes you might not realize that some piece of code is used in multiple places and get yourself in some weird inconsistent state. 99 | But if you must, or if you have hundreds of functions in the stack, it's simple, get the Logical ID of a function (using aws cli or going to the stack using their cloudformation panel), and do: 100 | 101 | ``` 102 | npx update-typescript-function ./src/cdk.ts PurchaseEndpointIKYULFRNR9VJ 103 | ``` 104 | 105 | ## Error logging 106 | 107 | ### Why? 108 | 109 | Having multiple independent lambda functions is great, but it comes with a price of difficult monitoring. 110 | We like to be notified of things going wrong, as early as possible and in automated fashion. New lambda functions should be connected to the system with a minimal setup. 111 | 112 | ### What? 113 | 114 | Our TypeScriptFunction has built-in ability to send Error logs to a passed lambda handler. 115 | First, create a logHandler: 116 | 117 | ```typescript 118 | import { CloudWatchLogsDecodedData, CloudWatchLogsHandler } from "aws-lambda"; 119 | import zlib from "zlib"; 120 | 121 | export const handler: CloudWatchLogsHandler = async (event, context) => { 122 | const compressedPayload = Buffer.from(event.awslogs.data, "base64"); 123 | const jsonPayload = zlib.gunzipSync(compressedPayload).toString("utf8"); 124 | const parsed: CloudWatchLogsDecodedData = JSON.parse(jsonPayload); 125 | console.log(parsed); 126 | }; 127 | ``` 128 | 129 | This is the simplest possible one that will just log errors in a CloudWatch stream aggregating all errors from all lambda functions. 130 | 131 | Now in your cdk define a TypeScriptFunction that will deploy that code. Assign its handle to a variable. 132 | 133 | ```typescript 134 | import { SubscriptionFilter, FilterPattern } from "@aws-cdk/aws-logs"; 135 | import * as LogsDestinations from "@aws-cdk/aws-logs-destinations"; 136 | //... 137 | const logHandle = new TypeScriptFunction(scope, "logHandler", { 138 | entry: require.resolve("@sales/logHandler/src/handler.ts"), 139 | }); 140 | ``` 141 | 142 | Pass it to existing function like so: 143 | 144 | ```typescript 145 | new TypeScriptFunction(scope, "Purchase-Status-Endpoint", { 146 | entry: require.resolve("@sales/purchase-endpoint/src/handler.ts"), 147 | logFunction: logHandle, 148 | }); 149 | ``` 150 | 151 | Now, whenever any error (console.error or exception) shows up in the Purchase-Status-Endpoint, it will be passed and displayed by the logHandler. 152 | Obviously, the usefulness of that increases with the number of lambdas you have. :-) 153 | 154 | Enjoy! 155 | 156 | ## Easily expose through HTTP 157 | 158 | ### Why? 159 | 160 | In our development exposing lambdas with http is a very frequent case. The code around it for most cases stays exactly the same and increases the noise. 161 | We define the function: 162 | 163 | ```typescript 164 | const handler = new TypeScriptFunction(stack, "Add-Function", { 165 | entry: require.resolve("@calculator/add/src/handler.ts"), 166 | }); 167 | ``` 168 | 169 | Add HttpApi using LambdaProxyIntegration 170 | 171 | ```typescript 172 | const statusApi = new apiGateway2.HttpApi(stack, "PurchaseStatusHttpApi", { 173 | defaultIntegration: new apiGateway2Integrations.LambdaProxyIntegration({ 174 | handler, 175 | }), 176 | }); 177 | ``` 178 | 179 | Add the url to CfnOutput to, among others, see the url in CI/CD logs. 180 | 181 | ```typescript 182 | new CfnOutput(stack, "addUrl", { 183 | value: statusApi.url, 184 | }); 185 | ``` 186 | 187 | ### What? 188 | 189 | Define your function with `withHttp` option like so: 190 | 191 | ```typescript 192 | new TypeScriptFunction(stack, "Add-Function", { 193 | entry: require.resolve("@calculator/add/src/handler.ts"), 194 | withHttp: true, 195 | }); 196 | ``` 197 | 198 | ...and the other two steps will be done for you automatically. 199 | 200 | ## Lambda and DynamoDB tables dependency management 201 | 202 | ### Why? 203 | 204 | Managing dependencies between Lambdas and DynamoDB tables can get ugly. 205 | The default way of allowing a Lambda function to access DynamoDB is done like so: 206 | 207 | ```typescript 208 | const tableHandle = new dynamodb.Table(stack, "Table", { 209 | partitionKey: { name: "id", type: dynamodb.AttributeType.STRING }, 210 | }); 211 | 212 | const functionHandle = new TypeScriptFunction(stack, "Add-Function", { 213 | entry: require.resolve("@calculator/add/src/handler.ts"), 214 | environment: { 215 | TABLE_NAME: tableHandle.tableName, // adding the table name to the environment 216 | }, 217 | }); 218 | 219 | tableHandle.grantReadWriteData(functionHandle); // grant the lambda access 220 | ``` 221 | 222 | And then in your code you'd do: 223 | 224 | ```typescript 225 | await this.documentClient 226 | .scan({ TableName: process.env.TABIE_NAME }) // using the env variable from lambda definition 227 | .promise(); 228 | ``` 229 | 230 | As you probably already know this pattern comes with some potential issues. 231 | 232 | 1. First is the problematic usage in code - there is no way to match that the environment variables name set on the function 233 | match what you are trying to access from the code. (I actually did put a typo there, did you spot it?) 234 | Although there are things to mitigate this a bit, for example - never use the env variables directly, but have centralized functions that do that, like: 235 | 236 | ```typescript 237 | const getTableName = () => process.env.TABLE_NAME; 238 | ``` 239 | 240 | Still, no verification is happening, and if someone removes the environment variable or change its name, you won't be able to know until you get a runtime error. 241 | 242 | 2. Another problem is the need to pass handlers around. For small stacks that might actually have only one function and one table, that's non-issue, but if you have a large application with tens or even hundreds of lambdas, and multiple tables, it gets ugly. 243 | 244 | 3. Related to number 2 - since you have to pass things around, they have to be introduced in order. Let's say we want to add a lambda that will watch the stream of events in that table, and maybe create some cache or aggregation in another table. It will have to be declared after the initial table. Then let's have another function that reads from that cache. It might seem like that order is correct, and if you are happy to keep things that way - great! Nonetheless - you should not be forced to. Sometimes it might make more sense to group and order things by functionality, not by their dependency order. 245 | 246 | 4. You have to remember to grant the permissions to read the Table to the lambda function. It seems like a sensible thing to do, but when you think about it - it wouldn't make sense to add the environment variable if we didn't also grant the permissions. Similarly - it would not make sense to grant permissions if we didn't somehow expose information to the lambda about how to connect to the table. That means - we should be able to do this in one step. (again, a frequent source of errors that are only visible run-time) 247 | 248 | 5. Handlers are only typed as a generic CDK Lambda/DynamoDB Table. That means, if you need to pass many of them around there is no way to see a problem before, again, a run-time error. 249 | Consider a lambda function that requires access to multiple tables: 250 | 251 | ```typescript 252 | const createTablesAggregator = ( 253 | stack: Stack, 254 | someTable: ITable, 255 | otherTable: ITable, 256 | yetAnotherTable: ITable 257 | ) => { 258 | new TypeScriptFunction(stack, "Aggregator-Function", { 259 | entry: require.resolve("@calculator/aggregator/handler.ts"), 260 | environment: { 261 | SOME_TABLE: someTable.tableName, 262 | OTHER_TABLE: otherTable.tableName, 263 | YET_ANOTHER_TABLE: yetAnotherTable.tableName, 264 | }, 265 | }); 266 | }; 267 | ``` 268 | 269 | and then somewhere else you would call: 270 | 271 | ```typescript 272 | createTablesAggregator(stack, someTable, yetAnotherTable, otherTable); 273 | ``` 274 | 275 | TypeScript would have no way of catching this mistake - everything would deploy. Best case scenario things would not work, worst case scenario, you might mess up the tables that were passed in a wrong order (maybe the schema for the tables was compatible, and your code successfully did an operation that should happen in the other table). Again - for a small stack this might seem like a non-issue. However, once you have a large one, and multiple people change the CDK code at the same time, it's very easy to mess this up. 276 | 277 | ### What? 278 | 279 | By now you are hopefully convinced that there are areas for improvements. Our solution is based on having a central "registry" for Lambdas and Dynamo Tables. 280 | 281 | Registry allows you to later reference those constructs by names, instead of passing them around. (which takes care of problems 2, 3, 5). 282 | 283 | ```typescript 284 | registerTable(stack, AvailableTables.TABLE, { 285 | partitionKey: { name: "id", type: dynamodb.AttributeType.STRING }, 286 | }); // registerTable is a custom wrapper, trivial to implement yourself, see example below 287 | 288 | new ToolkitFunction(stack, AvailableLambdas.ADD, { 289 | entry: require.resolve("@calculator/add/src/handler.ts"), 290 | addDependencies: [addTables(AvailableTables.TABLE)], 291 | }); 292 | ``` 293 | 294 | Using the addDependencies automatically adds the permissions (RW by default, trivial to add an option to specify a more limited permission) - which takes care of the problem number 4. 295 | 296 | We are left with the problem number 1, which is solved by using a helper function in your code: 297 | 298 | ```typescript 299 | export const getDynamoTableName = (tableName: AvailableTables) => 300 | process.env[`DYNAMODB_${AvailableTables[tableName]}`] 301 | 302 | getDynamoTableName(AvailableTables.TABLE) 303 | ``` 304 | 305 | To see how this all connects together take a look at dependencyManagement branch of our [xolvio/aws-sales-system-example/dependencyManagement] 306 | (https://github.com/xolvio/aws-sales-system-example/tree/dependencyManagement) 307 | 308 | ## State of the project 309 | 310 | This is a Proof Of Concept. It works for us, and you can play around with it using this Demo Repo: [xolvio/aws-sales-system-example](https://github.com/xolvio/aws-sales-system-example/tree/async-invocation) 311 | Please let us know if you hit any problems. 312 | Please do NOT use the updater for updating your production code. That should be a no-go even after this project becomes battle-tested. 313 | -------------------------------------------------------------------------------- /commitlint.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | extends: ["@commitlint/config-conventional"], 3 | }; 4 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: "ts-jest", 3 | testEnvironment: "node", 4 | }; 5 | -------------------------------------------------------------------------------- /lint-staged.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "*.{t,j}s": ["eslint --cache --fix", "jest --findRelatedTests"], 3 | "package.json": ["sort-package-json"], 4 | }; 5 | -------------------------------------------------------------------------------- /manual-releases.md: -------------------------------------------------------------------------------- 1 | # manual-releases 2 | 3 | This project has an automated release set up. So things are only released when 4 | there are useful changes in the code that justify a release. But sometimes 5 | things get messed up one way or another and we need to trigger the release 6 | ourselves. When this happens, simply bump the number below and commit that with 7 | the following commit message based on your needs: 8 | 9 | **Major** 10 | 11 | ``` 12 | fix(release): manually release a major version 13 | 14 | There was an issue with a major release, so this manual-releases.md 15 | change is to release a new major version. 16 | 17 | Reference: # 18 | 19 | BREAKING CHANGE: 20 | ``` 21 | 22 | **Minor** 23 | 24 | ``` 25 | feat(release): manually release a minor version 26 | 27 | There was an issue with a minor release, so this manual-releases.md 28 | change is to release a new minor version. 29 | 30 | Reference: # 31 | ``` 32 | 33 | **Patch** 34 | 35 | ``` 36 | fix(release): manually release a patch version 37 | 38 | There was an issue with a patch release, so this manual-releases.md 39 | change is to release a new patch version. 40 | 41 | Reference: # 42 | ``` 43 | 44 | The number of times we've had to do a manual release is: 4 45 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk-typescript-tooling", 3 | "version": "0.0.0-development", 4 | "description": "", 5 | "keywords": [], 6 | "homepage": "https://github.com/xolvio/cdk-typescript-tooling#readme", 7 | "bugs": { 8 | "url": "https://github.com/xolvio/cdk-typescript-tooling/issues" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git+https://github.com/xolvio/cdk-typescript-tooling.git" 13 | }, 14 | "license": "ISC", 15 | "author": "Lukasz Gandecki ", 16 | "main": "build/typeScriptFunction.js", 17 | "bin": { 18 | "update-typescript-function": "build/update-typescript-function.js" 19 | }, 20 | "scripts": { 21 | "build": "rm -rf build/* && tsc --declaration", 22 | "semantic-release": "semantic-release", 23 | "test": "jest --passWithNoTests", 24 | "type-check": "tsc --noEmit" 25 | }, 26 | "dependencies": { 27 | "@aws-cdk/assert": "1.96.0", 28 | "@types/shelljs": "0.8.8", 29 | "esbuild": "^0.11.5", 30 | "find-up": "5.0.0", 31 | "shelljs": "0.8.4", 32 | "ts-node": "9.0.0", 33 | "tsconfig-paths-webpack-plugin": "^3.3.0", 34 | "webpack": "^5.8.0", 35 | "webpack-cli": "^4.2.0" 36 | }, 37 | "devDependencies": { 38 | "@aws-cdk/aws-apigatewayv2": "1.96.0", 39 | "@aws-cdk/aws-apigatewayv2-integrations": "1.96.0", 40 | "@aws-cdk/aws-dynamodb": "1.96.0", 41 | "@aws-cdk/aws-lambda": "1.96.0", 42 | "@aws-cdk/aws-lambda-event-sources": "1.96.0", 43 | "@aws-cdk/aws-logs": "1.96.0", 44 | "@aws-cdk/aws-logs-destinations": "1.96.0", 45 | "@aws-cdk/core": "1.96.0", 46 | "@commitlint/cli": "8.3.5", 47 | "@commitlint/config-conventional": "8.3.4", 48 | "@tsconfig/node10": "^1.0.7", 49 | "@typescript-eslint/eslint-plugin": "4.5.0", 50 | "@typescript-eslint/parser": "4.5.0", 51 | "aws-sdk": "^2.782.0", 52 | "eslint": "7.12.0", 53 | "eslint-config-airbnb-typescript": "12.0.0", 54 | "eslint-config-prettier": "6.14.0", 55 | "eslint-import-resolver-typescript": "2.3.0", 56 | "eslint-plugin-import": "2.22.1", 57 | "eslint-plugin-jest": "24.1.0", 58 | "eslint-plugin-prettier": "3.1.4", 59 | "husky": "^4.3.0", 60 | "jest": "^26.6.1", 61 | "lint-staged": "^10.5.0", 62 | "prettier": "^2.1.2", 63 | "semantic-release": "^17.2.2", 64 | "sort-package-json": "^1.46.1", 65 | "ts-jest": "^26.4.3", 66 | "typescript": "^4.0.5" 67 | }, 68 | "peerDependencies": { 69 | "@aws-cdk/assert": "*", 70 | "@aws-cdk/aws-apigatewayv2": "*", 71 | "@aws-cdk/aws-apigatewayv2-integrations": "*", 72 | "@aws-cdk/aws-dynamodb": "*", 73 | "@aws-cdk/aws-lambda": "*", 74 | "@aws-cdk/aws-lambda-event-sources": "*", 75 | "@aws-cdk/aws-logs": "*", 76 | "@aws-cdk/aws-logs-destinations": "*", 77 | "@aws-cdk/core": "*", 78 | "aws-sdk": "*" 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/compileCodeEsbuild.ts: -------------------------------------------------------------------------------- 1 | import * as esbuild from "esbuild"; 2 | import fs from "fs"; 3 | import path from "path"; 4 | import os from "os"; 5 | import * as shelljs from "shelljs"; 6 | 7 | export const compileCodeEsbuild = ({ 8 | modulesToIgnore = [], 9 | entryFullPath, 10 | async, 11 | forceCompile, 12 | }: { 13 | modulesToIgnore?: string[]; 14 | entryFullPath: string; 15 | async?: boolean; 16 | forceCompile?: boolean; 17 | }): { outputDir: string } | Promise<{ outputDir: string }> => { 18 | const outputDir = fs.mkdtempSync( 19 | path.join(os.tmpdir(), "aws-lambda-nodejs-webpack") 20 | ); 21 | 22 | const options: esbuild.BuildOptions = { 23 | entryPoints: [entryFullPath], 24 | bundle: true, 25 | platform: "node", 26 | keepNames: true, 27 | external: ["aws-sdk", ...modulesToIgnore], 28 | outfile: path.join(outputDir, "main.js"), 29 | minify: true, 30 | sourcemap: true, 31 | }; 32 | if (process.env.NODE_ENV !== "test" || forceCompile) { 33 | if (async) { 34 | return new Promise((resolve) => { 35 | esbuild 36 | .build(options) 37 | .then((res) => { 38 | resolve({ outputDir }); 39 | }) 40 | .catch(() => process.exit(1)); 41 | }); 42 | } 43 | esbuild.buildSync(options); 44 | 45 | // this is incorrectly typed in shelljs, the array returns an object 46 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 47 | // @ts-ignore 48 | const compiledFiles = shelljs.ls("-l", outputDir).map((a) => a.name); 49 | if (compiledFiles.length === 0) { 50 | console.error( 51 | `No files compiled for: ${entryFullPath}. Something probably went wrong.` 52 | ); 53 | } 54 | } 55 | 56 | return { outputDir }; 57 | }; 58 | -------------------------------------------------------------------------------- /src/getConfiguredTypeScriptFunction.ts: -------------------------------------------------------------------------------- 1 | import * as dynamodb from "@aws-cdk/aws-dynamodb"; 2 | import * as awsLambda from "@aws-cdk/aws-lambda"; 3 | import { 4 | DynamoEventSourceProps, 5 | DynamoEventSource, 6 | } from "@aws-cdk/aws-lambda-event-sources"; 7 | import { TypeScriptFunction } from "./typeScriptFunction"; 8 | import { snakeToPascal } from "./snakeToPascal"; 9 | 10 | type DynamoWithStream = { 11 | table: dynamodb.ITable; 12 | stream?: dynamodb.ITable; 13 | }; 14 | 15 | export default ( 16 | AvailableLambdas: { 17 | [key: number]: string; 18 | }, 19 | AvailableTables: { [key: number]: string } 20 | ) => { 21 | const tables: { [key in T]?: DynamoWithStream } = {}; 22 | const lambdas: { [key in L]?: TypeScriptFunction } = {}; 23 | 24 | const registerTable = ( 25 | tableName: T, 26 | table: dynamodb.ITable, 27 | stream?: dynamodb.ITable 28 | ) => { 29 | tables[tableName] = { table, stream }; 30 | }; 31 | 32 | const registerLambda = (lambdaName: L, lambdaHandler: TypeScriptFunction) => { 33 | lambdas[lambdaName] = lambdaHandler; 34 | }; 35 | 36 | const addLambdas = (...lambdaNames: L[]) => (handle: TypeScriptFunction) => { 37 | lambdaNames.forEach((lambdaName) => { 38 | const lambda = lambdas[lambdaName] as TypeScriptFunction; 39 | 40 | handle.addEnvironment( 41 | `LAMBDA_${AvailableLambdas[lambdaName]}`, 42 | lambda.functionName 43 | ); 44 | if (lambda.url) { 45 | handle.addEnvironment( 46 | `LAMBDA_${AvailableLambdas[lambdaName]}_URL`, 47 | lambda.url 48 | ); 49 | } 50 | lambda.grantInvoke(handle); 51 | }); 52 | }; 53 | 54 | const addTables = (...tableNames: T[]) => (handle: TypeScriptFunction) => { 55 | tableNames.forEach((tableName) => { 56 | const table = tables[tableName]!.table as dynamodb.ITable; 57 | 58 | handle.addEnvironment( 59 | `DYNAMODB_${AvailableTables[tableName]}`, 60 | table.tableName 61 | ); 62 | table.grantReadWriteData(handle); 63 | }); 64 | }; 65 | 66 | const addStreams = ( 67 | ...tableStreams: (T | { stream: T; streamOpts: DynamoEventSourceProps })[] 68 | ) => (handle: TypeScriptFunction) => { 69 | tableStreams.forEach((tableOrObject) => { 70 | if ("stream" in tableOrObject) { 71 | const table = tables[tableOrObject.stream]!.stream as dynamodb.ITable; 72 | const eventSource = new DynamoEventSource( 73 | table, 74 | tableOrObject.streamOpts 75 | ); 76 | handle.addEventSource(eventSource); 77 | } else { 78 | const table = tables[tableOrObject]!.stream as dynamodb.ITable; 79 | const eventSource = new DynamoEventSource(table, { 80 | startingPosition: awsLambda.StartingPosition.TRIM_HORIZON, 81 | batchSize: 10, 82 | bisectBatchOnError: true, 83 | retryAttempts: 3, 84 | }); 85 | handle.addEventSource(eventSource); 86 | } 87 | }); 88 | }; 89 | 90 | return { 91 | addLambdas, 92 | addTables, 93 | addStreams, 94 | registerTable, 95 | registerLambda, 96 | ToolkitFunction: class ToolkitFunction extends TypeScriptFunction { 97 | constructor( 98 | scope: ConstructorParameters[0], 99 | id: L, 100 | props: ConstructorParameters[2] 101 | ) { 102 | const lambdaName = AvailableLambdas[id]; 103 | 104 | super(scope, snakeToPascal(lambdaName), props); 105 | registerLambda(id, this); 106 | return this as TypeScriptFunction; 107 | } 108 | }, 109 | }; 110 | }; 111 | -------------------------------------------------------------------------------- /src/runUpdateTypeScriptFunction.ts: -------------------------------------------------------------------------------- 1 | process.env.NODE_ENV = "test"; 2 | const [stackPath, functionPhysicalId] = process.argv.slice(2); 3 | 4 | // eslint-disable-next-line @typescript-eslint/no-var-requires 5 | require("./updateTypeScriptFunction")( 6 | functionPhysicalId, 7 | stackPath 8 | ).then(() => {}); 9 | -------------------------------------------------------------------------------- /src/snakeToPascal.spec.ts: -------------------------------------------------------------------------------- 1 | import { snakeToPascal } from "./snakeToPascal"; 2 | 3 | test("Turns snake to Pascal", () => { 4 | expect(snakeToPascal("ABC_DEF_GFH")).toEqual("AbcDefGfh"); 5 | }); 6 | 7 | test("Leaves things alone if not a snake", () => { 8 | expect(snakeToPascal("AbcDefGfh")).toEqual("AbcDefGfh"); 9 | }); 10 | -------------------------------------------------------------------------------- /src/snakeToPascal.ts: -------------------------------------------------------------------------------- 1 | export const snakeToPascal = (name: string) => { 2 | const splitName = name.split("_"); 3 | return splitName.length > 1 4 | ? splitName 5 | .map( 6 | (str) => 7 | str.slice(0, 1).toUpperCase() + 8 | str.slice(1, str.length).toLowerCase() 9 | ) 10 | .join("") 11 | : splitName[0]; 12 | }; 13 | -------------------------------------------------------------------------------- /src/typeScriptFunction.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-new */ 2 | import * as fs from "fs"; 3 | import * as path from "path"; 4 | import * as process from "process"; 5 | 6 | import * as lambda from "@aws-cdk/aws-lambda"; 7 | import * as cdk from "@aws-cdk/core"; 8 | import { FilterPattern, SubscriptionFilter } from "@aws-cdk/aws-logs"; 9 | import { CfnCondition, CfnOutput, Fn } from "@aws-cdk/core"; 10 | import * as LogsDestinations from "@aws-cdk/aws-logs-destinations"; 11 | import * as apiGateway2 from "@aws-cdk/aws-apigatewayv2"; 12 | import * as apiGateway2Integrations from "@aws-cdk/aws-apigatewayv2-integrations"; 13 | 14 | import { compileCodeEsbuild } from "./compileCodeEsbuild"; 15 | 16 | let functionsToRunAfter: (() => void)[] = []; 17 | 18 | export const compileCode = compileCodeEsbuild; 19 | 20 | /** 21 | * Properties for a NodejsFunction 22 | */ 23 | export interface NodejsFunctionProps extends lambda.FunctionOptions { 24 | addDependencies?: ((self: TypeScriptFunction) => void)[]; 25 | /** 26 | * Path to the entry file (JavaScript or TypeScript), relative to your project root 27 | */ 28 | readonly entry: string; 29 | 30 | /** 31 | * The name of the exported handler in the entry file. 32 | * 33 | * @default "handler" 34 | */ 35 | readonly handler?: string; 36 | 37 | /** 38 | * The runtime environment. Only runtimes of the Node.js family are 39 | * supported. 40 | * 41 | * @default - `NODEJS_12_X` if `process.versions.node` >= '12.0.0', 42 | * `NODEJS_10_X` otherwise. 43 | */ 44 | readonly runtime?: lambda.Runtime; 45 | 46 | /** 47 | * If you get "Module not found: Error: Can't resolve 'module_name'" errors, and you're not 48 | * actually using those modules, then it means there's a module you're using that is trying to 49 | * dynamically require other modules. This is the case with Knex.js. When this happens, pass all the modules 50 | * names found in the build error in this array. 51 | * 52 | * Example if you're only using PostgreSQL with Knex.js, use: 53 | * `modulesToIgnore: ["mssql", "pg-native", "pg-query-stream", "tedious"]` 54 | */ 55 | readonly modulesToIgnore?: string[]; 56 | 57 | /** 58 | * Whether to automatically reuse TCP connections when working with the AWS 59 | * SDK for JavaScript. 60 | * 61 | * This sets the `AWS_NODEJS_CONNECTION_REUSE_ENABLED` environment variable 62 | * to `1`. 63 | * 64 | * @see https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/node-reusing-connections.html 65 | * 66 | * @default true 67 | */ 68 | readonly awsSdkConnectionReuse?: boolean; 69 | 70 | readonly logFunction?: lambda.Function; 71 | 72 | readonly withHttp?: boolean; 73 | } 74 | 75 | /** 76 | * A Node.js Lambda function bundled using Parcel 77 | */ 78 | export class TypeScriptFunction extends lambda.Function { 79 | public url?: string; 80 | 81 | constructor( 82 | scope: cdk.Construct, 83 | id: string, 84 | props: NodejsFunctionProps = { 85 | entry: "", 86 | modulesToIgnore: [], 87 | withHttp: false, 88 | } 89 | ) { 90 | if (props.runtime && props.runtime.family !== lambda.RuntimeFamily.NODEJS) { 91 | throw new Error("Only `NODEJS` runtimes are supported."); 92 | } 93 | 94 | if (!/\.(js|ts)$/.test(props.entry)) { 95 | throw new Error( 96 | "Only JavaScript or TypeScript entry files are supported." 97 | ); 98 | } 99 | 100 | const entryFullPath = path.resolve(props.entry); 101 | 102 | if (!fs.existsSync(entryFullPath)) { 103 | throw new Error(`Cannot find entry file at ${entryFullPath}`); 104 | } 105 | 106 | const { outputDir } = compileCode({ 107 | modulesToIgnore: props.modulesToIgnore, 108 | entryFullPath, 109 | }) as { outputDir: string }; 110 | 111 | const defaultRunTime = 112 | nodeMajorVersion() >= 12 113 | ? lambda.Runtime.NODEJS_14_X 114 | : lambda.Runtime.NODEJS_10_X; 115 | const runtime = props.runtime ?? defaultRunTime; 116 | 117 | const handler = props.handler ?? "handler"; 118 | 119 | super(scope, id, { 120 | ...props, 121 | runtime, 122 | code: lambda.Code.fromAsset(outputDir), 123 | handler: `main.${handler}`, 124 | }); 125 | 126 | new CfnOutput(scope, `${id}UploadInfoExtended`, { 127 | value: JSON.stringify({ entryFullPath, functionName: this.functionName }), 128 | condition: new CfnCondition(scope, `${id}testEnvCondition`, { 129 | expression: Fn.conditionEquals(process.env.NODE_ENV || "", "test"), 130 | }), 131 | }); 132 | 133 | if (props.logFunction) { 134 | new SubscriptionFilter(scope, `${id}Subscription`, { 135 | logGroup: this.logGroup, 136 | filterPattern: FilterPattern.anyTerm("ERROR"), 137 | destination: new LogsDestinations.LambdaDestination(props.logFunction), 138 | }); 139 | } 140 | 141 | if (props.withHttp) { 142 | const api = new apiGateway2.HttpApi(scope, `${id}HttpApi`, { 143 | defaultIntegration: new apiGateway2Integrations.LambdaProxyIntegration({ 144 | handler: this, 145 | }), 146 | }); 147 | 148 | this.url = api.url as string; 149 | 150 | new CfnOutput(scope, `${id}Url`, { 151 | value: this.url, 152 | }); 153 | } 154 | 155 | // 156 | // Enable connection reuse for aws-sdk 157 | if (props.awsSdkConnectionReuse ?? true) { 158 | this.addEnvironment("AWS_NODEJS_CONNECTION_REUSE_ENABLED", "1"); 159 | } 160 | 161 | if (props.addDependencies) { 162 | functionsToRunAfter = [ 163 | ...functionsToRunAfter, 164 | ...props.addDependencies.map((dependencyFunction) => () => 165 | dependencyFunction(this) 166 | ), 167 | ]; 168 | } 169 | 170 | this.addEnvironment("NODE_OPTIONS", "--enable-source-maps"); 171 | } 172 | } 173 | 174 | function nodeMajorVersion(): number { 175 | return parseInt(process.versions.node.split(".")[0], 10); 176 | } 177 | 178 | export const initializeToolkitDependencies = () => { 179 | functionsToRunAfter.forEach((f) => f()); 180 | }; 181 | -------------------------------------------------------------------------------- /src/update-typescript-function.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import * as shelljs from "shelljs"; 3 | 4 | const command = `ts-node ${__dirname}/runUpdateTypeScriptFunction.js ${process.argv 5 | .slice(2) 6 | .join(" ")}`; 7 | shelljs.exec(command); 8 | -------------------------------------------------------------------------------- /src/updateTypeScriptFunction.ts: -------------------------------------------------------------------------------- 1 | import * as shelljs from "shelljs"; 2 | import * as AWS from "aws-sdk"; 3 | import path from "path"; 4 | import fs from "fs"; 5 | import { SynthUtils } from "@aws-cdk/assert"; 6 | import { exec, execSync } from "child_process"; 7 | import { StackResourceSummaries } from "aws-sdk/clients/cloudformation"; 8 | import { compileCode } from "./typeScriptFunction"; 9 | 10 | const getCompiledInfo = (output: any) => { 11 | const otherInfo = output.Value["Fn::Join"][1]; 12 | const compiledInfoStringified = `${otherInfo[0]}${otherInfo[1].Ref}${otherInfo[2]}`; 13 | return JSON.parse(compiledInfoStringified); 14 | }; 15 | 16 | const compileAndUpload = async ( 17 | entryFullPath: string, 18 | physicalResourceId: string 19 | ) => { 20 | const { outputDir } = await compileCode({ 21 | async: true, 22 | entryFullPath, 23 | forceCompile: true, 24 | }); 25 | return new Promise((resolve) => { 26 | const zippedFunctionPath = `${outputDir}/function.zip`; 27 | const zipCommand = `zip function.zip *`; 28 | exec(zipCommand, { cwd: outputDir }, (error) => { 29 | if (error) { 30 | console.error("Zip Command Error: ", error); 31 | } 32 | const updateCommand = `aws lambda update-function-code --function-name ${physicalResourceId} --zip-file fileb://${zippedFunctionPath}`; 33 | exec(updateCommand, (updateError) => { 34 | if (updateError) { 35 | console.error("Update command error", updateError); 36 | } 37 | console.log( 38 | `Lambda updated for ${path.relative( 39 | process.cwd(), 40 | entryFullPath 41 | )}. Run to follow logs:\naws logs tail --follow /aws/lambda/${physicalResourceId}\n` 42 | ); 43 | resolve("updateCommand"); 44 | }); 45 | }); 46 | }); 47 | }; 48 | 49 | const fetchAllStackResources = async (StackName: string) => { 50 | const cloudformation = new AWS.CloudFormation(); 51 | 52 | const stackResources = await cloudformation 53 | .listStackResources({ StackName }) 54 | .promise(); 55 | let { NextToken } = stackResources; 56 | if (stackResources?.StackResourceSummaries?.length) { 57 | const allResources = [...stackResources!.StackResourceSummaries]; 58 | while (NextToken) { 59 | // eslint-disable-next-line no-await-in-loop 60 | const stackResourcesAgain = await cloudformation 61 | .listStackResources({ StackName, NextToken }) 62 | .promise(); 63 | allResources.push( 64 | ...(stackResourcesAgain.StackResourceSummaries as StackResourceSummaries) 65 | ); 66 | ({ NextToken } = stackResourcesAgain); 67 | } 68 | return allResources; 69 | } 70 | throw new Error(`Did not get any stack resources for ${StackName}`); 71 | }; 72 | 73 | const getStackResourcesAndUploadInfos = async ( 74 | stackPath: string 75 | ): Promise<{ 76 | stackResources: StackResourceSummaries; 77 | uploadInfos: [string, any][]; 78 | }> => { 79 | return new Promise((resolve) => { 80 | let cache; 81 | const cacheDirectory = `${process.cwd()}/.update-function-cache`; 82 | const cacheFileName = `${process.env.BRANCH?.replace( 83 | /\//g, 84 | "_" 85 | )}.cache.json`; 86 | const cacheFilePath = `${cacheDirectory}/${cacheFileName}`; 87 | try { 88 | // eslint-disable-next-line global-require,import/no-dynamic-require 89 | cache = require(cacheFilePath); 90 | resolve(cache); 91 | } catch (e) { 92 | console.log("Cache not found for this branch"); 93 | Promise.resolve( 94 | // eslint-disable-next-line @typescript-eslint/no-var-requires,global-require,import/no-dynamic-require 95 | require(path.resolve(process.cwd(), stackPath)).default 96 | ).then(async (stack) => { 97 | const synthesized = SynthUtils.synthesize(stack); // cache 98 | const { stackName } = synthesized; 99 | 100 | const uploadInfos = Object.entries(synthesized.template.Outputs).filter( 101 | ([outputId]) => { 102 | return outputId.indexOf("UploadInfoExtended") > -1; 103 | } 104 | ); 105 | const stackResources = await fetchAllStackResources(stackName); // cache 106 | 107 | shelljs.mkdir("-p", cacheDirectory); 108 | fs.writeFileSync( 109 | cacheFilePath, 110 | JSON.stringify({ stackResources, uploadInfos }) 111 | ); 112 | resolve({ stackResources, uploadInfos }); 113 | }); 114 | } 115 | }); 116 | }; 117 | 118 | module.exports = async (functionLogicalId: string, stackPath: string) => { 119 | if (!process.env.BRANCH) { 120 | process.env.BRANCH = execSync("git branch --show-current") 121 | .toString() 122 | .trim(); 123 | } 124 | if ( 125 | process.env.BRANCH === "master" && 126 | process.env.FORCE_MASTER_UPDATE !== "true" 127 | ) { 128 | throw new Error( 129 | "Uploading to master is disabled by default. " + 130 | "We highly discourage doing so, but if you must, set FORCE_MASTER_UPDATE env variable to true" 131 | ); 132 | } 133 | console.log( 134 | `\n\nUsing branch ${process.env.BRANCH} as a base for your deployment, kill this process if that is not correct\n\n` 135 | ); 136 | const { stackResources, uploadInfos } = await getStackResourcesAndUploadInfos( 137 | stackPath 138 | ); 139 | 140 | process.env.NODE_ENV = "development"; 141 | 142 | if (functionLogicalId) { 143 | const resource = stackResources.find( 144 | (r) => r.LogicalResourceId === functionLogicalId 145 | ); 146 | let info: { entryFullPath: string; functionName: string }; 147 | 148 | uploadInfos.find(([_, output]: [string, any]) => { 149 | const compiledInfo = getCompiledInfo(output); 150 | 151 | if (compiledInfo.functionName === resource?.LogicalResourceId) { 152 | info = compiledInfo; 153 | return compiledInfo; 154 | } 155 | return undefined; 156 | }); 157 | 158 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment 159 | // @ts-ignore 160 | if (info) { 161 | await compileAndUpload( 162 | info.entryFullPath, 163 | resource?.PhysicalResourceId as string 164 | ); 165 | } 166 | } else { 167 | await Promise.all( 168 | uploadInfos.map(async ([_, output]: [string, any]) => { 169 | // return new Promise(async (resolve) => { 170 | const compiledInfo = getCompiledInfo(output); 171 | 172 | const resource = stackResources.find( 173 | (r) => r.LogicalResourceId === compiledInfo.functionName 174 | ); 175 | 176 | if (resource) { 177 | await compileAndUpload( 178 | compiledInfo.entryFullPath, 179 | resource.PhysicalResourceId as string 180 | ); 181 | } 182 | // }); 183 | }) 184 | ); 185 | } 186 | }; 187 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node10/tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "build/" 5 | }, 6 | "exclude": [ 7 | "node_modules", 8 | ] 9 | } 10 | --------------------------------------------------------------------------------