├── .github ├── ISSUE_TEMPLATE │ └── generic-issue.md ├── actions │ ├── build-asset │ │ └── action.yml │ ├── change-shell │ │ └── action.yml │ └── setup-node-deps │ │ └── action.yml └── workflows │ ├── ci.yml │ ├── pre-release.yml │ └── release.yml ├── .gitignore ├── .node-version ├── .vscode └── settings.json ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── benchmark ├── benchmark-payload.json ├── benchmark.ts ├── response-times-average.svg ├── response-times-extremes.svg ├── response-times.md └── traces.json ├── cdk.json ├── deployment ├── bin │ └── stack.ts └── lib │ └── lambda-stack.ts ├── docker-compose.yml ├── package-lock.json ├── package.json ├── src ├── bin │ └── bootstrap.rs └── lib.rs ├── tests └── main_test.rs └── tsconfig.json /.github/ISSUE_TEMPLATE/generic-issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Generic Issue 3 | about: Generic issue template 4 | title: '' 5 | labels: untriaged 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Description 11 | -------------------------------------------------------------------------------- /.github/actions/build-asset/action.yml: -------------------------------------------------------------------------------- 1 | name: "Build asset" 2 | description: "Build the asset required for deployment" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | # Convert /bin/sh to /bin/bash. 8 | - run: | 9 | sudo mv /bin/sh /bin/sh-dash 10 | sudo ln -s /bin/bash /bin/sh 11 | shell: bash 12 | 13 | # Set up our tools. 14 | - name: Install musl-tools 15 | run: | 16 | sudo apt-get install musl-tools 17 | shell: bash 18 | # Run the actual build. 19 | - run: | 20 | npm run build 21 | shell: bash 22 | -------------------------------------------------------------------------------- /.github/actions/change-shell/action.yml: -------------------------------------------------------------------------------- 1 | name: "Change shell" 2 | description: "Convert /bin/sh to /bin/bash" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | # Convert /bin/sh to /bin/bash. 8 | - run: | 9 | sudo mv /bin/sh /bin/sh-dash 10 | sudo ln -s /bin/bash /bin/sh 11 | shell: bash 12 | -------------------------------------------------------------------------------- /.github/actions/setup-node-deps/action.yml: -------------------------------------------------------------------------------- 1 | name: "Setup node dependencies" 2 | description: "Setup all node dependencies necessary for deployment" 3 | 4 | runs: 5 | using: "composite" 6 | steps: 7 | - name: Upgrade NPM to 6.14.8 8 | run: | 9 | npm i -g npm@6.14.8 10 | shell: bash 11 | 12 | - name: "Install dependencies" 13 | run: | 14 | npm ci 15 | shell: bash 16 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | # Check out https://help.github.com/en/articles/workflow-syntax-for-github-actions for documentation on Actions. 4 | on: 5 | push: 6 | branches: 7 | - "**" 8 | tags-ignore: 9 | - "*.*" 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions-rs/toolchain@v1 17 | with: 18 | toolchain: 1.49.0 19 | components: clippy 20 | - uses: actions/cache@v2 21 | with: 22 | path: | 23 | ~/.cargo/registry 24 | ~/.cargo/git 25 | target 26 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 27 | - run: cargo check --all 28 | 29 | fmt: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v2 33 | - uses: actions-rs/toolchain@v1 34 | with: 35 | toolchain: 1.49.0 36 | components: rustfmt 37 | - run: cargo fmt --all -- --check 38 | 39 | clippy: 40 | runs-on: ubuntu-latest 41 | steps: 42 | - uses: actions/checkout@v2 43 | - uses: actions-rs/toolchain@v1 44 | with: 45 | toolchain: 1.49.0 46 | components: clippy 47 | - uses: actions/cache@v2 48 | with: 49 | path: | 50 | ~/.cargo/registry 51 | ~/.cargo/git 52 | target 53 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 54 | - run: cargo clippy --all-targets -- -D clippy::all 55 | 56 | test: 57 | runs-on: ubuntu-latest 58 | steps: 59 | - uses: actions/checkout@v2 60 | - uses: actions-rs/toolchain@v1 61 | with: 62 | toolchain: 1.49.0 63 | components: clippy 64 | - uses: actions/cache@v2 65 | with: 66 | path: | 67 | ~/.cargo/registry 68 | ~/.cargo/git 69 | target 70 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 71 | - run: cargo test 72 | 73 | # Generate a static build artifact for usage in deployments. 74 | assets: 75 | runs-on: ubuntu-latest 76 | steps: 77 | - uses: actions/checkout@v2 78 | - uses: actions-rs/toolchain@v1 79 | with: 80 | toolchain: 1.49.0 81 | - uses: actions/cache@v2 82 | with: 83 | path: | 84 | ~/.cargo/registry 85 | ~/.cargo/git 86 | target 87 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 88 | - name: "Build asset" 89 | uses: ./.github/actions/build-asset 90 | # NOTE: Artifacts are not available across workflows. 91 | - uses: actions/upload-artifact@v2 92 | with: 93 | name: bootstrap 94 | path: ./target/cdk/release/bootstrap 95 | if-no-files-found: error 96 | 97 | # Validate our deployment setup by deploying the stack to LocalStack. 98 | localstack: 99 | runs-on: ubuntu-latest 100 | needs: [assets] 101 | environment: 102 | name: local 103 | services: 104 | localstack: 105 | image: localstack/localstack:latest 106 | env: 107 | SERVICES: serverless,cloudformation,iam,sts,sqs,ssm,s3,cloudwatch,cloudwatch-logs,lambda,dynamodb,apigateway 108 | DEFAULT_REGION: eu-west-1 109 | AWS_ACCESS_KEY_ID: localkey 110 | AWS_SECRET_ACCESS_KEY: localsecret 111 | ports: 112 | - 4566:4566 113 | - 4571:4571 114 | steps: 115 | - uses: actions/checkout@v2 116 | - name: Setup Node.js 14.12.0 117 | uses: actions/setup-node@v1 118 | with: 119 | node-version: "14.12.0" 120 | 121 | - uses: actions/cache@v2 122 | with: 123 | path: ~/.npm 124 | key: node-${{ hashFiles('**/package-lock.json') }} 125 | restore-keys: | 126 | node- 127 | - name: "Setup node dependencies" 128 | uses: ./.github/actions/setup-node-deps 129 | 130 | - uses: actions/download-artifact@v2 131 | with: 132 | name: bootstrap 133 | path: ./target/cdk/release 134 | 135 | - name: Bootstrapping LocalStack 136 | run: | 137 | npm run cdklocal:bootstrap 138 | env: 139 | DEFAULT_REGION: eu-west-1 140 | AWS_ACCOUNT_ID: "000000000000" 141 | AWS_ACCESS_KEY_ID: localkey 142 | AWS_SECRET_ACCESS_KEY: localsecret 143 | 144 | - name: Deploying to LocalStack 145 | run: | 146 | npm run cdklocal:deploy 147 | env: 148 | DEFAULT_REGION: eu-west-1 149 | AWS_ACCOUNT_ID: "000000000000" 150 | AWS_ACCESS_KEY_ID: localkey 151 | AWS_SECRET_ACCESS_KEY: localsecret 152 | -------------------------------------------------------------------------------- /.github/workflows/pre-release.yml: -------------------------------------------------------------------------------- 1 | name: pre-release 2 | 3 | # Refer to https://help.github.com/en/articles/workflow-syntax-for-github-actions for documentation on Actions. 4 | on: 5 | release: 6 | types: 7 | - prereleased 8 | 9 | jobs: 10 | # Generate a static build artifact for usage in deployments. 11 | assets: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Validate release tag 16 | run: | 17 | # We only want to run on tags looking like refs/tags/v1200. 18 | echo "Checking tag: $GITHUB_REF" 19 | [[ "${GITHUB_REF:10:1}" == "v" ]] || exit 1 20 | - uses: actions-rs/toolchain@v1 21 | with: 22 | toolchain: 1.49.0 23 | 24 | - uses: actions/cache@v2 25 | with: 26 | path: | 27 | ~/.cargo/registry 28 | ~/.cargo/git 29 | target 30 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 31 | - name: "Build asset" 32 | uses: ./.github/actions/build-asset 33 | # NOTE: Artifacts are not available across workflows. 34 | - uses: actions/upload-artifact@v2 35 | with: 36 | name: bootstrap 37 | path: ./target/cdk/release/bootstrap 38 | if-no-files-found: error 39 | 40 | benchmark: 41 | runs-on: ubuntu-latest 42 | needs: [assets] 43 | environment: 44 | name: benchmark 45 | 46 | steps: 47 | - uses: actions/checkout@v2 48 | - name: Setup Node.js 14.12.0 49 | uses: actions/setup-node@v1 50 | with: 51 | node-version: "14.12.0" 52 | 53 | - uses: actions/cache@v2 54 | with: 55 | path: ~/.npm 56 | key: node-${{ hashFiles('**/package-lock.json') }} 57 | restore-keys: | 58 | node- 59 | - name: "Setup node dependencies" 60 | uses: ./.github/actions/setup-node-deps 61 | - uses: ./.github/actions/change-shell 62 | 63 | - uses: actions/download-artifact@v2 64 | with: 65 | name: bootstrap 66 | path: ./target/cdk/release 67 | 68 | - name: Benchmark 69 | id: benchmark 70 | if: env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY 71 | env: 72 | AWS_ACCESS_KEY_ID: ${{ secrets.BENCHMARK_AWS_ACCESS_KEY_ID }} 73 | AWS_SECRET_ACCESS_KEY: ${{ secrets.BENCHMARK_AWS_SECRET_ACCESS_KEY }} 74 | AWS_REGION: ${{ secrets.BENCHMARK_AWS_REGION }} 75 | CI: "true" 76 | run: | 77 | export BENCHMARK_SUFFIX=$RANDOM 78 | echo "::set-output name=BENCHMARK_SUFFIX::$BENCHMARK_SUFFIX" 79 | npm run benchmark 80 | 81 | # Make sure we always clean up even after failing. 82 | - name: Ensure clean up 83 | if: failure() 84 | env: 85 | AWS_ACCESS_KEY_ID: ${{ secrets.BENCHMARK_AWS_ACCESS_KEY_ID }} 86 | AWS_SECRET_ACCESS_KEY: ${{ secrets.BENCHMARK_AWS_SECRET_ACCESS_KEY }} 87 | AWS_REGION: ${{ secrets.BENCHMARK_AWS_REGION }} 88 | CI: "true" 89 | BENCHMARK_SUFFIX: ${{ steps.benchmark.outputs.BENCHMARK_SUFFIX }} 90 | run: | 91 | npm run benchmark:destroy 92 | 93 | # Only commit the results upon success. 94 | - name: Commit benchmark results 95 | if: success() 96 | uses: codetalkio/add-and-commit@v6 97 | with: 98 | author_name: GitHub Actions 99 | author_email: no-reply@codetalk.io 100 | message: "Bot: Updating benchmark results" 101 | add: "./benchmark/*" 102 | branch: master 103 | 104 | deploy: 105 | runs-on: ubuntu-latest 106 | name: "deploy pre-release" 107 | needs: [assets] 108 | environment: 109 | name: staging 110 | 111 | steps: 112 | - uses: actions/checkout@v2 113 | - name: Setup Node.js 14.12.0 114 | uses: actions/setup-node@v1 115 | with: 116 | node-version: "14.12.0" 117 | 118 | - uses: actions/cache@v2 119 | with: 120 | path: ~/.npm 121 | key: node-${{ hashFiles('**/package-lock.json') }} 122 | restore-keys: | 123 | node- 124 | - name: "Setup node dependencies" 125 | uses: ./.github/actions/setup-node-deps 126 | - uses: ./.github/actions/change-shell 127 | 128 | - uses: actions/download-artifact@v2 129 | with: 130 | name: bootstrap 131 | path: ./target/cdk/release 132 | 133 | - name: Deploy 134 | if: env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY 135 | env: 136 | AWS_ACCESS_KEY_ID: ${{ secrets.PRE_RELEASE_AWS_ACCESS_KEY_ID }} 137 | AWS_SECRET_ACCESS_KEY: ${{ secrets.PRE_RELEASE_AWS_SECRET_ACCESS_KEY }} 138 | AWS_REGION: ${{ secrets.PRE_RELEASE_AWS_REGION }} 139 | CI: "true" 140 | run: | 141 | npm run cdk:deploy 142 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | # Refer to https://help.github.com/en/articles/workflow-syntax-for-github-actions for documentation on Actions. 4 | on: 5 | release: 6 | types: 7 | - released 8 | 9 | jobs: 10 | # Generate a static build artifact for usage in deployments. 11 | assets: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Validate release tag 16 | run: | 17 | # We only want to run on tags looking like refs/tags/v1200. 18 | echo "Checking tag: $GITHUB_REF" 19 | [[ "${GITHUB_REF:10:1}" == "v" ]] || exit 1 20 | - uses: actions-rs/toolchain@v1 21 | with: 22 | toolchain: 1.49.0 23 | 24 | - uses: actions/cache@v2 25 | with: 26 | path: | 27 | ~/.cargo/registry 28 | ~/.cargo/git 29 | target 30 | key: cargo-${{ hashFiles('**/Cargo.lock') }} 31 | - name: "Build asset" 32 | uses: ./.github/actions/build-asset 33 | # NOTE: Artifacts are not available across workflows. 34 | - uses: actions/upload-artifact@v2 35 | with: 36 | name: bootstrap 37 | path: ./target/cdk/release/bootstrap 38 | if-no-files-found: error 39 | 40 | deploy: 41 | runs-on: ubuntu-latest 42 | name: "deploy release" 43 | needs: [assets] 44 | environment: 45 | name: production 46 | 47 | steps: 48 | - uses: actions/checkout@v2 49 | - name: Setup Node.js 14.12.0 50 | uses: actions/setup-node@v1 51 | with: 52 | node-version: "14.12.0" 53 | 54 | - uses: actions/cache@v2 55 | with: 56 | path: ~/.npm 57 | key: node-${{ hashFiles('**/package-lock.json') }} 58 | restore-keys: | 59 | node- 60 | - name: "Setup node dependencies" 61 | uses: ./.github/actions/setup-node-deps 62 | - uses: ./.github/actions/change-shell 63 | 64 | - uses: actions/download-artifact@v2 65 | with: 66 | name: bootstrap 67 | path: ./target/cdk/release 68 | 69 | - name: Deploy 70 | if: env.AWS_ACCESS_KEY_ID && env.AWS_SECRET_ACCESS_KEY 71 | env: 72 | AWS_ACCESS_KEY_ID: ${{ secrets.RELEASE_AWS_ACCESS_KEY_ID }} 73 | AWS_SECRET_ACCESS_KEY: ${{ secrets.RELEASE_AWS_SECRET_ACCESS_KEY }} 74 | AWS_REGION: ${{ secrets.RELEASE_AWS_REGION }} 75 | CI: "true" 76 | run: | 77 | npm run cdk:deploy 78 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # These are backup files generated by rustfmt 6 | **/*.rs.bk 7 | 8 | .DS_Store 9 | lambda.zip 10 | cdk.out 11 | 12 | node_modules 13 | benchmark/traces.json 14 | -------------------------------------------------------------------------------- /.node-version: -------------------------------------------------------------------------------- 1 | 14.12.0 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true 3 | } 4 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | [[package]] 4 | name = "ansi_term" 5 | version = "0.11.0" 6 | source = "registry+https://github.com/rust-lang/crates.io-index" 7 | checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" 8 | dependencies = [ 9 | "winapi", 10 | ] 11 | 12 | [[package]] 13 | name = "arc-swap" 14 | version = "0.4.7" 15 | source = "registry+https://github.com/rust-lang/crates.io-index" 16 | checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034" 17 | 18 | [[package]] 19 | name = "async-stream" 20 | version = "0.3.0" 21 | source = "registry+https://github.com/rust-lang/crates.io-index" 22 | checksum = "3670df70cbc01729f901f94c887814b3c68db038aad1329a418bae178bc5295c" 23 | dependencies = [ 24 | "async-stream-impl", 25 | "futures-core", 26 | ] 27 | 28 | [[package]] 29 | name = "async-stream-impl" 30 | version = "0.3.0" 31 | source = "registry+https://github.com/rust-lang/crates.io-index" 32 | checksum = "a3548b8efc9f8e8a5a0a2808c5bd8451a9031b9e5b879a79590304ae928b0a70" 33 | dependencies = [ 34 | "proc-macro2", 35 | "quote", 36 | "syn", 37 | ] 38 | 39 | [[package]] 40 | name = "autocfg" 41 | version = "1.0.1" 42 | source = "registry+https://github.com/rust-lang/crates.io-index" 43 | checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" 44 | 45 | [[package]] 46 | name = "bytes" 47 | version = "0.5.6" 48 | source = "registry+https://github.com/rust-lang/crates.io-index" 49 | checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" 50 | 51 | [[package]] 52 | name = "bytes" 53 | version = "1.0.0" 54 | source = "registry+https://github.com/rust-lang/crates.io-index" 55 | checksum = "ad1f8e949d755f9d79112b5bb46938e0ef9d3804a0b16dfab13aafcaa5f0fa72" 56 | 57 | [[package]] 58 | name = "cfg-if" 59 | version = "0.1.10" 60 | source = "registry+https://github.com/rust-lang/crates.io-index" 61 | checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" 62 | 63 | [[package]] 64 | name = "cfg-if" 65 | version = "1.0.0" 66 | source = "registry+https://github.com/rust-lang/crates.io-index" 67 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 68 | 69 | [[package]] 70 | name = "ctor" 71 | version = "0.1.16" 72 | source = "registry+https://github.com/rust-lang/crates.io-index" 73 | checksum = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484" 74 | dependencies = [ 75 | "quote", 76 | "syn", 77 | ] 78 | 79 | [[package]] 80 | name = "difference" 81 | version = "2.0.0" 82 | source = "registry+https://github.com/rust-lang/crates.io-index" 83 | checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" 84 | 85 | [[package]] 86 | name = "fnv" 87 | version = "1.0.7" 88 | source = "registry+https://github.com/rust-lang/crates.io-index" 89 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 90 | 91 | [[package]] 92 | name = "futures-channel" 93 | version = "0.3.5" 94 | source = "registry+https://github.com/rust-lang/crates.io-index" 95 | checksum = "f366ad74c28cca6ba456d95e6422883cfb4b252a83bed929c83abfdbbf2967d5" 96 | dependencies = [ 97 | "futures-core", 98 | ] 99 | 100 | [[package]] 101 | name = "futures-core" 102 | version = "0.3.8" 103 | source = "registry+https://github.com/rust-lang/crates.io-index" 104 | checksum = "847ce131b72ffb13b6109a221da9ad97a64cbe48feb1028356b836b47b8f1748" 105 | 106 | [[package]] 107 | name = "futures-macro" 108 | version = "0.3.8" 109 | source = "registry+https://github.com/rust-lang/crates.io-index" 110 | checksum = "77408a692f1f97bcc61dc001d752e00643408fbc922e4d634c655df50d595556" 111 | dependencies = [ 112 | "proc-macro-hack", 113 | "proc-macro2", 114 | "quote", 115 | "syn", 116 | ] 117 | 118 | [[package]] 119 | name = "futures-sink" 120 | version = "0.3.5" 121 | source = "registry+https://github.com/rust-lang/crates.io-index" 122 | checksum = "3f2032893cb734c7a05d85ce0cc8b8c4075278e93b24b66f9de99d6eb0fa8acc" 123 | 124 | [[package]] 125 | name = "futures-task" 126 | version = "0.3.8" 127 | source = "registry+https://github.com/rust-lang/crates.io-index" 128 | checksum = "7c554eb5bf48b2426c4771ab68c6b14468b6e76cc90996f528c3338d761a4d0d" 129 | dependencies = [ 130 | "once_cell", 131 | ] 132 | 133 | [[package]] 134 | name = "futures-util" 135 | version = "0.3.8" 136 | source = "registry+https://github.com/rust-lang/crates.io-index" 137 | checksum = "d304cff4a7b99cfb7986f7d43fbe93d175e72e704a8860787cc95e9ffd85cbd2" 138 | dependencies = [ 139 | "futures-core", 140 | "futures-macro", 141 | "futures-task", 142 | "pin-project 1.0.2", 143 | "pin-utils", 144 | "proc-macro-hack", 145 | "proc-macro-nested", 146 | "slab", 147 | ] 148 | 149 | [[package]] 150 | name = "h2" 151 | version = "0.3.0" 152 | source = "registry+https://github.com/rust-lang/crates.io-index" 153 | checksum = "6b67e66362108efccd8ac053abafc8b7a8d86a37e6e48fc4f6f7485eb5e9e6a5" 154 | dependencies = [ 155 | "bytes 1.0.0", 156 | "fnv", 157 | "futures-core", 158 | "futures-sink", 159 | "futures-util", 160 | "http", 161 | "indexmap", 162 | "slab", 163 | "tokio", 164 | "tokio-util", 165 | "tracing", 166 | "tracing-futures", 167 | ] 168 | 169 | [[package]] 170 | name = "hashbrown" 171 | version = "0.9.1" 172 | source = "registry+https://github.com/rust-lang/crates.io-index" 173 | checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" 174 | 175 | [[package]] 176 | name = "hermit-abi" 177 | version = "0.1.16" 178 | source = "registry+https://github.com/rust-lang/crates.io-index" 179 | checksum = "4c30f6d0bc6b00693347368a67d41b58f2fb851215ff1da49e90fe2c5c667151" 180 | dependencies = [ 181 | "libc", 182 | ] 183 | 184 | [[package]] 185 | name = "http" 186 | version = "0.2.1" 187 | source = "registry+https://github.com/rust-lang/crates.io-index" 188 | checksum = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9" 189 | dependencies = [ 190 | "bytes 0.5.6", 191 | "fnv", 192 | "itoa", 193 | ] 194 | 195 | [[package]] 196 | name = "http-body" 197 | version = "0.4.0" 198 | source = "registry+https://github.com/rust-lang/crates.io-index" 199 | checksum = "2861bd27ee074e5ee891e8b539837a9430012e249d7f0ca2d795650f579c1994" 200 | dependencies = [ 201 | "bytes 1.0.0", 202 | "http", 203 | ] 204 | 205 | [[package]] 206 | name = "httparse" 207 | version = "1.3.4" 208 | source = "registry+https://github.com/rust-lang/crates.io-index" 209 | checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" 210 | 211 | [[package]] 212 | name = "httpdate" 213 | version = "0.3.2" 214 | source = "registry+https://github.com/rust-lang/crates.io-index" 215 | checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" 216 | 217 | [[package]] 218 | name = "hyper" 219 | version = "0.14.2" 220 | source = "registry+https://github.com/rust-lang/crates.io-index" 221 | checksum = "12219dc884514cb4a6a03737f4413c0e01c23a1b059b0156004b23f1e19dccbe" 222 | dependencies = [ 223 | "bytes 1.0.0", 224 | "futures-channel", 225 | "futures-core", 226 | "futures-util", 227 | "h2", 228 | "http", 229 | "http-body", 230 | "httparse", 231 | "httpdate", 232 | "itoa", 233 | "pin-project 1.0.2", 234 | "socket2", 235 | "tokio", 236 | "tower-service", 237 | "tracing", 238 | "want", 239 | ] 240 | 241 | [[package]] 242 | name = "indexmap" 243 | version = "1.6.0" 244 | source = "registry+https://github.com/rust-lang/crates.io-index" 245 | checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2" 246 | dependencies = [ 247 | "autocfg", 248 | "hashbrown", 249 | ] 250 | 251 | [[package]] 252 | name = "instant" 253 | version = "0.1.9" 254 | source = "registry+https://github.com/rust-lang/crates.io-index" 255 | checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" 256 | dependencies = [ 257 | "cfg-if 1.0.0", 258 | ] 259 | 260 | [[package]] 261 | name = "itoa" 262 | version = "0.4.6" 263 | source = "registry+https://github.com/rust-lang/crates.io-index" 264 | checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" 265 | 266 | [[package]] 267 | name = "lazy_static" 268 | version = "1.4.0" 269 | source = "registry+https://github.com/rust-lang/crates.io-index" 270 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 271 | 272 | [[package]] 273 | name = "libc" 274 | version = "0.2.77" 275 | source = "registry+https://github.com/rust-lang/crates.io-index" 276 | checksum = "f2f96b10ec2560088a8e76961b00d47107b3a625fecb76dedb29ee7ccbf98235" 277 | 278 | [[package]] 279 | name = "lock_api" 280 | version = "0.4.2" 281 | source = "registry+https://github.com/rust-lang/crates.io-index" 282 | checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" 283 | dependencies = [ 284 | "scopeguard", 285 | ] 286 | 287 | [[package]] 288 | name = "log" 289 | version = "0.4.11" 290 | source = "registry+https://github.com/rust-lang/crates.io-index" 291 | checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" 292 | dependencies = [ 293 | "cfg-if 0.1.10", 294 | ] 295 | 296 | [[package]] 297 | name = "memchr" 298 | version = "2.3.3" 299 | source = "registry+https://github.com/rust-lang/crates.io-index" 300 | checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" 301 | 302 | [[package]] 303 | name = "mio" 304 | version = "0.7.7" 305 | source = "registry+https://github.com/rust-lang/crates.io-index" 306 | checksum = "e50ae3f04d169fcc9bde0b547d1c205219b7157e07ded9c5aff03e0637cb3ed7" 307 | dependencies = [ 308 | "libc", 309 | "log", 310 | "miow", 311 | "ntapi", 312 | "winapi", 313 | ] 314 | 315 | [[package]] 316 | name = "miow" 317 | version = "0.3.6" 318 | source = "registry+https://github.com/rust-lang/crates.io-index" 319 | checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" 320 | dependencies = [ 321 | "socket2", 322 | "winapi", 323 | ] 324 | 325 | [[package]] 326 | name = "netlify_lambda" 327 | version = "0.2.0" 328 | source = "registry+https://github.com/rust-lang/crates.io-index" 329 | checksum = "bf36ac09d2d6088a343a8c7451a28cd771d3f3a036423933e190509bd932a6fa" 330 | dependencies = [ 331 | "async-stream", 332 | "bytes 1.0.0", 333 | "futures-core", 334 | "futures-util", 335 | "http", 336 | "hyper", 337 | "netlify_lambda_attributes", 338 | "serde", 339 | "serde_json", 340 | "tokio", 341 | "tower-service", 342 | "tracing", 343 | "tracing-futures", 344 | ] 345 | 346 | [[package]] 347 | name = "netlify_lambda_attributes" 348 | version = "0.2.0" 349 | source = "registry+https://github.com/rust-lang/crates.io-index" 350 | checksum = "442468bf98c34f0673d646fa73083d80782c8e331844b98d2324974dc454f5e3" 351 | dependencies = [ 352 | "proc-macro2", 353 | "quote", 354 | "syn", 355 | ] 356 | 357 | [[package]] 358 | name = "ntapi" 359 | version = "0.3.6" 360 | source = "registry+https://github.com/rust-lang/crates.io-index" 361 | checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" 362 | dependencies = [ 363 | "winapi", 364 | ] 365 | 366 | [[package]] 367 | name = "num_cpus" 368 | version = "1.13.0" 369 | source = "registry+https://github.com/rust-lang/crates.io-index" 370 | checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" 371 | dependencies = [ 372 | "hermit-abi", 373 | "libc", 374 | ] 375 | 376 | [[package]] 377 | name = "once_cell" 378 | version = "1.5.2" 379 | source = "registry+https://github.com/rust-lang/crates.io-index" 380 | checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0" 381 | 382 | [[package]] 383 | name = "output_vt100" 384 | version = "0.1.2" 385 | source = "registry+https://github.com/rust-lang/crates.io-index" 386 | checksum = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9" 387 | dependencies = [ 388 | "winapi", 389 | ] 390 | 391 | [[package]] 392 | name = "parking_lot" 393 | version = "0.11.1" 394 | source = "registry+https://github.com/rust-lang/crates.io-index" 395 | checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" 396 | dependencies = [ 397 | "instant", 398 | "lock_api", 399 | "parking_lot_core", 400 | ] 401 | 402 | [[package]] 403 | name = "parking_lot_core" 404 | version = "0.8.2" 405 | source = "registry+https://github.com/rust-lang/crates.io-index" 406 | checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272" 407 | dependencies = [ 408 | "cfg-if 1.0.0", 409 | "instant", 410 | "libc", 411 | "redox_syscall", 412 | "smallvec", 413 | "winapi", 414 | ] 415 | 416 | [[package]] 417 | name = "pin-project" 418 | version = "0.4.24" 419 | source = "registry+https://github.com/rust-lang/crates.io-index" 420 | checksum = "f48fad7cfbff853437be7cf54d7b993af21f53be7f0988cbfe4a51535aa77205" 421 | dependencies = [ 422 | "pin-project-internal 0.4.24", 423 | ] 424 | 425 | [[package]] 426 | name = "pin-project" 427 | version = "1.0.2" 428 | source = "registry+https://github.com/rust-lang/crates.io-index" 429 | checksum = "9ccc2237c2c489783abd8c4c80e5450fc0e98644555b1364da68cc29aa151ca7" 430 | dependencies = [ 431 | "pin-project-internal 1.0.2", 432 | ] 433 | 434 | [[package]] 435 | name = "pin-project-internal" 436 | version = "0.4.24" 437 | source = "registry+https://github.com/rust-lang/crates.io-index" 438 | checksum = "24c6d293bdd3ca5a1697997854c6cf7855e43fb6a0ba1c47af57a5bcafd158ae" 439 | dependencies = [ 440 | "proc-macro2", 441 | "quote", 442 | "syn", 443 | ] 444 | 445 | [[package]] 446 | name = "pin-project-internal" 447 | version = "1.0.2" 448 | source = "registry+https://github.com/rust-lang/crates.io-index" 449 | checksum = "f8e8d2bf0b23038a4424865103a4df472855692821aab4e4f5c3312d461d9e5f" 450 | dependencies = [ 451 | "proc-macro2", 452 | "quote", 453 | "syn", 454 | ] 455 | 456 | [[package]] 457 | name = "pin-project-lite" 458 | version = "0.1.8" 459 | source = "registry+https://github.com/rust-lang/crates.io-index" 460 | checksum = "71f349a4f0e70676ffb2dbafe16d0c992382d02f0a952e3ddf584fc289dac6b3" 461 | 462 | [[package]] 463 | name = "pin-project-lite" 464 | version = "0.2.0" 465 | source = "registry+https://github.com/rust-lang/crates.io-index" 466 | checksum = "6b063f57ec186e6140e2b8b6921e5f1bd89c7356dda5b33acc5401203ca6131c" 467 | 468 | [[package]] 469 | name = "pin-utils" 470 | version = "0.1.0" 471 | source = "registry+https://github.com/rust-lang/crates.io-index" 472 | checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" 473 | 474 | [[package]] 475 | name = "pretty_assertions" 476 | version = "0.6.1" 477 | source = "registry+https://github.com/rust-lang/crates.io-index" 478 | checksum = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" 479 | dependencies = [ 480 | "ansi_term", 481 | "ctor", 482 | "difference", 483 | "output_vt100", 484 | ] 485 | 486 | [[package]] 487 | name = "proc-macro-hack" 488 | version = "0.5.19" 489 | source = "registry+https://github.com/rust-lang/crates.io-index" 490 | checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" 491 | 492 | [[package]] 493 | name = "proc-macro-nested" 494 | version = "0.1.6" 495 | source = "registry+https://github.com/rust-lang/crates.io-index" 496 | checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a" 497 | 498 | [[package]] 499 | name = "proc-macro2" 500 | version = "1.0.23" 501 | source = "registry+https://github.com/rust-lang/crates.io-index" 502 | checksum = "51ef7cd2518ead700af67bf9d1a658d90b6037d77110fd9c0445429d0ba1c6c9" 503 | dependencies = [ 504 | "unicode-xid", 505 | ] 506 | 507 | [[package]] 508 | name = "quote" 509 | version = "1.0.7" 510 | source = "registry+https://github.com/rust-lang/crates.io-index" 511 | checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" 512 | dependencies = [ 513 | "proc-macro2", 514 | ] 515 | 516 | [[package]] 517 | name = "redox_syscall" 518 | version = "0.1.57" 519 | source = "registry+https://github.com/rust-lang/crates.io-index" 520 | checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" 521 | 522 | [[package]] 523 | name = "ryu" 524 | version = "1.0.5" 525 | source = "registry+https://github.com/rust-lang/crates.io-index" 526 | checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" 527 | 528 | [[package]] 529 | name = "scopeguard" 530 | version = "1.1.0" 531 | source = "registry+https://github.com/rust-lang/crates.io-index" 532 | checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" 533 | 534 | [[package]] 535 | name = "serde" 536 | version = "1.0.118" 537 | source = "registry+https://github.com/rust-lang/crates.io-index" 538 | checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800" 539 | dependencies = [ 540 | "serde_derive", 541 | ] 542 | 543 | [[package]] 544 | name = "serde_derive" 545 | version = "1.0.118" 546 | source = "registry+https://github.com/rust-lang/crates.io-index" 547 | checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df" 548 | dependencies = [ 549 | "proc-macro2", 550 | "quote", 551 | "syn", 552 | ] 553 | 554 | [[package]] 555 | name = "serde_json" 556 | version = "1.0.61" 557 | source = "registry+https://github.com/rust-lang/crates.io-index" 558 | checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a" 559 | dependencies = [ 560 | "itoa", 561 | "ryu", 562 | "serde", 563 | ] 564 | 565 | [[package]] 566 | name = "signal-hook-registry" 567 | version = "1.2.1" 568 | source = "registry+https://github.com/rust-lang/crates.io-index" 569 | checksum = "a3e12110bc539e657a646068aaf5eb5b63af9d0c1f7b29c97113fad80e15f035" 570 | dependencies = [ 571 | "arc-swap", 572 | "libc", 573 | ] 574 | 575 | [[package]] 576 | name = "slab" 577 | version = "0.4.2" 578 | source = "registry+https://github.com/rust-lang/crates.io-index" 579 | checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" 580 | 581 | [[package]] 582 | name = "sls_rust_minimal" 583 | version = "1.0.0" 584 | dependencies = [ 585 | "netlify_lambda", 586 | "pretty_assertions", 587 | "serde", 588 | "serde_derive", 589 | "serde_json", 590 | "tokio", 591 | ] 592 | 593 | [[package]] 594 | name = "smallvec" 595 | version = "1.6.0" 596 | source = "registry+https://github.com/rust-lang/crates.io-index" 597 | checksum = "1a55ca5f3b68e41c979bf8c46a6f1da892ca4db8f94023ce0bd32407573b1ac0" 598 | 599 | [[package]] 600 | name = "socket2" 601 | version = "0.3.19" 602 | source = "registry+https://github.com/rust-lang/crates.io-index" 603 | checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" 604 | dependencies = [ 605 | "cfg-if 1.0.0", 606 | "libc", 607 | "winapi", 608 | ] 609 | 610 | [[package]] 611 | name = "syn" 612 | version = "1.0.56" 613 | source = "registry+https://github.com/rust-lang/crates.io-index" 614 | checksum = "a9802ddde94170d186eeee5005b798d9c159fa970403f1be19976d0cfb939b72" 615 | dependencies = [ 616 | "proc-macro2", 617 | "quote", 618 | "unicode-xid", 619 | ] 620 | 621 | [[package]] 622 | name = "tokio" 623 | version = "1.0.1" 624 | source = "registry+https://github.com/rust-lang/crates.io-index" 625 | checksum = "d258221f566b6c803c7b4714abadc080172b272090cdc5e244a6d4dd13c3a6bd" 626 | dependencies = [ 627 | "autocfg", 628 | "bytes 1.0.0", 629 | "libc", 630 | "memchr", 631 | "mio", 632 | "num_cpus", 633 | "once_cell", 634 | "parking_lot", 635 | "pin-project-lite 0.2.0", 636 | "signal-hook-registry", 637 | "tokio-macros", 638 | "winapi", 639 | ] 640 | 641 | [[package]] 642 | name = "tokio-macros" 643 | version = "1.0.0" 644 | source = "registry+https://github.com/rust-lang/crates.io-index" 645 | checksum = "42517d2975ca3114b22a16192634e8241dc5cc1f130be194645970cc1c371494" 646 | dependencies = [ 647 | "proc-macro2", 648 | "quote", 649 | "syn", 650 | ] 651 | 652 | [[package]] 653 | name = "tokio-stream" 654 | version = "0.1.0" 655 | source = "registry+https://github.com/rust-lang/crates.io-index" 656 | checksum = "3f3be913b74b13210c8fe04b17ab833f5a124f45b93d0f99f59fff621f64392a" 657 | dependencies = [ 658 | "async-stream", 659 | "futures-core", 660 | "pin-project-lite 0.2.0", 661 | "tokio", 662 | ] 663 | 664 | [[package]] 665 | name = "tokio-util" 666 | version = "0.6.0" 667 | source = "registry+https://github.com/rust-lang/crates.io-index" 668 | checksum = "36135b7e7da911f5f8b9331209f7fab4cc13498f3fff52f72a710c78187e3148" 669 | dependencies = [ 670 | "bytes 1.0.0", 671 | "futures-core", 672 | "futures-sink", 673 | "log", 674 | "pin-project-lite 0.2.0", 675 | "tokio", 676 | "tokio-stream", 677 | ] 678 | 679 | [[package]] 680 | name = "tower-service" 681 | version = "0.3.0" 682 | source = "registry+https://github.com/rust-lang/crates.io-index" 683 | checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" 684 | 685 | [[package]] 686 | name = "tracing" 687 | version = "0.1.21" 688 | source = "registry+https://github.com/rust-lang/crates.io-index" 689 | checksum = "b0987850db3733619253fe60e17cb59b82d37c7e6c0236bb81e4d6b87c879f27" 690 | dependencies = [ 691 | "cfg-if 0.1.10", 692 | "log", 693 | "pin-project-lite 0.1.8", 694 | "tracing-attributes", 695 | "tracing-core", 696 | ] 697 | 698 | [[package]] 699 | name = "tracing-attributes" 700 | version = "0.1.11" 701 | source = "registry+https://github.com/rust-lang/crates.io-index" 702 | checksum = "80e0ccfc3378da0cce270c946b676a376943f5cd16aeba64568e7939806f4ada" 703 | dependencies = [ 704 | "proc-macro2", 705 | "quote", 706 | "syn", 707 | ] 708 | 709 | [[package]] 710 | name = "tracing-core" 711 | version = "0.1.17" 712 | source = "registry+https://github.com/rust-lang/crates.io-index" 713 | checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" 714 | dependencies = [ 715 | "lazy_static", 716 | ] 717 | 718 | [[package]] 719 | name = "tracing-futures" 720 | version = "0.2.4" 721 | source = "registry+https://github.com/rust-lang/crates.io-index" 722 | checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c" 723 | dependencies = [ 724 | "pin-project 0.4.24", 725 | "tracing", 726 | ] 727 | 728 | [[package]] 729 | name = "try-lock" 730 | version = "0.2.3" 731 | source = "registry+https://github.com/rust-lang/crates.io-index" 732 | checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" 733 | 734 | [[package]] 735 | name = "unicode-xid" 736 | version = "0.2.1" 737 | source = "registry+https://github.com/rust-lang/crates.io-index" 738 | checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" 739 | 740 | [[package]] 741 | name = "want" 742 | version = "0.3.0" 743 | source = "registry+https://github.com/rust-lang/crates.io-index" 744 | checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" 745 | dependencies = [ 746 | "log", 747 | "try-lock", 748 | ] 749 | 750 | [[package]] 751 | name = "winapi" 752 | version = "0.3.9" 753 | source = "registry+https://github.com/rust-lang/crates.io-index" 754 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 755 | dependencies = [ 756 | "winapi-i686-pc-windows-gnu", 757 | "winapi-x86_64-pc-windows-gnu", 758 | ] 759 | 760 | [[package]] 761 | name = "winapi-i686-pc-windows-gnu" 762 | version = "0.4.0" 763 | source = "registry+https://github.com/rust-lang/crates.io-index" 764 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 765 | 766 | [[package]] 767 | name = "winapi-x86_64-pc-windows-gnu" 768 | version = "0.4.0" 769 | source = "registry+https://github.com/rust-lang/crates.io-index" 770 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 771 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sls_rust_minimal" 3 | version = "1.0.0" 4 | authors = ["Christian Kjaer "] 5 | edition = "2018" 6 | readme = "README.md" 7 | license = "MIT OR Apache-2.0" 8 | 9 | [lib] 10 | name = "lib" 11 | path = "src/lib.rs" 12 | 13 | [[bin]] 14 | name = "bootstrap" 15 | path = "src/bin/bootstrap.rs" 16 | 17 | [profile.release] 18 | panic = 'abort' # Less code to include into binary. 19 | codegen-units = 1 # Optimization over all codebase (better optimization, slower build). 20 | opt-level = 'z' # Optimization for size (or 's' for a less aggressiv alternative). 21 | lto = true # Link time optimization using using whole-program analysis. 22 | 23 | [dependencies] 24 | # Until the original project starts moving again, we'll rely on Netlify's fork, which is 25 | # maintained. See https://github.com/awslabs/aws-lambda-rust-runtime/issues/274 for more. 26 | lambda = { package = "netlify_lambda", version = "0.2.0" } 27 | tokio = "1.0.1" 28 | serde = "1.0.118" 29 | serde_derive = "1.0.118" 30 | serde_json = "1.0.61" 31 | 32 | [dev-dependencies] 33 | pretty_assertions = "0.6.1" 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Christian Kjær 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Patterns: Serverless Rust (minimal) ![ci badge](https://github.com/codetalkio/patterns-serverless-rust-minimal/workflows/ci/badge.svg?branch=master) 2 | The following is an minimal template for deploying a Rust AWS Lambda function. All deployment is managed by the AWS CDK tool. 3 | 4 | **✨ Features ✨** 5 | 6 | - 🦀 Ready-to-use serverless setup using Rust and [AWS CDK](https://github.com/aws/aws-cdk). 7 | - 🚗 CI using [GitHub Actions](https://github.com/features/actions) testing the deployment using [LocalStack](https://github.com/localstack/localstack). 8 | - 👩‍💻 Local development using [LocalStack](https://github.com/localstack/localstack). 9 | - 🚀 Deployments via [GitHub Releases](https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/about-releases). 10 | 11 | 12 | **⚡️ Quick start ⚡️** 13 | 14 | Assuming you have set up npm and cargo/rustup, the following will get you going: 15 | 16 | - `npm ci`: install all our deployment dependencies. 17 | - `npm run build`: build the Rust executable and package it as an asset for CDK. 18 | - `npm run cdk:deploy`: deploy the packaged asset. 19 | 20 | The stack name is controlled by the `name` field in `package.json`. Other than that, just use your regular Rust development setup. 21 | 22 | Screenshot 2020-10-06 at 22 56 27 23 | 24 | Use this repo as a template to get quickly started! If you are interested in a more fully-featured version of this, check out [🚧 patterns-serverless-rust 🚧](https://github.com/codetalkio/patterns-serverless-rust) for how to expose a GraphQL endpoint and use DynamoDB. 25 | 26 | 27 | ### Overview 28 | 29 | - [Building](#-building) 30 | - [Deployment using CDK](#-deployment-using-cdk) 31 | - [Development using LocalStack](#-development-using-localstack) 32 | - [GitHub Actions (CI/CD)](#--github-actions-cicd) 33 | - [Benchmarks using AWS XRay](#️️-benchmarks-using-aws-xray) 34 | - [Libraries](#-libraries) 35 | - [Contributing](#️-contributing) 36 | 37 | An overview of commands (all prefixed with `npm run`): 38 | 39 | | Command | Description | Purpose | 40 | |---------|-------------|---------| 41 | | `build` | Build the Rust executable for release | 📦 | 42 | | `build:debug` | Build the Rust executable for debug | 📦 | 43 | | `build:archive` | Creates a `./lambda.zip` for deployment using the AWS CLI | 📦 | 44 | | `build:clean` | Cleans build artifcats from `target/cdk` | 📦 | 45 | | `deploy` | Cleans and builds a new executable, and deploys it via CDK | 📦 + 🚢 | 46 | | `cdk:bootstrap` | Bootstrap necessary resources on first usage of CDK in a region | 🚢 | 47 | | `cdk:deploy` | deploy this stack to your default AWS account/region | 🚢 | 48 | | `cdklocal:start` | Starts the LocalStack docker image | 👩‍💻 | 49 | | `cdklocal:bootstrap` | Bootstrap necessary resources for CDK against LocalStack | 👩‍💻 | 50 | | `cdklocal:deploy` | Deploy this stack to LocalStack | 👩‍💻 | 51 | 52 | 53 | ## 📦 Building 54 | We build our executable by running `npm run build`. 55 | 56 | Behind the scenes, the `build` NPM script does the following: 57 | 58 | - Adds our `x86_64-unknown-linux-musl` toolchain 59 | - Runs `cargo build --release --target x86_64-unknown-linux-musl` 60 | 61 | 62 | In other words, we cross-compile a static binary for `x86_64-unknown-linux-musl`, put the executable, `bootstrap`, in `target/cdk/release`, and CDK uses that as its asset. With custom runtimes, AWS Lambda looks for an executable called `bootstrap`, so this is why we need the renaming step. 63 | 64 | 65 | ## 🚢 Deployment using CDK 66 | We build and deploy by running `npm run deploy`, or just `npm run cdk:deploy` if you have already run `npm run build` previously. 67 | 68 | A couple of notes: 69 | 70 | - If this is the first CDK deployment ever on your AWS account/region, run `npm run cdk:bootstrap` first. This creates the necessary CDK stack resources on the cloud. 71 | - The CDK deployment bundles the `target/cdk/release` folder as its assets. This is where the `bootstrap` file needs to be located (handled by `npm run build`). 72 | 73 | **Generate our build assets** 74 | 75 | ```bash 76 | $ npm run build 77 | ``` 78 | 79 | **Deploy the Rust asset** 80 | 81 | To deploy your function, call `npm run cdk:deploy`, 82 | 83 | ```bash 84 | $ npm run cdk:deploy 85 | ... 86 | sls-rust: deploying... 87 | [0%] start: Publishing bdbf8354358bc096823baac946ba64130b6397ff8e7eda2f18d782810e158c39:current 88 | [100%] success: Published bdbf8354358bc096823baac946ba64130b6397ff8e7eda2f18d782810e158c39:current 89 | sls-rust: creating CloudFormation changeset... 90 | [██████████████████████████████████████████████████████████] (5/5) 91 | 92 | ✅ sls-rust 93 | 94 | Outputs: 95 | sls-rust.entryArn = arn:aws:lambda:eu-west-1:xxxxxxxxxxxxxx:function:sls-rust-main 96 | 97 | Stack ARN: 98 | arn:aws:cloudformation:eu-west-1:xxxxxxxxxxxxxx:stack/sls-rust/xxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxx 99 | ``` 100 | 101 | > 💡 The security prompt is automatically disabled on CIs that set `CI=true`. You can remove this check by setting `--require-approval never` in the `cdk:deploy` npm command. 102 | 103 | **Validate you CDK CloudFormation** 104 | 105 | If you want to check if you CDK generated CloudFormation is valid, you can do that via, 106 | 107 | ```bash 108 | $ npm run cdk:synth 109 | ``` 110 | 111 | **Compare local against deployed** 112 | 113 | And finally, if you want to see a diff between your deployed stack and your local stack, 114 | 115 | ```bash 116 | $ npm run cdk:diff 117 | ``` 118 | 119 |
120 | 👈 Expand here for deployment using AWS CLI 121 | 122 | For real-usage we will deploy using AWS CDK, but you can dip your feet by deploying the Rust function via the AWS CLI. 123 | 124 | We'll do a couple of steps additional steps for the first time setup. Only step 5. is necessary after having done this once: 125 | 126 | 1. Set up a role to use with our Lambda function. 127 | 2. Attach policies to that role to be able to actually do something. 128 | 3. Deploy the Lambda function using the `lambda.zip` we've built. 129 | 4. Invoke the function with a test payload. 130 | 5. (Optional) Update the Lambda function with a new `lambda.zip`. 131 | 132 | **Generate our build assets** 133 | 134 | ```bash 135 | $ npm run build && npm run build:archive 136 | ``` 137 | 138 | **Set up the IAM Role** 139 | ```bash 140 | $ aws iam create-role \ 141 | --role-name sls-rust-test-execution \ 142 | --assume-role-policy-document \ 143 | '{"Version": "2012-10-17","Statement": [{ "Effect": "Allow", "Principal": {"Service": "lambda.amazonaws.com"}, "Action": "sts:AssumeRole"}]}' 144 | ``` 145 | 146 | We also need to set some basic policies on the IAM Role for it to be invokeable and for XRay traces to work, 147 | ```bash 148 | $ aws iam attach-role-policy \ 149 | --role-name sls-rust-test-execution \ 150 | --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 151 | $ aws iam attach-role-policy \ 152 | --role-name sls-rust-test-execution \ 153 | --policy-arn arn:aws:iam::aws:policy/AWSXRayDaemonWriteAccess 154 | ``` 155 | 156 | **Deploy our function** 157 | ```bash 158 | $ aws lambda create-function \ 159 | --function-name sls-rust-test \ 160 | --handler doesnt.matter \ 161 | --cli-binary-format raw-in-base64-out \ 162 | --zip-file fileb://./lambda.zip \ 163 | --runtime provided.al2 \ 164 | --role arn:aws:iam::$(aws sts get-caller-identity | jq -r .Account):role/sls-rust-test-execution \ 165 | --environment Variables={RUST_BACKTRACE=1} \ 166 | --tracing-config Mode=Active 167 | ``` 168 | 169 | > 💡 You can replace the `$(aws sts get-caller-identity | jq -r .Account)` call with your AWS account ID, if you do not have [jq](https://stedolan.github.io/jq/) installed. 170 | 171 | **Invoke our function** 172 | ```bash 173 | $ aws lambda invoke \ 174 | --function-name sls-rust-test \ 175 | --cli-binary-format raw-in-base64-out \ 176 | --payload '{"firstName": "world"}' \ 177 | tmp-output.json > /dev/null && cat tmp-output.json && rm tmp-output.json 178 | {"message":"Hello, world!"} 179 | ``` 180 | 181 | **(Optional) Update the function** 182 | We can also update the function code again, after creating a new asset `lambda.zip`, 183 | 184 | ```bash 185 | $ aws lambda update-function-code \ 186 | --cli-binary-format raw-in-base64-out \ 187 | --function-name sls-rust-test \ 188 | --zip-file fileb://lambda.zip 189 | ``` 190 | 191 | **Clean up the function** 192 | 193 | ```bash 194 | $ aws lambda delete-function --function-name sls-rust-test 195 | $ aws iam detach-role-policy --role-name sls-rust-test-execution --policy-arn arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole 196 | $ aws iam detach-role-policy --role-name sls-rust-test-execution --policy-arn arn:aws:iam::aws:policy/AWSXRayDaemonWriteAccess 197 | $ aws iam delete-role --role-name sls-rust-test-execution 198 | ``` 199 | 200 |
201 | 202 | 203 | ## 👩‍💻 Development using LocalStack 204 | 205 | LocalStack allows us to deploy our CDK services directly to our local environment: 206 | 207 | 1. `npm run cdklocal:start` to start the LocalStack services. 208 | 2. `npm run cdklocal:bootstrap` to create the necessary CDK stack resources on the cloud. 209 | 3. `npm run cdklocal:deploy` to deploy our stack. 210 | 4. Target the local services from our application, with `cdklocal`, or by setting the `endpoint` option on the AWS CLI, e.g. `aws --endpoint-url=http://localhost:4566`. 211 | 212 | We can use [cargo watch](https://crates.io/crates/cargo-watch) (via `cargo install cargo-watch`) to continously build a debug build of our application, 213 | 214 | ```bash 215 | $ cargo watch -s 'npm run build:debug' 216 | ``` 217 | 218 | If you want to test the application through the AWS CLI, the following should do the trick, 219 | 220 | ```bash 221 | $ aws --endpoint-url=http://localhost:4566 lambda invoke \ 222 | --function-name sls-rust-minimal-main \ 223 | --cli-binary-format raw-in-base64-out \ 224 | --payload '{"firstName": "world"}' \ 225 | tmp-output.json > /dev/null && cat tmp-output.json && rm tmp-output.json 226 | {"message":"Hello, world!"} 227 | ``` 228 | 229 | #### How does this work? 230 | 231 | LocalStack supports [using local code for lambdas](https://github.com/localstack/localstack#using-local-code-with-lambda), which is what we take advantage of here. This works because step 3. mounts the `./target/cdk/release` directory. Whenever we update the `bootstrap` executable in here (still targeting `x86_64-unknown-linux-musl`) , it will be reflected in the Lambda function. 232 | 233 | You can see this in the `./deployment/lib/lambda-stack.ts` file where we conditionally switch out how we bundle the Lambda code based on the presence of a `CDK_LOCAL` environment variable. 234 | 235 | 236 | ## 🚗 🚀 GitHub Actions (CI/CD) 237 | Using [GitHub actions](/actions) allows us to have an efficient CI/CD setup with minimal work. 238 | 239 | | Workflow | Trigger | Purpose | Environment Variables | 240 | |----------|---------|---------|-----------------------| 241 | | **ci** | push | Continously test the build along with linting, formatting, best-practices (clippy), and validate deployment against LocalStack | | 242 | | **pre-release** | Pre-release using GitHub Releases | Run benchmark suite | **BENCHMARK_AWS_ACCESS_KEY_ID**
**BENCHMARK_AWS_SECRET_ACCESS_KEY**
**BENCHMARK_AWS_SECRET_ACCESS_KEY** | 243 | | **pre-release** | Pre-release using GitHub Releases | Deploy to a QA or staging environment | **PRE_RELEASE_AWS_ACCESS_KEY_ID**
**PRE_RELEASE_AWS_SECRET_ACCESS_KEY**
**PRE_RELEASE_AWS_SECRET_ACCESS_KEY** | 244 | | **release** | Release using GitHub Releases | Deploy to production environment | **RELEASE_AWS_ACCESS_KEY_ID**
**RELEASE_AWS_SECRET_ACCESS_KEY**
**RELEASE_AWS_SECRET_ACCESS_KEY** | 245 | 246 | The CI will work seamlessly without any manual steps, but for deployments via [GitHub Releases](/releases) to work, you will need to set up your GitHub secrets for the repository for the variables in the table above. 247 | 248 | These are used in the `.github/workflows/release.yml` and `.github/workflows/pre-release.yml` workflows for deploying the CDK stack whenever a GitHub pre-release/release is made. 249 | 250 | 251 | ## 🕵️‍♀️ Benchmarks using AWS XRay 252 | 253 | Since we have enabled `tracing: lambda.Tracing.ACTIVE` in CDK and `tracing-config Mode=Active` in the CLI, we will get XRay traces for our AWS Lambda invocations. 254 | 255 | You can checkout each trace in the AWS Console inside the XRay service, which is extremely valuable for figuring our timings between services, slow AWS SDK calls, annotating cost centers in your code, and much more. 256 | 257 | We can benchmark our performance using `npm run benchmark`, which will deploy the AWS Lambda to your AWS account, invoke it a bunch of times and trigger cold starts, along with gathering up all the AWS XRay traces into a neat table. 258 | 259 | Below are two charts generated by the benchmark, you can see the raw data in [the response-times table](./benchmark/response-times.md). 260 | 261 | 262 | ![Average Cold/Warm Response Times](./benchmark/response-times-average.svg) 263 | 264 | - 🔵: Average cold startup times 265 | - 🔴: Average warm startup times 266 | 267 | ![Fastest and Slowest Response Times](./benchmark/response-times-extremes.svg) 268 | 269 | - 🔵: Fastest warm response time 270 | - 🔴: Slowest warm response time 271 | - 🟡: Fastest cold response time 272 | - 🟠: Slowest cold response time 273 | 274 | Benchmarks can be triggered in the CI by setting up its environment variables and creating a pre-release via GitHub Releases. 275 | 276 | 277 | ## 📚 Libraries 278 | We are using a couple of libraries, in various state of maturity/release: 279 | 280 | - The netlify fork of [aws-lambda-rust-runtime](https://github.com/netlify/aws-lambda-rust-runtime) pending on [#274](https://github.com/awslabs/aws-lambda-rust-runtime/issues/274). 281 | - We will need the musl tools, which we use instead of glibc, via `apt-get install musl-tools` for Ubuntu or `brew tap SergioBenitez/osxct && brew install FiloSottile/musl-cross/musl-cross` for macOS. 282 | - [aws-cdk](https://docs.aws.amazon.com/cdk/latest/guide/home.html) for deploying to AWS, using CloudFormation under-the-hood. We'll use their support for [Custom Runtimes](https://docs.aws.amazon.com/cdk/api/latest/docs/aws-lambda-readme.html). 283 | - The [aws-cdk-local](https://github.com/localstack/aws-cdk-local) tool from [localstack](https://github.com/localstack/localstack) for a local development setup. 284 | - [cargo watch](https://github.com/passcod/cargo-watch) so we can develop using `cargo watch`, installable via `cargo install cargo-watch`. 285 | 286 | 287 | ## 🙋‍♀️ Contributing 288 | Have any improvements our ideas? Don't be afraid to create an issue to discuss what's on your mind! 289 | -------------------------------------------------------------------------------- /benchmark/benchmark-payload.json: -------------------------------------------------------------------------------- 1 | { 2 | "firstName": "world" 3 | } 4 | -------------------------------------------------------------------------------- /benchmark/benchmark.ts: -------------------------------------------------------------------------------- 1 | import { 2 | LambdaClient, 3 | GetFunctionConfigurationCommand, 4 | UpdateFunctionConfigurationCommand, 5 | UpdateFunctionConfigurationCommandInput, 6 | InvokeCommand, 7 | InvokeCommandInput, 8 | } from "@aws-sdk/client-lambda"; 9 | import { 10 | XRayClient, 11 | GetTraceSummariesCommand, 12 | GetTraceSummariesCommandInput, 13 | BatchGetTracesCommand, 14 | BatchGetTracesCommandInput, 15 | TraceSummary, 16 | Trace, 17 | } from "@aws-sdk/client-xray"; 18 | import * as fs from "fs"; 19 | 20 | import * as pkg from "../package.json"; 21 | import * as benchmarkPayload from "./benchmark-payload.json"; 22 | 23 | const chartistSvg = require("svg-chartist"); 24 | 25 | /** 26 | * Benchmark configuration values. 27 | */ 28 | const COLD_STARTS = 50; 29 | const WARM_STARTS = 50; 30 | const MEMORY_SIZES = [128, 256, 512, 1024, 2048, 3072, 4096]; 31 | 32 | /** 33 | * Dynamic benchmark variables. The stack name is generated by taking the package name 34 | * and appending the random number generated in the benchmark suffix. 35 | */ 36 | const { BENCHMARK_SUFFIX, DRY_RUN } = process.env; 37 | const STACK_NAME = BENCHMARK_SUFFIX ? `${pkg.name}-${BENCHMARK_SUFFIX}` : pkg.name; 38 | 39 | /** 40 | * Map of memory configuration and their benchmark results. 41 | */ 42 | interface MemoryTimes { 43 | memorySize: number; 44 | times: BenchmarkResults; 45 | } 46 | 47 | /** 48 | * The benchmark numbers. 49 | */ 50 | interface BenchmarkResults { 51 | overallTimes: BenchmarkAggregateMetrics; 52 | traceTimes: BenchmarkSingleInvocationMetric[]; 53 | } 54 | 55 | /** 56 | * All aggregate benchmark metrics. 57 | */ 58 | interface BenchmarkAggregateMetrics { 59 | avgWarmMs: number | undefined; 60 | avgColdMs: number | undefined; 61 | fastestWarmMs: number | undefined; 62 | fastestColdMs: number | undefined; 63 | slowestWarmMs: number | undefined; 64 | slowestColdMs: number | undefined; 65 | } 66 | 67 | /** 68 | * The metrics for a single invocation, extract from XRay. 69 | */ 70 | interface BenchmarkSingleInvocationMetric { 71 | id: string | undefined; 72 | totalTime: number | undefined; 73 | initTime: number | undefined; 74 | invocTime: number | undefined; 75 | overheadTime: number | undefined; 76 | } 77 | 78 | /** 79 | * Map of memory configuration and the traces extracted. 80 | */ 81 | interface MemoryTraces { 82 | memorySize: number; 83 | traces: MinimalTrace[]; 84 | } 85 | 86 | /** 87 | * The minimal trace information we require for processing. 88 | */ 89 | type MinimalTrace = Pick & { Segments: SegmentDocument[] }; 90 | 91 | /** 92 | * The layout of the XRay segments. 93 | */ 94 | interface SegmentDocument { 95 | origin?: string; 96 | end_time: number; 97 | start_time: number; 98 | subsegments?: SubSegment[]; 99 | } 100 | 101 | /** 102 | * The layout of the XRay sub-segments. 103 | */ 104 | interface SubSegment { 105 | name?: string; 106 | end_time: number; 107 | start_time: number; 108 | } 109 | 110 | /** 111 | * Run the benchmark by: 112 | * - Iterating through memory configurations. 113 | * - Updating the Lambda with each memory configuration. 114 | * - Invoking the Lambda for n cold starts and m warm starts. 115 | * - Extract all XRay traces. 116 | * - Process the traces to get the benchmark results. 117 | * - Output a markdown table and two charts. 118 | * 119 | * If you just want to reprocess the results, run the benchmark with `DRY_RUN`, 120 | * e.g. `DRY_RUN=true npm run benchmark`. This will skip deployment, teardown, invocations, and 121 | * fetching of XRay traces, and instead load the existing traces from `traces.json` and generate 122 | * the output again. 123 | */ 124 | const main = async (functionName: string) => { 125 | const memoryTimes: MemoryTimes[] = []; 126 | if (DRY_RUN === "true") { 127 | // If we are running a dry run, we only need to load in the existing traces and process them. 128 | const memoryTraces = JSON.parse(fs.readFileSync("./benchmark/traces.json").toString()); 129 | memoryTraces.forEach(({ memorySize, traces: traceBatches }: MemoryTraces) => { 130 | const times = processXRayTraces(traceBatches); 131 | memoryTimes.push({ 132 | memorySize, 133 | times, 134 | }); 135 | }); 136 | } else { 137 | // For each memory configuration, run through all invocations first. 138 | const benchmarkStartTimes: Date[] = []; 139 | for (let i = 0; i < MEMORY_SIZES.length; i++) { 140 | const benchmarkStartTime = new Date(); 141 | benchmarkStartTimes.push(benchmarkStartTime); 142 | const memorySize = MEMORY_SIZES[i]; 143 | await invokeFunctions(functionName, memorySize); 144 | await sleep(1000); 145 | } 146 | // The XRay traces should now be ready for us to fetch. 147 | const memoryTraces: MemoryTraces[] = []; 148 | for (let i = 0; i < MEMORY_SIZES.length; i++) { 149 | const benchmarkStartTime = benchmarkStartTimes[i]; 150 | const memorySize = MEMORY_SIZES[i]; 151 | const traceSummaries = await fetchXRayTraceSummaries(functionName, benchmarkStartTime); 152 | const traceBatches = await fetchXRayTraceBatches(traceSummaries); 153 | memoryTraces.push({ 154 | memorySize, 155 | traces: traceBatches, 156 | }); 157 | const times = processXRayTraces(traceBatches); 158 | memoryTimes.push({ 159 | memorySize, 160 | times, 161 | }); 162 | } 163 | fs.writeFileSync("./benchmark/traces.json", JSON.stringify(memoryTraces)); 164 | } 165 | 166 | outputBenchmarkMarkdown(memoryTimes); 167 | outputBenchmarkChart(memoryTimes); 168 | }; 169 | 170 | /** 171 | * Sleep for the specified time. 172 | */ 173 | const sleep = (ms: number) => { 174 | return new Promise((resolve) => { 175 | setTimeout(resolve, ms); 176 | }); 177 | }; 178 | 179 | /** 180 | * Run the actual benchmark, performing a series of invocations to the Lambda. To ensure cold 181 | * starts, we trigger an update on the functions environment variables before invoking it. We 182 | * then invoke it a number of times afterwards to gather warm startup data as well. 183 | * 184 | * The `memorySize` configuration sets the Lambda memory size, allowing easy scale up and down. 185 | */ 186 | const invokeFunctions = async (functionName: string, memorySize: number) => { 187 | const lambdaClient = new LambdaClient({}); 188 | const baseConfiguration = await lambdaClient.send( 189 | new GetFunctionConfigurationCommand({ 190 | FunctionName: functionName, 191 | }) 192 | ); 193 | 194 | // We generate the update configuration on the fly to always provide a unique 195 | // benchmark run time. 196 | const updateConfiguration: () => UpdateFunctionConfigurationCommandInput = () => ({ 197 | FunctionName: functionName, 198 | MemorySize: memorySize, 199 | Environment: { 200 | Variables: { 201 | ...baseConfiguration.Environment?.Variables, 202 | BENCHMARK_RUN_TIME: `${new Date().toISOString()}-${Math.random()}`, 203 | }, 204 | }, 205 | }); 206 | const testPayload: () => InvokeCommandInput = () => { 207 | // Dynamically replace placeholders in the payload, so we can generate unique 208 | // test data per invocation. 209 | const payload = JSON.stringify(benchmarkPayload) 210 | .replace("##DATE##", new Date().toISOString()) 211 | .replace("##NUM##", `${Math.floor(Math.random() * 10000)}`); 212 | return { 213 | FunctionName: functionName, 214 | Payload: Buffer.from(payload), 215 | }; 216 | }; 217 | 218 | for (let cI = 0; cI < COLD_STARTS; cI++) { 219 | console.log("[BENCHMARK] Updating the function to ensure a cold start."); 220 | await lambdaClient.send(new UpdateFunctionConfigurationCommand(updateConfiguration())); 221 | const s = Date.now(); 222 | await lambdaClient.send(new InvokeCommand(testPayload())); 223 | console.log(`[BENCHMARK] Invoked cold-start function: ${Date.now() - s}ms.`); 224 | await sleep(500); 225 | } 226 | for (let wI = 0; wI < WARM_STARTS; wI++) { 227 | const s = Date.now(); 228 | await lambdaClient.send(new InvokeCommand(testPayload())); 229 | console.log(`[BENCHMARK] Invoked warm-start function: ${Date.now() - s}ms.`); 230 | await sleep(500); 231 | } 232 | }; 233 | 234 | /** 235 | * Fetch all XRay trace summaries, which contain the trace IDs we will need to get the detailed information. We 236 | * limit the information we search for by filtering on the `functionName` and on service type of AWS:Lambda. Additionally, 237 | * we only look in the period between the `benchmarkStartTime` time and the time that this function is called. 238 | * 239 | * Since XRay trace can take some time to appear, we also gracefully handle waiting if we don't see at least 90% of 240 | * the traces in the results. 241 | */ 242 | const fetchXRayTraceSummaries = async (functionName: string, benchmarkStartTime: Date): Promise => { 243 | const benchmarkEndTime = new Date(); 244 | const xRayClient = new XRayClient({}); 245 | 246 | const traceSummaries: TraceSummary[] = []; 247 | let nextTokenSummary: string | undefined; 248 | let retries = 0; 249 | let retry = true; 250 | while (retry) { 251 | const traceInput: GetTraceSummariesCommandInput = { 252 | StartTime: benchmarkStartTime, 253 | EndTime: benchmarkEndTime, 254 | FilterExpression: `service(id(name: "${functionName}", type: "AWS::Lambda"))`, 255 | NextToken: nextTokenSummary, 256 | }; 257 | const traceSummariesRes = await xRayClient.send(new GetTraceSummariesCommand(traceInput)); 258 | nextTokenSummary = traceSummariesRes.NextToken; 259 | traceSummaries.push(...(traceSummariesRes.TraceSummaries ?? [])); 260 | 261 | // Make sure we've fetched all our traces. We only require 90% to have been gathered, since 262 | // XRay is sampling our requests. 263 | if ((traceSummariesRes.TraceSummaries?.length ?? 0) + traceSummaries.length < (COLD_STARTS + WARM_STARTS) * 0.8) { 264 | if (retries >= 40) { 265 | throw new Error( 266 | `[TEARDOWN] Failed to get all traces for the invocations, was only able to find '${traceSummariesRes.TraceSummaries?.length}' traces.` 267 | ); 268 | } 269 | console.log("[TRACES] Traces has still not appeared, waiting 1 seconds and trying again..."); 270 | await sleep(1000); 271 | retries++; 272 | } else { 273 | retry = false; 274 | } 275 | 276 | if (!retry && nextTokenSummary === undefined) { 277 | break; 278 | } 279 | } 280 | console.log("[TRACES] Fetched trace summaries, fetching detailed trace information."); 281 | return traceSummaries; 282 | }; 283 | 284 | /** 285 | * Split an array into chunks based on the `size`. 286 | */ 287 | const chunkArray = (arr: any[], size: number) => { 288 | var results = []; 289 | while (arr.length) { 290 | results.push(arr.splice(0, size)); 291 | } 292 | return results; 293 | }; 294 | 295 | /** 296 | * Once we have a list of trace IDs, we can get the detailed trace information which contain the breakdown 297 | * of the different stages the Lambda function went through. 298 | * 299 | * Because the XRay API is limited to fetching 5 traces at a time, we divide all the trace IDs into chunks of 300 | * 5. Additionally, we re-request traces if we detect any that are unprocessed. 301 | * 302 | * NOTE: To avoid leaking information, all traces are stripped of information and only a whitelist is saved. 303 | */ 304 | const fetchXRayTraceBatches = async (traceSummaries: Pick[]): Promise => { 305 | const xRayClient = new XRayClient({}); 306 | const batchTraces: MinimalTrace[] = []; 307 | 308 | // We can only request 5 traces at a time, so we split the summary IDs into chunks. 309 | const batchSummaryChunks = chunkArray(traceSummaries, 5); 310 | 311 | let nextTokenBatch: string | undefined; 312 | for (let i = 0; i < batchSummaryChunks.length; i++) { 313 | const batchSummaryChunk = batchSummaryChunks[i]; 314 | while (true) { 315 | const batchInput: BatchGetTracesCommandInput = { 316 | TraceIds: [...new Set(batchSummaryChunk.filter((t) => t.Id).map((t) => t.Id!))], 317 | NextToken: nextTokenBatch, 318 | }; 319 | const batchTracesRes = await xRayClient.send(new BatchGetTracesCommand(batchInput)); 320 | 321 | // Check if there are any unprocessed traces. If there are, we wait 1 second and retry the loop. 322 | if ((batchTracesRes.UnprocessedTraceIds?.length ?? 0) > 0) { 323 | console.log("[TRACES] Detailed traces are still being processed, waiting 1 second and trying again..."); 324 | await sleep(1000); 325 | continue; 326 | } 327 | 328 | // Only store the relevant parts of the traces, so we can't accidentally leak information. 329 | const minimalTraces: MinimalTrace[] = (batchTracesRes.Traces ?? []).map((t) => ({ 330 | Id: t.Id, 331 | Segments: (t.Segments ?? []) 332 | .filter((s) => s.Document !== undefined) 333 | .map((s: any) => { 334 | const seg: SegmentDocument = JSON.parse(s.Document!); 335 | const cleanedSeg: SegmentDocument = { 336 | origin: seg.origin, 337 | end_time: seg.end_time, 338 | start_time: seg.start_time, 339 | subsegments: seg.subsegments?.map((subSeg) => ({ 340 | name: subSeg.name, 341 | end_time: subSeg.end_time, 342 | start_time: subSeg.start_time, 343 | })), 344 | }; 345 | return cleanedSeg; 346 | }), 347 | })); 348 | batchTraces.push(...minimalTraces); 349 | 350 | nextTokenBatch = batchTracesRes.NextToken; 351 | if (nextTokenBatch === undefined) { 352 | break; 353 | } 354 | } 355 | } 356 | 357 | return batchTraces; 358 | }; 359 | 360 | /** 361 | * Process a list of XRay detailed traces, extracting the timings for the various 362 | * segments, along with overall metrics. 363 | */ 364 | const processXRayTraces = (traces: MinimalTrace[]): BenchmarkResults => { 365 | console.log("[TRACES] Processing trace information."); 366 | // Gather overall metrics. 367 | let avgWarmMs: number | undefined; 368 | let avgColdMs: number | undefined; 369 | let fastestWarmMs: number | undefined; 370 | let fastestColdMs: number | undefined; 371 | let slowestWarmMs: number | undefined; 372 | let slowestColdMs: number | undefined; 373 | 374 | // Gather per-trace metrics. 375 | const traceTimes: BenchmarkSingleInvocationMetric[] = []; 376 | traces.map((trace) => { 377 | let totalTime: number | undefined; 378 | let initTime: number | undefined; 379 | let invocTime: number | undefined; 380 | let overheadTime: number | undefined; 381 | 382 | // Piece together the segment timings into one measurement. 383 | trace.Segments?.map((seg) => { 384 | if (seg.origin === "AWS::Lambda") { 385 | totalTime = seg.end_time - seg.start_time; 386 | } else if (seg.origin === "AWS::Lambda::Function") { 387 | seg.subsegments?.map((subSeg) => { 388 | if (subSeg.name === "Initialization") { 389 | initTime = subSeg.end_time - subSeg.start_time; 390 | } else if (subSeg.name === "Invocation") { 391 | invocTime = subSeg.end_time - subSeg.start_time; 392 | } else if (subSeg.name === "Overhead") { 393 | overheadTime = subSeg.end_time - subSeg.start_time; 394 | } 395 | }); 396 | } 397 | }); 398 | 399 | const isColdStart = initTime ? true : false; 400 | 401 | // XRay validation (see https://github.com/codetalkio/patterns-serverless-rust-minimal/issues/5 for context): 402 | // 1. XRay can sometimes hand us back invalid traces where the total time is less than the 403 | // sum of its elements. We discard these traces. 404 | const otherTime = (initTime || 0) + (invocTime || 0) + (overheadTime || 0); 405 | if (totalTime! < otherTime) { 406 | console.error( 407 | `[TRACES] Invalid trace with total time '${totalTime}' less than sum of other times '${otherTime}'. ID = ${trace.Id}.` 408 | ); 409 | return; 410 | } 411 | // 2. Similarly, XRay sometimes only catches the Lambda service part, but not the function metrics 412 | // themselves. We are then unable to tell if it was a cold start or not. 413 | if (!invocTime) { 414 | console.error(`[TRACES] Invalid trace with missing invocation time. ID = ${trace.Id}.`); 415 | return; 416 | } 417 | 418 | traceTimes.push({ 419 | id: trace.Id, 420 | totalTime, 421 | initTime, 422 | invocTime, 423 | overheadTime, 424 | }); 425 | 426 | // Keep track of overall metrics. 427 | if (!isColdStart) { 428 | avgWarmMs = !avgWarmMs ? totalTime : (avgWarmMs + totalTime!) / 2; 429 | fastestWarmMs = !fastestWarmMs || totalTime! < fastestWarmMs ? totalTime : fastestWarmMs; 430 | slowestWarmMs = !slowestWarmMs || totalTime! > slowestWarmMs ? totalTime : slowestWarmMs; 431 | } else if (isColdStart) { 432 | avgColdMs = !avgColdMs ? totalTime : (avgColdMs + totalTime!) / 2; 433 | fastestColdMs = !fastestColdMs || totalTime! < fastestColdMs ? totalTime : fastestColdMs; 434 | slowestColdMs = !slowestColdMs || totalTime! > slowestColdMs ? totalTime : slowestColdMs; 435 | } 436 | }); 437 | 438 | return { 439 | overallTimes: { 440 | avgWarmMs, 441 | avgColdMs, 442 | fastestWarmMs, 443 | slowestWarmMs, 444 | fastestColdMs, 445 | slowestColdMs, 446 | }, 447 | traceTimes, 448 | }; 449 | }; 450 | 451 | /** 452 | * Output the results to the `response-times.md` markdown file by manually piecing together the 453 | * markdown content. 454 | */ 455 | const outputBenchmarkMarkdown = async (memoryTimes: MemoryTimes[]) => { 456 | console.log("[OUTPUT] Saving benchmark times to 'response-times.md'."); 457 | 458 | // Generate the measurement tables for each memory configuration section. 459 | let benchmarkData = ""; 460 | memoryTimes.map(({ memorySize, times }) => { 461 | benchmarkData += ` 462 | 463 | ## Results for ${memorySize} MB`; 464 | benchmarkData += ` 465 | 466 | | Measurement (${memorySize} MB) | Time (ms) | 467 | |-------------|------| 468 | | Average warm start response time | ${Math.floor(times.overallTimes.avgWarmMs! * 10000) / 10} ms | 469 | | Average cold start response time | ${Math.floor(times.overallTimes.avgColdMs! * 10000) / 10} ms | 470 | | Fastest warm response time | ${Math.floor(times.overallTimes.fastestWarmMs! * 10000) / 10} ms | 471 | | Slowest warm response time | ${Math.floor(times.overallTimes.slowestWarmMs! * 10000) / 10} ms | 472 | | Fastest cold response time | ${Math.floor(times.overallTimes.fastestColdMs! * 10000) / 10} ms | 473 | | Slowest cold response time | ${Math.floor(times.overallTimes.slowestColdMs! * 10000) / 10} ms | 474 | `; 475 | 476 | benchmarkData += ` 477 | 478 | | Response time | Initialization | Invocation | Overhead | Cold/ Warm Start | Memory Size | Trace ID | 479 | |---------------|----------------|------------|----------|------------------|-------------|----------|`; 480 | times.traceTimes.map((time) => { 481 | const isColdStart = !!time.initTime; 482 | const totalTimeMs = time.totalTime ? `${Math.floor(time.totalTime * 10000) / 10} ms` : ""; 483 | const initTimeMs = time.initTime ? `${Math.floor(time.initTime * 10000) / 10} ms` : ""; 484 | const invocTimeMs = time.invocTime ? `${Math.floor(time.invocTime * 10000) / 10} ms` : ""; 485 | const overheadTimeMs = time.overheadTime ? `${Math.floor(time.overheadTime * 10000) / 10} ms` : ""; 486 | const coldOrWarmStart = isColdStart ? "🥶" : "🥵"; 487 | benchmarkData += ` 488 | | ${totalTimeMs} | ${initTimeMs} | ${invocTimeMs} | ${overheadTimeMs} | ${coldOrWarmStart} | ${memorySize} MB | ${time.id} |`; 489 | }); 490 | }); 491 | 492 | // Set up the page, including the generated charts. 493 | const header = ` 494 | # Benchmark: Response Times 495 | 496 | The following are the response time results from AWS XRay, generated after running \`npm run benchmark\`. 497 | 498 | ![Average Cold/Warm Response Times](./response-times-average.svg) 499 | 500 | - 🔵: Average cold startup times 501 | - 🔴: Average warm startup times 502 | 503 | ![Fastest and Slowest Response Times](./response-times-extremes.svg) 504 | 505 | - 🔵: Fastest warm response time 506 | - 🔴: Slowest warm response time 507 | - 🟡: Fastest cold response time 508 | - 🟠: Slowest cold response time 509 | 510 | `; 511 | 512 | // Provide a table of contents for quick access to the different measurements. 513 | let tableOfContents = ` 514 | ## Overview 515 | 516 | `; 517 | memoryTimes.map(({ memorySize }) => { 518 | tableOfContents += ` 519 | - [Results for ${memorySize} MB](#results-for-${memorySize}-mb)`; 520 | }); 521 | 522 | // Include generic XRay trace examples in the bottom of the page. 523 | const footer = ` 524 | 525 | ## XRay Example of a Cold Start 526 | 527 | Screenshot 2020-10-07 at 23 01 40 528 | 529 | ## XRay Example of a Warm Start 530 | 531 | Screenshot 2020-10-07 at 23 01 23 532 | `; 533 | const markdown = [header, tableOfContents, benchmarkData, footer].join("\n"); 534 | fs.writeFileSync("./benchmark/response-times.md", markdown); 535 | }; 536 | 537 | /** 538 | * Output two charts based on the data, showing the behaviour and performance of the AWS Lambda: 539 | * - response-times-average.svg: Shows average cold start and warm starts, for each memory configuration. 540 | * - response-times-extremes.svg: Shows the fastest and slowests response times for both cold and warm 541 | * starts, for each memory configuration. 542 | */ 543 | const outputBenchmarkChart = async (memoryTimes: MemoryTimes[]) => { 544 | console.log("[OUTPUT] Charting benchmark times to 'response-times.svg'."); 545 | 546 | const opts = { 547 | options: { 548 | width: 700, 549 | height: 300, 550 | axisX: { 551 | showLabel: true, 552 | showGrid: false, 553 | }, 554 | axisY: { 555 | labelInterpolationFnc: function (value: any) { 556 | return value + "ms"; 557 | }, 558 | scaleMinSpace: 15, 559 | }, 560 | }, 561 | title: { 562 | height: 50, 563 | fill: "#4A5572", 564 | }, 565 | css: `.ct-series-a .ct-bar, .ct-series-a .ct-line, .ct-series-a .ct-point, .ct-series-a .ct-slice-donut{ 566 | stroke: #4A5572 567 | }`, 568 | }; 569 | 570 | const labels: string[] = []; 571 | const avgSeries: number[][] = [ 572 | [], // Avg. Cold 573 | [], // Avg. Warm 574 | ]; 575 | const extremesSeries: number[][] = [ 576 | [], // Fastest Warm 577 | [], // Slowest Warm 578 | [], // Fastest Cold 579 | [], // Slowest Cold 580 | ]; 581 | memoryTimes.map(({ memorySize, times }) => { 582 | labels.push(`${memorySize}MB`); 583 | // Add the average data to the first chart series. 584 | avgSeries[0].push(Math.floor(times.overallTimes.avgColdMs! * 10000) / 10); 585 | avgSeries[1].push(Math.floor(times.overallTimes.avgWarmMs! * 10000) / 10); 586 | // Add the extremes data to the second chart series. 587 | extremesSeries[0].push(Math.floor(times.overallTimes.fastestWarmMs! * 10000) / 10); 588 | extremesSeries[1].push(Math.floor(times.overallTimes.slowestWarmMs! * 10000) / 10); 589 | extremesSeries[2].push(Math.floor(times.overallTimes.fastestColdMs! * 10000) / 10); 590 | extremesSeries[3].push(Math.floor(times.overallTimes.slowestColdMs! * 10000) / 10); 591 | }); 592 | 593 | const avgSeriesData = { 594 | title: "Average Cold/Warm Response Times Across Memory Configurations", 595 | labels, 596 | series: avgSeries, 597 | }; 598 | const extremesSeriesData = { 599 | title: "Fastest and Slowest Response Times Across Memory Configurations", 600 | labels, 601 | series: extremesSeries, 602 | }; 603 | 604 | chartistSvg("bar", avgSeriesData, opts).then((html: any) => { 605 | fs.writeFileSync("./benchmark/response-times-average.svg", html); 606 | }); 607 | chartistSvg("bar", extremesSeriesData, opts).then((html: any) => { 608 | fs.writeFileSync("./benchmark/response-times-extremes.svg", html); 609 | }); 610 | }; 611 | 612 | (async () => { 613 | const functionName = `${STACK_NAME}-main`; 614 | console.log(`[SETUP] BENCHMARK_SUFFIX = ${BENCHMARK_SUFFIX}`); 615 | console.log(`[SETUP] Stack Name = ${functionName}`); 616 | if (!BENCHMARK_SUFFIX) { 617 | console.error("No 'BENCHMARK_SUFFIX' was set!"); 618 | process.exit(1); 619 | } 620 | 621 | try { 622 | await main(functionName); 623 | } catch (err) { 624 | console.error("[ERROR] Benchmark failed unexpectedly:", err); 625 | process.exit(1); 626 | } 627 | })(); 628 | -------------------------------------------------------------------------------- /benchmark/response-times-average.svg: -------------------------------------------------------------------------------- 1 | 128MB256MB512MB1024MB2048MB3072MB4096MB0ms12.5ms25ms37.5ms50ms62.5ms75ms87.5ms100ms112.5msAverage Cold/Warm Response Times Across Memory Configurations -------------------------------------------------------------------------------- /benchmark/response-times-extremes.svg: -------------------------------------------------------------------------------- 1 | 128MB256MB512MB1024MB2048MB3072MB4096MB0ms25ms50ms75ms100ms125ms150ms175ms200ms225msFastest and Slowest Response Times Across Memory Configurations -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "ts-node -r tsconfig-paths/register deployment/bin/stack.ts" 3 | } 4 | -------------------------------------------------------------------------------- /deployment/bin/stack.ts: -------------------------------------------------------------------------------- 1 | import * as cdk from "@aws-cdk/core"; 2 | 3 | import { LambdaStack } from "../lib/lambda-stack"; 4 | import * as pkg from "../../package.json"; 5 | 6 | // Allow appending a randomized benchmark number. 7 | const { BENCHMARK_SUFFIX } = process.env; 8 | const STACK_NAME = BENCHMARK_SUFFIX ? `${pkg.name}-${BENCHMARK_SUFFIX}` : pkg.name; 9 | 10 | /** 11 | * Construct for the Serverless Application. 12 | * 13 | * NOTE: We export the our construct so that it's possible to stitch it into a larger deployment. 14 | */ 15 | export default class Stack { 16 | public lambdaStack: LambdaStack; 17 | 18 | constructor(app: cdk.App) { 19 | // Set up our Lambda Stack. 20 | this.lambdaStack = new LambdaStack(app, `${STACK_NAME}`, {}); 21 | } 22 | } 23 | 24 | const app = new cdk.App(); 25 | new Stack(app); 26 | -------------------------------------------------------------------------------- /deployment/lib/lambda-stack.ts: -------------------------------------------------------------------------------- 1 | import * as core from "@aws-cdk/core"; 2 | import * as lambda from "@aws-cdk/aws-lambda"; 3 | import * as s3 from "@aws-cdk/aws-s3"; 4 | import * as cdk from "@aws-cdk/core"; 5 | 6 | const { CDK_LOCAL } = process.env; 7 | 8 | interface Props {} 9 | 10 | export class LambdaStack extends core.Stack { 11 | constructor(scope: cdk.App, id: string, props: Props) { 12 | super(scope, id); 13 | 14 | const bootstrapLocation = `${__dirname}/../../target/cdk/release`; 15 | 16 | // Our Lambda function details. 17 | const entryId = "main"; 18 | const entryFnName = `${id}-${entryId}`; 19 | const entry = new lambda.Function(this, entryId, { 20 | functionName: entryFnName, 21 | description: "Rust serverless minimal microservice", 22 | runtime: lambda.Runtime.PROVIDED_AL2, 23 | handler: `${id}`, // The handler value syntax is `{cargo-package-name}.{bin-name}`. 24 | code: 25 | CDK_LOCAL !== "true" 26 | ? lambda.Code.fromAsset(bootstrapLocation) 27 | : lambda.Code.fromBucket(s3.Bucket.fromBucketName(this, `LocalBucket`, "__local__"), bootstrapLocation), 28 | memorySize: 256, 29 | timeout: cdk.Duration.seconds(10), 30 | tracing: lambda.Tracing.ACTIVE, 31 | }); 32 | 33 | // Our Lambda function environment variables. 34 | entry.addEnvironment("AWS_NODEJS_CONNECTION_REUSE_ENABLED", "1"); 35 | 36 | // Tag our resource. 37 | core.Aspects.of(entry).add(new cdk.Tag("service-type", "API")); 38 | core.Aspects.of(entry).add(new cdk.Tag("billing", `lambda-${entryFnName}`)); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "2.1" 2 | 3 | services: 4 | localstack: 5 | container_name: "${LOCALSTACK_DOCKER_NAME-localstack_main}" 6 | # image: localstack/localstack:0.12.4 7 | image: localstack/localstack:latest 8 | network_mode: bridge 9 | ports: 10 | - "4566:4566" 11 | - "4571:4571" 12 | - "${PORT_WEB_UI-9888}:${PORT_WEB_UI-9888}" 13 | environment: 14 | - SERVICES=${SERVICES-serverless,cloudfront,cloudformation,iam,sts,sqs,ssm,s3,route53,acm,cloudwatch,cloudwatch-logs,lambda,dynamodb,apigateway} 15 | - DEFAULT_REGION=${DEFAULT_REGION-eu-west-1} 16 | - DEBUG=${DEBUG- } 17 | - DATA_DIR=${DATA_DIR- } 18 | - PORT_WEB_UI=${PORT_WEB_UI-9888} 19 | - LAMBDA_EXECUTOR=${LAMBDA_EXECUTOR- } 20 | - LAMBDA_REMOTE_DOCKER=${LAMBDA_REMOTE_DOCKER-false} 21 | - KINESIS_ERROR_PROBABILITY=${KINESIS_ERROR_PROBABILITY- } 22 | - DOCKER_HOST=unix:///var/run/docker.sock 23 | - HOST_TMP_FOLDER=${TMPDIR:-/tmp/localstack} 24 | volumes: 25 | - "${TMPDIR:-/tmp/localstack}:/tmp/localstack" 26 | - "/var/run/docker.sock:/var/run/docker.sock" 27 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sls-rust-minimal", 3 | "description": "Deployment tools for the minimal Serverless Rust application.", 4 | "version": "1.0.0", 5 | "author": "Christian Kjaer ", 6 | "license": "MIT", 7 | "keywords": [ 8 | "serverless", 9 | "rust", 10 | "aws", 11 | "cdk" 12 | ], 13 | "scripts": { 14 | "build": "rustup target add x86_64-unknown-linux-musl && cargo build --release --target x86_64-unknown-linux-musl && mkdir -p ./target/cdk/release && cp ./target/x86_64-unknown-linux-musl/release/bootstrap ./target/cdk/release/bootstrap", 15 | "build:debug": "cargo build --target x86_64-unknown-linux-musl && mkdir -p ./target/cdk/release && cp ./target/x86_64-unknown-linux-musl/debug/bootstrap ./target/cdk/release/bootstrap", 16 | "build:archive": "(cd ./target/cdk/release/ && zip ./lambda.zip ./bootstrap) && mv ./target/cdk/release/lambda.zip ./lambda.zip", 17 | "build:clean": "rm -r ./target/cdk/release || echo '[build:clean] No existing release found.'", 18 | "deploy": "npm run build:clean && npm run build && npm run cdk:deploy", 19 | "benchmark": "export BENCHMARK_SUFFIX=${BENCHMARK_SUFFIX-$RANDOM}; npm run benchmark:setup && ts-node -- ./benchmark/benchmark.ts && npm run benchmark:destroy", 20 | "benchmark:setup": "[[ $DRY_RUN != 'true' ]] && CI=true npm run cdk:deploy || echo 'Dry run, skipping deploy.'", 21 | "benchmark:destroy": "[[ $BENCHMARK_SUFFIX != '' && $DRY_RUN != 'true' ]] && cdk destroy --force '*' || echo 'Dry run, skipping teardown.'", 22 | "cdk:synth": "cdk synth", 23 | "cdk:diff": "cdk diff", 24 | "cdk:deploy": "[[ $CI == 'true' ]] && export CDK_APPROVAL='never' || export CDK_APPROVAL='broadening'; cdk deploy --require-approval $CDK_APPROVAL '*'", 25 | "cdk:bootstrap": "cdk bootstrap aws://$(aws sts get-caller-identity | jq -r .Account)/$AWS_REGION", 26 | "cdklocal:start": "docker-compose up", 27 | "cdklocal:clear-cache": "(rm ~/.cdk/cache/accounts.json || true) && (rm ~/.cdk/cache/accounts_partitions.json || true)", 28 | "cdklocal:deploy": "npm run --silent cdklocal:clear-cache && CDK_LOCAL=true AWS_REGION=eu-west-1 cdklocal deploy --require-approval never '*'", 29 | "cdklocal:bootstrap": "npm run --silent cdklocal:clear-cache && CDK_LOCAL=true AWS_REGION=eu-west-1 cdklocal bootstrap aws://000000000000/eu-west-1", 30 | "ts-node": "ts-node" 31 | }, 32 | "dependencies": {}, 33 | "devDependencies": { 34 | "@aws-cdk/assert": "1.81.0", 35 | "@aws-cdk/aws-lambda": "1.81.0", 36 | "@aws-cdk/aws-s3": "1.81.0", 37 | "@aws-cdk/core": "1.81.0", 38 | "@aws-sdk/client-lambda": "^3.1.0", 39 | "@aws-sdk/client-xray": "^3.1.0", 40 | "@types/node": "14.11.2", 41 | "aws-cdk": "1.81.0", 42 | "aws-cdk-local": "1.65.2", 43 | "svg-chartist": "^1.0.1", 44 | "ts-node": "9.1.1", 45 | "tsconfig-paths": "3.9.0", 46 | "typescript": "4.1.3" 47 | }, 48 | "prettier": { 49 | "printWidth": 120 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/bin/bootstrap.rs: -------------------------------------------------------------------------------- 1 | use lambda::handler_fn; 2 | 3 | use ::lib::*; 4 | 5 | #[tokio::main] 6 | async fn main() -> Result<(), LambdaError> { 7 | // Attach our own handler function to the lambda rust runtime, and run it. 8 | let runtime_handler = handler_fn(handler); 9 | lambda::run(runtime_handler).await?; 10 | Ok(()) 11 | } 12 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use lambda::Context; 2 | use serde_json::{json, Value}; 3 | 4 | pub type LambdaError = Box; 5 | 6 | /// Our handler processes the Lambda events and returns the response as JSON. 7 | pub async fn handler(event: Value, _: Context) -> Result { 8 | let first_name = event["firstName"].as_str().unwrap_or("world"); 9 | 10 | Ok(json!({ "message": format!("Hello, {}!", first_name) })) 11 | } 12 | -------------------------------------------------------------------------------- /tests/main_test.rs: -------------------------------------------------------------------------------- 1 | use lambda::Context; 2 | use serde_json::json; 3 | 4 | #[tokio::test] 5 | async fn handler_handles_basic_event() { 6 | let event = json!({ 7 | "firstName": "Earth" 8 | }); 9 | let expected = json!({ 10 | "message": "Hello, Earth!" 11 | }); 12 | let result = lib::handler(event.clone(), Context::default()) 13 | .await 14 | .expect("expected Ok(_) value"); 15 | assert_eq!(result, expected) 16 | } 17 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2016", "es2017.object", "es2017.string"], 6 | "resolveJsonModule": true, 7 | "declaration": true, 8 | "strict": true, 9 | "noImplicitAny": true, 10 | "strictNullChecks": true, 11 | "noImplicitThis": true, 12 | "alwaysStrict": true, 13 | "noUnusedLocals": false, 14 | "noUnusedParameters": false, 15 | "noImplicitReturns": true, 16 | "noFallthroughCasesInSwitch": true, 17 | "inlineSourceMap": true, 18 | "inlineSources": true, 19 | "experimentalDecorators": true, 20 | "strictPropertyInitialization": false, 21 | "typeRoots": ["./node_modules/@types"], 22 | "allowSyntheticDefaultImports": true, 23 | "baseUrl": "." 24 | }, 25 | "exclude": ["cdk.out"] 26 | } 27 | --------------------------------------------------------------------------------