├── .editorconfig ├── .eslintrc ├── .github ├── lifeomic-probot.yml └── workflows │ ├── code-scanning-2022-06-29.yml │ ├── pr-build.yaml │ └── release.yaml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── bin ├── build.ts ├── get-host-addr.ts └── wait-for-localstack.ts ├── build-package.js ├── examples ├── compose │ ├── docker-compose.yaml │ ├── lambda.js │ ├── lambda.test.js │ ├── package.json │ └── setup.js └── dynamodb │ ├── dynamodb.test.js │ ├── package.json │ └── setup.js ├── package.json ├── release.config.js ├── src ├── Environment.ts ├── WriteBuffer.ts ├── docker.ts ├── dynamodb.ts ├── graphql.ts ├── handleWebpackResult.ts ├── index.ts ├── kinesis.ts ├── lambda.ts ├── localstack.ts ├── mockServerLambda.ts ├── patches │ ├── dns.js │ ├── index.ts │ └── lambda.js ├── utils │ ├── awsUtils.ts │ ├── config.ts │ ├── kinesisTools.ts │ └── logging.ts ├── webpack.ts └── zip.ts ├── test ├── Environment.test.js ├── WriteBuffer.test.js ├── crypto-browserify.test.js ├── docker │ ├── ensureImage.test.js │ ├── executeContainerCommand.test.js │ ├── get-host-addr.test.js │ └── pullImage.test.js ├── dynamodb │ ├── basic-parallel.test.js │ ├── basic-serial.test.js │ ├── custom-endpoint.test.js │ ├── dynamoDBTestHooks.test.ts │ ├── dynamodb.test.js │ ├── testHooks-error.test.js │ ├── testTypes.test.ts │ └── wait-for-ready.test.js ├── fixtures │ ├── async_iterators.js │ ├── async_test.js │ ├── async_with_arrow.js │ ├── bundled_service.js │ ├── bundled_service.js.map │ ├── bundled_service.zip │ ├── crypto-browserify.js │ ├── es_modules │ │ ├── es_module.mjs │ │ └── index.js │ ├── lambci-derivative │ │ └── Dockerfile │ ├── lambda-with-tsconfig │ │ ├── index.ts │ │ └── tsconfig.json │ ├── lambda_graphql.js │ ├── lambda_service.js │ ├── multi-lambdas │ │ ├── func1.js │ │ ├── func2.ts │ │ ├── func3 │ │ │ └── index.js │ │ ├── func4 │ │ │ └── index.ts │ │ ├── ignored.md │ │ ├── invalid │ │ │ └── index.js │ │ │ │ └── ignored │ │ └── unreadable │ │ │ └── index.js │ ├── runtime_callbacks.js │ ├── runtime_dns.js │ ├── runtime_events.js │ ├── runtime_promises.js │ ├── ts_lambda_kinesisHandler.ts │ └── ts_lambda_service.ts ├── graphql │ ├── assertSuccess-httpErrors.test.js │ ├── assertions.test.js │ ├── config.test.js │ ├── query.test.js │ ├── queryHooks.test.js │ └── urlOption.test.js ├── handleWebpackResult.test.js ├── helpers │ ├── createDefaultContainer.js │ └── lambda.js ├── index.test.ts ├── indexJs.test.js ├── kinesis │ ├── basic-parallel.test.js │ ├── basic-serial.test.js │ ├── custom-endpoint.test.js │ ├── kinesis.test.js │ ├── testHooks-error.test.js │ ├── useKinesis.test.js │ └── wait-for-ready.test.js ├── lambda │ ├── compose-container.test.js │ ├── compose-derivative-container.test.js │ ├── core.test.js │ ├── createLambdaExecutionEnvironment-errorHandling.test.js │ ├── createLambdaExecutionEnvironment.test.js │ ├── entrypoint.test.js │ ├── graphql.test.js │ ├── runtime-callbacks.test.js │ ├── runtime-dns.test.js │ ├── runtime-events.test.js │ ├── runtime-no-dns.test.js │ ├── runtime-promises.test.js │ ├── tools-compose-container-env.test.js │ ├── tools-container-compose-network.test.js │ ├── tools-container-custom-image.test.js │ ├── tools-container-env.test.js │ ├── tools-container-zipfile.test.js │ └── tools-container.test.js ├── localstack │ ├── externalDocker.test.ts │ ├── getConnection.test.js │ ├── lambdaSetup.test.js │ ├── localStackHooks.test.js │ ├── localstackNameTags.test.ts │ ├── useLocalStack.test.js │ ├── version10Status.test.js │ ├── version11Status.test.js │ ├── version12Status.test.js │ ├── version13Status.test.js │ └── version14Status.test.ts ├── mockServerLambda.test.js ├── utils │ ├── config.test.js │ ├── kinesisTools.test.js │ └── logging.test.js └── webpack-build.test.js ├── tsconfig.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | end_of_line = lf 3 | insert_final_newline = true 4 | trim_trailing_whitespace = true 5 | 6 | [*.{js,json,yml,ts}] 7 | charset = utf-8 8 | indent_size = 2 9 | indent_style = space 10 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "overrides": [ 3 | { 4 | "files": ["**/*.js"], 5 | "extends": [ 6 | "plugin:@lifeomic/node/recommended" 7 | ] 8 | }, 9 | { 10 | "files": ["**/*.ts"], 11 | "extends": [ 12 | "eslint:recommended", 13 | "plugin:@typescript-eslint/eslint-recommended", 14 | "plugin:@typescript-eslint/recommended" 15 | ], 16 | "parser": "@typescript-eslint/parser", 17 | "plugins": ["@typescript-eslint"], 18 | "rules": { 19 | "@typescript-eslint/no-explicit-any": "off", 20 | "@typescript-eslint/ban-ts-ignore": "off", 21 | "@typescript-eslint/explicit-function-return-type": "off", 22 | "@typescript-eslint/no-non-null-assertion": "off", 23 | "@typescript-eslint/no-var-requires": "off" 24 | } 25 | } 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /.github/lifeomic-probot.yml: -------------------------------------------------------------------------------- 1 | enforceSemanticCommits: true 2 | -------------------------------------------------------------------------------- /.github/workflows/code-scanning-2022-06-29.yml: -------------------------------------------------------------------------------- 1 | # This workflow is inherited from our internal .github repo at https://github.com/lifeomic/.github/blob/master/workflow-templates/code-scanning-2022-06-29.yml 2 | # Setting up this workflow on the repository will perform a static scan for security issues using GitHub Code Scanning. 3 | # Any findings for a repository can be found under the `Security` tab -> `Code Scanning Alerts` 4 | name: "CodeQL" 5 | 6 | on: 7 | push: 8 | branches: 9 | - main 10 | - master 11 | paths-ignore: 12 | - test 13 | - tests 14 | - '**/test' 15 | - '**/tests' 16 | - '**/*.test.js' 17 | - '**/*.test.ts' 18 | pull_request: 19 | branches: 20 | - main 21 | - master 22 | paths-ignore: 23 | - test 24 | - tests 25 | - '**/test' 26 | - '**/tests' 27 | - '**/*.test.js' 28 | - '**/*.test.ts' 29 | 30 | jobs: 31 | analyze: 32 | if: ${{ !contains(github.head_ref, 'dependabot') }} 33 | name: Analyze 34 | runs-on: ubuntu-latest 35 | 36 | strategy: 37 | fail-fast: false 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v3 42 | with: 43 | # We must fetch at least the immediate parents so that if this is 44 | # a pull request then we can checkout the head. 45 | fetch-depth: 2 46 | 47 | # Initializes the CodeQL tools for scanning. 48 | - name: Initialize CodeQL 49 | uses: github/codeql-action/init@v2 50 | with: 51 | config-file: lifeomic/.github/config-files/codeql-config.yml@master # uses our config file from the lifeomic/.github repo 52 | queries: +security-extended # This will run all queries at https://github.com/github/codeql/:language/ql/src/codeql-suites/:language-security-extended.qls 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, it should be removed and replaced with custom build steps. 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v2 58 | 59 | - name: Perform CodeQL Analysis 60 | uses: github/codeql-action/analyze@v2 61 | 62 | -------------------------------------------------------------------------------- /.github/workflows/pr-build.yaml: -------------------------------------------------------------------------------- 1 | name: PR Build and Test 2 | 3 | on: pull_request 4 | 5 | jobs: 6 | test: 7 | runs-on: ubuntu-latest 8 | name: Node.JS ${{ matrix.node-version }} 9 | strategy: 10 | matrix: 11 | node-version: [14.x, 16.x] 12 | steps: 13 | - uses: actions/checkout@v2 14 | - uses: actions/setup-node@v1 15 | with: 16 | node-version: ${{ matrix.node-version }} 17 | - name: Install 18 | run: yarn install 19 | - name: Test 20 | env: 21 | LAMBDA_REMOTE_DOCKER: true 22 | run: yarn test 23 | - name: Build 24 | run: yarn build 25 | - name: Coverage 26 | run: yarn coverage 27 | - name: Coveralls 28 | uses: coverallsapp/github-action@v1.1.2 29 | with: 30 | github-token: ${{ secrets.GITHUB_TOKEN }} 31 | path-to-lcov: .nyc_output/lcov.info 32 | flag-name: run-${{ matrix.node }} 33 | parallel: true 34 | finish: 35 | needs: test 36 | runs-on: ubuntu-latest 37 | steps: 38 | - name: Coveralls Finished 39 | uses: coverallsapp/github-action@v1.1.2 40 | with: 41 | github-token: ${{ secrets.github_token }} 42 | parallel-finished: true 43 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - "master" 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | environment: npm 12 | steps: 13 | - uses: actions/checkout@v2 14 | with: 15 | fetch-depth: 0 16 | - uses: actions/setup-node@v1 17 | with: 18 | node-version: 14 19 | registry-url: https://registry.npmjs.org 20 | - name: Install 21 | run: yarn install --frozen-lockfile 22 | - name: Test 23 | env: 24 | LAMBDA_REMOTE_DOCKER: true 25 | run: yarn test 26 | - name: Build 27 | run: yarn build 28 | - name: Coverage # Only needed if the test command doesn't already output a report, like with Jest 29 | run: yarn coverage # the coverage command should output results to /.nyc_output/lcov.info 30 | - name: Coverage Report 31 | uses: coverallsapp/github-action@v1.1.2 32 | with: 33 | github-token: ${{ secrets.GITHUB_TOKEN }} 34 | path-to-lcov: .nyc_output/lcov.info 35 | flag-name: master 36 | - name: Publish 37 | env: 38 | NPM_TOKEN: ${{ secrets.LIFEOMIC_NPM_TOKEN }} 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | run: | 41 | echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > .npmrc 42 | yarn semantic-release 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.nyc_output/ 2 | node_modules/ 3 | /examples/**/yarn.lock 4 | /examples/compose/build/ 5 | /test/fixtures/build/ 6 | yarn-error.log 7 | coverage/ 8 | src/lambda.js 9 | src/localstack.js 10 | src/dynamodb.js 11 | CertificateAuthorityCertificate.pem 12 | dist -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [14.1.0] - 2021-01-11 8 | ### Allow specifying the dynamodb docker image 9 | - DynamoDB Input option to specify the docker dynamodb image and version 10 | 11 | ## [14.0.0] - 2021-01-07 12 | ### Webpack and node version updates 13 | - Removed async to generator for babel. 14 | - Defaulted to version 0.12.4 localstack 15 | - Removed old node versions from tests 16 | 17 | ## [13.0.0] - 2020-12-07 18 | ### Typescript conversions 19 | - Updated the rest of the source files to typescript. This will break some commonJS 20 | imports if they are importing `src/WriteBuffer` or `src/Environment`. 21 | 22 | ## [12.0.0] - 2020-11-23 23 | ### Type Breaking 24 | - Converted localstack to typescript, and fixed some inputs 25 | - Updated deployment to use `github-actions` 26 | 27 | ## [11.2.2] - 2020-07-31 28 | ### Fixed 29 | - Add explicit peerDependency entry for `@lifeomic/alpha`. Previously, alpha 30 | was being used, but not declared as a dependency. 31 | 32 | ## [11.0.3] - 2020-02-28 33 | ### Fixed 34 | - Adding missing properties to WebpackOptions TS typing 35 | 36 | ## [10.0.0] - 2020-01-10 37 | ### Breaking 38 | - Remove support for the ENABLE_LAMBDA_LOGGING environment variable. Instead you can set DEBUG=lambda-tools:lambda 39 | 40 | ## [8.0.0] - 2019-06-07 41 | ### Breaking 42 | - Upgraded ava to 2.0 43 | - Drop support for node 6 44 | 45 | ## [7.2.0] - 2019-02-27 46 | ### New 47 | - A new `-t` options for the webpack build CLI that allows providing a 48 | `tsconfig.json` file for TypeScript compilation 49 | 50 | ## [7.1.0] - 2019-02-27 51 | ### New 52 | - A new `mountpointParent` option on `useNewContainer` to was added to control 53 | where `zipfile` arguments are unzippped. This new option can be used when 54 | trying to align paths both inside and outside docker which is needed when 55 | using `zipfile` from inside a compose environment. 56 | 57 | ## [7.0.0] - 2019-02-26 58 | ### New 59 | - A new `zipfile` option on `useNewContainer` to build containers straight from 60 | zip files. Testing from zip files can help validate final packaging. 61 | 62 | ### Breaking 63 | - Dependent projects will need to make sure that MockServer 5.5 is used in 64 | docker-compose environments and npm dependencies 65 | - Dependnet projects need to upgrade to Ava 1.x 66 | 67 | 68 | [11.2.2]: https://github.com/lifeomic/lambda-tools/compare/v11.2.1...v11.2.2 69 | [11.0.3]: https://github.com/lifeomic/lambda-tools/compare/v10.0.0...v11.0.3 70 | [10.0.0]: https://github.com/lifeomic/lambda-tools/compare/v8.0.0...v10.0.0 71 | [8.0.0]: https://github.com/lifeomic/lambda-tools/compare/v7.2.0...v8.0.0 72 | [7.2.0]: https://github.com/lifeomic/lambda-tools/compare/v7.1.0...v7.2.0 73 | [7.1.0]: https://github.com/lifeomic/lambda-tools/compare/v7.0.0...v7.1.0 74 | [7.0.0]: https://github.com/lifeomic/lambda-tools/compare/v6.0.1...v7.0.0 75 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 LifeOmic 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bin/build.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import path from 'path'; 3 | import chalk from 'chalk'; 4 | import yargs from 'yargs'; 5 | 6 | import build, {Config} from '../src/webpack'; 7 | 8 | const epilogue = ` 9 | Each entrypoint is a single source file that represents the top-level module for 10 | the bundle being produced. By default, the resulting bundle will use the 11 | basename of the entrypoint as the bundle name. If a :name suffix is provided 12 | then the name value will be used as the bundle name instead. For example, 13 | src/app.js:lambda.js would use src/app.js as the entrypoint and produce a bundle 14 | named lambda.js in the output directory. 15 | `; 16 | 17 | const argv = yargs 18 | .usage('$0 [] ...') 19 | .option('d', { 20 | alias: 'dns-retry', 21 | describe: 'enable automatic retries for DNS lookups', 22 | type: 'boolean' 23 | }) 24 | .option('n', { 25 | alias: 'node-version', 26 | describe: 'the version of node that the bundle should be optimized for (default 12.13.0)', 27 | type: 'string' 28 | }) 29 | .option('enable-runtime-source-maps', { 30 | describe: 'enable support for runtime source maps', 31 | type: 'boolean', 32 | default: false 33 | }) 34 | .option('o', { 35 | alias: 'output-directory', 36 | describe: 'the path where the bundle will be produced (default: cwd)', 37 | type: 'string' 38 | }) 39 | .option('s', { 40 | alias: 'service-name', 41 | describe: 'the name of the service the bundle is for', 42 | type: 'string' 43 | }) 44 | .option('minify', { 45 | describe: 'enable minification of bundled code', 46 | type: 'boolean', 47 | default: false 48 | }) 49 | .option('w', { 50 | alias: 'webpack-transform', 51 | describe: 'a module that exports a function to transform the webpack configuration', 52 | type: 'string' 53 | }) 54 | .option('z', { 55 | alias: 'zip', 56 | describe: 'zip the JS bundle (default false)', 57 | type: 'boolean' 58 | }) 59 | .option('t', { 60 | alias: 'tsconfig', 61 | describe: 'relative path to a tsconfig.json file to compile typescript', 62 | type: 'string' 63 | }) 64 | .option('transpile-only', { 65 | describe: 'when using --tsconfig, disable typechecking in ts-loader', 66 | type: 'boolean' 67 | }) 68 | .options('enable-cache-directory', { 69 | describe: 'enables babel-loader cache directory', 70 | type: 'boolean' 71 | }) 72 | .demandCommand(1) 73 | .demandOption(['s']) 74 | .epilog(epilogue) 75 | .argv; 76 | 77 | const buildOptions: Config = { 78 | enableDnsRetry: argv.d, 79 | entrypoint: argv._ as string[], 80 | nodeVersion: argv.n, 81 | outputPath: argv.o, 82 | minify: argv.minify, 83 | serviceName: argv.s, 84 | zip: argv.z, 85 | tsconfig: argv.t, 86 | transpileOnly: argv['transpile-only'], 87 | enableRuntimeSourceMaps: argv['enable-runtime-source-maps'], 88 | cacheDirectory: argv['enable-cache-directory'] 89 | }; 90 | 91 | if (argv.t) { 92 | // assert typescript and ts-loader are installed 93 | ['typescript', 'ts-loader'].forEach(dependency => { 94 | try { 95 | require.resolve(dependency); 96 | } catch (_) { 97 | console.error(chalk.bold.red(`It looks like you're trying to use TypeScript but do not have '${chalk.bold( 98 | dependency 99 | )}' installed. Please install it or remove the tsconfig flag.`)); 100 | process.exit(1); 101 | } 102 | }); 103 | } 104 | 105 | if (argv.w) { 106 | // Ignore the non-literal module require because the module to load is 107 | // expected to come from the caller of the command 108 | // eslint-disable-next-line security/detect-non-literal-require 109 | const transformFunction = require(path.join(process.cwd(), argv.w)); 110 | const transformType = typeof transformFunction; 111 | if (transformType !== 'function') { 112 | throw new Error(`The webpack transform module should export a function, but the exported type was ${transformType}`); 113 | } 114 | buildOptions.configTransformer = transformFunction; 115 | } 116 | 117 | build(buildOptions) 118 | .catch((error) => { 119 | if (error.message === 'compilation_error') { 120 | console.error('An error occurred during compilation. See output above for more details.'); 121 | } else { 122 | console.error('Failed to build lambda package:', error); 123 | } 124 | process.exitCode = 1; 125 | }); 126 | -------------------------------------------------------------------------------- /bin/get-host-addr.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { getHostAddress } from '../src/docker'; 4 | 5 | getHostAddress() 6 | .then(console.log.bind(console)) 7 | .catch((error) => { 8 | console.error(error); 9 | process.exitCode = 1; 10 | }); 11 | -------------------------------------------------------------------------------- /bin/wait-for-localstack.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import yargs from 'yargs'; 4 | import { dockerLocalstackReady } from '../src/localstack'; 5 | 6 | const { name, containerId, version } = yargs 7 | .usage('$0 []') 8 | .option('containerId', { 9 | alias: 'c', 10 | describe: 'the docker container ID to wait for', 11 | type: 'string', 12 | }) 13 | .option('name', { 14 | alias: 'n', 15 | describe: 'the name given to the container to wait for', 16 | type: 'string', 17 | }) 18 | .option('version', { 19 | alias: 'v', 20 | describe: 'the version of localstack to search active containers for', 21 | type: 'string', 22 | }) 23 | .demandCommand(1) 24 | .argv; 25 | 26 | // @ts-expect-error this is to satisfy plain javascript, where the compiler won't complain. 27 | dockerLocalstackReady({ containerId, name, version }, {}) 28 | .then(() => process.exit(0), (err) => { 29 | console.error(err); 30 | process.exitCode = 1; 31 | }); 32 | -------------------------------------------------------------------------------- /build-package.js: -------------------------------------------------------------------------------- 1 | const { execSync } = require('child_process'); 2 | 3 | const run = (cmd) => execSync(cmd, { stdio: 'inherit' }); 4 | 5 | run('rm -rf dist/'); 6 | 7 | run('yarn tsc'); 8 | 9 | for (const file of ['package.json', 'LICENSE', 'CHANGELOG.md', 'README.md']) { 10 | run(`cp ${file} dist/`); 11 | } 12 | 13 | // Explicitly copy 'js' patch files because they are not 14 | // compiled + moved by typescript 15 | run('cp src/patches/*.js dist/src/patches'); 16 | 17 | console.log('✔️ Successfully built library to dist folder'); 18 | -------------------------------------------------------------------------------- /examples/compose/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | dynamodb: 4 | image: cnadiminti/dynamodb-local:latest 5 | test: 6 | command: yarn run ava -v 7 | depends_on: 8 | - dynamodb 9 | environment: 10 | - AWS_ACCESS_KEY_ID=bogus 11 | - AWS_REGION=us-east-1 12 | - AWS_SECRET_ACCESS_KEY=bogus 13 | - COMPOSE_PROJECT_NAME 14 | - DYNAMODB_ENDPOINT=http://dynamodb:8000 15 | - DEBUG=lambda-tools:* 16 | - MOUNTPOINT=${PWD}/build/ 17 | image: node:8.9.4-alpine 18 | volumes: 19 | - ./:/opt/work 20 | - /var/run/docker.sock:/var/run/docker.sock 21 | working_dir: /opt/work 22 | 23 | networks: 24 | default: 25 | internal: true 26 | -------------------------------------------------------------------------------- /examples/compose/lambda.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk'); 2 | 3 | exports.handler = async (event, context, callback) => { 4 | try { 5 | const dynamodb = new AWS.DynamoDB({ 6 | endpoint: process.env.DYNAMODB_ENDPOINT 7 | }); 8 | 9 | const result = await dynamodb.listTables().promise(); 10 | callback(null, result.TableNames); 11 | } catch (error) { 12 | callback(error); 13 | } 14 | }; 15 | -------------------------------------------------------------------------------- /examples/compose/lambda.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useDynamoDB } = require('@lifeomic/lambda-tools').dynamodb; 4 | const { useLambda } = require('@lifeomic/lambda-tools').lambda; 5 | 6 | useDynamoDB(test); 7 | useLambda(test); 8 | 9 | test.serial('The Lambda function can use the Dynamo instance', async (test) => { 10 | const tables = await test.context.lambda.raw(null, null); 11 | test.deepEqual(tables, [ 'testers' ]); 12 | }); 13 | -------------------------------------------------------------------------------- /examples/compose/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "compose", 3 | "version": "1.0.0", 4 | "scripts": { 5 | "pretest": "lambda-tools-build -o ./build ./lambda.js", 6 | "test": "export COMPOSE_PROJECT_NAME=${RANDOM}; docker-compose run test; EXIT_CODE=$?; docker-compose down; exit $EXIT_CODE" 7 | }, 8 | "license": "MIT", 9 | "ava": { 10 | "files": "*.test.js", 11 | "require": "./setup" 12 | }, 13 | "dependencies": { 14 | "ava": "^2.0.0", 15 | "aws-sdk": "^2.224.1", 16 | "@lifeomic/lambda-tools": "file:../../" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /examples/compose/setup.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | const { tableSchema } = require('@lifeomic/lambda-tools').dynamodb; 4 | const { useNewContainer } = require('@lifeomic/lambda-tools').lambda; 5 | 6 | tableSchema([ 7 | { 8 | AttributeDefinitions: [ 9 | { 10 | AttributeName: 'name', 11 | AttributeType: 'S' 12 | }, 13 | { 14 | AttributeName: 'age', 15 | AttributeType: 'N' 16 | } 17 | ], 18 | KeySchema: [ 19 | { 20 | AttributeName: 'name', 21 | KeyType: 'HASH' 22 | }, 23 | { 24 | AttributeName: 'age', 25 | KeyType: 'RANGE' 26 | } 27 | ], 28 | ProvisionedThroughput: { 29 | ReadCapacityUnits: 1, 30 | WriteCapacityUnits: 1 31 | }, 32 | TableName: 'testers' 33 | } 34 | ]); 35 | 36 | useNewContainer({ 37 | environment: { 38 | DYNAMODB_ENDPOINT: process.env.DYNAMODB_ENDPOINT 39 | }, 40 | handler: 'lambda.handler', 41 | mountpoint: process.env.MOUNTPOINT, 42 | useComposeNetwork: true 43 | }); 44 | -------------------------------------------------------------------------------- /examples/dynamodb/dynamodb.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useDynamoDB } = require('@lifeomic/lambda-tools').dynamodb; 4 | 5 | useDynamoDB(test); 6 | 7 | test.serial('A DynamoDB table is provisioned', async (test) => { 8 | const items = [ 9 | { name: 'alice', age: 35 }, 10 | { name: 'bob', age: 38 } 11 | ]; 12 | 13 | // Insert all items into the databes 14 | await Promise.all( 15 | items.map((item) => test.context.dynamodb.documentClient.put({ 16 | Item: item, 17 | TableName: 'testers' 18 | }).promise()) 19 | ); 20 | 21 | // Get all items in the database 22 | const results = await test.context.dynamodb.documentClient.scan({ TableName: 'testers' }).promise(); 23 | 24 | test.deepEqual(results.Items, items); 25 | }); 26 | -------------------------------------------------------------------------------- /examples/dynamodb/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dynamodb", 3 | "version": "1.0.0", 4 | "scripts": { 5 | "test": "ava -v" 6 | }, 7 | "license": "MIT", 8 | "ava": { 9 | "files": "*.test.js", 10 | "require": "./setup.js" 11 | }, 12 | "dependencies": { 13 | "ava": "^0.25.0", 14 | "@lifeomic/lambda-tools": "file:../../" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /examples/dynamodb/setup.js: -------------------------------------------------------------------------------- 1 | const { tableSchema } = require('@lifeomic/lambda-tools').dynamodb; 2 | 3 | tableSchema([ 4 | { 5 | AttributeDefinitions: [ 6 | { 7 | AttributeName: 'name', 8 | AttributeType: 'S' 9 | }, 10 | { 11 | AttributeName: 'age', 12 | AttributeType: 'N' 13 | } 14 | ], 15 | KeySchema: [ 16 | { 17 | AttributeName: 'name', 18 | KeyType: 'HASH' 19 | }, 20 | { 21 | AttributeName: 'age', 22 | KeyType: 'RANGE' 23 | } 24 | ], 25 | ProvisionedThroughput: { 26 | ReadCapacityUnits: 1, 27 | WriteCapacityUnits: 1 28 | }, 29 | TableName: 'testers' 30 | } 31 | ]); 32 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@lifeomic/lambda-tools", 3 | "version": "0.0.0", 4 | "description": "Common utilities for Lambda testing and development", 5 | "main": "src/index.js", 6 | "types": "src/index.d.ts", 7 | "files": [ 8 | "bin/**/*", 9 | "src/**/*" 10 | ], 11 | "bin": { 12 | "lambda-tools-build": "./bin/build.js", 13 | "lambda-tools-host-addr": "./bin/get-host-addr.js", 14 | "lambda-tools-wait-for-localstack": "./bin/wait-for-localstack.js" 15 | }, 16 | "scripts": { 17 | "build": "node build-package.js", 18 | "coverage": "nyc report --reporter=text-lcov > ./.nyc_output/lcov.info", 19 | "lint": "eslint . --ext .js,.ts -f codeframe", 20 | "pretest": "yarn lint && tsc --noEmit", 21 | "test": "nyc ava" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "https://github.com/lifeomic/lambda-tools.git" 26 | }, 27 | "author": "LifeOmic ", 28 | "license": "MIT", 29 | "ava": { 30 | "verbose": true, 31 | "failWithoutAssertions": false, 32 | "timeout": "10m", 33 | "files": [ 34 | "test/**/*.test.*" 35 | ], 36 | "require": [ 37 | "ts-node/register", 38 | "source-map-support/register" 39 | ], 40 | "extensions": [ 41 | "ts", 42 | "js" 43 | ] 44 | }, 45 | "eslintIgnore": [ 46 | "examples/", 47 | "test/fixtures/bundled_*", 48 | "test/fixtures/build" 49 | ], 50 | "nyc": { 51 | "check-coverage": true, 52 | "all": true, 53 | "lines": 100, 54 | "statements": 100, 55 | "functions": 100, 56 | "branches": 100, 57 | "include": [ 58 | "src/**" 59 | ], 60 | "exclude": [ 61 | "src/patches/**", 62 | "src/**/*.d.ts" 63 | ] 64 | }, 65 | "optionalDependencies": { 66 | "ts-loader": "*", 67 | "typescript": "*" 68 | }, 69 | "devDependencies": { 70 | "@lifeomic/alpha": "^4.1.0", 71 | "@lifeomic/eslint-plugin-node": "^2.0.1", 72 | "@types/archiver": "^5.1.0", 73 | "@types/aws-lambda": "^8.10.64", 74 | "@types/debug": "^4.1.5", 75 | "@types/dockerode": "^2.5.20", 76 | "@types/fs-extra": "^8.1.0", 77 | "@types/koa": "^2.0.45", 78 | "@types/koa-router": "^7.0.28", 79 | "@types/lodash": "^4.14.150", 80 | "@types/nested-error-stacks": "^2.1.0", 81 | "@types/node": "^12.20.43", 82 | "@types/promise-retry": "^1.1.3", 83 | "@types/sinon": "^9.0.10", 84 | "@types/supertest": "^2.0.10", 85 | "@types/supports-color": "^7.2.0", 86 | "@types/terser-webpack-plugin": "^2.2.3", 87 | "@types/tmp": "^0.2.0", 88 | "@types/unzipper": "^0.10.3", 89 | "@types/uuid": "^7.0.3", 90 | "@types/webpack": "^4.41.25", 91 | "@types/yargs": "^15.0.11", 92 | "@typescript-eslint/eslint-plugin": "^2.33.0", 93 | "@typescript-eslint/parser": "^2.33.0", 94 | "apollo-server-koa": "^2.21.0", 95 | "aws-sdk-mock": "^5.1.0", 96 | "axios": "^0.27.2", 97 | "conventional-changelog-conventionalcommits": "^4.6.3", 98 | "coveralls": "^3.1.1", 99 | "crypto-browserify": "^3.12.0", 100 | "eslint": "^7.0.0", 101 | "graphql": "^14.0.2", 102 | "graphql-tools": "^4.0.0", 103 | "koa": "^2.5.0", 104 | "koa-router": "^7.4.0", 105 | "mockserver-client": "^5.11.2", 106 | "nyc": "^15.1.0", 107 | "proxyquire": "^2.1.3", 108 | "semantic-release": "^19.0.2", 109 | "serverless-http": "^2.7.0", 110 | "sinon": "^9.2.4", 111 | "ts-loader": "^8.0.17", 112 | "ts-node": "^9.1.1", 113 | "ts-sinon": "^2.0.1", 114 | "typescript": "^3.9.9" 115 | }, 116 | "dependencies": { 117 | "@babel/core": "^7.22.11", 118 | "@babel/plugin-proposal-async-generator-functions": "^7.12.1", 119 | "@babel/plugin-transform-async-to-generator": "^7.12.1", 120 | "@babel/polyfill": "^7.12.1", 121 | "@babel/preset-env": "^7.12.1", 122 | "@babel/preset-typescript": "^7.12.1", 123 | "@elastic/elasticsearch": "^7.3.0", 124 | "archiver": "^3.0.0", 125 | "ava": "^3.13.0", 126 | "aws-sdk": "^2.1001.0", 127 | "babel-loader": "^8.0.2", 128 | "chalk": "^3.0.0", 129 | "debug": "^4.3.4", 130 | "dockerode": "^2.5.3", 131 | "fs-extra": "^8.1.0", 132 | "glob": "^7.1.2", 133 | "jszip": "^3.1.5", 134 | "lodash": "^4.17.5", 135 | "nested-error-stacks": "^2.0.0", 136 | "p-queue": "^6.4.0", 137 | "promise-retry": "^1.1.1", 138 | "source-map-support": "^0.5.19", 139 | "string-replace-loader": "^2.1.1", 140 | "supertest": "^4.0.1", 141 | "supports-color": "^7.0.0", 142 | "terser-webpack-plugin": "^2.3.8", 143 | "tmp-promise": "^2.0.1", 144 | "unzipper": "^0.10.0", 145 | "uuid": "^3.3.2", 146 | "webpack": "^4.41.2", 147 | "webpack-babel-env-deps": "^1.5.0", 148 | "wrapper-webpack-plugin": "^2.1.0", 149 | "yargs": "^16.1.1", 150 | "zip-webpack-plugin": "^3.0.0" 151 | }, 152 | "publishConfig": { 153 | "access": "public" 154 | }, 155 | "peerDependencies": { 156 | "@lifeomic/alpha": "^4.1.0 || ^5.0.1", 157 | "mockserver-client": "^5.11.2" 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /release.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | branches: ['master'], 3 | plugins: [ 4 | ['@semantic-release/commit-analyzer', { preset: 'conventionalcommits' }], 5 | ['@semantic-release/npm', { pkgRoot: 'dist/' }], 6 | [ 7 | '@semantic-release/github', 8 | { 9 | // Setting this to false disables the default behavior 10 | // of opening a GitHub issue when a release fails. 11 | // We have other methods of tracking these failures. 12 | failComment: false 13 | } 14 | ] 15 | ] 16 | }; 17 | -------------------------------------------------------------------------------- /src/Environment.ts: -------------------------------------------------------------------------------- 1 | export class Environment { 2 | private _backup: Record = {} 3 | 4 | restore () { 5 | for (const [ name, value ] of Object.entries(this._backup)) { 6 | if (value === undefined) { 7 | delete process.env[name]; 8 | } else { 9 | process.env[name] = value; 10 | } 11 | } 12 | } 13 | 14 | set (name: string, value: any) { 15 | this._backup[name] = process.env[name]; 16 | process.env[name] = value; 17 | } 18 | } 19 | 20 | export default Environment; 21 | -------------------------------------------------------------------------------- /src/WriteBuffer.ts: -------------------------------------------------------------------------------- 1 | import {Writable, WritableOptions} from 'stream'; 2 | 3 | type WriteParams = Parameters; 4 | 5 | export class WriteBuffer extends Writable { 6 | private _buffer: Buffer[] = [] 7 | constructor (options?: WritableOptions) { 8 | super(options); 9 | } 10 | 11 | reset () { 12 | this._buffer = []; 13 | } 14 | 15 | toString (encoding?: BufferEncoding) { 16 | return this._buffer.map((chunk) => chunk.toString(encoding)).join(''); 17 | } 18 | 19 | _write (chunk: WriteParams[0], encoding: WriteParams[1], callback: WriteParams[2]) { 20 | this._buffer.push(Buffer.from(chunk, encoding as BufferEncoding)); 21 | callback(); 22 | } 23 | } 24 | 25 | export default WriteBuffer; 26 | -------------------------------------------------------------------------------- /src/docker.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import Docker, { Exec, ExecCreateOptions } from 'dockerode'; 3 | import os from 'os'; 4 | import { WriteBuffer } from './WriteBuffer'; 5 | import map from 'lodash/map'; 6 | import flatten from 'lodash/flatten'; 7 | import { getLogger } from './utils/logging'; 8 | import { EventEmitter } from 'events' 9 | 10 | const DEFAULT_IMAGE = 'alpine:3.6'; 11 | const DEFAULT_ROUTE_PATTERN = /^default\b.*$/m; 12 | const INTERFACE_ADDRESS_PATTERN = /\binet addr:\d{1,3}\.\d{1,3}.\d{1,3}\.\d{1,3}\b/m; 13 | 14 | const logger = getLogger('docker'); 15 | 16 | export interface ExecuteCommandConfig { 17 | container: Docker.Container; 18 | command: string[]; 19 | environment?: string[]; 20 | stdin?: string; 21 | } 22 | 23 | type DockerExec = Exec & { 24 | output: EventEmitter & { end: (...args: any[]) => any }; 25 | } 26 | 27 | export const executeContainerCommand = async ({ container, command, environment, stdin }: ExecuteCommandConfig) => { 28 | const options: ExecCreateOptions = { 29 | AttachStderr: true, 30 | AttachStdout: true, 31 | Cmd: command 32 | }; 33 | 34 | const usingStdin = stdin !== undefined; 35 | 36 | if (environment) { 37 | options.Env = environment; 38 | } 39 | 40 | if (usingStdin) { 41 | options.AttachStdin = true; 42 | } 43 | 44 | const exec = await container.exec(options) as DockerExec; 45 | 46 | const stderr = new WriteBuffer(); 47 | const stdout = new WriteBuffer(); 48 | await exec.start(usingStdin ? { stdin: true, hijack: true } : {}); 49 | if (usingStdin) { 50 | exec.output.end(Buffer.from(stdin!)); 51 | } 52 | container.modem.demuxStream(exec.output, stdout, stderr); 53 | await new Promise((resolve, reject) => { 54 | exec.output.once('end', resolve); 55 | exec.output.once('error', reject); 56 | }); 57 | const inspectOutput = await exec.inspect(); 58 | return { stderr, stdout, inspectOutput }; 59 | }; 60 | 61 | const getDefaultInterface = (routeTable: string) => { 62 | const route = routeTable.match(DEFAULT_ROUTE_PATTERN); 63 | assert(route && route.length, 'Failed to parse route table for host'); 64 | 65 | const columns = route![0].split(/\s+/); 66 | assert(columns.length > 7, 'Failed to parse default route'); 67 | return columns[7]; 68 | }; 69 | 70 | const getInterfaceAddress = (ifconfig: string) => { 71 | const match = ifconfig.match(INTERFACE_ADDRESS_PATTERN); 72 | assert(match && match.length, 'Failed to parse interface configuration'); 73 | return match![0].split(':')[1]; 74 | }; 75 | 76 | const buildAuthForDocker = () => { 77 | const dockerUser = process.env.DOCKER_HUB_USER; 78 | const dockerPass = process.env.DOCKER_HUB_PASS; 79 | if (dockerUser && dockerPass) { 80 | logger.debug(`Pulling image as ${dockerUser}`); 81 | return { 82 | authconfig: { 83 | username: dockerUser, 84 | password: dockerPass, 85 | } 86 | } 87 | } 88 | 89 | logger.debug('Pulling image as anon'); 90 | return {}; 91 | } 92 | 93 | export const pullImage = async (docker: Docker, image: string) => { 94 | const stream = await docker.pull(image, buildAuthForDocker()); 95 | await new Promise((resolve) => { 96 | docker.modem.followProgress(stream, resolve, (progress: {status: string; progress?: string}) => { 97 | logger.debug(`${image}: ${progress.status}${progress.progress ? ` ${progress.progress}` : ''}`); 98 | }); 99 | }); 100 | }; 101 | 102 | export const imageExists = async (docker: Docker, image: string) => { 103 | const images = await docker.listImages(); 104 | const imageTags = flatten(map(images, 'RepoTags')); 105 | return imageTags.includes(image); 106 | }; 107 | 108 | export const ensureImage = async (docker: Docker, image: string) => { 109 | if (!await imageExists(docker, image)) { 110 | await pullImage(docker, image); 111 | } 112 | }; 113 | 114 | export const getHostAddress = async () => { 115 | if (process.env.DOCKER_HOST_ADDR) { 116 | return process.env.DOCKER_HOST_ADDR; 117 | } 118 | 119 | // Docker on Mac runs in a VM. This makes the networking messy... We really 120 | // only need the host address for the builds. 121 | if (os.type() === 'Darwin') { 122 | return '127.0.0.1'; 123 | } 124 | 125 | const docker = new Docker(); 126 | await ensureImage(docker, DEFAULT_IMAGE); 127 | 128 | const container = await docker.createContainer({ 129 | Entrypoint: 'sh', 130 | HostConfig: { 131 | AutoRemove: true, 132 | NetworkMode: 'host', 133 | UsernsMode: 'host' 134 | }, 135 | Image: DEFAULT_IMAGE, 136 | OpenStdin: true, 137 | }); 138 | 139 | await container.start(); 140 | logger.debug(`Started container ${container.id}`); 141 | 142 | try { 143 | const { stdout: routeTable } = await executeContainerCommand({ container, command: ['route'] }); 144 | const defaultInterface = getDefaultInterface(routeTable.toString('utf8')); 145 | 146 | const { stdout: ifconfig } = await executeContainerCommand({ container, command: ['ifconfig', defaultInterface] }); 147 | return getInterfaceAddress(ifconfig.toString('utf8')); 148 | } finally { 149 | await container.stop(); 150 | logger.debug(`Stopped container ${container.id}`); 151 | } 152 | }; 153 | -------------------------------------------------------------------------------- /src/graphql.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import map from 'lodash/map'; 3 | import isString from 'lodash/isString'; 4 | import supertest, { Test } from 'supertest'; 5 | import { ExecutionContext, TestInterface } from 'ava'; 6 | import Koa from 'koa'; 7 | 8 | export type SetupContextGraphQl = (context: Context) => Koa; 9 | export type SetupGraphQL = (test: ExecutionContext) => Koa; 10 | 11 | export interface GraphQLErrorLocation { 12 | line: number; 13 | column: number; 14 | } 15 | 16 | export interface GraphQLError { 17 | message: string; 18 | path: (string | number)[]; 19 | locations: GraphQLErrorLocation[]; 20 | } 21 | 22 | export interface GraphQlResponse { 23 | statusCode: number; 24 | error: any; 25 | body: { 26 | errors: GraphQLError[]; 27 | }; 28 | } 29 | 30 | let setupGraphQLFunc: SetupGraphQL = () => { 31 | throw new Error('A test GraphQL endpoint has not been configured!'); 32 | }; 33 | 34 | /** 35 | * Assert response contains error path/message 36 | * @param {Object} response http graphql response object 37 | * @param {String} path '.' deliminated path to graphql resolver 38 | * @param {String|Function} messageTest test to be applied to error 39 | * message. If string, exact match. If function, apply test function to 40 | * error message. 41 | */ 42 | export const assertError = (response: GraphQlResponse, path: string | undefined, messageTest: string | ((message: string) => boolean)) => { 43 | assert(response.body.errors, 'Expected error but none found'); 44 | 45 | // path isn't defined on schema type errors. Get first error in that case 46 | let error; 47 | if (path) { 48 | error = response.body.errors.find((error) => 49 | (error.path || []).join('.') === path); 50 | } else { 51 | error = response.body.errors.find((error) => 52 | error.path === undefined); 53 | } 54 | 55 | const errorPaths = map(response.body.errors, function (error) { 56 | if (error.path) { 57 | return error.path.join('.'); 58 | } else { 59 | return ''; 60 | } 61 | }); 62 | 63 | assert(error, `No error found with path '${path}'. The paths with errors were: ${errorPaths.join(',')}`); 64 | if (isString(messageTest)) { 65 | assert.strictEqual(error!.message, messageTest); 66 | } else { 67 | assert(messageTest(error!.message), 'message did not match'); 68 | } 69 | }; 70 | 71 | export const assertSuccess = (response: GraphQlResponse) => { 72 | const status = response.statusCode; 73 | assert(status >= 200 && status < 300, 74 | `Did not succeed. HTTP status code was ${status}` + 75 | ` and error was ${JSON.stringify(response.error, null, 2)}`); 76 | 77 | const errors = map(response.body.errors, err => { 78 | return { 79 | message: err.message, 80 | path: err.path && err.path.join('.') 81 | }; 82 | }); 83 | assert(!response.body.errors, 'Did not succeed. Errors were ' + 84 | `${JSON.stringify(errors, null, 2)}`); 85 | }; 86 | 87 | export const setupGraphQL = (func: SetupGraphQL) => { 88 | setupGraphQLFunc = func; 89 | }; 90 | 91 | export interface GraphQLTestContext { 92 | graphql: (query: string, variables?: Record) => Test; 93 | } 94 | 95 | export interface GraphQlHooksOptions { 96 | getApp: SetupContextGraphQl; 97 | context: Context; 98 | url?: string; 99 | } 100 | 101 | export function graphqlHooks ( 102 | { 103 | getApp, 104 | context, 105 | url = '/graphql' 106 | }: GraphQlHooksOptions 107 | ) { 108 | return { 109 | beforeEach() { 110 | const app = getApp(context); 111 | assert(app, 'GraphQL setup must return a Koa application'); 112 | const request = supertest(app.callback()); 113 | 114 | context.graphql = (query, variables) => { 115 | if (Array.isArray(query)) { 116 | return request.post(url) 117 | .send(query); 118 | } 119 | return request.post(url) 120 | .send({ query, variables }); 121 | }; 122 | } 123 | } 124 | } 125 | 126 | 127 | export interface GraphQlOptions { 128 | url?: string; 129 | } 130 | 131 | export const useGraphQL = ( 132 | anyTest: TestInterface, 133 | { 134 | url = '/graphql' 135 | }: GraphQlOptions = {} 136 | ) => { 137 | const test = anyTest as TestInterface; 138 | test.serial.beforeEach((t) => { 139 | const app = setupGraphQLFunc(t); 140 | assert(app, 'GraphQL setup must return a Koa application'); 141 | const request = supertest(app.callback()); 142 | 143 | t.context.graphql = (query, variables) => { 144 | if (Array.isArray(query)) { 145 | return request.post(url) 146 | .send(query); 147 | } 148 | return request.post(url) 149 | .send({ query, variables }); 150 | }; 151 | }); 152 | }; 153 | -------------------------------------------------------------------------------- /src/handleWebpackResult.ts: -------------------------------------------------------------------------------- 1 | import supportsColor from 'supports-color'; 2 | import { getLogger } from './utils/logging'; 3 | import webpack from "webpack"; 4 | const logger = getLogger('webpack'); 5 | 6 | export const handleWebpackResults = (webpackResult: webpack.Stats) => { 7 | logger.info('Webpacking compilation result:\n', webpackResult.toString({ 8 | colors: !!supportsColor.stdout, 9 | // hide excessive chunking output 10 | chunks: false, 11 | // hide other built modules 12 | maxModules: 0, 13 | // hide warning traces 14 | moduleTrace: false 15 | })); 16 | 17 | if (webpackResult.hasErrors()) { 18 | throw new Error('compilation_error'); 19 | } 20 | }; 21 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import * as docker from './docker'; 2 | import * as dynamodb from './dynamodb'; 3 | import Environment from './Environment'; 4 | import * as graphql from './graphql'; 5 | import * as lambda from './lambda'; 6 | import * as localStack from './localstack'; 7 | import * as kinesis from './kinesis'; 8 | import * as mockServerLambda from './mockServerLambda'; 9 | import WriteBuffer from './WriteBuffer'; 10 | 11 | export { 12 | docker, 13 | dynamodb, 14 | Environment, 15 | graphql, 16 | lambda, 17 | localStack, 18 | kinesis, 19 | mockServerLambda, 20 | WriteBuffer 21 | }; 22 | -------------------------------------------------------------------------------- /src/kinesis.ts: -------------------------------------------------------------------------------- 1 | import AWS from 'aws-sdk'; 2 | import { v4 as uuid } from 'uuid'; 3 | import Docker from 'dockerode'; 4 | import cloneDeep from 'lodash/cloneDeep'; 5 | import fromPairs from 'lodash/fromPairs'; 6 | 7 | import * as tools from './utils/kinesisTools'; 8 | 9 | import { getHostAddress, ensureImage } from './docker'; 10 | import { Environment } from './Environment'; 11 | import { AwsUtilsConnection, buildConnectionAndConfig, ConfigurationOptions, waitForReady } from './utils/awsUtils'; 12 | import { localstackReady } from './localstack'; 13 | import { getLogger } from './utils/logging'; 14 | import { pQueue } from './utils/config'; 15 | import { TestInterface } from 'ava'; 16 | 17 | const logger = getLogger('kinesis'); 18 | 19 | const KINESIS_IMAGE = 'localstack/localstack:0.12.2'; 20 | 21 | export { tools }; 22 | 23 | export type MappedStreamNames = {[Key in KeyArray[number]]: string} 24 | 25 | export interface KinesisContext { 26 | kinesisClient: AWS.Kinesis; 27 | config: ConfigurationOptions; 28 | streamNames: MappedStreamNames; 29 | uniqueIdentifier: string; 30 | } 31 | 32 | export interface KinesisTestContext { 33 | kinesis: KinesisContext; 34 | } 35 | 36 | export interface UseKinesisContext { 37 | kinesis: AWS.Kinesis; 38 | } 39 | 40 | const kinesisStreams: { StreamName: string; ShardCount: number }[] = []; 41 | 42 | export function streams (streams: string[]) { 43 | kinesisStreams.length = 0; 44 | kinesisStreams.push(...streams.map(StreamName => ({ 45 | StreamName, 46 | ShardCount: 1 47 | }))); 48 | } 49 | 50 | function getStreamName (streamName: string, uniqueIdentifier: string) { 51 | return uniqueIdentifier ? `${streamName}-${uniqueIdentifier}` : streamName; 52 | } 53 | 54 | export async function destroyStreams (kinesisClient: AWS.Kinesis, uniqueIdentifier: string): Promise { 55 | const failedDeletions: string[] = []; 56 | const { StreamNames } = await kinesisClient.listStreams().promise(); 57 | const streamNames = kinesisStreams 58 | .map(({ StreamName }) => getStreamName(StreamName, uniqueIdentifier)); 59 | const streamsToDestroy = StreamNames 60 | .filter(name => streamNames.includes(name)); 61 | 62 | await pQueue.addAll( 63 | streamsToDestroy 64 | .map(StreamName => async () => { 65 | try { 66 | await kinesisClient.deleteStream({ StreamName }).promise(); 67 | await kinesisClient.waitFor('streamNotExists', { StreamName }).promise(); 68 | } catch (err) { 69 | failedDeletions.push(StreamName); 70 | logger.error(`Failed to destroy stream "${StreamName}"`, err); 71 | } 72 | }) 73 | ); 74 | 75 | if (failedDeletions.length) { 76 | throw new Error(`Failed to destroy streams: ${failedDeletions.join(', ')}`); 77 | } 78 | } 79 | 80 | export async function createStreams (kinesisClient: AWS.Kinesis, uniqueIdentifier: string): Promise { 81 | const failedProvisons: string[] = []; 82 | await pQueue.addAll( 83 | kinesisStreams.map(stream => async () => { 84 | const newStream = cloneDeep(stream); 85 | const StreamName = getStreamName(newStream.StreamName, uniqueIdentifier); 86 | newStream.StreamName = StreamName; 87 | 88 | try { 89 | await kinesisClient.createStream(newStream).promise(); 90 | await kinesisClient.waitFor('streamExists', { StreamName }).promise(); 91 | } catch (err) { 92 | failedProvisons.push(StreamName); 93 | logger.error(`Failed to create stream "${StreamName}"`, err); 94 | } 95 | }) 96 | ); 97 | 98 | if (failedProvisons.length) { 99 | try { 100 | await destroyStreams(kinesisClient, uniqueIdentifier); 101 | } catch (err) { 102 | logger.error('Failed to destroy streams after create failed', err); 103 | } 104 | throw new Error(`Failed to create streams: ${failedProvisons.join(', ')}`); 105 | } 106 | } 107 | 108 | function buildStreamNameMapping (uniqueIdentifier: string) { 109 | return fromPairs(kinesisStreams.map(({ StreamName }) => { 110 | return [StreamName, getStreamName(StreamName, uniqueIdentifier)]; 111 | })); 112 | } 113 | 114 | export async function getConnection () { 115 | if (process.env.KINESIS_ENDPOINT) { 116 | return buildConnectionAndConfig({ url: process.env.KINESIS_ENDPOINT }); 117 | } 118 | 119 | const docker = new Docker(); 120 | const environment = new Environment(); 121 | 122 | await ensureImage(docker, KINESIS_IMAGE); 123 | 124 | const localstackPort = `${process.env.LAMBDA_TOOLS_LOCALSTACK_PORT || 4566}`; 125 | 126 | const container = await docker.createContainer({ 127 | HostConfig: { 128 | AutoRemove: true, 129 | PublishAllPorts: true 130 | }, 131 | ExposedPorts: { [`${localstackPort}/tcp`]: {} }, 132 | Image: KINESIS_IMAGE, 133 | Env: [ 134 | 'SERVICES=kinesis' 135 | ] 136 | }); 137 | 138 | await container.start(); 139 | const promise = localstackReady(container); 140 | 141 | const containerData = await container.inspect(); 142 | const host = await getHostAddress(); 143 | const port = containerData.NetworkSettings.Ports[`${localstackPort}/tcp`][0].HostPort; 144 | const url = `http://${host}:${port}`; 145 | 146 | environment.set('AWS_ACCESS_KEY_ID', 'bogus'); 147 | environment.set('AWS_SECRET_ACCESS_KEY', 'bogus'); 148 | environment.set('AWS_REGION', 'us-east-1'); 149 | environment.set('KINESIS_ENDPOINT', url); 150 | 151 | const { config, connection } = buildConnectionAndConfig({ 152 | cleanup: () => { 153 | environment.restore(); 154 | return container.stop(); 155 | }, 156 | url 157 | }); 158 | 159 | await promise; 160 | const kinesisClient = new AWS.Kinesis(config); 161 | await waitForReady('Kinesis', async () => kinesisClient.listStreams().promise()); 162 | 163 | return { connection, config }; 164 | } 165 | 166 | export function kinesisTestHooks (useUniqueStreams?: boolean) { 167 | let connection: AwsUtilsConnection; 168 | let config: ConfigurationOptions; 169 | 170 | async function beforeAll () { 171 | const result = await getConnection(); 172 | connection = result.connection; 173 | config = result.config; 174 | } 175 | 176 | async function beforeEach () { 177 | const uniqueIdentifier = useUniqueStreams ? uuid() : ''; 178 | const service = new AWS.Kinesis(config); 179 | await createStreams(service, uniqueIdentifier); 180 | return { 181 | kinesisClient: service, 182 | config, 183 | streamNames: buildStreamNameMapping(uniqueIdentifier), 184 | uniqueIdentifier 185 | }; 186 | } 187 | 188 | async function afterEach (context: KinesisContext) { 189 | if (!context) { 190 | return; 191 | } 192 | const { kinesisClient, uniqueIdentifier } = context; 193 | await destroyStreams(kinesisClient, uniqueIdentifier); 194 | } 195 | 196 | async function afterAll () { 197 | // If the beforeAll block executed long enough to set a connection, 198 | // then it should be cleaned up 199 | if (connection) { 200 | await connection.cleanup(); 201 | } 202 | } 203 | 204 | return { 205 | beforeAll, beforeEach, afterEach, afterAll 206 | }; 207 | } 208 | 209 | export function useKinesisDocker (anyTest: TestInterface, useUniqueStreams?: boolean) { 210 | const test = anyTest as TestInterface>; 211 | const testHooks = kinesisTestHooks(useUniqueStreams); 212 | 213 | test.serial.before(testHooks.beforeAll); 214 | 215 | test.serial.beforeEach(async (t) => { 216 | t.context.kinesis = await testHooks.beforeEach(); 217 | }); 218 | 219 | test.serial.afterEach.always(async t => { 220 | await testHooks.afterEach(t.context.kinesis); 221 | }); 222 | 223 | test.serial.after.always(testHooks.afterAll); 224 | } 225 | 226 | export function useKinesis (anyTest: TestInterface, streamName: string) { 227 | // The base ava test doesn't have context, and has to be cast. 228 | // This allows clients to send in the default ava export, and they can cast later or before. 229 | const test = anyTest as TestInterface; 230 | const kinesis = new AWS.Kinesis({ 231 | endpoint: process.env.KINESIS_ENDPOINT 232 | }); 233 | 234 | test.serial.before(async () => { 235 | await kinesis.createStream({ 236 | ShardCount: 1, 237 | StreamName: streamName 238 | }).promise(); 239 | }); 240 | 241 | test.serial.beforeEach(t => { 242 | t.context.kinesis = kinesis; 243 | }); 244 | 245 | test.serial.after(async () => { 246 | await kinesis.deleteStream({ 247 | StreamName: streamName 248 | }); 249 | }); 250 | } 251 | -------------------------------------------------------------------------------- /src/mockServerLambda.ts: -------------------------------------------------------------------------------- 1 | import {MockServerClient} from 'mockserver-client/mockServerClient' 2 | import {Expectation, HttpRequest} from "mockserver-client/mockServer"; 3 | 4 | export async function mockInvocation( 5 | mockServerClient: MockServerClient, 6 | functionName: string, 7 | responseBody: Record, 8 | requestBody?: Record, 9 | times?: number, 10 | ): Promise { 11 | const httpRequest: HttpRequest = { 12 | method: 'POST', 13 | path: `/lambda/2015-03-31/functions/${functionName}/invocations` 14 | } 15 | if (requestBody) { 16 | httpRequest.body = { 17 | type: 'JSON', 18 | json: JSON.stringify(requestBody), 19 | }; 20 | } 21 | 22 | const options: Expectation = { 23 | httpRequest, 24 | httpResponse: { 25 | statusCode: 200, 26 | body: JSON.stringify(responseBody) 27 | } 28 | }; 29 | if (times) { 30 | options.times = { 31 | remainingTimes: times, 32 | unlimited: false 33 | }; 34 | } else { 35 | options.times = { 36 | unlimited: true 37 | }; 38 | } 39 | 40 | await mockServerClient.mockAnyResponse(options); 41 | } 42 | 43 | export function verifyInvocation ( 44 | mockServerClient: MockServerClient, 45 | functionName: string, 46 | requestBody: Record, 47 | times?: number 48 | ): Promise { 49 | return mockServerClient.verify({ 50 | method: 'POST', 51 | path: `/lambda/2015-03-31/functions/${functionName}/invocations`, 52 | body: { 53 | type: 'JSON', 54 | json: JSON.stringify(requestBody), 55 | } 56 | }, times, times); 57 | } 58 | -------------------------------------------------------------------------------- /src/patches/dns.js: -------------------------------------------------------------------------------- 1 | // Lambdas running inside a VPC rely on ENIs. On a cold start attaching the ENI 2 | // can be quite slow and can mean that some services, like DNS, are not yet 3 | // functional when the Lambda function begins to execute for the first time. 4 | // Inserting DNS retry logic gives the function a chance to recover before 5 | // failing completely. 6 | // 7 | // See https://docs.aws.amazon.com/lambda/latest/dg/vpc.html#vpc-configuring 8 | (function () { 9 | const dns = require('dns'); 10 | 11 | const DELAY = 1000; 12 | const TRIES = 5; 13 | 14 | dns._raw = { lookup: dns.lookup }; 15 | 16 | dns.lookup = function dnsLookupWrapper (hostname, options, callback) { 17 | let remaining = TRIES; 18 | 19 | function dnsLookupWrapperResponse (error, address, family) { 20 | if (error && error.code === dns.NOTFOUND && --remaining > 0) { 21 | // Using a logger other than the console would be ideal. Since this 22 | // code is injected as a patch, it is hard to get access to a better 23 | // logger 24 | console.error(`DNS lookup of ${hostname} failed and will be retried ${remaining} more times`); 25 | setTimeout( 26 | () => dns._raw.lookup(hostname, options, dnsLookupWrapperResponse), 27 | DELAY 28 | ); 29 | return; 30 | } 31 | 32 | callback(error, address, family); 33 | } 34 | 35 | if (typeof options === 'function') { 36 | callback = options; 37 | options = {}; 38 | } 39 | 40 | return dns._raw.lookup(hostname, options, dnsLookupWrapperResponse); 41 | }; 42 | })(); 43 | -------------------------------------------------------------------------------- /src/patches/index.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import fs from 'fs-extra'; 3 | import get from 'lodash/get'; 4 | import path from 'path'; 5 | const WrapperPlugin = require('wrapper-webpack-plugin'); 6 | 7 | const patches = { 8 | dns: { 9 | file: path.resolve(__dirname, 'dns.js'), 10 | // DNS patches need to be inserted at the beginning of the bundle so that 11 | // the DNS module can be updated before any other module loads it. 12 | placement: 'header' 13 | }, 14 | 15 | lambda: { 16 | file: path.resolve(__dirname, 'lambda.js'), 17 | placement: 'footer' 18 | } 19 | }; 20 | 21 | export const loadPatch = async (name: string) => { 22 | const patch = get(patches, name); 23 | assert(patch, `No patch found for '${name}'`); 24 | 25 | return new WrapperPlugin({ 26 | test: /\.js$/, 27 | // eslint-disable-next-line security/detect-non-literal-fs-filename 28 | [patch.placement]: await fs.readFile(patch.file, { encoding: 'utf8' }) 29 | }); 30 | }; 31 | -------------------------------------------------------------------------------- /src/patches/lambda.js: -------------------------------------------------------------------------------- 1 | // Wrap the patch implementation in a closure in order to avoid polluting the 2 | // Lambda module's namespace. 3 | (function () { 4 | const tag = '[lambda-tools]'; 5 | 6 | const addAllEventHandlers = (handlers) => { 7 | for (const [ event, handler ] of Object.entries(handlers)) { 8 | process.prependListener(event, handler); 9 | } 10 | }; 11 | 12 | const getHandlerName = () => process.env._HANDLER.split('.')[1]; 13 | const log = (...message) => console.error(tag, ...message); 14 | 15 | const removeAllEventHandlers = (handlers) => { 16 | for (const [ event, handler ] of Object.entries(handlers)) { 17 | process.removeListener(event, handler); 18 | } 19 | }; 20 | 21 | const wrapHandler = (handler) => (event, context, done) => { 22 | // Bind the logging to the current request ID. This prevents AWS log patches 23 | // from changing the ID if a new request starts while backgrounded tasks 24 | // are executing. 25 | const requestId = context.awsRequestId; 26 | const logWithContext = (...message) => log(`RequestId: ${requestId}`, ...message); 27 | 28 | const eventHandlers = { 29 | beforeExit: (code) => { 30 | logWithContext(`Received 'beforeExit' with code ${code} before the handler completed. This usually means the handler never called back.`); 31 | // without this event handlers will pile up with each new event. 32 | // Eventually the maximum listener count will be hit (the default is 10). 33 | removeAllEventHandlers(eventHandlers); 34 | }, 35 | // process should terminate. no cleanup neeed. 36 | uncaughtException: (error) => logWithContext('Uncaught exception', error), 37 | // if the unhandled rejection is on the critical path for the handler then 38 | // the event loop should drain leading to the 'beforeExit' case 39 | unhandledRejection: (reason, promise) => logWithContext('Unhandled rejection at:', promise, 'reason:', reason) 40 | }; 41 | 42 | const completions = []; 43 | 44 | const finish = (...args) => { 45 | if (completions.length) { 46 | logWithContext('The Lambda function called back multiple times. Note that promise resolutions are equivalent to callback invocations.'); 47 | logWithContext('The callback was invoked with the following sets of arguments:\n', completions); 48 | } 49 | 50 | completions.push(args); 51 | removeAllEventHandlers(eventHandlers); 52 | done(...args); 53 | }; 54 | 55 | let returned = null; 56 | 57 | try { 58 | addAllEventHandlers(eventHandlers); 59 | returned = handler(event, context, finish); 60 | } catch (error) { 61 | finish(error); 62 | return; 63 | } 64 | 65 | if (returned && typeof returned.then === 'function') { 66 | returned.then( 67 | function (result) { finish(null, result); }, 68 | function (error) { finish(error); } 69 | ); 70 | } 71 | }; 72 | 73 | const handlerName = getHandlerName(); 74 | if (typeof handlerName !== 'string') { 75 | log('Could not determine the handler name. Not patching handler invocation.'); 76 | return; 77 | } 78 | 79 | // The handler name value comes from the Lambda runtime 80 | // eslint-disable-next-line security/detect-object-injection 81 | const handler = module.exports[handlerName]; 82 | if (typeof handler !== 'function') { 83 | log('Handler is not a function. Not patching handler invocation.'); 84 | return; 85 | } 86 | 87 | // The handler name value comes from the Lambda runtime 88 | // eslint-disable-next-line security/detect-object-injection 89 | module.exports[handlerName] = wrapHandler(handler); 90 | })(); 91 | -------------------------------------------------------------------------------- /src/utils/awsUtils.ts: -------------------------------------------------------------------------------- 1 | import AWS from 'aws-sdk'; 2 | import NestedError from 'nested-error-stacks'; 3 | import promiseRetry from 'promise-retry'; 4 | import {ServiceConfigurationOptions} from 'aws-sdk/lib/service'; 5 | import { getLogger } from './logging'; 6 | 7 | const logger = getLogger('awsUtils'); 8 | 9 | export interface AwsUtilsConnection { 10 | url: string; 11 | cleanup: () => any; 12 | region: string; 13 | accessKey: string; 14 | secretAccessKey: string; 15 | } 16 | 17 | export type ConfigurationOptions = Pick; 18 | 19 | export function buildConfigFromConnection (connection: AwsUtilsConnection): ConfigurationOptions { 20 | return { 21 | credentials: new AWS.Credentials(connection.accessKey, connection.secretAccessKey), 22 | endpoint: connection.url, 23 | region: connection.region, 24 | maxRetries: 10 25 | }; 26 | } 27 | 28 | export interface BuildConnectionAndConfigOptions { 29 | url: string; 30 | cleanup?: () => any; 31 | } 32 | 33 | export interface ConnectionAndConfig { 34 | connection: AwsUtilsConnection; 35 | config: ConfigurationOptions; 36 | } 37 | 38 | export function buildConnectionAndConfig ({ 39 | url, 40 | cleanup = () => undefined 41 | }: BuildConnectionAndConfigOptions): ConnectionAndConfig { 42 | const connection: AwsUtilsConnection = { 43 | url, 44 | cleanup, 45 | region: process.env.AWS_REGION || 'us-east-1', 46 | accessKey: process.env.AWS_ACCESS_KEY_ID!, 47 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY! 48 | }; 49 | const config = buildConfigFromConnection(connection); 50 | return { connection, config }; 51 | } 52 | 53 | export async function waitForReady (awsType: string, retryFunc: () => Promise) { 54 | const start = Date.now(); 55 | await promiseRetry(async function (retry, retryNumber) { 56 | try { 57 | await retryFunc(); 58 | } catch (error) { 59 | const message = `${awsType} is still not ready after ${retryNumber} connection attempts. Running for ${Date.now() - start}` 60 | logger.debug(message, error); 61 | retry(new NestedError(message, error)); 62 | } 63 | }, { maxTimeout: 1000, retries: 20 }); 64 | } 65 | -------------------------------------------------------------------------------- /src/utils/config.ts: -------------------------------------------------------------------------------- 1 | import PQueue from 'p-queue'; 2 | 3 | const concurrency = process.env.LAMBDA_TOOLS_CONCURRENCY 4 | ? Number.parseInt(process.env.LAMBDA_TOOLS_CONCURRENCY, 10) 5 | : Number.POSITIVE_INFINITY; 6 | 7 | export const pQueue = new PQueue({ concurrency }); 8 | -------------------------------------------------------------------------------- /src/utils/kinesisTools.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import Kinesis from "aws-sdk/clients/kinesis"; 3 | import { 4 | Callback, 5 | Context, 6 | KinesisStreamHandler, 7 | KinesisStreamRecord 8 | } from "aws-lambda"; 9 | 10 | export interface BasicKinesisConfig { 11 | kinesisClient: Kinesis; 12 | streamName: string; 13 | } 14 | 15 | export interface LambdaTriggerConfig{ 16 | lambdaHandler: KinesisStreamHandler; 17 | kinesisIterator: KinesisIterator; 18 | limit?: number; 19 | context?: Context; 20 | callback?: Callback; 21 | } 22 | 23 | export class KinesisIterator { 24 | static async newIterator (config: BasicKinesisConfig): Promise { 25 | const iterator = new KinesisIterator(config); 26 | await iterator.init(); 27 | return iterator; 28 | } 29 | 30 | private _kinesis: Kinesis; 31 | private _streamName: string; 32 | private _shardIterator: string | undefined; 33 | private _getRecordsResponse: Kinesis.GetRecordsOutput | undefined; 34 | 35 | constructor ( 36 | { 37 | kinesisClient, 38 | streamName, 39 | }: BasicKinesisConfig 40 | ) { 41 | assert.ok(kinesisClient, 'kinesisClient client needs to be provided'); 42 | assert.ok(!!kinesisClient.getRecords && !!kinesisClient.describeStream && !!kinesisClient.getShardIterator, 'kinesisClient client needs to be of type AWS.Kinesis'); 43 | assert.ok(typeof streamName === 'string', 'streamName needs to be defined and a string'); 44 | 45 | this._kinesis = kinesisClient; 46 | this._streamName = streamName; 47 | } 48 | 49 | async init (): Promise { 50 | const describeStreamResult = await this._kinesis.describeStream({ 51 | StreamName: this._streamName 52 | }).promise(); 53 | 54 | const getShardIteratorResult = await this._kinesis.getShardIterator({ 55 | ShardId: describeStreamResult.StreamDescription.Shards[0].ShardId, 56 | ShardIteratorType: 'TRIM_HORIZON', 57 | StreamName: this._streamName 58 | }).promise(); 59 | 60 | this._shardIterator = getShardIteratorResult.ShardIterator; 61 | return this; 62 | } 63 | 64 | async next ( 65 | Limit?: Kinesis.GetRecordsInputLimit 66 | ): Promise { 67 | if (!this._shardIterator) { 68 | await this.init(); 69 | } 70 | this._getRecordsResponse = await this._kinesis.getRecords({ 71 | ShardIterator: this._shardIterator!, 72 | Limit 73 | }).promise(); 74 | this._shardIterator = this._getRecordsResponse.NextShardIterator; 75 | return this; 76 | } 77 | 78 | get records (): Kinesis.GetRecordsOutput['Records'] { 79 | return this._getRecordsResponse!.Records; 80 | } 81 | 82 | get response (): Kinesis.GetRecordsOutput | undefined { 83 | return this._getRecordsResponse; 84 | } 85 | } 86 | 87 | export async function getStreamRecords (config: BasicKinesisConfig): Promise { 88 | const kinesisIterator = await KinesisIterator.newIterator(config); 89 | await kinesisIterator.next(); 90 | return kinesisIterator.records; 91 | } 92 | 93 | export function createLambdaEvent (records: Kinesis.RecordList): KinesisStreamRecord[] { 94 | return records.map(record => ({ 95 | eventID: `shardId-000000000000:${record.SequenceNumber}`, 96 | eventVersion: '1.0', 97 | kinesis: { 98 | approximateArrivalTimestamp: Date.now(), 99 | partitionKey: record.PartitionKey, 100 | data: record.Data.toString('base64'), 101 | kinesisSchemaVersion: '1.0', 102 | sequenceNumber: record.SequenceNumber 103 | }, 104 | invokeIdentityArn: 'some-arn', 105 | eventName: 'aws:kinesis:record', 106 | eventSourceARN: 'some-arn', 107 | eventSource: 'aws:kinesis', 108 | awsRegion: 'us-east-1' 109 | })); 110 | } 111 | 112 | /** 113 | * @param lambdaHandler A {function} used to interact with the lambda instance; 114 | * @param kinesisIterator A {KinesisIterator} to get records from the stream. 115 | * @param context A {Context} to pass to the lambda handler. 116 | * @param callback A {CallBack} function to pass to the lambda handler. 117 | * @param limit An optional limit to the number of records in each iterator batch. 118 | * @returns {Promise<{processedRecordCount}>} 119 | */ 120 | export async function kinesisLambdaTrigger ({ 121 | lambdaHandler, 122 | kinesisIterator, 123 | limit, 124 | context, 125 | callback 126 | }: LambdaTriggerConfig): Promise<{ 127 | processedRecordCount: number; 128 | }> { 129 | assert.ok(lambdaHandler, 'No lambdaHandler provided'); 130 | assert.ok(typeof lambdaHandler === 'function', 'lambdaHandler needs to be a function'); 131 | assert.ok(typeof kinesisIterator.next === 'function', 'kinesisIterator needs to be of type KinesisIterator'); 132 | 133 | let processedRecordCount = 0; 134 | 135 | let hadRecords = true; 136 | while (hadRecords) { 137 | hadRecords = false; 138 | const eventRecords = (await kinesisIterator.next(limit)).records!; 139 | processedRecordCount += eventRecords.length; 140 | if (eventRecords.length > 0) { 141 | hadRecords = true; 142 | const Records = createLambdaEvent(eventRecords); 143 | await lambdaHandler({ Records }, context!, callback!); 144 | } 145 | } 146 | return { 147 | processedRecordCount 148 | }; 149 | } 150 | -------------------------------------------------------------------------------- /src/utils/logging.ts: -------------------------------------------------------------------------------- 1 | import createDebug, { Debugger, debug } from 'debug'; 2 | 3 | const libName = 'lambda-tools'; 4 | 5 | const loggers: Record = {}; 6 | const consoleLog = console.log.bind(console); 7 | 8 | export interface Logger { 9 | info: Debugger; 10 | error: Debugger; 11 | warn: Debugger; 12 | debug: Debugger; 13 | child(name: string): Logger; 14 | } 15 | 16 | interface LoggerExtension { 17 | logger: Debugger; 18 | name: string; 19 | enabled?: boolean; 20 | log?: typeof console.log; 21 | } 22 | 23 | function extendLogger ({ logger, name, enabled, log }: LoggerExtension): Debugger { 24 | const childLogger: Debugger = logger.extend(name); 25 | childLogger.log = log || logger.log; 26 | if (enabled || logger.enabled) { 27 | debug.names.push(new RegExp(`^${childLogger.namespace}$`)) 28 | } 29 | 30 | return childLogger; 31 | } 32 | 33 | function createChildLogger (name: string, root: Debugger): Logger { 34 | return { 35 | info: extendLogger({ logger: root, name: 'info', log: consoleLog, enabled: true }), 36 | error: extendLogger({ logger: root, name: 'error', enabled: true }), 37 | warn: extendLogger({ logger: root, name: 'warn', enabled: true }), 38 | debug: extendLogger({ logger: root, name: 'debug', log: consoleLog, enabled: root.enabled }), 39 | child: (name: string) => { 40 | const child = root.extend(name); 41 | if (root.enabled) { 42 | debug.names.push(new RegExp(`^${child.namespace}$`)) 43 | } 44 | 45 | return createChildLogger(name, child); 46 | } 47 | }; 48 | } 49 | 50 | export function getLogger (name: string): Logger { 51 | const fullName = `${libName}:${name}`; 52 | if (!loggers[fullName]) { 53 | loggers[fullName] = createChildLogger(fullName, createDebug(fullName)); 54 | } 55 | return loggers[fullName]; 56 | } 57 | -------------------------------------------------------------------------------- /src/zip.ts: -------------------------------------------------------------------------------- 1 | import archiver from 'archiver'; 2 | import fs from 'fs-extra'; 3 | import path from 'path'; 4 | 5 | export interface Entry { 6 | file: string; 7 | name: string; 8 | } 9 | 10 | export async function zip(zipFile: string, entries: Entry[]) { 11 | await fs.mkdirp(path.dirname(zipFile)); 12 | 13 | return new Promise((resolve, reject) => { 14 | // eslint-disable-next-line security/detect-non-literal-fs-filename 15 | const outStream = fs.createWriteStream(zipFile); 16 | const archive = archiver('zip', { 17 | zlib: { level: 9 } // Sets the compression level. 18 | }); 19 | 20 | outStream.on('finish', resolve); 21 | 22 | // good practice to catch warnings (ie stat failures and other non-blocking errors) 23 | archive.on('warning', reject); 24 | archive.on('error', reject); 25 | 26 | for (const entry of entries) { 27 | archive.file(entry.file, { name: entry.name }); 28 | } 29 | 30 | // pipe archive data to the file 31 | archive.pipe(outStream); 32 | 33 | archive.finalize(); 34 | }); 35 | } 36 | -------------------------------------------------------------------------------- /test/Environment.test.js: -------------------------------------------------------------------------------- 1 | const { Environment } = require('../src/Environment'); 2 | const test = require('ava'); 3 | 4 | test.serial('Setting an environment variable modifies the process state', async (test) => { 5 | const environment = new Environment(); 6 | environment.set('foo', 'bar'); 7 | 8 | try { 9 | test.is(process.env.foo, 'bar'); 10 | } finally { 11 | delete process.env.foo; 12 | } 13 | }); 14 | 15 | test.serial('Restoring a new environment variable unsets the variable', async (test) => { 16 | const environment = new Environment(); 17 | environment.set('foo', 'bar'); 18 | 19 | try { 20 | environment.restore(); 21 | test.is(process.env.foo, undefined); 22 | } finally { 23 | delete process.env.foo; 24 | } 25 | }); 26 | 27 | test.serial('Restoring an existing variable resets the variable', async (test) => { 28 | const environment = new Environment(); 29 | process.env.foo = 'bar'; 30 | 31 | try { 32 | environment.set('foo', 'baz'); 33 | environment.restore(); 34 | test.is(process.env.foo, 'bar'); 35 | } finally { 36 | delete process.env.foo; 37 | } 38 | }); 39 | -------------------------------------------------------------------------------- /test/WriteBuffer.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const { WriteBuffer } = require('../src/WriteBuffer'); 3 | 4 | const { Writable } = require('stream'); 5 | 6 | test('A buffer is a writable stream', async (test) => { 7 | const buffer = new WriteBuffer(); 8 | test.true(buffer instanceof Writable); 9 | }); 10 | 11 | test('A buffer can encode the collected data as a string', async (test) => { 12 | const buffer = new WriteBuffer(); 13 | buffer.write('aGVsbG8=', 'base64'); 14 | buffer.write('20', 'hex'); 15 | buffer.write('world'); 16 | 17 | test.is(buffer.toString('utf8'), 'hello world'); 18 | }); 19 | 20 | test('the buffer can be reset', async (test) => { 21 | const buffer = new WriteBuffer(); 22 | buffer.write('aGVsbG8=', 'base64'); 23 | buffer.write('20', 'hex'); 24 | buffer.reset(); 25 | buffer.write('world'); 26 | 27 | test.is(buffer.toString('utf8'), 'world'); 28 | }); 29 | -------------------------------------------------------------------------------- /test/crypto-browserify.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | const { 6 | build, 7 | createLambdaExecutionEnvironment, 8 | destroyLambdaExecutionEnvironment, 9 | LambdaRunner 10 | } = require('../src/lambda'); 11 | 12 | const { FIXTURES_DIRECTORY } = require('./helpers/lambda'); 13 | 14 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 15 | 16 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 17 | 18 | [ 19 | '10.23.0', 20 | '12.20.0' 21 | ].forEach((nodeVersion) => { 22 | const outputPath = path.join(BUILD_DIRECTORY, uuid()); 23 | const entrypoint = path.join(FIXTURES_DIRECTORY, `crypto-browserify.js`); 24 | test(`${nodeVersion}: crypto is substituted for crypto-browserify `, async (test) => { 25 | const result = await build({ 26 | nodeVersion, 27 | entrypoint, 28 | outputPath, 29 | serviceName: 'test-service' 30 | }); 31 | test.false(result.hasErrors()); 32 | 33 | const executionEnvironment = await createLambdaExecutionEnvironment({ 34 | image: `lambci/lambda:nodejs${nodeVersion.split('.')[0]}.x`, 35 | mountpoint: outputPath 36 | }); 37 | try { 38 | const runner = new LambdaRunner(executionEnvironment.container.id, null, 'crypto-browserify.handler'); 39 | const result = await runner.invoke({}); 40 | test.deepEqual(result, 'crypto === crypto-browserify'); 41 | } finally { 42 | await destroyLambdaExecutionEnvironment(executionEnvironment); 43 | } 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /test/docker/ensureImage.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const Docker = require('dockerode'); 3 | const { ensureImage } = require('../../src/docker'); 4 | const sinon = require('sinon'); 5 | 6 | const TEST_IMAGE = 'alpine:3.6'; 7 | 8 | test.before(async function () { 9 | const docker = new Docker(); 10 | await ensureImage(docker, TEST_IMAGE); 11 | }); 12 | 13 | test('does not call pullImage if listImages includes the image already', async function (test) { 14 | // Create a Docker instance and watch the pull method 15 | const docker = new Docker(); 16 | sinon.spy(docker, 'pull'); 17 | 18 | // Call ensureImage on an image that is known to exist, because of the 19 | // beforeEach and make sure that pull is not called 20 | await ensureImage(docker, TEST_IMAGE); 21 | sinon.assert.notCalled(docker.pull); 22 | }); 23 | 24 | function newImmediatelyEndingStream () { 25 | return { 26 | pipe: function (stream) { 27 | stream.emit('end'); 28 | } 29 | }; 30 | } 31 | 32 | test('calls pullImage if listImages does not include the image', async function (test) { 33 | // Create a Docker instance and watch the pull method 34 | const docker = new Docker(); 35 | sinon.stub(docker, 'pull') 36 | .returns(newImmediatelyEndingStream()); 37 | 38 | // Call ensureImage on an image that does not exist 39 | // make sure that pull is not called with that image name 40 | const IMAGE_NEEDING_PULL = 'needs-pull:latest'; 41 | await ensureImage(docker, IMAGE_NEEDING_PULL); 42 | sinon.assert.calledWithExactly(docker.pull, IMAGE_NEEDING_PULL, {}); 43 | }); 44 | -------------------------------------------------------------------------------- /test/docker/executeContainerCommand.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const { createDefaultContainer } = require('../helpers/createDefaultContainer'); 3 | const { executeContainerCommand } = require('../../src/docker'); 4 | 5 | test('response includes stdout, stderr, and inspectOutput with ExitCode 0', async (t) => { 6 | const container = await createDefaultContainer(); 7 | console.log(`Started container ${container.id}`); 8 | try { 9 | await container.start(); 10 | const command = ['pwd']; 11 | const { inspectOutput, stderr, stdout } = await executeContainerCommand({ container, command }); 12 | t.is(stdout.toString('utf8'), '/\n'); 13 | t.is(stderr.toString('utf8'), ''); 14 | t.is(inspectOutput.ExitCode, 0); 15 | } finally { 16 | await container.stop(); 17 | console.log(`Stopped container ${container.id}`); 18 | } 19 | }); 20 | -------------------------------------------------------------------------------- /test/docker/get-host-addr.test.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const proxyquire = require('proxyquire').noPreserveCache(); 3 | const sinon = require('sinon'); 4 | const test = require('ava'); 5 | const { v4: uuid } = require('uuid'); 6 | 7 | test.beforeEach((test) => { 8 | test.context.createContainer = sinon.spy(Docker.prototype, 'createContainer'); 9 | }); 10 | 11 | test.afterEach.always((test) => { 12 | test.context.createContainer.restore(); 13 | }); 14 | 15 | test.serial('If DOCKER_HOST_ADDR is set it is returned', async (test) => { 16 | const { getHostAddress } = require('../../src/docker'); 17 | const expected = process.env.DOCKER_HOST_ADDR = uuid(); 18 | 19 | try { 20 | const address = await getHostAddress(); 21 | test.is(address, expected); 22 | } finally { 23 | delete process.env.DOCKER_HOST_ADDR; 24 | } 25 | 26 | sinon.assert.notCalled(test.context.createContainer); 27 | }); 28 | 29 | test.serial('On Mac 127.0.0.1 is always returned', async (test) => { 30 | const { getHostAddress } = proxyquire( 31 | '../../src/docker', 32 | { 33 | os: { 34 | type: () => 'Darwin' 35 | } 36 | } 37 | ); 38 | 39 | const address = await getHostAddress(); 40 | test.is(address, '127.0.0.1'); 41 | sinon.assert.notCalled(test.context.createContainer); 42 | }); 43 | 44 | test.serial('On other platforms a "real" address is returned', async (test) => { 45 | const { getHostAddress } = proxyquire( 46 | '../../src/docker', 47 | { 48 | os: { 49 | type: () => 'Linux' 50 | } 51 | } 52 | ); 53 | 54 | const address = await getHostAddress(); 55 | test.regex(address, /\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/); 56 | 57 | sinon.assert.calledOnce(test.context.createContainer); 58 | sinon.assert.calledWithExactly(test.context.createContainer, sinon.match({ 59 | HostConfig: sinon.match({ NetworkMode: 'host' }) 60 | })); 61 | }); 62 | -------------------------------------------------------------------------------- /test/docker/pullImage.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const Docker = require('dockerode'); 3 | const sinon = require('sinon'); 4 | const { getLogger } = require('../../src/utils/logging'); 5 | const { PassThrough } = require('stream'); 6 | 7 | const { pullImage, imageExists } = require('../../src/docker'); 8 | const TEST_IMAGE = 'alpine:3.5'; 9 | 10 | test.beforeEach(t => { 11 | const logger = getLogger('docker'); 12 | 13 | Object.assign(t.context, { logger }); 14 | }); 15 | 16 | test.afterEach(t => { 17 | const { logger } = t.context; 18 | if (logger.debug.restore) { 19 | logger.debug.restore(); 20 | } 21 | }); 22 | 23 | test.serial('will debug the progress of pulling an image', async (test) => { 24 | const { logger } = test.context; 25 | const logSpy = sinon.spy(logger, 'debug'); 26 | 27 | const docker = new Docker(); 28 | if (await imageExists(docker, TEST_IMAGE)) { 29 | const image = await docker.getImage(TEST_IMAGE); 30 | await image.remove(); 31 | } 32 | 33 | await pullImage(docker, TEST_IMAGE); 34 | sinon.assert.called(logSpy); 35 | sinon.assert.calledWithExactly(logSpy, `${TEST_IMAGE}: Status: Downloaded newer image for ${TEST_IMAGE}`); 36 | }); 37 | 38 | test.serial('will provide empty credentials if no docker env variables exist when pulling an image from docker hub', async (test) => { 39 | const { logger } = test.context; 40 | const docker = new Docker(); 41 | const logSpy = sinon.spy(logger, 'debug'); 42 | const dockerSpy = sinon.spy(docker, 'pull'); 43 | 44 | if (await imageExists(docker, TEST_IMAGE)) { 45 | const image = await docker.getImage(TEST_IMAGE); 46 | await image.remove(); 47 | } 48 | 49 | await pullImage(docker, TEST_IMAGE); 50 | sinon.assert.called(logSpy); 51 | sinon.assert.called(dockerSpy); 52 | sinon.assert.calledWithExactly(logSpy, `${TEST_IMAGE}: Status: Downloaded newer image for ${TEST_IMAGE}`); 53 | sinon.assert.calledWithExactly(dockerSpy, TEST_IMAGE, { }); 54 | }); 55 | 56 | test.serial('will provide credentials from env variables when pulling an image from docker hub', async (test) => { 57 | process.env.DOCKER_HUB_USER = 'docker_user'; 58 | process.env.DOCKER_HUB_PASS = 'docker_pass'; 59 | try { 60 | const { logger } = test.context; 61 | const docker = new Docker(); 62 | const logSpy = sinon.spy(logger, 'debug'); 63 | const dockerStub = sinon.stub(docker, 'pull'); 64 | const stream = new PassThrough(); 65 | dockerStub.resolves(stream); 66 | stream.push(JSON.stringify({ status: `Status: Downloaded newer image for ${TEST_IMAGE}` })); 67 | stream.push(null); 68 | 69 | await pullImage(docker, TEST_IMAGE); 70 | sinon.assert.called(logSpy); 71 | sinon.assert.called(dockerStub); 72 | sinon.assert.calledWithExactly(logSpy, `${TEST_IMAGE}: Status: Downloaded newer image for ${TEST_IMAGE}`); 73 | sinon.assert.calledWithExactly(dockerStub, TEST_IMAGE, { 74 | authconfig: { 75 | username: 'docker_user', 76 | password: 'docker_pass' 77 | } 78 | }); 79 | } finally { 80 | delete process.env.DOCKER_HUB_USER; 81 | delete process.env.DOCKER_HUB_PASS; 82 | } 83 | }); 84 | -------------------------------------------------------------------------------- /test/dynamodb/basic-parallel.test.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk'); 2 | const test = require('ava'); 3 | 4 | const { tableSchema, useDynamoDB } = require('../../src/dynamodb'); 5 | 6 | useDynamoDB(test, true); 7 | 8 | test.before(() => { 9 | tableSchema([ 10 | { 11 | AttributeDefinitions: [ 12 | { 13 | AttributeName: 'id', 14 | AttributeType: 'S' 15 | } 16 | ], 17 | KeySchema: [ 18 | { 19 | AttributeName: 'id', 20 | KeyType: 'HASH' 21 | } 22 | ], 23 | ProvisionedThroughput: { 24 | ReadCapacityUnits: 1, 25 | WriteCapacityUnits: 1 26 | }, 27 | TableName: 'test-table' 28 | } 29 | ]); 30 | }); 31 | 32 | test.after(() => { 33 | tableSchema([]); 34 | }); 35 | 36 | test('The helper provides database clients and tables', async (test) => { 37 | const { tableNames, dynamoClient, documentClient } = test.context.dynamodb; 38 | test.true(dynamoClient instanceof AWS.DynamoDB); 39 | test.true(documentClient instanceof AWS.DynamoDB.DocumentClient); 40 | 41 | const tables = await dynamoClient.listTables().promise(); 42 | const tableName = tableNames['test-table']; 43 | test.true(tables.TableNames.includes(tableName)); 44 | 45 | const item = { 46 | id: 'test', 47 | message: 'hello' 48 | }; 49 | 50 | await documentClient.put({ 51 | Item: item, 52 | TableName: tableName 53 | }).promise(); 54 | 55 | const result = await documentClient.get({ 56 | Key: { id: 'test' }, 57 | TableName: tableName 58 | }).promise(); 59 | 60 | test.deepEqual(result.Item, item); 61 | }); 62 | 63 | test('The helper includes a unique identifier in the table names', async (test) => { 64 | const { tableNames, uniqueIdentifier } = test.context.dynamodb; 65 | const tableName = tableNames['test-table']; 66 | 67 | test.true(typeof uniqueIdentifier === 'string'); 68 | test.true(uniqueIdentifier.length > 0); 69 | test.is(tableName, `test-table-${uniqueIdentifier}`); 70 | }); 71 | 72 | test('The helper sets default configuration environment variables', async (test) => { 73 | test.truthy(process.env.AWS_ACCESS_KEY_ID); 74 | test.truthy(process.env.AWS_SECRET_ACCESS_KEY); 75 | test.truthy(process.env.AWS_REGION); 76 | test.truthy(process.env.DYNAMODB_ENDPOINT); 77 | }); 78 | 79 | test('The helper provides a config object', async (test) => { 80 | const { config } = test.context.dynamodb; 81 | 82 | test.true(config.credentials instanceof AWS.Credentials); 83 | test.is(typeof config.endpoint, 'string', config.endpoint); 84 | test.truthy(config.region); 85 | }); 86 | -------------------------------------------------------------------------------- /test/dynamodb/basic-serial.test.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk'); 2 | const test = require('ava'); 3 | 4 | const { tableSchema, useDynamoDB } = require('../../src/dynamodb'); 5 | 6 | useDynamoDB(test); 7 | 8 | test.before(() => { 9 | tableSchema([ 10 | { 11 | AttributeDefinitions: [ 12 | { 13 | AttributeName: 'id', 14 | AttributeType: 'S' 15 | } 16 | ], 17 | KeySchema: [ 18 | { 19 | AttributeName: 'id', 20 | KeyType: 'HASH' 21 | } 22 | ], 23 | ProvisionedThroughput: { 24 | ReadCapacityUnits: 1, 25 | WriteCapacityUnits: 1 26 | }, 27 | TableName: 'test-table' 28 | } 29 | ]); 30 | }); 31 | 32 | test.after(() => { 33 | tableSchema([]); 34 | }); 35 | 36 | // no uuid in table name (old way, basic regression test to ensure forward 37 | // compatibility) 38 | test.serial('The helper provides database clients and tables', async (test) => { 39 | const { dynamoClient, documentClient } = test.context.dynamodb; 40 | test.true(dynamoClient instanceof AWS.DynamoDB); 41 | test.true(documentClient instanceof AWS.DynamoDB.DocumentClient); 42 | 43 | const tables = await dynamoClient.listTables().promise(); 44 | const tableName = 'test-table'; // no uuid/table name lookup 45 | test.true(tables.TableNames.includes(tableName)); 46 | 47 | const item = { 48 | id: 'test', 49 | message: 'hello' 50 | }; 51 | 52 | await documentClient.put({ 53 | Item: item, 54 | TableName: tableName 55 | }).promise(); 56 | 57 | const result = await documentClient.get({ 58 | Key: { id: 'test' }, 59 | TableName: tableName 60 | }).promise(); 61 | 62 | test.deepEqual(result.Item, item); 63 | }); 64 | 65 | test.serial('The helper does not include a unique identifier in the table names', async (test) => { 66 | const { tableNames, uniqueIdentifier } = test.context.dynamodb; 67 | const tableName = tableNames['test-table']; 68 | 69 | test.true(typeof uniqueIdentifier === 'string'); 70 | test.true(uniqueIdentifier.length === 0); 71 | test.is(tableName, 'test-table'); 72 | }); 73 | -------------------------------------------------------------------------------- /test/dynamodb/custom-endpoint.test.js: -------------------------------------------------------------------------------- 1 | const { Environment } = require('../../src/Environment'); 2 | const test = require('ava'); 3 | const sinon = require('sinon'); 4 | const AWS = require('aws-sdk'); 5 | const AWSMock = require('aws-sdk-mock'); 6 | AWSMock.setSDKInstance(AWS); 7 | 8 | // Mock listTables for `beforeAll` and `afterEach` hooks 9 | const listTablesMock = sinon.stub() 10 | .resolves({ TableNames: [] }, []); 11 | 12 | AWSMock.mock('DynamoDB', 'listTables', listTablesMock); 13 | 14 | const { useDynamoDB } = require('../../src/dynamodb'); 15 | const environment = new Environment(); 16 | 17 | test.before(() => { 18 | environment.set('AWS_ACCESS_KEY_ID', 'test-access-key'); 19 | environment.set('AWS_SECRET_ACCESS_KEY', 'test-secret-key'); 20 | environment.set('DYNAMODB_ENDPOINT', 'dynamodb://localhost'); 21 | }); 22 | 23 | test.after(() => environment.restore()); 24 | 25 | useDynamoDB(test); 26 | 27 | test.serial('When DYNAMODB_ENDPOINT is set configuration environment variables are not set', (test) => { 28 | test.is(process.env.AWS_ACCESS_KEY_ID, 'test-access-key'); 29 | test.is(process.env.AWS_SECRET_ACCESS_KEY, 'test-secret-key'); 30 | test.is(process.env.DYNAMODB_ENDPOINT, 'dynamodb://localhost'); 31 | }); 32 | -------------------------------------------------------------------------------- /test/dynamodb/dynamoDBTestHooks.test.ts: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import { v4 as uuid } from 'uuid'; 3 | import { DynamoDB } from 'aws-sdk'; 4 | 5 | import { dynamoDBTestHooks, tableSchema } from '../../src/dynamodb'; 6 | 7 | function createTableSchema(): DynamoDB.CreateTableInput { 8 | const AttributeName = uuid(); 9 | return { 10 | TableName: uuid(), 11 | AttributeDefinitions: [{ 12 | AttributeName, 13 | AttributeType: 'S', 14 | }], 15 | KeySchema: [{ 16 | AttributeName, 17 | KeyType: 'HASH', 18 | }], 19 | ProvisionedThroughput: { 20 | WriteCapacityUnits: 1, 21 | ReadCapacityUnits: 1, 22 | } 23 | }; 24 | } 25 | 26 | test('can define tables in config', async t => { 27 | const badSchema = createTableSchema(); 28 | const expectedSchema = createTableSchema(); 29 | tableSchema([badSchema]); 30 | const { beforeAll, afterEach, beforeEach, afterAll } = dynamoDBTestHooks<['testTable']>(false, { 31 | tableSchemas: [expectedSchema] 32 | }); 33 | await beforeAll(); 34 | let context; 35 | try { 36 | context = await beforeEach(); 37 | try { 38 | const {TableNames} = await context.dynamoClient.listTables().promise(); 39 | t.deepEqual(TableNames, [expectedSchema.TableName]); 40 | } finally { 41 | await afterEach(context); 42 | } 43 | } finally { 44 | await afterAll(); 45 | } 46 | 47 | 48 | }); 49 | -------------------------------------------------------------------------------- /test/dynamodb/testHooks-error.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | 4 | test.serial('The afterAll hook handles errors in the beforeAll hook gracefully', async (test) => { 5 | // Stub the docker module to throw errors when fetching images. 6 | // This needs to happen before the dynamodb helper module is imported 7 | const docker = require('../../src/docker'); 8 | const error = new Error('Stubbed failure'); 9 | const ensureStub = sinon.stub(docker, 'ensureImage') 10 | .rejects(error); 11 | 12 | const { dynamoDBTestHooks } = require('../../src/dynamodb'); 13 | const { afterAll, beforeAll } = dynamoDBTestHooks(false); 14 | 15 | try { 16 | await test.throwsAsync(beforeAll, { instanceOf: Error, message: error.message }); 17 | await afterAll(); 18 | } finally { 19 | ensureStub.restore(); 20 | } 21 | }); 22 | 23 | // Some test runners (like Jest) continue to process hooks and tests even when 24 | // hooks fail. This can create invalid AWS clients in some cases. The clients 25 | // are sometimes instantiated with the default configuration which means that 26 | // DynamoDB test cases may execute against real tables and cause data corruption 27 | // or destruction. 28 | test.serial('the beforeEach hook does not create clients when beforeAll fails', async (test) => { 29 | // Stub the docker module to throw errors when fetching images. 30 | // This needs to happen before the dynamodb helper module is imported 31 | const docker = require('../../src/docker'); 32 | const error = new Error('Stubbed failure'); 33 | const ensureStub = sinon.stub(docker, 'ensureImage') 34 | .rejects(error); 35 | 36 | const { dynamoDBTestHooks } = require('../../src/dynamodb'); 37 | const { beforeAll, beforeEach } = dynamoDBTestHooks(false); 38 | 39 | try { 40 | await test.throwsAsync(beforeAll, { instanceOf: Error, message: error.message }); 41 | await test.throwsAsync(beforeEach, { message: 'Invalid DynamoDB test configuration.' }); 42 | } finally { 43 | ensureStub.restore(); 44 | } 45 | }); 46 | 47 | test('The afterEach hook will ignore a missing context', async t => { 48 | const { dynamoDBTestHooks } = require('../../src/dynamodb'); 49 | const { afterEach } = dynamoDBTestHooks(false); 50 | await t.notThrowsAsync(afterEach(undefined)); 51 | }); 52 | -------------------------------------------------------------------------------- /test/dynamodb/testTypes.test.ts: -------------------------------------------------------------------------------- 1 | import anyTest, {TestInterface} from 'ava'; 2 | import {DynamoDB} from "aws-sdk"; 3 | import {DynamoDBTestContext, tableSchema, useDynamoDB} from '../../src/dynamodb' 4 | 5 | const test = anyTest as TestInterface>; 6 | 7 | const tableSchemas: DynamoDB.CreateTableInput[] = [ 8 | { 9 | TableName: 'test1', 10 | AttributeDefinitions: [{ 11 | AttributeName: 'key', 12 | AttributeType: 'S' 13 | }], 14 | KeySchema: [{ 15 | AttributeName: 'key', 16 | KeyType: 'HASH' 17 | }], 18 | ProvisionedThroughput: { 19 | ReadCapacityUnits: 1, 20 | WriteCapacityUnits: 1 21 | } 22 | }, 23 | { 24 | TableName: 'test2', 25 | AttributeDefinitions: [{ 26 | AttributeName: 'key', 27 | AttributeType: 'S' 28 | }], 29 | KeySchema: [{ 30 | AttributeName: 'key', 31 | KeyType: 'HASH' 32 | }], 33 | ProvisionedThroughput: { 34 | ReadCapacityUnits: 1, 35 | WriteCapacityUnits: 1 36 | } 37 | }, 38 | ]; 39 | 40 | tableSchema(tableSchemas); 41 | 42 | useDynamoDB(anyTest); 43 | 44 | test('testTypes', t => { 45 | const {dynamodb: { 46 | tableNames 47 | }} = t.context; 48 | 49 | t.is(tableNames.test1, 'test1'); 50 | t.is(tableNames.test2, 'test2'); 51 | 52 | // @ts-expect-error 53 | t.is(tableNames.test3, undefined); 54 | }); 55 | -------------------------------------------------------------------------------- /test/dynamodb/wait-for-ready.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | 4 | const AWS = require('aws-sdk'); 5 | const AWSMock = require('aws-sdk-mock'); 6 | AWSMock.setSDKInstance(AWS); 7 | 8 | // Setup a mock listTables call that will fail once 9 | // to force a retry and then success to allow the setup 10 | // to continue 11 | const listTablesMock = sinon.stub() 12 | .resolves({ TableNames: [] }, []) 13 | .onFirstCall().rejects(new Error('First error')); 14 | 15 | AWSMock.mock('DynamoDB', 'listTables', listTablesMock); 16 | 17 | const { useDynamoDB } = require('../../src/dynamodb'); 18 | 19 | useDynamoDB(test); 20 | 21 | test.serial('The helper provides database clients and tables', async (test) => { 22 | // Listing the tables should be called twice. Once for the failure and the 23 | // second for success to allow the `before` block to complete 24 | sinon.assert.calledTwice(listTablesMock); 25 | }); 26 | -------------------------------------------------------------------------------- /test/fixtures/async_iterators.js: -------------------------------------------------------------------------------- 1 | async function* numbers (limit) { 2 | let count = 0; 3 | while (count <= limit) { 4 | yield count++; 5 | } 6 | } 7 | 8 | async function sumCount (limit) { 9 | let sum = 0; 10 | const nums = numbers(limit); 11 | for await (const num of nums) { 12 | sum += num; 13 | } 14 | return sum; 15 | } 16 | 17 | // Execute the code to make sure it executes without an error after transpiling 18 | exports.handle = async (event, context, callback) => { 19 | const sum = await sumCount(5); 20 | callback(null, sum); 21 | }; 22 | -------------------------------------------------------------------------------- /test/fixtures/async_test.js: -------------------------------------------------------------------------------- 1 | async function test (handler) { 2 | const obj = { 3 | one: 1, 4 | two: 2 5 | }; 6 | 7 | for (const entry of obj) { 8 | await handler(entry); 9 | } 10 | } 11 | 12 | module.exports = test; 13 | -------------------------------------------------------------------------------- /test/fixtures/async_with_arrow.js: -------------------------------------------------------------------------------- 1 | class Foo { 2 | async foo (bar) { 3 | (() => { 4 | this.bar = 'baz'; 5 | })(); 6 | } 7 | } 8 | 9 | // Execute the code to make sure it executes without an error after transpiling 10 | exports.handle = function (event, context, callback) { 11 | new Foo().foo(); 12 | callback(null, {}); 13 | }; 14 | -------------------------------------------------------------------------------- /test/fixtures/bundled_service.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lifeomic/lambda-tools/e517014e2f079b3541762e68214c2c45037d3b67/test/fixtures/bundled_service.zip -------------------------------------------------------------------------------- /test/fixtures/crypto-browserify.js: -------------------------------------------------------------------------------- 1 | // The function will not work correctly if this is not swapped out when the 2 | // function is built. 3 | // eslint-disable-next-line import/no-extraneous-dependencies 4 | const cryptoBrowserify = require('crypto-browserify'); 5 | const nodeCrypto = require('crypto'); 6 | const assert = require('assert'); 7 | 8 | exports.handler = async (event, context, callback) => { 9 | assert.deepStrictEqual(cryptoBrowserify, nodeCrypto, 'crypto != crypto-browserify'); 10 | return 'crypto === crypto-browserify'; 11 | }; 12 | -------------------------------------------------------------------------------- /test/fixtures/es_modules/es_module.mjs: -------------------------------------------------------------------------------- 1 | export default 'hi'; 2 | -------------------------------------------------------------------------------- /test/fixtures/es_modules/index.js: -------------------------------------------------------------------------------- 1 | require('./es_module'); 2 | -------------------------------------------------------------------------------- /test/fixtures/lambci-derivative/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lambci/lambda:nodejs10.x 2 | 3 | RUN echo "Any command is fine" 4 | -------------------------------------------------------------------------------- /test/fixtures/lambda-with-tsconfig/index.ts: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const Router = require('koa-router'); 3 | const serverless = require('serverless-http'); 4 | 5 | const app = new Koa(); 6 | const router = new Router(); 7 | 8 | router.get('/', async (context, next) => { 9 | context.response.body = { 10 | service: 'lambda-test', 11 | parameter: process.env.TEST_PARAMETER 12 | }; 13 | await next(); 14 | }); 15 | 16 | app.use(router.routes()); 17 | export const handler = serverless(app); 18 | -------------------------------------------------------------------------------- /test/fixtures/lambda-with-tsconfig/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es6", 5 | "skipLibCheck": true 6 | }, 7 | "include": ["**/*.ts"], 8 | "exclude": [ 9 | "node_modules" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /test/fixtures/lambda_graphql.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const serverless = require('serverless-http'); 3 | 4 | const { ApolloServer, gql } = require('apollo-server-koa'); 5 | 6 | const app = new Koa(); 7 | 8 | const graphql = new ApolloServer({ 9 | context: ({ ctx }) => ({ 10 | header: ctx.request.get('test-header') 11 | }), 12 | resolvers: { 13 | Query: { 14 | value: (obj, args, context, info) => args.prompt + ': ' + context.header 15 | } 16 | }, 17 | typeDefs: gql` 18 | type Query { 19 | value (prompt: String!): String! 20 | } 21 | ` 22 | }); 23 | 24 | graphql.applyMiddleware({ app, path: '/' }); 25 | module.exports.handler = serverless(app); 26 | -------------------------------------------------------------------------------- /test/fixtures/lambda_service.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const Router = require('koa-router'); 3 | const serverless = require('serverless-http'); 4 | 5 | const app = new Koa(); 6 | const router = new Router(); 7 | 8 | router.get('/', async (context, next) => { 9 | context.response.body = { 10 | service: 'lambda-test', 11 | parameter: process.env.TEST_PARAMETER 12 | }; 13 | await next(); 14 | }); 15 | 16 | app.use(router.routes()); 17 | module.exports.handler = serverless(app); 18 | -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/func1.js: -------------------------------------------------------------------------------- 1 | console.log('func1'); 2 | -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/func2.ts: -------------------------------------------------------------------------------- 1 | console.log('func2'); 2 | -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/func3/index.js: -------------------------------------------------------------------------------- 1 | console.log('func3'); 2 | -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/func4/index.ts: -------------------------------------------------------------------------------- 1 | console.log('func4'); 2 | -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/ignored.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lifeomic/lambda-tools/e517014e2f079b3541762e68214c2c45037d3b67/test/fixtures/multi-lambdas/ignored.md -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/invalid/index.js/ignored: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lifeomic/lambda-tools/e517014e2f079b3541762e68214c2c45037d3b67/test/fixtures/multi-lambdas/invalid/index.js/ignored -------------------------------------------------------------------------------- /test/fixtures/multi-lambdas/unreadable/index.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lifeomic/lambda-tools/e517014e2f079b3541762e68214c2c45037d3b67/test/fixtures/multi-lambdas/unreadable/index.js -------------------------------------------------------------------------------- /test/fixtures/runtime_callbacks.js: -------------------------------------------------------------------------------- 1 | exports.handler = async (event, context, callback) => { 2 | callback(null, 'one'); 3 | // Returning from an async function is equivalent to invoking the callback. 4 | return 'two'; 5 | }; 6 | -------------------------------------------------------------------------------- /test/fixtures/runtime_dns.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const dns = require('dns'); 3 | const sinon = require('sinon'); 4 | 5 | exports.handler = (event, context, callback) => { 6 | const failure = new Error('simulated failure'); 7 | failure.code = dns.NOTFOUND; 8 | 9 | // Simulate an extra pollyfill/shim 10 | dns.lookup = dns.lookup.bind(dns); 11 | 12 | const lookup = sinon.stub(dns._raw, 'lookup') 13 | .callsArgWith(2, failure) 14 | .withArgs('example.com', sinon.match.object, sinon.match.func) 15 | .onFirstCall().callsArgWith(2, failure) 16 | .onSecondCall().callsArgWith(2, failure) 17 | .onThirdCall().callsArgWith(2, null, '127.0.0.1', 4); 18 | 19 | const consoleSpy = sinon.stub(console, 'error'); 20 | 21 | dns.lookup('example.com', (error, hostname, family) => { 22 | try { 23 | assert.ifError(error); 24 | assert.strictEqual(hostname, '127.0.0.1'); 25 | assert.strictEqual(family, 4); 26 | sinon.assert.callCount(lookup, 3); 27 | 28 | sinon.assert.callCount(consoleSpy, 2); 29 | for (const attempt of [4, 3]) { 30 | sinon.assert.calledWith(consoleSpy, `DNS lookup of example.com failed and will be retried ${attempt} more times`); 31 | } 32 | } catch (error) { 33 | callback(error); 34 | return; 35 | } 36 | callback(null, 'success!'); 37 | }); 38 | }; 39 | -------------------------------------------------------------------------------- /test/fixtures/runtime_events.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | 3 | exports.handler = (event, context, callback) => { 4 | const listeners = process.listeners('beforeExit'); 5 | assert(listeners.length === 2, `unexpected number of listeners ${listeners.length}`); 6 | 7 | // Need to remove the AWS listener so that we don't terminate early 8 | process.removeListener('beforeExit', listeners[1]); 9 | process.emit('beforeExit'); 10 | // Re-add the listener to ensure that lambci/lambda output doesn't get messed up 11 | process.addListener('beforeExit', listeners[1]); 12 | 13 | const listenerCount = process.listenerCount('beforeExit'); 14 | assert(listenerCount === 1, `listener cleanup failed ${listenerCount}`); 15 | }; 16 | -------------------------------------------------------------------------------- /test/fixtures/runtime_promises.js: -------------------------------------------------------------------------------- 1 | exports.handler = async (event, context) => { 2 | await new Promise((resolve) => setImmediate(resolve)); 3 | return 'hello from the promised land!'; 4 | }; 5 | -------------------------------------------------------------------------------- /test/fixtures/ts_lambda_kinesisHandler.ts: -------------------------------------------------------------------------------- 1 | import {Kinesis} from 'aws-sdk'; 2 | import {KinesisStreamEvent} from 'aws-lambda'; 3 | 4 | export async function handler (event: KinesisStreamEvent) { 5 | const kinesis = new Kinesis({ endpoint: process.env.KINESIS_ENDPOINT }); 6 | console.log(`Handling ${event.Records.length} records`); 7 | const records: Kinesis.PutRecordsRequestEntryList = event.Records.map(({kinesis: {data, partitionKey}}) => ({ 8 | Data: Buffer.from(Buffer.from(data, 'base64').toString()), 9 | PartitionKey: partitionKey 10 | })); 11 | await kinesis.putRecords({ 12 | StreamName: process.env.NEXT_KINESIS_STREAM_NAME!, 13 | Records: records, 14 | }).promise(); 15 | } 16 | -------------------------------------------------------------------------------- /test/fixtures/ts_lambda_service.ts: -------------------------------------------------------------------------------- 1 | import Koa from 'koa'; 2 | import Router from 'koa-router'; 3 | import serverless from 'serverless-http'; 4 | 5 | const app = new Koa(); 6 | const router = new Router(); 7 | 8 | router.get('/', async (context, next) => { 9 | context.response.body = { 10 | service: 'lambda-test', 11 | parameter: process.env.TEST_PARAMETER 12 | }; 13 | await next(); 14 | }); 15 | 16 | app.use(router.routes()); 17 | module.exports.handler = serverless(app); 18 | -------------------------------------------------------------------------------- /test/graphql/assertSuccess-httpErrors.test.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const Router = require('koa-router'); 3 | const test = require('ava'); 4 | 5 | const { assertSuccess, setupGraphQL, useGraphQL } = require('../../src/graphql'); 6 | 7 | useGraphQL(test); 8 | 9 | test.before(() => { 10 | setupGraphQL(() => { 11 | const app = new Koa(); 12 | const router = new Router(); 13 | 14 | app.use(router.routes()); 15 | 16 | return app; 17 | }); 18 | }); 19 | 20 | test('assertSuccess throws on an HTTP error', async (test) => { 21 | const query = '{ error }'; 22 | const response = await test.context.graphql(query); 23 | const expectedErrorMessage = `Did not succeed. HTTP status code was 404 and error was { 24 | "status": 404, 25 | "text": "Not Found", 26 | "method": "POST", 27 | "path": "/graphql" 28 | }`; 29 | test.throws(() => assertSuccess(response), { message: expectedErrorMessage }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/graphql/assertions.test.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const test = require('ava'); 3 | const assert = require('assert'); 4 | 5 | const { assertError, assertSuccess, setupGraphQL, useGraphQL } = require('../../src/graphql'); 6 | const { ApolloServer, gql } = require('apollo-server-koa'); 7 | 8 | const graphql = new ApolloServer({ 9 | resolvers: { 10 | Query: { 11 | error: (obj, args, context, info) => { throw new Error('boom!'); }, 12 | success: (obj, args, context, info) => 'success!' 13 | } 14 | }, 15 | typeDefs: gql` 16 | type Query { 17 | error: String! 18 | success: String! 19 | } 20 | ` 21 | }); 22 | 23 | useGraphQL(test); 24 | 25 | test.before(() => { 26 | setupGraphQL(() => { 27 | const app = new Koa(); 28 | graphql.applyMiddleware({ app }); 29 | return app; 30 | }); 31 | }); 32 | 33 | // This is a work-around for a bug in ava. This should be remove when the fix 34 | // is released in ava 1.0.0. 35 | // See https://github.com/avajs/ava/pull/1885 36 | test.beforeEach((test) => { 37 | test.context.isTTY = process.stdout.isTTY; 38 | process.stdout.isTTY = false; 39 | }); 40 | 41 | test.afterEach((test) => { 42 | process.stdout.isTTY = test.context.isTTY; 43 | }); 44 | 45 | test('assertSuccess does not throw on a successful response', async (test) => { 46 | const query = '{ success }'; 47 | const response = await test.context.graphql(query); 48 | assertSuccess(response); 49 | }); 50 | 51 | test('assertSuccess throws on a graphql error', async (test) => { 52 | const query = '{ error }'; 53 | const response = await test.context.graphql(query); 54 | const expectedErrorMessage = `Did not succeed. Errors were [ 55 | { 56 | "message": "boom!", 57 | "path": "error" 58 | } 59 | ]`; 60 | 61 | test.throws(() => assertSuccess(response), { message: expectedErrorMessage }); 62 | }); 63 | 64 | test('assertError throws on a successful response', async (test) => { 65 | const query = '{ success }'; 66 | const response = await test.context.graphql(query); 67 | test.throws(() => assertError(response), { message: 'Expected error but none found' }); 68 | }); 69 | 70 | test('assertError does not throw on a failed response', async (test) => { 71 | const query = '{ error }'; 72 | const response = await test.context.graphql(query); 73 | assertError(response, 'error', 'boom!'); 74 | }); 75 | 76 | test('assertError throws if no error matches the path', async (test) => { 77 | const query = '{ error }'; 78 | const response = await test.context.graphql(query); 79 | test.throws(() => assertError(response, 'some.other.path', 'boom!'), { message: `No error found with path 'some.other.path'. The paths with errors were: error` }); 80 | }); 81 | 82 | test('assertError throws if the error does not match the message', async (test) => { 83 | const query = '{ error }'; 84 | const response = await test.context.graphql(query); 85 | 86 | // The message generated from AssertionErrors is different depending on 87 | // Node version, so a hardcoded message cannot be used for the assertion. 88 | let expectedMessage; 89 | try { 90 | assert.strictEqual('boom!', 'some other message'); 91 | expectedMessage = 'Failed to generate expected failure'; 92 | } catch (e) { 93 | expectedMessage = e.message; 94 | } 95 | 96 | test.throws(() => assertError(response, 'error', 'some other message'), { message: expectedMessage }); 97 | }); 98 | 99 | test('assertError throws if the path is undefined and no error has undefined path', test => { 100 | const response = { 101 | body: { 102 | errors: [{ message: 'foo', path: ['some', 'path'] }] 103 | } 104 | }; 105 | test.throws(() => assertError(response, undefined, 'something'), { message: `No error found with path 'undefined'. The paths with errors were: some.path` }); 106 | }); 107 | 108 | test('assertError doesn\'t throw on mixed path/no-path errors', test => { 109 | const response = { 110 | body: { 111 | errors: [ 112 | { message: 'bar', path: undefined }, 113 | { message: 'foo', path: ['some', 'path'] } 114 | ] 115 | } 116 | }; 117 | assertError(response, 'some.path', 'foo'); 118 | }); 119 | 120 | test('assertError can be called with a matcher function', async (test) => { 121 | const query = '{ error }'; 122 | const response = await test.context.graphql(query); 123 | assertError(response, 'error', (msg) => msg.includes('boom')); 124 | }); 125 | -------------------------------------------------------------------------------- /test/graphql/config.test.js: -------------------------------------------------------------------------------- 1 | const sinon = require('sinon'); 2 | const test = require('ava'); 3 | 4 | function resetGraphqlHelper () { 5 | const path = require.resolve('../../src/graphql'); 6 | // eslint-disable-next-line security/detect-object-injection 7 | delete require.cache[path]; 8 | } 9 | 10 | test.before(resetGraphqlHelper); 11 | 12 | test.beforeEach((test) => { 13 | const { useGraphQL } = require('../../src/graphql'); 14 | 15 | test.context.mock = { 16 | serial: { 17 | beforeEach: sinon.stub() 18 | } 19 | }; 20 | 21 | test.context.useGraphQL = useGraphQL; 22 | }); 23 | 24 | test.afterEach.always(resetGraphqlHelper); 25 | 26 | test('When a test endpoint has not been configured an error is thrown', async (test) => { 27 | test.context.useGraphQL(test.context.mock); 28 | 29 | sinon.assert.calledOnce(test.context.mock.serial.beforeEach); 30 | test.throws( 31 | test.context.mock.serial.beforeEach.firstCall.args[0], 32 | { message: 'A test GraphQL endpoint has not been configured!' } 33 | ); 34 | }); 35 | -------------------------------------------------------------------------------- /test/graphql/query.test.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const sinon = require('sinon'); 3 | const test = require('ava'); 4 | 5 | const { setupGraphQL, useGraphQL } = require('../../src/graphql'); 6 | const { ApolloServer, gql } = require('apollo-server-koa'); 7 | 8 | useGraphQL(test); 9 | 10 | test.before(() => { 11 | setupGraphQL((test) => { 12 | const app = new Koa(); 13 | 14 | test.context.defaultValue = sinon.stub().returns('default'); 15 | 16 | const graphql = new ApolloServer({ 17 | context: ({ ctx }) => ({ 18 | defaultValue: test.context.defaultValue, 19 | header: ctx.get('test-header') 20 | }), 21 | resolvers: { 22 | Query: { 23 | value: (obj, args, context, info) => args.prompt + ': ' + (context.header || context.defaultValue()) 24 | } 25 | }, 26 | typeDefs: gql` 27 | type Query { 28 | value(prompt: String!): String! 29 | } 30 | ` 31 | }); 32 | 33 | graphql.applyMiddleware({ app }); 34 | return app; 35 | }); 36 | }); 37 | 38 | test('Making a GraphQL query invokes the app', async (test) => { 39 | const query = ` 40 | query GetValue ($prompt: String!) { 41 | value(prompt: $prompt) 42 | } 43 | `; 44 | 45 | const variables = { prompt: 'value' }; 46 | const response = await test.context.graphql(query, variables); 47 | 48 | test.is(response.status, 200); 49 | test.is(response.type, 'application/json'); 50 | test.falsy(response.body.errors); 51 | test.truthy(response.body.data); 52 | test.is(response.body.data.value, 'value: default'); 53 | }); 54 | 55 | test('The GraphQL helper allows making batch requests', async (test) => { 56 | const query = [ 57 | { 58 | query: 'query GetValue{ value(prompt: "prompt1") }' 59 | }, 60 | { 61 | query: 'query GetValue{ value(prompt: "prompt2") }' 62 | } 63 | ]; 64 | 65 | // const variables = { prompt: 'value' }; 66 | const response = await test.context.graphql(query); 67 | 68 | test.is(response.status, 200); 69 | test.is(response.type, 'application/json'); 70 | test.falsy(response.body[0].errors); 71 | test.falsy(response.body[1].errors); 72 | test.truthy(response.body[0].data); 73 | test.truthy(response.body[1].data); 74 | test.is(response.body[0].data.value, 'prompt1: default'); 75 | test.is(response.body[1].data.value, 'prompt2: default'); 76 | }); 77 | 78 | test('A GraphQL request can be customized', async (test) => { 79 | const query = ` 80 | query GetValue ($prompt: String!) { 81 | value(prompt: $prompt) 82 | } 83 | `; 84 | 85 | const variables = { prompt: 'value' }; 86 | const request = test.context.graphql(query, variables); 87 | request.set('test-header', 'test value'); 88 | 89 | const response = await request; 90 | 91 | test.is(response.status, 200); 92 | test.is(response.type, 'application/json'); 93 | test.falsy(response.body.errors); 94 | test.truthy(response.body.data); 95 | test.is(response.body.data.value, 'value: test value'); 96 | }); 97 | 98 | test('The GraphQL helper allows the test context to be customized', async (test) => { 99 | const query = ` 100 | query GetValue ($prompt: String!) { 101 | value(prompt: $prompt) 102 | } 103 | `; 104 | 105 | const variables = { prompt: 'value' }; 106 | 107 | test.context.defaultValue.returns('custom value'); 108 | const response = await test.context.graphql(query, variables); 109 | 110 | test.is(response.status, 200); 111 | test.is(response.type, 'application/json'); 112 | test.falsy(response.body.errors); 113 | test.truthy(response.body.data); 114 | test.is(response.body.data.value, 'value: custom value'); 115 | sinon.assert.calledOnce(test.context.defaultValue); 116 | }); 117 | -------------------------------------------------------------------------------- /test/graphql/queryHooks.test.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const sinon = require('sinon'); 3 | const test = require('ava'); 4 | 5 | const { graphqlHooks } = require('../../src/graphql'); 6 | const { ApolloServer, gql } = require('apollo-server-koa'); 7 | 8 | const getApp = (context) => { 9 | const app = new Koa(); 10 | 11 | context.defaultValue = sinon.stub().returns('default'); 12 | 13 | const graphql = new ApolloServer({ 14 | context: ({ ctx }) => ({ 15 | defaultValue: context.defaultValue, 16 | header: ctx.get('test-header') 17 | }), 18 | resolvers: { 19 | Query: { 20 | value: (obj, args, context, info) => args.prompt + ': ' + (context.header || context.defaultValue()) 21 | } 22 | }, 23 | typeDefs: gql` 24 | type Query { 25 | value(prompt: String!): String! 26 | } 27 | ` 28 | }); 29 | 30 | graphql.applyMiddleware({ app }); 31 | return app; 32 | }; 33 | 34 | test.beforeEach((t) => { 35 | const hooks = graphqlHooks({ 36 | context: t.context, 37 | getApp 38 | }); 39 | hooks.beforeEach(); 40 | }); 41 | 42 | test('Making a GraphQL query invokes the app', async (test) => { 43 | const query = ` 44 | query GetValue ($prompt: String!) { 45 | value(prompt: $prompt) 46 | } 47 | `; 48 | 49 | const variables = { prompt: 'value' }; 50 | const response = await test.context.graphql(query, variables); 51 | 52 | test.is(response.status, 200); 53 | test.is(response.type, 'application/json'); 54 | test.falsy(response.body.errors); 55 | test.truthy(response.body.data); 56 | test.is(response.body.data.value, 'value: default'); 57 | }); 58 | 59 | test('The GraphQL helper allows making batch requests', async (test) => { 60 | const query = [ 61 | { 62 | query: 'query GetValue{ value(prompt: "prompt1") }' 63 | }, 64 | { 65 | query: 'query GetValue{ value(prompt: "prompt2") }' 66 | } 67 | ]; 68 | 69 | // const variables = { prompt: 'value' }; 70 | const response = await test.context.graphql(query); 71 | 72 | test.is(response.status, 200); 73 | test.is(response.type, 'application/json'); 74 | test.falsy(response.body[0].errors); 75 | test.falsy(response.body[1].errors); 76 | test.truthy(response.body[0].data); 77 | test.truthy(response.body[1].data); 78 | test.is(response.body[0].data.value, 'prompt1: default'); 79 | test.is(response.body[1].data.value, 'prompt2: default'); 80 | }); 81 | 82 | test('A GraphQL request can be customized', async (test) => { 83 | const query = ` 84 | query GetValue ($prompt: String!) { 85 | value(prompt: $prompt) 86 | } 87 | `; 88 | 89 | const variables = { prompt: 'value' }; 90 | const request = test.context.graphql(query, variables); 91 | request.set('test-header', 'test value'); 92 | 93 | const response = await request; 94 | 95 | test.is(response.status, 200); 96 | test.is(response.type, 'application/json'); 97 | test.falsy(response.body.errors); 98 | test.truthy(response.body.data); 99 | test.is(response.body.data.value, 'value: test value'); 100 | }); 101 | 102 | test('The GraphQL helper allows the test context to be customized', async (test) => { 103 | const query = ` 104 | query GetValue ($prompt: String!) { 105 | value(prompt: $prompt) 106 | } 107 | `; 108 | 109 | const variables = { prompt: 'value' }; 110 | 111 | test.context.defaultValue.returns('custom value'); 112 | const response = await test.context.graphql(query, variables); 113 | 114 | test.is(response.status, 200); 115 | test.is(response.type, 'application/json'); 116 | test.falsy(response.body.errors); 117 | test.truthy(response.body.data); 118 | test.is(response.body.data.value, 'value: custom value'); 119 | sinon.assert.calledOnce(test.context.defaultValue); 120 | }); 121 | -------------------------------------------------------------------------------- /test/graphql/urlOption.test.js: -------------------------------------------------------------------------------- 1 | const Koa = require('koa'); 2 | const test = require('ava'); 3 | 4 | const { setupGraphQL, useGraphQL } = require('../../src/graphql'); 5 | const { ApolloServer, gql } = require('apollo-server-koa'); 6 | 7 | const graphql = new ApolloServer({ 8 | resolvers: { 9 | Query: { 10 | value: () => '' 11 | } 12 | }, 13 | typeDefs: gql` 14 | type Query { 15 | value: String! 16 | } 17 | ` 18 | }); 19 | 20 | const alternateUrl = '/graphql-alt'; 21 | useGraphQL(test, { url: alternateUrl }); 22 | 23 | test.before(() => { 24 | setupGraphQL((test) => { 25 | const app = new Koa(); 26 | graphql.applyMiddleware({ app, path: alternateUrl }); 27 | return app; 28 | }); 29 | }); 30 | 31 | test('the useGraphQL url option can be used to change the path used in the tests', async (test) => { 32 | const query = '{ value }'; 33 | 34 | const response = await test.context.graphql(query); 35 | 36 | test.is(response.status, 200); 37 | test.is(response.type, 'application/json'); 38 | test.falsy(response.body.errors); 39 | test.truthy(response.body.data); 40 | test.is(response.body.data.value, ''); 41 | }); 42 | -------------------------------------------------------------------------------- /test/handleWebpackResult.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const { handleWebpackResults } = require('../src/handleWebpackResult'); 3 | 4 | test('Throw error if webpack result hasErrors() returns true', (test) => { 5 | const err = test.throws(() => { 6 | handleWebpackResults({ 7 | hasErrors: () => { 8 | return true; 9 | } 10 | }); 11 | }); 12 | 13 | test.is(err.message, 'compilation_error'); 14 | }); 15 | -------------------------------------------------------------------------------- /test/helpers/createDefaultContainer.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const { ensureImage } = require('../../src/docker'); 3 | const DEFAULT_IMAGE = 'alpine:3.6'; 4 | 5 | async function createDefaultContainer () { 6 | const docker = new Docker(); 7 | await ensureImage(docker, DEFAULT_IMAGE); 8 | 9 | return docker.createContainer({ 10 | Entrypoint: 'sh', 11 | HostConfig: { 12 | AutoRemove: true, 13 | NetworkMode: 'host', 14 | UsernsMode: 'host' 15 | }, 16 | Image: DEFAULT_IMAGE, 17 | OpenStdin: true 18 | }); 19 | } 20 | 21 | module.exports = { 22 | createDefaultContainer 23 | }; 24 | -------------------------------------------------------------------------------- /test/helpers/lambda.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const fs = require('fs-extra'); 3 | const path = require('path'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { build, useComposeContainer, useLambda } = require('../../src/lambda'); 7 | const { promisify } = require('util'); 8 | 9 | const FIXTURES_DIRECTORY = path.join(__dirname, '../fixtures'); 10 | 11 | function hasTag (tagName) { 12 | return function (image) { 13 | return image.RepoTags && image.RepoTags.includes(tagName); 14 | }; 15 | } 16 | 17 | async function buildLambda (bundlePath, handlerName, options) { 18 | const buildResults = await build({ 19 | entrypoint: path.join(FIXTURES_DIRECTORY, `${handlerName}`), 20 | outputPath: bundlePath, 21 | serviceName: `test-service-${handlerName}`, 22 | ...options 23 | }); 24 | 25 | if (buildResults.hasErrors()) { 26 | console.error(buildResults.toJson().errors); 27 | throw new Error('Lambda build failed!'); 28 | } 29 | return buildResults; 30 | } 31 | 32 | function useLambdaContainer (test, imageName, options = {}) { 33 | const bundlePath = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 34 | const { containerConfig = {}, handlerName = 'lambda_service' } = options; 35 | let container; 36 | 37 | useLambda(test); 38 | 39 | test.before(async () => { 40 | await buildLambda(bundlePath, `${handlerName}.js`); 41 | 42 | imageName = typeof imageName === 'string' ? imageName : await imageName(); 43 | 44 | const containerName = 'container'; 45 | const containerPrefix = process.env.COMPOSE_PROJECT_NAME = uuid(); 46 | container = await createContainer(imageName, `${containerPrefix}_${containerName}_1`, bundlePath); 47 | useComposeContainer({ ...containerConfig, service: containerName, handler: `${handlerName}.handler` }); 48 | }); 49 | 50 | test.beforeEach(function (t) { 51 | t.context.container = container; 52 | }); 53 | 54 | test.after.always(async () => { 55 | delete process.env.COMPOSE_PROJECT_NAME; 56 | await fs.remove(bundlePath); 57 | 58 | // Ensure that the container is always stopped 59 | try { 60 | await container.stop(); 61 | } catch (error) { 62 | console.warn(error); 63 | // swallow errors... 64 | } 65 | }); 66 | } 67 | 68 | async function createContainer (image, name, mountpoint) { 69 | const docker = new Docker(); 70 | const followProgress = promisify(docker.modem.followProgress); 71 | 72 | const qualifiedImage = /:[^:]*/.test(image) ? image : `${image}:latest`; 73 | 74 | const images = await docker.listImages(); 75 | if (!images.find(hasTag(qualifiedImage))) { 76 | await followProgress(await docker.pull(image)); 77 | } 78 | 79 | const container = await docker.createContainer({ 80 | Entrypoint: '/bin/sh', 81 | HostConfig: { 82 | AutoRemove: true, 83 | Binds: [ 84 | `${mountpoint}:/var/task` 85 | ] 86 | }, 87 | Image: image, 88 | name, 89 | OpenStdin: true, 90 | Volumes: { 91 | '/var/task': {} 92 | } 93 | }); 94 | 95 | await container.start(); 96 | console.log(`Created container ${container.id}`); 97 | return container; 98 | } 99 | 100 | module.exports = { 101 | createContainer, 102 | useLambdaContainer, 103 | FIXTURES_DIRECTORY, 104 | buildLambda 105 | }; 106 | -------------------------------------------------------------------------------- /test/index.test.ts: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | 3 | import * as index from '../src'; 4 | 5 | import * as docker from '../src/docker'; 6 | import * as dynamodb from '../src/dynamodb'; 7 | import Environment from '../src/Environment'; 8 | import * as graphql from '../src/graphql'; 9 | import * as lambda from '../src/lambda'; 10 | import * as localStack from '../src/localstack'; 11 | import * as kinesis from '../src/kinesis'; 12 | import * as mockServerLambda from '../src/mockServerLambda'; 13 | import WriteBuffer from '../src/WriteBuffer'; 14 | 15 | test('exports match expected', t => { 16 | t.deepEqual(index, { 17 | docker, 18 | dynamodb, 19 | Environment, 20 | graphql, 21 | lambda, 22 | localStack, 23 | kinesis, 24 | mockServerLambda, 25 | WriteBuffer 26 | }); 27 | }) 28 | -------------------------------------------------------------------------------- /test/indexJs.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const index = require('../src'); 4 | const docker = require('../src/docker'); 5 | const dynamodb = require('../src/dynamodb'); 6 | const Environment = require('../src/Environment').default; 7 | const graphql = require('../src/graphql'); 8 | const lambda = require('../src/lambda'); 9 | const localStack = require('../src/localstack'); 10 | const kinesis = require('../src/kinesis'); 11 | const mockServerLambda = require('../src/mockServerLambda'); 12 | const WriteBuffer = require('../src/WriteBuffer').default; 13 | 14 | test('exports match expected', t => { 15 | t.deepEqual(index, { 16 | docker, 17 | dynamodb, 18 | Environment, 19 | graphql, 20 | lambda, 21 | localStack, 22 | kinesis, 23 | mockServerLambda, 24 | WriteBuffer 25 | }); 26 | }); 27 | -------------------------------------------------------------------------------- /test/kinesis/basic-parallel.test.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk'); 2 | const test = require('ava'); 3 | const { v4: uuid } = require('uuid'); 4 | 5 | const { streams, useKinesisDocker } = require('../../src/kinesis'); 6 | 7 | useKinesisDocker(test, true); 8 | 9 | test.before(() => { 10 | streams(['test-stream']); 11 | }); 12 | 13 | test.after(() => { 14 | streams([]); 15 | }); 16 | 17 | test('The helper provides kinesis client and streams', async (test) => { 18 | const { streamNames, kinesisClient } = test.context.kinesis; 19 | test.true(kinesisClient instanceof AWS.Kinesis); 20 | 21 | const listStreamsResponse = await kinesisClient.listStreams().promise(); 22 | const streamName = streamNames['test-stream']; 23 | test.true(listStreamsResponse.StreamNames.includes(streamName)); 24 | 25 | const item = { 26 | id: 'test', 27 | message: 'hello' 28 | }; 29 | 30 | await kinesisClient.putRecord({ 31 | Data: JSON.stringify(item), 32 | StreamName: streamName, 33 | PartitionKey: uuid() 34 | }).promise(); 35 | 36 | const describeStream = await kinesisClient.describeStream({ 37 | StreamName: streamName 38 | }).promise(); 39 | 40 | const iterator = await kinesisClient.getShardIterator({ 41 | ShardId: describeStream.StreamDescription.Shards[0].ShardId, 42 | ShardIteratorType: 'TRIM_HORIZON', 43 | StreamName: streamName, 44 | Timestamp: Date.now() 45 | }).promise(); 46 | 47 | const results = await kinesisClient.getRecords({ 48 | ShardIterator: iterator.ShardIterator, 49 | Limit: 10e3 50 | }).promise(); 51 | 52 | test.is(results.Records.length, 1); 53 | const payload = Buffer.from(results.Records[0].Data, 'base64').toString( 54 | 'utf8' 55 | ); 56 | 57 | test.deepEqual(JSON.parse(payload), item); 58 | }); 59 | 60 | test('The helper includes a unique identifier in the stream names', async (test) => { 61 | const { streamNames, uniqueIdentifier } = test.context.kinesis; 62 | const streamName = streamNames['test-stream']; 63 | 64 | test.true(typeof uniqueIdentifier === 'string'); 65 | test.true(uniqueIdentifier.length > 0); 66 | test.is(streamName, `test-stream-${uniqueIdentifier}`); 67 | }); 68 | 69 | test('The helper sets default configuration environment variables', async (test) => { 70 | test.truthy(process.env.AWS_ACCESS_KEY_ID); 71 | test.truthy(process.env.AWS_SECRET_ACCESS_KEY); 72 | test.truthy(process.env.AWS_REGION); 73 | test.truthy(process.env.KINESIS_ENDPOINT); 74 | }); 75 | 76 | test('The helper provides a config object', async (test) => { 77 | const { config } = test.context.kinesis; 78 | 79 | test.true(config.credentials instanceof AWS.Credentials); 80 | test.is(typeof config.endpoint, 'string', config.endpoint); 81 | test.truthy(config.region); 82 | }); 83 | -------------------------------------------------------------------------------- /test/kinesis/basic-serial.test.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk'); 2 | const test = require('ava'); 3 | const { v4: uuid } = require('uuid'); 4 | 5 | const { streams, useKinesisDocker } = require('../../src/kinesis'); 6 | 7 | useKinesisDocker(test); 8 | 9 | test.before(() => { 10 | streams(['test-stream']); 11 | }); 12 | 13 | test.after(() => { 14 | streams([]); 15 | }); 16 | 17 | // no uuid in stream name (old way, basic regression test to ensure forward 18 | // compatibility) 19 | test.serial('The helper provides kinesis clients and streams', async (test) => { 20 | const { kinesisClient } = test.context.kinesis; 21 | test.true(kinesisClient instanceof AWS.Kinesis); 22 | 23 | const listStreamsResponse = await kinesisClient.listStreams().promise(); 24 | const streamName = 'test-stream'; // no uuid-stream name lookup 25 | test.true(listStreamsResponse.StreamNames.includes(streamName)); 26 | 27 | const item = { 28 | id: 'test', 29 | message: 'hello' 30 | }; 31 | 32 | await kinesisClient.putRecord({ 33 | Data: JSON.stringify(item), 34 | StreamName: streamName, 35 | PartitionKey: uuid() 36 | }).promise(); 37 | 38 | const describeStream = await kinesisClient.describeStream({ 39 | StreamName: streamName 40 | }).promise(); 41 | 42 | const iterator = await kinesisClient.getShardIterator({ 43 | ShardId: describeStream.StreamDescription.Shards[0].ShardId, 44 | ShardIteratorType: 'TRIM_HORIZON', 45 | StreamName: streamName, 46 | Timestamp: Date.now() 47 | }).promise(); 48 | 49 | const results = await kinesisClient.getRecords({ 50 | ShardIterator: iterator.ShardIterator, 51 | Limit: 10e3 52 | }).promise(); 53 | 54 | test.is(results.Records.length, 1); 55 | const payload = Buffer.from(results.Records[0].Data, 'base64').toString( 56 | 'utf8' 57 | ); 58 | 59 | test.deepEqual(JSON.parse(payload), item); 60 | }); 61 | 62 | test.serial('The helper does not include a unique identifier in the stream names', async (test) => { 63 | const { streamNames, uniqueIdentifier } = test.context.kinesis; 64 | const streamName = streamNames['test-stream']; 65 | 66 | test.true(typeof uniqueIdentifier === 'string'); 67 | test.true(uniqueIdentifier.length === 0); 68 | test.is(streamName, 'test-stream'); 69 | }); 70 | -------------------------------------------------------------------------------- /test/kinesis/custom-endpoint.test.js: -------------------------------------------------------------------------------- 1 | const { Environment } = require('../../src/Environment'); 2 | const test = require('ava'); 3 | const sinon = require('sinon'); 4 | const AWS = require('aws-sdk'); 5 | const AWSMock = require('aws-sdk-mock'); 6 | AWSMock.setSDKInstance(AWS); 7 | const docker = require('../../src/docker'); 8 | const ensureImageSpy = sinon.spy(docker, 'ensureImage'); 9 | 10 | // Mock listStreams for `beforeAll` and `afterEach` hooks 11 | const listStreams = sinon.stub() 12 | .resolves({ StreamNames: [] }, []); 13 | 14 | AWSMock.mock('Kinesis', 'listStreams', listStreams); 15 | 16 | const { useKinesisDocker } = require('../../src/kinesis'); 17 | const environment = new Environment(); 18 | 19 | test.before(() => { 20 | environment.set('AWS_ACCESS_KEY_ID', 'test-access-key'); 21 | environment.set('AWS_SECRET_ACCESS_KEY', 'test-secret-key'); 22 | environment.set('KINESIS_ENDPOINT', 'kinesis://localhost'); 23 | }); 24 | 25 | test.after(() => environment.restore()); 26 | 27 | useKinesisDocker(test); 28 | 29 | test.serial('When KINESIS_ENDPOINT is set configuration environment variables are not set', (test) => { 30 | test.is(process.env.AWS_ACCESS_KEY_ID, 'test-access-key'); 31 | test.is(process.env.AWS_SECRET_ACCESS_KEY, 'test-secret-key'); 32 | test.is(process.env.KINESIS_ENDPOINT, 'kinesis://localhost'); 33 | }); 34 | 35 | test.serial('When KINESIS_ENDPOINT is set configuration the Docker image is not started', (test) => { 36 | sinon.assert.notCalled(ensureImageSpy); 37 | docker.ensureImage.restore(); 38 | }); 39 | -------------------------------------------------------------------------------- /test/kinesis/kinesis.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const AWS = require('aws-sdk'); 3 | const sinon = require('sinon'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { streams, getConnection, createStreams, destroyStreams } = require('../../src/kinesis'); 7 | 8 | function throwTestError () { 9 | throw new Error('test'); 10 | } 11 | 12 | test.before(async t => { 13 | Object.assign(t.context, await getConnection()); 14 | }); 15 | 16 | test.beforeEach((t) => { 17 | const { config } = t.context; 18 | streams(['test-stream']); 19 | const kinesisClient = new AWS.Kinesis(config); 20 | Object.assign(t.context, { kinesisClient }); 21 | }); 22 | 23 | test.afterEach(() => { 24 | streams([]); 25 | }); 26 | 27 | test.after(async t => { 28 | const { connection } = t.context; 29 | await connection.cleanup(); 30 | }); 31 | 32 | async function assertStreamsPresent (t, client, expected, message) { 33 | const response = await client.listStreams().promise(); 34 | t.false(response.HasMoreStreams); 35 | t.deepEqual( 36 | response.StreamNames, 37 | expected, 38 | message 39 | ); 40 | } 41 | 42 | test.serial('createStreams creates streams according to specified schemas', async (t) => { 43 | const { kinesisClient } = t.context; 44 | 45 | await createStreams(kinesisClient); 46 | await assertStreamsPresent( 47 | t, 48 | kinesisClient, 49 | ['test-stream'], 50 | 'createStream should have added "test-stream"' 51 | ); 52 | }); 53 | 54 | test.serial('throws when createStreams fails', async t => { 55 | const { kinesisClient } = t.context; 56 | 57 | sinon.stub(kinesisClient, 'createStream').onFirstCall().callsFake(throwTestError); 58 | const { message } = await t.throwsAsync(createStreams(kinesisClient)); 59 | t.is(message, 'Failed to create streams: test-stream'); 60 | }); 61 | 62 | test.serial('deletes created streams when createStreams fails', async t => { 63 | const { kinesisClient } = t.context; 64 | 65 | streams([ 66 | 'test-stream-not-created', 67 | 'test-stream-created' 68 | ]); 69 | 70 | sinon.stub(kinesisClient, 'createStream') 71 | .callThrough() 72 | .onFirstCall().callsFake(throwTestError); 73 | const deleteStream = sinon.spy(kinesisClient, 'deleteStream'); 74 | 75 | const { message } = await t.throwsAsync(createStreams(kinesisClient)); 76 | t.is(message, 'Failed to create streams: test-stream-not-created'); 77 | const { StreamNames } = await kinesisClient.listStreams().promise(); 78 | t.deepEqual(StreamNames, []); 79 | sinon.assert.calledOnce(deleteStream); 80 | sinon.assert.calledWithExactly(deleteStream, { StreamName: 'test-stream-created' }); 81 | }); 82 | 83 | test.serial('throws when createStream fails, logs if destory fails', async t => { 84 | const { kinesisClient } = t.context; 85 | 86 | const StreamName = 'test-stream-2'; 87 | 88 | streams([ 89 | StreamName, 90 | 'test-stream' 91 | ]); 92 | 93 | sinon.stub(kinesisClient, 'createStream') 94 | .callThrough() 95 | .onSecondCall().callsFake(throwTestError); 96 | const deleteStream = sinon.stub(kinesisClient, 'deleteStream') 97 | .callThrough() 98 | .onFirstCall().callsFake(throwTestError); 99 | 100 | const { message } = await t.throwsAsync(createStreams(kinesisClient)); 101 | 102 | t.is(message, 'Failed to create streams: test-stream'); 103 | sinon.assert.calledOnce(deleteStream); 104 | sinon.assert.calledWithExactly(deleteStream, { StreamName }); 105 | 106 | await kinesisClient.deleteStream({ StreamName }).promise(); 107 | }); 108 | 109 | test.serial('throws when destroyStreams fails', async t => { 110 | const { kinesisClient } = t.context; 111 | 112 | sinon.stub(kinesisClient, 'listStreams').onFirstCall().returns({ 113 | promise: () => Promise.resolve({ 114 | StreamNames: ['test-stream'] 115 | }) 116 | }); 117 | sinon.stub(kinesisClient, 'deleteStream').onFirstCall().callsFake(throwTestError); 118 | const { message } = await t.throwsAsync(destroyStreams(kinesisClient)); 119 | t.is(message, 'Failed to destroy streams: test-stream'); 120 | }); 121 | 122 | async function destroyStreamTest (t, useUniqueStreams) { 123 | const { kinesisClient } = t.context; 124 | const uniqueIdentifier = useUniqueStreams ? uuid() : ''; 125 | const streamName = useUniqueStreams 126 | ? `test-stream-${uniqueIdentifier}` : 'test-stream'; 127 | 128 | await createStreams(kinesisClient, uniqueIdentifier); 129 | 130 | await assertStreamsPresent( 131 | t, 132 | kinesisClient, 133 | [streamName], 134 | `createStreams should have added "${streamName}"` 135 | ); 136 | 137 | await destroyStreams(kinesisClient, uniqueIdentifier); 138 | await assertStreamsPresent( 139 | t, 140 | kinesisClient, 141 | [], 142 | `createStreams should have destroyed "${streamName}"` 143 | ); 144 | } 145 | 146 | test.serial('destroyStreams destroys created stream', async t => { 147 | await destroyStreamTest(t, false); 148 | }); 149 | 150 | test.serial('destroyStreams destroys created stream when uniqueIdentifier is used', async t => { 151 | await destroyStreamTest(t, true); 152 | }); 153 | -------------------------------------------------------------------------------- /test/kinesis/testHooks-error.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | 4 | test('The afterAll hook handles errors in the beforeAll hook gracefully', async (t) => { 5 | // Stub the docker module to throw errors when fetching images. 6 | // This needs to happen before the dynamodb helper module is imported 7 | const docker = require('../../src/docker'); 8 | const error = new Error('Stubbed failure'); 9 | const ensureStub = sinon.stub(docker, 'ensureImage') 10 | .rejects(error); 11 | 12 | const { kinesisTestHooks } = require('../../src/kinesis'); 13 | const { afterAll, beforeAll } = kinesisTestHooks(false); 14 | 15 | try { 16 | await t.throwsAsync(beforeAll, { instanceOf: Error, message: error.message }); 17 | await t.notThrowsAsync(afterAll()); 18 | } finally { 19 | ensureStub.restore(); 20 | } 21 | }); 22 | 23 | test('The afterEach hook will ignore a missing context', async t => { 24 | const { kinesisTestHooks } = require('../../src/kinesis'); 25 | const { afterEach } = kinesisTestHooks(false); 26 | await t.notThrowsAsync(afterEach(undefined)); 27 | }); 28 | -------------------------------------------------------------------------------- /test/kinesis/useKinesis.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const { useKinesis } = require('../../src/kinesis'); 3 | const sinon = require('sinon'); 4 | 5 | const AWS = require('aws-sdk'); 6 | const AWSMock = require('aws-sdk-mock'); 7 | AWSMock.setSDKInstance(AWS); 8 | 9 | AWS.config.region = 'us-east-1'; 10 | 11 | const createStreamStub = sinon.stub().yields(); 12 | const deleteStreamStub = sinon.stub().yields(); 13 | AWSMock.mock('Kinesis', 'createStream', createStreamStub); 14 | AWSMock.mock('Kinesis', 'deleteStream', deleteStreamStub); 15 | 16 | const TEST_STREAM_NAME = 'stream-name'; 17 | 18 | useKinesis(test, TEST_STREAM_NAME); 19 | 20 | test.after(function () { 21 | sinon.assert.calledOnce(deleteStreamStub); 22 | sinon.assert.calledWith(deleteStreamStub, { 23 | StreamName: TEST_STREAM_NAME 24 | }); 25 | }); 26 | 27 | test('provides a kinesis client to the tests', (test) => { 28 | test.truthy(test.context.kinesis); 29 | test.is(typeof test.context.kinesis.putRecord, 'function'); 30 | }); 31 | 32 | test('calls createStream to create the stream', (test) => { 33 | sinon.assert.calledOnce(createStreamStub); 34 | sinon.assert.calledWith(createStreamStub, { 35 | StreamName: TEST_STREAM_NAME, 36 | ShardCount: 1 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /test/kinesis/wait-for-ready.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | 4 | const AWS = require('aws-sdk'); 5 | const AWSMock = require('aws-sdk-mock'); 6 | AWSMock.setSDKInstance(AWS); 7 | 8 | // Setup a mock listStreams call that will fail once 9 | // to force a retry and then success to allow the setup 10 | // to continue 11 | const listStreamsMock = sinon.stub() 12 | .onFirstCall().rejects(new Error('First error')) 13 | .resolves({ StreamNames: [] }, []); 14 | 15 | AWSMock.mock('Kinesis', 'listStreams', listStreamsMock); 16 | 17 | const { useKinesisDocker } = require('../../src/kinesis'); 18 | 19 | useKinesisDocker(test); 20 | 21 | test.serial('The helper provides database clients and streams', async (test) => { 22 | // Listing the Streams should be called twice. Once for the failure and the 23 | // second for success to allow the `before` block to complete 24 | sinon.assert.calledTwice(listStreamsMock); 25 | }); 26 | -------------------------------------------------------------------------------- /test/lambda/compose-container.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const debug = require('debug'); 3 | const { WriteBuffer } = require('../../src/WriteBuffer'); 4 | 5 | const { useLambdaContainer } = require('../helpers/lambda'); 6 | 7 | const LAMBDA_IMAGE = 'lambci/lambda:nodejs12.x'; 8 | 9 | useLambdaContainer(test, LAMBDA_IMAGE); 10 | 11 | test.serial('The helper client can invoke compose containers', async (test) => { 12 | const response = await test.context.lambda.get('/'); 13 | test.is(response.status, 200); 14 | test.is(response.data.service, 'lambda-test'); 15 | }); 16 | 17 | test.serial('The helper client can invoke the conatainer with a raw event and context', async (test) => { 18 | const result = await test.context.lambda.raw({ path: '/foo' }, {}); 19 | test.is(result.statusCode, 404); 20 | }); 21 | 22 | test.serial('The helper can log Lambda execution output', async (test) => { 23 | const write = process.stdout.write; 24 | const buffer = new WriteBuffer(); 25 | process.stdout.write = buffer.write.bind(buffer); 26 | debug.enable('lambda-tools:*'); 27 | 28 | try { 29 | await test.context.lambda.get('/'); 30 | } finally { 31 | process.stdout.write = write; 32 | } 33 | 34 | test.regex(buffer.toString(), /container output was:/); 35 | }); 36 | 37 | // We rely on test ordering to ensure that this test case does not interfere 38 | // with the others in this suite (this test case stops the container) 39 | test.serial('The helper reports invocation errors', async (test) => { 40 | const { container } = test.context; 41 | await container.stop(); 42 | // The format of this error message seems to differ based on timing of the 43 | // container cleanup. We'll just settle for getting an error... 44 | await test.throwsAsync(() => test.context.lambda.get('/')); 45 | }); 46 | -------------------------------------------------------------------------------- /test/lambda/compose-derivative-container.test.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { promisify } = require('util'); 7 | const { useLambdaContainer, FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | async function buildDerivativeImage () { 10 | const docker = new Docker(); 11 | const followProgress = promisify(docker.modem.followProgress); 12 | const name = `lambci-derivative-${uuid()}`; 13 | 14 | await followProgress( 15 | await docker.buildImage( 16 | { 17 | context: path.join(FIXTURES_DIRECTORY, 'lambci-derivative'), 18 | src: [ 'Dockerfile' ] 19 | }, 20 | { t: name } 21 | ) 22 | ); 23 | 24 | return name; 25 | } 26 | 27 | useLambdaContainer(test, buildDerivativeImage); 28 | 29 | test('The helper client can invoke lambdaci derivative containers', async (test) => { 30 | const response = await test.context.lambda.get('/'); 31 | test.is(response.status, 200); 32 | test.is(response.data.service, 'lambda-test'); 33 | }); 34 | -------------------------------------------------------------------------------- /test/lambda/core.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const { v4: uuid } = require('uuid'); 3 | const { destroyLambdaExecutionEnvironment, useNewContainer, getGlobalOptions } = require('../../src/lambda'); 4 | 5 | test('will not crash if no execution environment is provided', async test => { 6 | await test.notThrowsAsync(destroyLambdaExecutionEnvironment()); 7 | }); 8 | 9 | test('will not create a network id when not useComposeNetwork', t => { 10 | process.env.COMPOSE_PROJECT_NAME = uuid(); 11 | useNewContainer({ }); 12 | const options = getGlobalOptions(); 13 | t.is(options.network, undefined); 14 | }); 15 | 16 | test('will create a network id for the compose network', t => { 17 | const composeProjectName = uuid(); 18 | process.env.COMPOSE_PROJECT_NAME = composeProjectName; 19 | useNewContainer({ useComposeNetwork: true }); 20 | const options = getGlobalOptions(); 21 | t.is(options.network, `${composeProjectName}_default`); 22 | }); 23 | -------------------------------------------------------------------------------- /test/lambda/createLambdaExecutionEnvironment-errorHandling.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | const Docker = require('dockerode'); 4 | const path = require('path'); 5 | const lambda = require('../../src/lambda'); 6 | const assert = require('assert'); 7 | const fs = require('fs-extra'); 8 | const tmp = require('tmp-promise'); 9 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 10 | 11 | test.beforeEach((test) => { 12 | test.context.sandbox = sinon.createSandbox(); 13 | }); 14 | test.afterEach((test) => { 15 | test.context.sandbox.restore(); 16 | }); 17 | 18 | test.serial('Cleans up the network and container on failure after start', async function (test) { 19 | const error = new Error('Failure to start'); 20 | const originalStart = Docker.Container.prototype.start; 21 | test.context.sandbox.stub(Docker.Container.prototype, 'start').callsFake(async function () { 22 | await originalStart.call(this, arguments); 23 | throw error; 24 | }); 25 | const containerStopSpy = test.context.sandbox.spy(Docker.Container.prototype, 'stop'); 26 | const networkRemoveSpy = test.context.sandbox.spy(Docker.Network.prototype, 'remove'); 27 | 28 | const create = lambda.createLambdaExecutionEnvironment({ 29 | mountpoint: path.join(FIXTURES_DIRECTORY, 'build') 30 | }); 31 | 32 | await test.throwsAsync(() => create, { message: error.message }); 33 | 34 | sinon.assert.calledOnce(containerStopSpy); 35 | sinon.assert.calledOnce(networkRemoveSpy); 36 | }); 37 | 38 | test.serial('Sends AWS_XRAY_CONTEXT_MISSING var to createContainer with no value when it is null (removing env vars)', async function (test) { 39 | const createSpy = test.context.sandbox.spy(Docker.prototype, 'createContainer'); 40 | 41 | const env = await lambda.createLambdaExecutionEnvironment({ 42 | environment: { AWS_XRAY_CONTEXT_MISSING: null }, 43 | mountpoint: path.join(FIXTURES_DIRECTORY, 'build') 44 | }); 45 | 46 | try { 47 | sinon.assert.calledWithMatch(createSpy, sinon.match((arg) => { 48 | assert.deepStrictEqual(arg.Env, ['AWS_XRAY_CONTEXT_MISSING']); 49 | return true; 50 | })); 51 | } finally { 52 | await lambda.destroyLambdaExecutionEnvironment(env); 53 | } 54 | }); 55 | 56 | test.serial('Cleanups up temp directory when unzipping fails', async (test) => { 57 | // Create a read stream that immediately yields an error 58 | const origReadStream = fs.createReadStream; 59 | test.context.sandbox.stub(fs, 'createReadStream').callsFake(function (...args) { 60 | const stream = origReadStream.call(this, ...args); 61 | test.context.sandbox.stub(stream, 'on') 62 | .withArgs('error') 63 | .yields(new Error('Failure')); 64 | return stream; 65 | }); 66 | const emptyDirSpy = test.context.sandbox.spy(fs, 'emptyDir'); 67 | const tempDirSpy = test.context.sandbox.spy(tmp, 'dir'); 68 | 69 | const failingCreate = lambda.createLambdaExecutionEnvironment({ 70 | environment: { AWS_XRAY_CONTEXT_MISSING: null }, 71 | zipfile: path.join(FIXTURES_DIRECTORY, 'bundled_service.zip') 72 | }); 73 | 74 | await test.throwsAsync(() => failingCreate); 75 | 76 | sinon.assert.calledOnce(tempDirSpy); 77 | 78 | const { path: tempDirPath } = await tempDirSpy.returnValues[0]; 79 | sinon.assert.calledOnce(emptyDirSpy); 80 | sinon.assert.calledWithExactly(emptyDirSpy, tempDirPath); 81 | }); 82 | -------------------------------------------------------------------------------- /test/lambda/createLambdaExecutionEnvironment.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | const { v4: uuid } = require('uuid'); 4 | const docker = require('dockerode'); 5 | 6 | const { createLambdaExecutionEnvironment } = require('../../src/lambda'); 7 | const { getLogger } = require('../../src/utils/logging'); 8 | 9 | test.beforeEach(t => { 10 | const logger = getLogger('lambda'); 11 | const errorSpy = sinon.spy(logger, 'error'); 12 | 13 | Object.assign(t.context, { logger, stubbedMethods: [errorSpy], errorSpy }); 14 | }); 15 | 16 | test.afterEach.always(t => { 17 | t.context.stubbedMethods.forEach(method => method.restore()); 18 | }); 19 | 20 | test.serial('An error is thrown if both zipfile and mountpoint arguments are provided', async (test) => { 21 | await test.throwsAsync(() => 22 | createLambdaExecutionEnvironment({ 23 | environment: { AWS_XRAY_CONTEXT_MISSING: null }, 24 | mountpoint: 'someMountPoint/', 25 | zipfile: 'some.zip' 26 | }) 27 | , { message: 'Only one of mountpoint or zipfile can be provided' }); 28 | }); 29 | 30 | test.serial('Will throw an error if the image can\'t be fetched', async test => { 31 | const { errorSpy } = test.context; 32 | const error = await test.throwsAsync(createLambdaExecutionEnvironment({ 33 | mountpoint: 'someMountPoint/', 34 | image: `junkity-junky/junk:${uuid()}` 35 | }), { message: '(HTTP code 404) unexpected - pull access denied for junkity-junky/junk, repository does not exist or may require \'docker login\': denied: requested access to the resource is denied ' }); 36 | sinon.assert.calledOnce(errorSpy); 37 | sinon.assert.calledWithExactly(errorSpy, 'Unable to get image', JSON.stringify({ error }, null, 2)); 38 | }); 39 | 40 | test.serial('Will throw an error if the network can\'t be created', async test => { 41 | const { errorSpy } = test.context; 42 | const networkError = sinon.stub(docker.prototype, 'createNetwork'); 43 | test.context.stubbedMethods.push(networkError); 44 | const errorMessage = `A new error: ${uuid()}`; 45 | 46 | networkError.rejects(errorMessage); 47 | 48 | const error = await test.throwsAsync(createLambdaExecutionEnvironment({ 49 | mountpoint: 'invalidCharacters/' 50 | })); 51 | sinon.assert.calledOnce(errorSpy); 52 | sinon.assert.calledWithExactly(errorSpy, 'Unable to create network', JSON.stringify({ error }, null, 2)); 53 | }); 54 | 55 | test.serial('Will throw an error if the container can\'t be created', async test => { 56 | const { errorSpy } = test.context; 57 | const error = await test.throwsAsync(createLambdaExecutionEnvironment({ 58 | mountpoint: 'invalidCharacters/', 59 | network: uuid() 60 | })); 61 | sinon.assert.calledOnce(errorSpy); 62 | sinon.assert.calledWithExactly(errorSpy, 'Unable to create container', JSON.stringify({ error }, null, 2)); 63 | }); 64 | 65 | test.serial('Will throw an error if the container can\'t start', async test => { 66 | const { errorSpy } = test.context; 67 | const createContainerStub = sinon.stub(docker.prototype, 'createContainer'); 68 | test.context.stubbedMethods.push(createContainerStub); 69 | const errorMessage = `A new error: ${uuid()}`; 70 | createContainerStub.resolves({ 71 | start: () => { 72 | throw new Error(errorMessage); 73 | } 74 | }); 75 | 76 | const error = await test.throwsAsync(createLambdaExecutionEnvironment({ 77 | mountpoint: 'invalidCharacters/', 78 | network: uuid() 79 | })); 80 | sinon.assert.calledOnce(errorSpy); 81 | sinon.assert.calledWithExactly(errorSpy, 'Unable to start container', JSON.stringify({ error }, null, 2)); 82 | }); 83 | -------------------------------------------------------------------------------- /test/lambda/entrypoint.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | const Dockerode = require('dockerode'); 4 | const { getEntrypoint } = require('../../src/lambda'); 5 | 6 | test.afterEach(() => { 7 | sinon.restore(); 8 | }); 9 | 10 | test.serial('should fail to get entrypoint', async (test) => { 11 | const imageName = 'image'; 12 | const errorMessage = `The image ${imageName} has no entrypoint and no parent image`; 13 | const containerStub = sinon.createStubInstance(Dockerode.Container, { 14 | inspect: sinon.stub().resolves({ Image: imageName }) 15 | }); 16 | const imageStub = sinon.createStubInstance(Dockerode.Image, { 17 | inspect: sinon.stub().resolves({ 18 | Config: {}, 19 | ContainerConfig: {} 20 | }) 21 | }); 22 | const dockerStub = sinon.createStubInstance(Dockerode, { 23 | getContainer: sinon.stub().resolves(containerStub), 24 | getImage: sinon.stub().returns(imageStub) 25 | }); 26 | 27 | const error = await test.throwsAsync(getEntrypoint(dockerStub, imageName)); 28 | 29 | test.is(error.message, errorMessage); 30 | }); 31 | 32 | test.serial('should not fail to get entrypoint', async (test) => { 33 | const imageName = 'image'; 34 | const containerStub = sinon.createStubInstance(Dockerode.Container, { 35 | inspect: sinon.stub().resolves({ Image: imageName }) 36 | }); 37 | const imageInspectStub = sinon.stub(); 38 | imageInspectStub.onFirstCall().resolves({ 39 | Config: {}, 40 | ContainerConfig: {}, 41 | Parent: `parent-${imageName}` 42 | }); 43 | imageInspectStub.onSecondCall().resolves({ 44 | Config: { 45 | Entrypoint: ['hello', 'entrypoint'] 46 | } 47 | }); 48 | 49 | const imageStub = sinon.createStubInstance(Dockerode.Image, { 50 | inspect: imageInspectStub 51 | }); 52 | const dockerStub = sinon.createStubInstance(Dockerode, { 53 | getContainer: sinon.stub().resolves(containerStub), 54 | getImage: sinon.stub().returns(imageStub) 55 | }); 56 | 57 | await test.notThrowsAsync(getEntrypoint(dockerStub, imageName)); 58 | }); 59 | 60 | test.serial('should get entrypoint from ContainerConfig over Config', async (test) => { 61 | const imageName = 'image'; 62 | const containerStub = sinon.createStubInstance(Dockerode.Container, { 63 | inspect: sinon.stub().resolves({ Image: imageName }) 64 | }); 65 | const imageInspectStub = sinon.stub().resolves({ 66 | Config: { 67 | Entrypoint: ['bad', 'entrypoint'] 68 | }, 69 | ContainerConfig: { 70 | Entrypoint: ['good', 'entrypoint'] 71 | } 72 | }); 73 | 74 | const imageStub = sinon.createStubInstance(Dockerode.Image, { 75 | inspect: imageInspectStub 76 | }); 77 | const dockerStub = sinon.createStubInstance(Dockerode, { 78 | getContainer: sinon.stub().resolves(containerStub), 79 | getImage: sinon.stub().returns(imageStub) 80 | }); 81 | 82 | const result = await getEntrypoint(dockerStub, imageName); 83 | 84 | test.deepEqual(result, ['good', 'entrypoint']); 85 | }); 86 | -------------------------------------------------------------------------------- /test/lambda/graphql.test.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const test = require('ava'); 3 | const fs = require('fs-extra'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { build, useComposeContainer, useLambda } = require('../../src/lambda'); 7 | const { createContainer, FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | const LAMBDA_IMAGE = 'lambci/lambda:nodejs12.x'; 10 | 11 | const buildDirectory = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 12 | let container = null; 13 | 14 | useLambda(test); 15 | 16 | test.before(async () => { 17 | const buildResults = await build({ 18 | entrypoint: path.join(FIXTURES_DIRECTORY, 'lambda_graphql.js'), 19 | outputPath: buildDirectory 20 | }); 21 | 22 | if (buildResults.hasErrors()) { 23 | console.error(buildResults.toJson().errors); 24 | throw new Error('Lambda build failed!'); 25 | } 26 | 27 | const containerName = 'container'; 28 | const containerPrefix = process.env.COMPOSE_PROJECT_NAME = uuid(); 29 | container = await createContainer(LAMBDA_IMAGE, `${containerPrefix}_${containerName}_1`, buildDirectory); 30 | useComposeContainer({ service: containerName, handler: 'lambda_graphql.handler' }); 31 | }); 32 | 33 | test.after.always(async () => { 34 | delete process.env.COMPOSE_PROJECT_NAME; 35 | await fs.remove(buildDirectory); 36 | try { 37 | if (container) { 38 | await container.stop(); 39 | console.log(`Stopped container ${container.id}`); 40 | } 41 | } catch (error) { 42 | console.error(error); 43 | } 44 | }); 45 | 46 | test('The helper client can invoke graphql lambda services', async (test) => { 47 | const config = { 48 | headers: { 'test-header': 'test value' } 49 | }; 50 | 51 | const query = ` 52 | query TestQuery ($prompt: String!) { 53 | value(prompt: $prompt) 54 | } 55 | `; 56 | 57 | const variables = { 58 | prompt: 'value' 59 | }; 60 | 61 | const response = await test.context.lambda.graphql('/', query, variables, config); 62 | test.is(response.status, 200); 63 | test.is(response.data.data.value, 'value: test value'); 64 | }); 65 | -------------------------------------------------------------------------------- /test/lambda/runtime-callbacks.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | const debug = require('debug'); 6 | const { WriteBuffer } = require('../../src/WriteBuffer'); 7 | 8 | const { build, useNewContainer, useLambda } = require('../../src/lambda'); 9 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 10 | 11 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 12 | 13 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 14 | // executed before the useLambda hooks are executed 15 | test.serial.before(async () => { 16 | const buildResults = await build({ 17 | entrypoint: path.join(FIXTURES_DIRECTORY, 'runtime_callbacks.js'), 18 | outputPath: BUILD_DIRECTORY, 19 | serviceName: 'runtime-callbacks' 20 | }); 21 | 22 | if (buildResults.hasErrors()) { 23 | console.error(buildResults.toJson().errors); 24 | throw new Error('Lambda build failed!'); 25 | } 26 | 27 | useNewContainer({ 28 | handler: 'runtime_callbacks.handler', 29 | image: 'lambci/lambda:nodejs12.x', 30 | mountpoint: BUILD_DIRECTORY 31 | }); 32 | }); 33 | 34 | useLambda(test); 35 | 36 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 37 | 38 | test.serial(`The lambda function logs multiple callback invocations`, async (test) => { 39 | const write = process.stdout.write; 40 | const buffer = new WriteBuffer(); 41 | process.stdout.write = buffer.write.bind(buffer); 42 | 43 | const context = {}; 44 | const event = {}; 45 | debug.enable('lambda-tools:*'); 46 | 47 | try { 48 | await test.context.lambda.raw(event, context); 49 | } finally { 50 | process.stdout.write = write; 51 | } 52 | 53 | test.regex(buffer.toString(), /called back multiple times/); 54 | }); 55 | -------------------------------------------------------------------------------- /test/lambda/runtime-dns.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { build, useNewContainer, useLambda } = require('../../src/lambda'); 7 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 10 | 11 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 12 | // executed before the useLambda hooks are executed 13 | test.serial.before(async () => { 14 | const buildResults = await build({ 15 | enableDnsRetry: true, 16 | entrypoint: path.join(FIXTURES_DIRECTORY, 'runtime_dns.js'), 17 | outputPath: BUILD_DIRECTORY, 18 | serviceName: 'runtime-dns' 19 | }); 20 | 21 | if (buildResults.hasErrors()) { 22 | console.error(buildResults.toJson().errors); 23 | throw new Error('Lambda build failed!'); 24 | } 25 | 26 | useNewContainer({ 27 | handler: 'runtime_dns.handler', 28 | image: 'lambci/lambda:nodejs12.x', 29 | mountpoint: BUILD_DIRECTORY 30 | }); 31 | }); 32 | 33 | useLambda(test); 34 | 35 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 36 | 37 | test.serial('DNS lookups can be automatically retried', async (test) => { 38 | // The test assertions are part of the lambda fixture 39 | const result = await test.context.lambda.raw({}, {}); 40 | test.is(result, 'success!'); 41 | }); 42 | -------------------------------------------------------------------------------- /test/lambda/runtime-events.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | const crypto = require('crypto'); 6 | const debug = require('debug'); 7 | 8 | const { WriteBuffer } = require('../../src/WriteBuffer'); 9 | 10 | const { build, useNewContainer, useLambda } = require('../../src/lambda'); 11 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 12 | 13 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 14 | 15 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 16 | // executed before the useLambda hooks are executed 17 | test.serial.before(async () => { 18 | const buildResults = await build({ 19 | entrypoint: path.join(FIXTURES_DIRECTORY, 'runtime_events.js'), 20 | outputPath: BUILD_DIRECTORY, 21 | serviceName: 'runtime-events' 22 | }); 23 | 24 | if (buildResults.hasErrors()) { 25 | console.error(buildResults.toJson().errors); 26 | throw new Error('Lambda build failed!'); 27 | } 28 | 29 | useNewContainer({ 30 | handler: 'runtime_events.handler', 31 | image: 'lambci/lambda:nodejs12.x', 32 | mountpoint: BUILD_DIRECTORY 33 | }); 34 | }); 35 | 36 | useLambda(test); 37 | 38 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 39 | 40 | async function testEventExecution (test, event) { 41 | const write = process.stdout.write; 42 | const buffer = new WriteBuffer(); 43 | process.stdout.write = buffer.write.bind(buffer); 44 | debug.enable('lambda-tools:*'); 45 | 46 | const context = {}; 47 | 48 | try { 49 | await test.context.lambda.raw(event, context); 50 | } finally { 51 | process.stdout.write = write; 52 | } 53 | 54 | test.notRegex(buffer.toString(), /AssertionError/); 55 | test.regex(buffer.toString(), /'beforeExit'/); 56 | } 57 | 58 | test.serial(`The lambda function logs process events`, async (test) => { 59 | await testEventExecution(test, {}); 60 | }); 61 | 62 | test.serial(`The lambda function logs process string events`, async (test) => { 63 | await testEventExecution(test, '{}'); 64 | }); 65 | 66 | test.serial(`Returns results when event is undefined`, async (test) => { 67 | const write = process.stdout.write; 68 | const buffer = new WriteBuffer(); 69 | process.stdout.write = buffer.write.bind(buffer); 70 | 71 | const context = {}; 72 | debug.enable('lambda-tools:*'); 73 | 74 | try { 75 | await test.context.lambda.raw(undefined, context); 76 | } finally { 77 | process.stdout.write = write; 78 | } 79 | 80 | test.notRegex(buffer.toString(), /AssertionError/); 81 | test.regex(buffer.toString(), /Unexpected token/); 82 | }); 83 | 84 | test.serial(`The lambda function can be invoked with a large event`, async test => { 85 | const write = process.stdout.write; 86 | const buffer = new WriteBuffer(); 87 | process.stdout.write = buffer.write.bind(buffer); 88 | 89 | const context = {}; 90 | const event = { 91 | someLargeValue: crypto.randomBytes(12584038).toString('base64') // 12584038 * 1.333 = 16777216 which is the max size. trying to send message larger than max (16780812 vs. 16777216) 92 | }; 93 | debug.enable('lambda-tools:*'); 94 | 95 | try { 96 | await test.context.lambda.raw(event, context); 97 | } finally { 98 | process.stdout.write = write; 99 | } 100 | 101 | test.notRegex(buffer.toString(), /AssertionError/); 102 | test.regex(buffer.toString(), /'beforeExit'/); 103 | }); 104 | -------------------------------------------------------------------------------- /test/lambda/runtime-no-dns.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { build, useNewContainer, useLambda } = require('../../src/lambda'); 7 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 10 | 11 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 12 | // executed before the useLambda hooks are executed 13 | test.serial.before(async () => { 14 | const buildResults = await build({ 15 | entrypoint: path.join(FIXTURES_DIRECTORY, 'runtime_dns.js'), 16 | outputPath: BUILD_DIRECTORY, 17 | serviceName: 'runtime-dns' 18 | }); 19 | 20 | if (buildResults.hasErrors()) { 21 | console.error(buildResults.toJson().errors); 22 | throw new Error('Lambda build failed!'); 23 | } 24 | 25 | useNewContainer({ 26 | handler: 'runtime_dns.handler', 27 | image: 'lambci/lambda:nodejs12.x', 28 | mountpoint: BUILD_DIRECTORY 29 | }); 30 | }); 31 | 32 | useLambda(test); 33 | 34 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 35 | 36 | test.serial('DNS lookups are not retried by default', async (test) => { 37 | // The test assertions are part of the lambda fixture 38 | const result = await test.context.lambda.raw({}, {}); 39 | test.not(result, 'success!'); 40 | }); 41 | -------------------------------------------------------------------------------- /test/lambda/runtime-promises.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { build, useNewContainer, useLambda } = require('../../src/lambda'); 7 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 10 | 11 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 12 | // executed before the useLambda hooks are executed 13 | test.serial.before(async () => { 14 | const buildResults = await build({ 15 | entrypoint: path.join(FIXTURES_DIRECTORY, 'runtime_promises.js'), 16 | outputPath: BUILD_DIRECTORY, 17 | serviceName: 'runtime-promises' 18 | }); 19 | 20 | if (buildResults.hasErrors()) { 21 | console.error(buildResults.toJson().errors); 22 | throw new Error('Lambda build failed!'); 23 | } 24 | 25 | useNewContainer({ 26 | handler: 'runtime_promises.handler', 27 | // Using Node 12.x gives a more thorough test since this isn't normally 28 | // supported. 29 | image: 'lambci/lambda:nodejs12.x', 30 | mountpoint: BUILD_DIRECTORY 31 | }); 32 | }); 33 | 34 | useLambda(test); 35 | 36 | test.after.always(async (test) => fs.remove(BUILD_DIRECTORY)); 37 | 38 | test.serial(`A lambda handler can return a promise`, async (test) => { 39 | const result = await test.context.lambda.raw({}, {}); 40 | test.is(result, 'hello from the promised land!'); 41 | }); 42 | -------------------------------------------------------------------------------- /test/lambda/tools-compose-container-env.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useLambdaContainer } = require('../helpers/lambda'); 4 | 5 | const containerConfig = { 6 | environment: { 7 | TEST_PARAMETER: 'a test value' 8 | } 9 | }; 10 | 11 | const LAMBDA_IMAGE = 'lambci/lambda:nodejs12.x'; 12 | 13 | useLambdaContainer(test, LAMBDA_IMAGE, { containerConfig }); 14 | 15 | test('Compose containers can use a custom environment', async (test) => { 16 | const response = await test.context.lambda.get('/'); 17 | test.is(response.status, 200); 18 | test.is(response.data.parameter, 'a test value'); 19 | }); 20 | -------------------------------------------------------------------------------- /test/lambda/tools-container-compose-network.test.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const sinon = require('sinon'); 3 | const test = require('ava'); 4 | const { v4: uuid } = require('uuid'); 5 | 6 | const { useNewContainer, useLambda } = require('../../src/lambda'); 7 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 8 | 9 | const prefix = process.env.COMPOSE_PROJECT_NAME = uuid(); 10 | const networkName = `${prefix}_default`; 11 | 12 | test.serial.before(async t => { 13 | const docker = new Docker(); 14 | 15 | const createContainer = sinon.spy(Docker.prototype, 'createContainer'); 16 | 17 | const network = await docker.createNetwork({ 18 | Internal: true, 19 | Name: networkName 20 | }); 21 | Object.assign(t.context, { 22 | createContainer, 23 | network 24 | }); 25 | }); 26 | 27 | useLambda(test); 28 | 29 | useNewContainer({ 30 | handler: 'bundled_service.handler', 31 | mountpoint: FIXTURES_DIRECTORY, 32 | useComposeNetwork: true 33 | }); 34 | 35 | test.serial.after.always(async (test) => { 36 | const { createContainer, network } = test; 37 | if (createContainer) { 38 | createContainer.restore(); 39 | } 40 | if (network) { 41 | await network.remove(); 42 | } 43 | }); 44 | 45 | test('Managed containers can use a compose network', async (test) => { 46 | const { createContainer } = test.context; 47 | const response = await test.context.lambda.get('/'); 48 | test.is(response.status, 200); 49 | test.is(response.data.service, 'lambda-test'); 50 | 51 | sinon.assert.calledWithExactly( 52 | createContainer, 53 | sinon.match({ 54 | HostConfig: sinon.match({ 55 | NetworkMode: networkName 56 | }) 57 | }) 58 | ); 59 | }); 60 | -------------------------------------------------------------------------------- /test/lambda/tools-container-custom-image.test.js: -------------------------------------------------------------------------------- 1 | const Docker = require('dockerode'); 2 | const sinon = require('sinon'); 3 | const test = require('ava'); 4 | 5 | const { useNewContainer, useLambda } = require('../../src/lambda'); 6 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 7 | 8 | let createContainer = null; 9 | 10 | test.before((test) => { 11 | createContainer = sinon.spy(Docker.prototype, 'createContainer'); 12 | }); 13 | 14 | useLambda(test); 15 | 16 | useNewContainer({ 17 | handler: 'bundled_service.handler', 18 | image: 'lambci/lambda:nodejs12.x', 19 | mountpoint: FIXTURES_DIRECTORY 20 | }); 21 | 22 | test.after.always((test) => { 23 | createContainer.restore(); 24 | }); 25 | 26 | test('Managed containers can use a custom image', async (test) => { 27 | const response = await test.context.lambda.get('/'); 28 | test.is(response.status, 200); 29 | test.is(response.data.service, 'lambda-test'); 30 | 31 | sinon.assert.calledWithExactly( 32 | createContainer, 33 | sinon.match({ 34 | Image: 'lambci/lambda:nodejs12.x' 35 | }) 36 | ); 37 | }); 38 | -------------------------------------------------------------------------------- /test/lambda/tools-container-env.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useNewContainer, useLambda } = require('../../src/lambda'); 4 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 5 | 6 | useLambda(test); 7 | 8 | useNewContainer({ 9 | environment: { 10 | 'OTHER_VARIABLE': 2, 11 | 'TEST_PARAMETER': 'test value' 12 | }, 13 | handler: 'bundled_service.handler', 14 | mountpoint: FIXTURES_DIRECTORY 15 | }); 16 | 17 | test('Managed containers can use a custom environment', async (test) => { 18 | const response = await test.context.lambda.get('/'); 19 | test.is(response.status, 200); 20 | test.is(response.data.parameter, 'test value'); 21 | }); 22 | -------------------------------------------------------------------------------- /test/lambda/tools-container-zipfile.test.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const test = require('ava'); 3 | const unzip = require('unzipper'); 4 | const tmp = require('tmp-promise'); 5 | const sinon = require('sinon'); 6 | 7 | const { useNewContainer, useLambda, createLambdaExecutionEnvironment, destroyLambdaExecutionEnvironment } = require('../../src/lambda'); 8 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 9 | 10 | useLambda(test); 11 | 12 | useNewContainer({ 13 | handler: 'bundled_service.handler', 14 | zipfile: path.join(FIXTURES_DIRECTORY, 'bundled_service.zip') 15 | }); 16 | 17 | test('The helper client can create a new container', async (test) => { 18 | const response = await test.context.lambda.get('/'); 19 | test.is(response.status, 200); 20 | test.is(response.data.service, 'lambda-test'); 21 | }); 22 | 23 | test('will use mountpointParent as the directory for unzipping if provided', async (test) => { 24 | const tempWork = await tmp.dir({ dir: process.cwd(), prefix: '.mountpointParent-test-' }); 25 | try { 26 | // Spy on unzipper to make sure the temp path is used 27 | const extractSpy = sinon.spy(unzip, 'Extract'); 28 | 29 | const env = await createLambdaExecutionEnvironment({ 30 | environment: { AWS_XRAY_CONTEXT_MISSING: null }, 31 | zipfile: path.join(FIXTURES_DIRECTORY, 'bundled_service.zip'), 32 | mountpointParent: tempWork.path 33 | }); 34 | 35 | await destroyLambdaExecutionEnvironment(env); 36 | 37 | sinon.assert.calledWith(extractSpy, sinon.match({ 38 | path: sinon.match((path) => path.startsWith(tempWork.path)) 39 | })); 40 | } finally { 41 | await tempWork.cleanup(); 42 | } 43 | }); 44 | -------------------------------------------------------------------------------- /test/lambda/tools-container.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useNewContainer, useLambda } = require('../../src/lambda'); 4 | 5 | const { FIXTURES_DIRECTORY } = require('../helpers/lambda'); 6 | 7 | useLambda(test); 8 | 9 | useNewContainer({ 10 | handler: 'bundled_service.handler', 11 | mountpoint: FIXTURES_DIRECTORY 12 | }); 13 | 14 | test('The helper client can create a new container', async (test) => { 15 | const response = await test.context.lambda.get('/'); 16 | test.is(response.status, 200); 17 | test.is(response.data.service, 'lambda-test'); 18 | }); 19 | -------------------------------------------------------------------------------- /test/localstack/externalDocker.test.ts: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import Docker, { Container, ContainerInspectInfo } from 'dockerode'; 3 | import { dockerLocalstackReady } from '../../src/localstack'; 4 | import { ensureImage } from '../../src/docker'; 5 | import { v4 as uuid } from 'uuid'; 6 | 7 | const LOCALSTACK_IMAGE = 'localstack/localstack'; 8 | 9 | const versions = [ 10 | '0.10.9', 11 | '0.11.6', 12 | '0.12.20', 13 | '0.13.2', 14 | '0.14.0' 15 | ] as const; 16 | const docker = new Docker(); 17 | 18 | const containers = {} as Record; 19 | 20 | test.before(async () => { 21 | await Promise.all(versions.map(async (versionTag) => { 22 | const image = `${LOCALSTACK_IMAGE}:${versionTag}`; 23 | 24 | await ensureImage(docker, image); 25 | 26 | const container = await docker.createContainer({ 27 | HostConfig: { 28 | AutoRemove: true, 29 | PublishAllPorts: true, 30 | Binds: [ '/var/run/docker.sock:/var/run/docker.sock' ] 31 | }, 32 | Image: image, 33 | Env: [ 34 | `SERVICES=s3`, 35 | ] 36 | }); 37 | 38 | await container.start(); 39 | 40 | const info = await container.inspect(); 41 | containers[versionTag] = { info, container }; 42 | })); 43 | }); 44 | 45 | test.after(async () => { 46 | await Promise.all(versions.map(async (versionTag) => { 47 | const { container } = containers[versionTag]; 48 | if (container) { 49 | await container.stop(); 50 | } 51 | })); 52 | }); 53 | 54 | versions.forEach((versionTag) => { 55 | test.serial(`dockerLocalstackReady ${versionTag} by containerId`, async (t) => { 56 | const { container } = containers[versionTag]; 57 | await t.notThrowsAsync(dockerLocalstackReady({ containerId: container.id })); 58 | }); 59 | 60 | test.serial(`dockerLocalstackReady ${versionTag} by name`, async (t) => { 61 | const { info } = containers[versionTag]; 62 | await t.notThrowsAsync(dockerLocalstackReady({ name: info.Name })); 63 | }); 64 | 65 | test.serial(`dockerLocalstackReady ${versionTag} by image`, async (t) => { 66 | await t.notThrowsAsync(dockerLocalstackReady({ version: versionTag })); 67 | }); 68 | }); 69 | 70 | test.serial(`dockerLocalstackReady no matching images provided`, async (t) => { 71 | await t.notThrowsAsync(dockerLocalstackReady({ name: uuid() })); 72 | }); 73 | 74 | test(`dockerLocalstackReady will throw an exception if missing parameters`, async (t) => { 75 | // @ts-expect-error this is to satisfy plain javascript, where the compiler won't complain. 76 | await t.throwsAsync(dockerLocalstackReady({}), { 77 | message: '\'containerId\', \'name\' or \'version\' is required' 78 | }); 79 | }); 80 | -------------------------------------------------------------------------------- /test/localstack/getConnection.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | const { v4: uuid } = require('uuid'); 4 | const random = require('lodash/random'); 5 | const proxyquire = require('proxyquire'); 6 | 7 | const { getLogger } = require('../../src/utils/logging'); 8 | const { getConnection, getService } = require('../../src/localstack'); 9 | 10 | test.beforeEach(t => { 11 | const logger = getLogger('localstack'); 12 | 13 | Object.assign(t.context, { logger }); 14 | }); 15 | 16 | test.afterEach(t => { 17 | const { logger } = t.context; 18 | if (logger.debug.restore) { 19 | logger.debug.restore(); 20 | } 21 | }); 22 | 23 | test('getConnection defaults to a default version', async t => { 24 | // Stub the docker module to throw errors when fetching images. 25 | // This needs to happen before the localstack helper module is imported 26 | const docker = proxyquire('../../src/docker', {}); 27 | const error = new Error('Stubbed failure'); 28 | const ensureStub = sinon.stub(docker, 'ensureImage').rejects(error); 29 | 30 | const { getConnection, LOCALSTACK_SERVICES } = proxyquire('../../src/localstack', { 31 | './docker': docker 32 | }); 33 | 34 | const services = Object.keys(LOCALSTACK_SERVICES); 35 | const idx = random(0, services.length - 1); 36 | const serviceName = services[idx]; 37 | 38 | await t.throwsAsync(getConnection({ services: [serviceName] }), { instanceOf: Error, message: error.message }); 39 | sinon.assert.calledOnce(ensureStub); 40 | sinon.assert.calledWithExactly(ensureStub, sinon.match.any, 'localstack/localstack:0.12.4'); 41 | }); 42 | 43 | test('getConnection allows specifying the localstack version', async t => { 44 | // Stub the docker module to throw errors when fetching images. 45 | // This needs to happen before the localstack helper module is imported 46 | const docker = proxyquire('../../src/docker', {}); 47 | const error = new Error('Stubbed failure'); 48 | const ensureStub = sinon.stub(docker, 'ensureImage').rejects(error); 49 | 50 | const { getConnection, LOCALSTACK_SERVICES } = proxyquire('../../src/localstack', { 51 | './docker': docker 52 | }); 53 | const services = Object.keys(LOCALSTACK_SERVICES); 54 | const idx = random(0, services.length - 1); 55 | const serviceName = services[idx]; 56 | const versionTag = uuid(); 57 | 58 | await t.throwsAsync(getConnection({ versionTag, services: [serviceName] }), { instanceOf: Error, message: error.message }); 59 | sinon.assert.calledOnce(ensureStub); 60 | sinon.assert.calledWithExactly(ensureStub, sinon.match.any, `localstack/localstack:${versionTag}`); 61 | }); 62 | 63 | test('getConnection throws when missing services', async t => { 64 | const { getConnection } = require('../../src/localstack'); 65 | await t.throwsAsync(getConnection()); 66 | await t.throwsAsync(getConnection({ services: [] }), { message: 'No services provided' }); 67 | }); 68 | 69 | test('getConnection throws when specifying the latest tag', async t => { 70 | const { getConnection } = require('../../src/localstack'); 71 | await t.throwsAsync(getConnection({ versionTag: 'latest', services: ['sqs'] }), { message: 'We refuse to try to work with the latest tag' }); 72 | }); 73 | 74 | test('getConnection throws when invalid services are requested', async t => { 75 | const serviceName = uuid(); 76 | const { getConnection } = require('../../src/localstack'); 77 | await t.throwsAsync(getConnection({ services: [serviceName] }), { message: `Unknown service ${serviceName}` }); 78 | }); 79 | 80 | test.serial('will create a child log and debug the localstack setup', async t => { 81 | const { logger } = t.context; 82 | const logSpy = sinon.stub(logger, 'child'); 83 | let debugSpy; 84 | logSpy.callsFake(function ({ container } = {}) { 85 | t.not(container, null); 86 | const child = logSpy.wrappedMethod.apply(this, arguments); 87 | debugSpy = sinon.spy(child, 'debug'); 88 | return child; 89 | }); 90 | const { cleanup } = await getConnection({ services: [ 'lambda' ], versionTag: '0.10.6' }); 91 | await cleanup(); 92 | sinon.assert.called(logSpy); 93 | sinon.assert.called(debugSpy); 94 | sinon.assert.calledWith(debugSpy, sinon.match(/Ready\./)); 95 | }); 96 | 97 | test('will throw an exception when extracting a service that is unknown', t => { 98 | const serviceName = uuid(); 99 | t.throws(() => getService(serviceName), { message: `Unknown service ${serviceName}` }); 100 | }); 101 | -------------------------------------------------------------------------------- /test/localstack/lambdaSetup.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const path = require('path'); 3 | const { v4: uuid } = require('uuid'); 4 | const fs = require('fs-extra'); 5 | const sinon = require('sinon'); 6 | 7 | const { KinesisIterator } = require('../../src/utils/kinesisTools'); 8 | 9 | const { createStreams, destroyStreams, streams } = require('../../src/kinesis'); 10 | const { useLocalStack } = require('../../src/localstack'); 11 | const { FIXTURES_DIRECTORY, buildLambda } = require('../helpers/lambda'); 12 | const streamNames = ['first-stream', 'second-stream']; 13 | streams(streamNames); 14 | 15 | useLocalStack(test, { services: ['lambda', 'kinesis'], versionTag: '0.12.4' }); 16 | const handlerName = 'ts_lambda_kinesisHandler'; 17 | 18 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 19 | 20 | const sleep = seconds => new Promise(resolve => setTimeout(resolve, seconds * 1000)); 21 | 22 | test.before(async () => { 23 | await buildLambda(BUILD_DIRECTORY, `${handlerName}.ts`, { zip: true }); 24 | }); 25 | 26 | test.serial.beforeEach(async t => { 27 | const { localStack: { services: { lambda, kinesis } } } = t.context; 28 | 29 | await createStreams(kinesis.client); 30 | 31 | await lambda.client.createFunction({ 32 | Code: { 33 | // eslint-disable-next-line security/detect-non-literal-fs-filename 34 | ZipFile: fs.readFileSync(path.join(BUILD_DIRECTORY, `${handlerName}.js.zip`)) 35 | }, 36 | FunctionName: handlerName, 37 | Runtime: 'nodejs12.x', 38 | Handler: `${handlerName}.handler`, 39 | MemorySize: 1024, 40 | Role: 'arn:aws:iam::123456789012:role/service-role/role-name', 41 | Publish: true, 42 | Environment: { 43 | Variables: { 44 | NEXT_KINESIS_STREAM_NAME: streamNames[1], 45 | KINESIS_ENDPOINT: kinesis.connection.url, 46 | AWS_SECRET_ACCESS_KEY: uuid(), 47 | AWS_ACCESS_KEY_ID: uuid() 48 | } 49 | } 50 | }).promise(); 51 | 52 | const { StreamDescription: { StreamARN } } = await kinesis.client.describeStream({ StreamName: streamNames[0] }).promise(); 53 | 54 | await lambda.client.createEventSourceMapping({ 55 | EventSourceArn: StreamARN, 56 | FunctionName: handlerName, 57 | BatchSize: 10, 58 | Enabled: true, 59 | StartingPosition: 'TRIM_HORIZON' 60 | }).promise(); 61 | }); 62 | 63 | test.afterEach(async t => { 64 | const { localStack: { services: { lambda, kinesis } } } = t.context; 65 | await destroyStreams(kinesis.client); 66 | await lambda.client.deleteFunction({ FunctionName: handlerName }).promise(); 67 | }); 68 | 69 | test.after.always(async t => { 70 | await fs.remove(BUILD_DIRECTORY); 71 | }); 72 | 73 | function formatRecords (StreamName, records) { 74 | return { 75 | Records: records.map(record => ({ 76 | Data: Buffer.from(JSON.stringify(record)), 77 | PartitionKey: uuid() 78 | })), 79 | StreamName 80 | }; 81 | } 82 | 83 | test.serial('can iterate through stream to handler', async t => { 84 | const { localStack: { services: { kinesis: { client: kinesisClient } } } } = t.context; 85 | const expected = [...Array(20)].map(() => ({ key: uuid() })); 86 | 87 | await kinesisClient.putRecords(formatRecords(streamNames[0], expected)).promise(); 88 | const secondIterator = await KinesisIterator.newIterator({ kinesisClient, streamName: streamNames[1] }); 89 | 90 | let records = []; 91 | let attempts = 0; 92 | while (records.length === 0 && attempts++ < 20) { 93 | await secondIterator.next(); 94 | records = secondIterator.records; 95 | await sleep(1); 96 | } 97 | 98 | const actual = await records.map(({ Data }) => { 99 | const base64 = Buffer.from(Data, 'base64'); 100 | const utf8 = base64.toString('utf8'); 101 | return JSON.parse(utf8); 102 | }); 103 | sinon.assert.match(actual, expected); 104 | }); 105 | -------------------------------------------------------------------------------- /test/localstack/localStackHooks.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const sinon = require('sinon'); 3 | 4 | test('The afterAll hook handles errors in the beforeAll hook gracefully', async t => { 5 | // Stub the docker module to throw errors when fetching images. 6 | // This needs to happen before the localstack helper module is imported 7 | const docker = require('../../src/docker'); 8 | const error = new Error('Stubbed failure'); 9 | const ensureStub = sinon.stub(docker, 'ensureImage') 10 | .rejects(error); 11 | 12 | const { localStackHooks } = require('../../src/localstack'); 13 | const { afterAll, beforeAll } = localStackHooks({ services: ['es'] }); 14 | 15 | try { 16 | await t.throwsAsync(beforeAll, { instanceOf: Error, message: error.message }); 17 | await t.notThrowsAsync(afterAll()); 18 | } finally { 19 | ensureStub.restore(); 20 | } 21 | }); 22 | 23 | test('localStackHooks throws when missing services', t => { 24 | const { localStackHooks } = require('../../src/localstack'); 25 | t.throws(localStackHooks); 26 | }); 27 | -------------------------------------------------------------------------------- /test/localstack/localstackNameTags.test.ts: -------------------------------------------------------------------------------- 1 | import anyTest, { TestInterface } from 'ava'; 2 | import { getConnection } from '../../src/localstack'; 3 | 4 | const test = anyTest as TestInterface<{ cleanup?: () => Promise }>; 5 | 6 | test.afterEach(async (t) => { 7 | if (t.context.cleanup) { 8 | await t.context.cleanup(); 9 | } 10 | }); 11 | 12 | [ 13 | 'light', 14 | 'full', 15 | undefined, 16 | ].forEach((nameExtension) => { 17 | const nameTag = nameExtension as 'full' | 'light' | undefined; 18 | test.serial(`will use docker tag localstack/localstack${nameTag ? `-${nameTag}` : ''}:0.14.0`, async (t) => { 19 | const { mappedServices, cleanup } = await getConnection({ services: ['s3'], versionTag: '0.14.0', nameTag }); 20 | const { isReady, client } = mappedServices.s3; 21 | t.context.cleanup = cleanup; 22 | await t.notThrowsAsync(isReady(client)); 23 | }); 24 | }); 25 | 26 | -------------------------------------------------------------------------------- /test/localstack/useLocalStack.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { useLocalStack, LOCALSTACK_SERVICES } = require('../../src/localstack'); 4 | 5 | const services = Object.keys(LOCALSTACK_SERVICES); 6 | const serviceName = 'lambda'; 7 | 8 | useLocalStack(test, { services: [serviceName] }); 9 | 10 | test(`${serviceName} should be available`, async t => { 11 | const { localStack: { services } } = t.context; 12 | const service = services[serviceName]; 13 | await t.notThrowsAsync(service.isReady(service.client)); 14 | }); 15 | 16 | services.forEach(nextServiceName => { 17 | if (nextServiceName === serviceName) { 18 | return; 19 | } 20 | 21 | test(`${nextServiceName} should not be listed in the services`, t => { 22 | const { localStack: { services } } = t.context; 23 | const service = services[nextServiceName]; 24 | t.is(service, undefined); 25 | }); 26 | }); 27 | 28 | test('will error when missing services', t => { 29 | t.throws(() => useLocalStack(test, {})); 30 | t.throws(() => useLocalStack(test)); 31 | }); 32 | 33 | test.serial('will return the output from localstack', t => { 34 | const { localStack: { getOutput } } = t.context; 35 | t.true(getOutput().includes('\nReady.')); 36 | }); 37 | 38 | test.serial('can reset the logs', async t => { 39 | const { localStack: { getOutput, clearOutput } } = t.context; 40 | t.not(getOutput().length, 0); 41 | clearOutput(); 42 | t.is(getOutput().length, 0); 43 | }); 44 | -------------------------------------------------------------------------------- /test/localstack/version10Status.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { LOCALSTACK_SERVICES, getConnection } = require('../../src/localstack'); 4 | const services = Object.keys(LOCALSTACK_SERVICES).filter((service) => service !== 'elasticsearch'); 5 | 6 | test.before(async t => { 7 | const { mappedServices, cleanup } = await getConnection({ services, versionTag: '0.10.9' }); 8 | Object.assign(t.context, { mappedServices, cleanup }); 9 | }); 10 | 11 | test.after.always(async t => { 12 | const { cleanup } = t.context; 13 | if (cleanup) { 14 | await cleanup(); 15 | } 16 | }); 17 | 18 | services.forEach(serviceName => { 19 | if (serviceName === 'events') { 20 | return; 21 | } 22 | test(`${serviceName} should be available`, async t => { 23 | const { mappedServices } = t.context; 24 | const service = mappedServices[serviceName]; 25 | await t.notThrowsAsync(service.isReady(service.client)); 26 | }); 27 | 28 | // It appears this is necessary to get code coverage. 29 | test(`${serviceName} can configure a valid client`, async t => { 30 | const { mappedServices } = t.context; 31 | const { config, connection } = mappedServices[serviceName]; 32 | const client = LOCALSTACK_SERVICES[serviceName].getClient({ config, connection }); 33 | await t.notThrowsAsync(LOCALSTACK_SERVICES[serviceName].isReady(client)); 34 | }); 35 | }); 36 | -------------------------------------------------------------------------------- /test/localstack/version11Status.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { LOCALSTACK_SERVICES, getConnection } = require('../../src/localstack'); 4 | const services = Object.keys(LOCALSTACK_SERVICES); 5 | 6 | test.before(async t => { 7 | const { mappedServices, cleanup } = await getConnection({ services, versionTag: '0.11.6' }); 8 | Object.assign(t.context, { mappedServices, cleanup }); 9 | }); 10 | 11 | test.after.always(async t => { 12 | const { cleanup } = t.context; 13 | if (cleanup) { 14 | await cleanup(); 15 | } 16 | }); 17 | 18 | services.forEach(serviceName => { 19 | test(`${serviceName} should be available`, async t => { 20 | const { mappedServices } = t.context; 21 | const service = mappedServices[serviceName]; 22 | await t.notThrowsAsync(service.isReady(service.client)); 23 | }); 24 | 25 | // It appears this is necessary to get code coverage. 26 | test(`${serviceName} can configure a valid client`, async t => { 27 | const { mappedServices } = t.context; 28 | const { config, connection } = mappedServices[serviceName]; 29 | const client = LOCALSTACK_SERVICES[serviceName].getClient({ config, connection }); 30 | await t.notThrowsAsync(LOCALSTACK_SERVICES[serviceName].isReady(client)); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/localstack/version12Status.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { LOCALSTACK_SERVICES, getConnection, waitForServicesToBeReady } = require('../../src/localstack'); 4 | const services = Object.keys(LOCALSTACK_SERVICES); 5 | 6 | test.before(async t => { 7 | const { mappedServices, cleanup } = await getConnection({ services, versionTag: '0.12.20' }); 8 | Object.assign(t.context, { mappedServices, cleanup }); 9 | }); 10 | 11 | test.after.always(async t => { 12 | const { cleanup } = t.context; 13 | if (cleanup) { 14 | await cleanup(); 15 | } 16 | }); 17 | 18 | services.forEach(serviceName => { 19 | test(`${serviceName} should be available`, async t => { 20 | const { mappedServices } = t.context; 21 | const service = mappedServices[serviceName]; 22 | await t.notThrowsAsync(service.isReady(service.client)); 23 | }); 24 | 25 | // It appears this is necessary to get code coverage. 26 | test(`${serviceName} can configure a valid client`, async t => { 27 | const { mappedServices } = t.context; 28 | const { config, connection } = mappedServices[serviceName]; 29 | const client = LOCALSTACK_SERVICES[serviceName].getClient({ config, connection }); 30 | await t.notThrowsAsync(LOCALSTACK_SERVICES[serviceName].isReady(client)); 31 | }); 32 | }); 33 | 34 | test.serial('waitForServicesToBeReady', async t => { 35 | const { mappedServices } = t.context; 36 | const servicesConfigs = Object.keys(mappedServices).reduce((acc, serviceName) => ({ 37 | ...acc, 38 | [serviceName]: { 39 | url: mappedServices[serviceName].connection.url 40 | } 41 | }), {}); 42 | await t.notThrowsAsync(waitForServicesToBeReady(servicesConfigs)); 43 | }); 44 | -------------------------------------------------------------------------------- /test/localstack/version13Status.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { 4 | LOCALSTACK_SERVICES, 5 | getConnection, 6 | waitForServicesToBeReady 7 | } = require('../../src/localstack'); 8 | const services = Object.keys(LOCALSTACK_SERVICES); 9 | 10 | test.before(async t => { 11 | const { mappedServices, cleanup } = await getConnection({ services, versionTag: '0.13.3' }); 12 | Object.assign(t.context, { mappedServices, cleanup }); 13 | }); 14 | 15 | test.after.always(async t => { 16 | const { cleanup } = t.context; 17 | if (cleanup) { 18 | await cleanup(); 19 | } 20 | }); 21 | 22 | services.forEach(serviceName => { 23 | test.serial(`${serviceName} should be available`, async t => { 24 | const { mappedServices } = t.context; 25 | const service = mappedServices[serviceName]; 26 | await t.notThrowsAsync(service.isReady(service.client)); 27 | }); 28 | 29 | // It appears this is necessary to get code coverage. 30 | test.serial(`${serviceName} can configure a valid client`, async t => { 31 | const { mappedServices } = t.context; 32 | const { config, connection } = mappedServices[serviceName]; 33 | const client = LOCALSTACK_SERVICES[serviceName].getClient({ config, connection }); 34 | await t.notThrowsAsync(LOCALSTACK_SERVICES[serviceName].isReady(client)); 35 | }); 36 | }); 37 | 38 | test.serial('waitForServicesToBeReady', async t => { 39 | const { mappedServices } = t.context; 40 | const servicesConfigs = Object.keys(mappedServices).reduce((acc, serviceName) => ({ 41 | ...acc, 42 | [serviceName]: { 43 | url: mappedServices[serviceName].connection.url 44 | } 45 | }), {}); 46 | await t.notThrowsAsync(waitForServicesToBeReady(servicesConfigs)); 47 | }); 48 | -------------------------------------------------------------------------------- /test/localstack/version14Status.test.ts: -------------------------------------------------------------------------------- 1 | import anyTest, { TestInterface } from 'ava'; 2 | 3 | import { 4 | LOCALSTACK_SERVICES, 5 | getConnection, 6 | waitForServicesToBeReady, 7 | LocalStackServices, 8 | LocalStackService, 9 | } from '../../src/localstack'; 10 | const services = Object.keys(LOCALSTACK_SERVICES) as (keyof LocalStackServices)[]; 11 | 12 | const test = anyTest as TestInterface<{ cleanup?: () => Promise; mappedServices: Record }> 13 | 14 | test.before(async t => { 15 | const { mappedServices, cleanup } = await getConnection({ services, versionTag: '0.14.0' }); 16 | Object.assign(t.context, { mappedServices, cleanup }); 17 | }); 18 | 19 | test.after.always(async t => { 20 | const { cleanup } = t.context; 21 | if (cleanup) { 22 | await cleanup(); 23 | } 24 | }); 25 | 26 | services.forEach((serviceName) => { 27 | test(`${serviceName} should be available`, async (t) => { 28 | const { mappedServices } = t.context; 29 | const service = mappedServices[serviceName]; 30 | await t.notThrowsAsync(service.isReady(service.client)); 31 | }); 32 | 33 | // It appears this is necessary to get code coverage. 34 | test(`${serviceName} can configure a valid client`, async (t) => { 35 | const { mappedServices } = t.context; 36 | const { config, connection } = mappedServices[serviceName]; 37 | const client = LOCALSTACK_SERVICES[serviceName].getClient({ config, connection }); 38 | await t.notThrowsAsync(LOCALSTACK_SERVICES[serviceName].isReady(client)); 39 | }); 40 | }); 41 | 42 | test.serial('waitForServicesToBeReady', async (t) => { 43 | const { mappedServices } = t.context; 44 | const servicesConfigs = Object.keys(mappedServices).reduce((acc, serviceName) => ({ 45 | ...acc, 46 | [serviceName]: { 47 | url: mappedServices[serviceName as keyof LocalStackServices].connection.url 48 | } 49 | }), {}); 50 | await t.notThrowsAsync(waitForServicesToBeReady(servicesConfigs)); 51 | }); 52 | -------------------------------------------------------------------------------- /test/mockServerLambda.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const Docker = require('dockerode'); 3 | const AWS = require('aws-sdk'); 4 | const promiseRetry = require('promise-retry'); 5 | const NestedError = require('nested-error-stacks'); 6 | const { v4: uuid } = require('uuid'); 7 | 8 | const { mockInvocation, verifyInvocation } = require('../src').mockServerLambda; 9 | 10 | const { mockServerClient } = require('mockserver-client'); 11 | 12 | const { getHostAddress, ensureImage } = require('../src/docker'); 13 | 14 | const MOCKSERVER_IMAGE = 'jamesdbloom/mockserver:mockserver-5.5.1'; 15 | 16 | async function waitForMockServerToBeReady (mockServerClient) { 17 | await promiseRetry(async function (retry, retryNumber) { 18 | try { 19 | await mockServerClient.retrieveActiveExpectations() 20 | .then( 21 | (success) => Promise.resolve(success), 22 | (error) => { throw error; } 23 | ); 24 | } catch (error) { 25 | retry(new NestedError(`MockServer is still not ready after ${retryNumber} connection attempts`, error)); 26 | } 27 | }, { factor: 1, minTimeout: 100, retries: 1000 }); 28 | } 29 | 30 | let moduleContext; 31 | 32 | test.before(async () => { 33 | const docker = new Docker(); 34 | 35 | await ensureImage(docker, MOCKSERVER_IMAGE); 36 | 37 | const exposedPort = '1080/tcp'; 38 | const container = await docker.createContainer({ 39 | HostConfig: { 40 | AutoRemove: true, 41 | PublishAllPorts: true 42 | }, 43 | ExposedPorts: { [exposedPort]: {} }, 44 | Image: MOCKSERVER_IMAGE 45 | }); 46 | 47 | await container.start(); 48 | 49 | const containerData = await container.inspect(); 50 | const host = await getHostAddress(); 51 | 52 | // The `exposedPort` value is a constant in this function. That's not 53 | // a security risk 54 | // eslint-disable-next-line security/detect-object-injection 55 | const port = containerData.NetworkSettings.Ports[exposedPort][0].HostPort; 56 | 57 | const msClient = mockServerClient(host, port); 58 | await waitForMockServerToBeReady(msClient); 59 | 60 | moduleContext = { host, port, mockServerClient: msClient, container }; 61 | }); 62 | 63 | test.beforeEach(async (test) => { 64 | const { host, port } = moduleContext; 65 | const lambda = new AWS.Lambda({ 66 | credentials: new AWS.Credentials('dummy-access-key', 'dummy-key-secret'), 67 | region: 'us-east-1', 68 | endpoint: `http://${host}:${port}/lambda` 69 | }); 70 | 71 | test.context = { 72 | lambda, 73 | mockServerClient: moduleContext.mockServerClient 74 | }; 75 | }); 76 | 77 | test.after.always(async (test) => { 78 | if (moduleContext) { 79 | await moduleContext.container.stop(); 80 | } 81 | }); 82 | 83 | test('Lambda function invocations can be mocked', async (test) => { 84 | const { lambda, mockServerClient } = test.context; 85 | 86 | const functionName = `test-${uuid()}`; 87 | const expectedResponse = { response: 'result' }; 88 | const expectedRequestBody = { test: 'value' }; 89 | 90 | // Verify that invocations fail before mocking 91 | const preMockInvoke = lambda.invoke({ 92 | FunctionName: functionName, 93 | Payload: JSON.stringify(expectedRequestBody) 94 | }).promise(); 95 | await test.throwsAsync(() => preMockInvoke); 96 | 97 | await mockInvocation(mockServerClient, functionName, expectedResponse, expectedRequestBody); 98 | 99 | // Verify that invocations succeed after mocking 100 | const response = await lambda.invoke({ 101 | FunctionName: functionName, 102 | Payload: JSON.stringify(expectedRequestBody) 103 | }).promise(); 104 | 105 | test.deepEqual(response, { 106 | StatusCode: 200, 107 | Payload: JSON.stringify(expectedResponse) 108 | }); 109 | }); 110 | 111 | test('Lambda function invocations mocking supports times argument (times 1)', async (test) => { 112 | const { lambda, mockServerClient } = test.context; 113 | 114 | const functionName = `test-${uuid()}`; 115 | const expectedResponse = { response: 'result' }; 116 | const expectedRequestBody = { test: 'value' }; 117 | 118 | // Verify that invocations fail before mocking 119 | const preMockInvoke = lambda.invoke({ 120 | FunctionName: functionName, 121 | Payload: JSON.stringify(expectedRequestBody) 122 | }).promise(); 123 | await test.throwsAsync(() => preMockInvoke); 124 | 125 | await mockInvocation(mockServerClient, functionName, expectedResponse, expectedRequestBody, 1); 126 | 127 | // Verify that first invocation succeeds after mocking 128 | const response = await lambda.invoke({ 129 | FunctionName: functionName, 130 | Payload: JSON.stringify(expectedRequestBody) 131 | }).promise(); 132 | 133 | test.deepEqual(response, { 134 | StatusCode: 200, 135 | Payload: JSON.stringify(expectedResponse) 136 | }); 137 | 138 | // Verify that subsequent invocations fail 139 | const postMockInvoke = lambda.invoke({ 140 | FunctionName: functionName, 141 | Payload: JSON.stringify(expectedRequestBody) 142 | }).promise(); 143 | await test.throwsAsync(() => postMockInvoke); 144 | }); 145 | 146 | test('Lambda function invocations mocking supports times argument (unlimited times)', async (test) => { 147 | const { lambda, mockServerClient } = test.context; 148 | 149 | const functionName = `test-${uuid()}`; 150 | const expectedResponse = { response: 'result' }; 151 | const expectedRequestBody = { test: 'value' }; 152 | 153 | // Verify that invocations fail before mocking 154 | const preMockInvoke = lambda.invoke({ 155 | FunctionName: functionName, 156 | Payload: JSON.stringify(expectedRequestBody) 157 | }).promise(); 158 | await test.throwsAsync(() => preMockInvoke); 159 | 160 | await mockInvocation(mockServerClient, functionName, expectedResponse, expectedRequestBody); 161 | // verify that multiple invocations don't fail if times argument is not provided when mocking 162 | await lambda.invoke({ 163 | FunctionName: functionName, 164 | Payload: JSON.stringify(expectedRequestBody) 165 | }).promise(); 166 | const response = await lambda.invoke({ 167 | FunctionName: functionName, 168 | Payload: JSON.stringify(expectedRequestBody) 169 | }).promise(); 170 | 171 | test.deepEqual(response, { 172 | StatusCode: 200, 173 | Payload: JSON.stringify(expectedResponse) 174 | }); 175 | }); 176 | 177 | test('Lambda function invocations can be mocked without specifying the request body', async (test) => { 178 | const { lambda, mockServerClient } = test.context; 179 | 180 | const functionName = `test-${uuid()}`; 181 | const expectedResponse = { response: 'result' }; 182 | const expectedRequestBody = { test: 'value' }; 183 | 184 | await mockInvocation(mockServerClient, functionName, expectedResponse); 185 | 186 | // Verify that invocations succeed after mocking 187 | const response = await lambda.invoke({ 188 | FunctionName: functionName, 189 | Payload: JSON.stringify(expectedRequestBody) 190 | }).promise(); 191 | 192 | test.deepEqual(response, { 193 | StatusCode: 200, 194 | Payload: JSON.stringify(expectedResponse) 195 | }); 196 | }); 197 | 198 | test('Lambda function invocations can be verified', async (test) => { 199 | const { lambda, mockServerClient } = test.context; 200 | 201 | const functionName = `test-${uuid()}`; 202 | const expectedResponse = { verifyResponse: 'result' }; 203 | const expectedRequestBody = { verifyRequest: 'value' }; 204 | 205 | await mockInvocation(mockServerClient, functionName, expectedResponse, expectedRequestBody); 206 | 207 | // Verifying no invocations should succeed 208 | await verifyInvocation(mockServerClient, functionName, expectedRequestBody, 0); 209 | // Verifying one invocation should fail 210 | try { 211 | await verifyInvocation(mockServerClient, functionName, expectedRequestBody, 1); 212 | throw new Error('Verification should have thrown an error'); 213 | } catch (error) { 214 | // mockClientServer throws `string` type errors instead of real Errors. 215 | // Ava's `throwsAsync` will fail if the function throws a non-Error, so the 216 | // contents needs to be manually asserted. 217 | test.regex(error, /Request not found exactly once/); 218 | } 219 | 220 | // Invoke so that we can retest verify after invocation 221 | await lambda.invoke({ 222 | FunctionName: functionName, 223 | Payload: JSON.stringify(expectedRequestBody) 224 | }).promise(); 225 | 226 | // Verifying one invocation should succeed now 227 | await verifyInvocation(mockServerClient, functionName, expectedRequestBody, 1); 228 | }); 229 | -------------------------------------------------------------------------------- /test/utils/config.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const random = require('lodash/random'); 3 | const proxyquire = require('proxyquire').noPreserveCache(); 4 | 5 | test.serial('will default to max concurrency', t => { 6 | const { pQueue } = proxyquire('../../src/utils/config', {}); 7 | t.is(pQueue._concurrency, Number.POSITIVE_INFINITY); 8 | }); 9 | 10 | test.serial('can set the concurrency', t => { 11 | const concurrency = random(1, Number.MAX_SAFE_INTEGER); 12 | process.env.LAMBDA_TOOLS_CONCURRENCY = `${concurrency}`; 13 | const { pQueue } = proxyquire('../../src/utils/config', {}); 14 | t.is(pQueue._concurrency, concurrency); 15 | }); 16 | -------------------------------------------------------------------------------- /test/utils/kinesisTools.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const path = require('path'); 3 | const { v4: uuid } = require('uuid'); 4 | const fs = require('fs-extra'); 5 | const sinon = require('sinon'); 6 | 7 | const { kinesisLambdaTrigger, KinesisIterator, getStreamRecords } = require('../../src/utils/kinesisTools'); 8 | const { useKinesisDocker, streams } = require('../../src/kinesis'); 9 | const { useLocalStack } = require('../../src/localstack'); 10 | const { FIXTURES_DIRECTORY, buildLambda } = require('../helpers/lambda'); 11 | 12 | streams(['first-stream', 'second-stream']); 13 | useKinesisDocker(test); 14 | useLocalStack(test, { services: ['lambda'], versionTag: '0.12.4' }); 15 | 16 | const handlerName = 'ts_lambda_kinesisHandler'; 17 | const BUILD_DIRECTORY = path.join(FIXTURES_DIRECTORY, 'build', uuid()); 18 | 19 | // Ava's `serial` hook decorator needs to be used so that `useNewContainer` is 20 | // executed before the useLambda hooks are executed 21 | test.serial.before(async () => { 22 | await buildLambda(BUILD_DIRECTORY, `${handlerName}.ts`, { zip: true }); 23 | }); 24 | 25 | test.serial.beforeEach(async t => { 26 | const { kinesis: { streamNames }, localStack: { services: { lambda } } } = t.context; 27 | const secondStream = streamNames['second-stream']; 28 | 29 | await lambda.client.createFunction({ 30 | Code: { 31 | // eslint-disable-next-line security/detect-non-literal-fs-filename 32 | ZipFile: fs.readFileSync(path.join(BUILD_DIRECTORY, `${handlerName}.js.zip`)) 33 | }, 34 | FunctionName: handlerName, 35 | Runtime: 'nodejs12.x', 36 | Handler: `${handlerName}.handler`, 37 | MemorySize: 1024, 38 | Role: 'arn:aws:iam::123456789012:role/service-role/role-name', 39 | Publish: true, 40 | Environment: { 41 | Variables: { 42 | NEXT_KINESIS_STREAM_NAME: secondStream, 43 | KINESIS_ENDPOINT: process.env.KINESIS_ENDPOINT, 44 | AWS_SECRET_ACCESS_KEY: uuid(), 45 | AWS_ACCESS_KEY_ID: uuid() 46 | } 47 | } 48 | }).promise(); 49 | 50 | Object.assign(t.context, { 51 | firstStream: streamNames['first-stream'], 52 | secondStream 53 | }); 54 | }); 55 | 56 | test.afterEach.always(async t => { 57 | const { localStack: { services: { lambda } } } = t.context; 58 | await lambda.client.deleteFunction({ FunctionName: handlerName }).promise(); 59 | }); 60 | 61 | test.after.always(async t => { 62 | await fs.remove(BUILD_DIRECTORY); 63 | }); 64 | 65 | function formatRecords (StreamName, records) { 66 | return { 67 | Records: records.map(record => ({ 68 | Data: Buffer.from(JSON.stringify(record)), 69 | PartitionKey: uuid() 70 | })), 71 | StreamName 72 | }; 73 | } 74 | 75 | test.serial('can iterate through stream to handler', async t => { 76 | const { kinesis: { kinesisClient }, firstStream, secondStream, localStack: { services: { lambda: { client } } } } = t.context; 77 | const expected = [...Array(20)].map(() => ({ key: uuid() })); 78 | 79 | await kinesisClient.putRecords(formatRecords(firstStream, expected)).promise(); 80 | const firstIterator = await KinesisIterator.newIterator({ kinesisClient, streamName: firstStream }); 81 | const secondIterator = new KinesisIterator({ kinesisClient, streamName: secondStream }); 82 | 83 | await kinesisLambdaTrigger({ 84 | kinesisIterator: firstIterator, 85 | lambdaHandler: async (event) => client.invoke({ 86 | FunctionName: handlerName, 87 | InvocationType: 'RequestResponse', 88 | Payload: Buffer.from(JSON.stringify(event)) 89 | }).promise() 90 | }); 91 | await secondIterator.next(); 92 | const actual = await secondIterator.records.map(({ Data }) => { 93 | const base64 = Buffer.from(Data, 'base64'); 94 | const utf8 = base64.toString('utf8'); 95 | return JSON.parse(utf8); 96 | }); 97 | sinon.assert.match(actual, expected); 98 | }); 99 | 100 | test.serial('can get stream records using getStreamRecords function', async t => { 101 | const { kinesis: { kinesisClient }, firstStream } = t.context; 102 | const expected = [...Array(20)].map(() => ({ key: uuid() })); 103 | 104 | await kinesisClient.putRecords(formatRecords(firstStream, expected)).promise(); 105 | const records = await getStreamRecords({ kinesisClient, streamName: firstStream }); 106 | 107 | const actual = records.map(({ Data }) => { 108 | const base64 = Buffer.from(Data, 'base64'); 109 | const utf8 = base64.toString('utf8'); 110 | return JSON.parse(utf8); 111 | }); 112 | sinon.assert.match(actual, expected); 113 | }); 114 | 115 | test.serial('can access the response from getRecords', async t => { 116 | const { kinesis: { kinesisClient }, firstStream } = t.context; 117 | const firstIterator = await KinesisIterator.newIterator({ kinesisClient, streamName: firstStream }); 118 | await firstIterator.next(); 119 | t.not(firstIterator.response, undefined); 120 | }); 121 | -------------------------------------------------------------------------------- /test/utils/logging.test.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | const proxyquire = require('proxyquire').noPreserveCache(); 3 | const debug = require('debug'); 4 | 5 | function validateLogLevelsEnabled (t, logger, debugEnabled = false) { 6 | t.is(logger.info.enabled, true); 7 | t.is(logger.error.enabled, true); 8 | t.is(logger.warn.enabled, true); 9 | t.is(logger.debug.enabled, debugEnabled); 10 | } 11 | 12 | function requireLogging (debugSetting = '') { 13 | process.env.DEBUG = debugSetting; 14 | debug.enable(debug.load()); 15 | return proxyquire('../../src/utils/logging', {}); 16 | } 17 | 18 | test.serial('will default debug to disabled', t => { 19 | const name = 'testLib'; 20 | const { getLogger } = requireLogging(); 21 | const logger = getLogger(name); 22 | validateLogLevelsEnabled(t, logger); 23 | }); 24 | 25 | test.serial('Multiple calls will return the same object', t => { 26 | const name = 'testLib'; 27 | const { getLogger } = requireLogging(); 28 | const logger = getLogger(name); 29 | validateLogLevelsEnabled(t, logger); 30 | t.is(getLogger(name), logger); 31 | }); 32 | 33 | test.serial('can enable debug logs', t => { 34 | const name = 'testLib'; 35 | const { getLogger } = requireLogging(`lambda-tools:${name}`); 36 | const logger = getLogger(name); 37 | validateLogLevelsEnabled(t, logger, true); 38 | }); 39 | 40 | test.serial('can get child logger', t => { 41 | const name = 'testLib'; 42 | const { getLogger } = requireLogging(`lambda-tools:${name}`); 43 | const logger = getLogger(name); 44 | const aChild = logger.child('aChild'); 45 | validateLogLevelsEnabled(t, aChild, true); 46 | t.is(aChild.info.namespace, `lambda-tools:${name}:aChild:info`); 47 | 48 | const bChild = aChild.child('bChild'); 49 | validateLogLevelsEnabled(t, bChild, true); 50 | t.is(bChild.info.namespace, `lambda-tools:${name}:aChild:bChild:info`); 51 | }); 52 | 53 | [ 54 | 'lambda-tools:*', 55 | '*' 56 | ].forEach(debugValue => { 57 | test.serial(`can all log levels to debug using ${debugValue}`, t => { 58 | const name = 'testLib'; 59 | const { getLogger } = requireLogging(debugValue); 60 | const logger = getLogger(name); 61 | validateLogLevelsEnabled(t, logger, true); 62 | }); 63 | }); 64 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "noUnusedLocals": true, 5 | "allowSyntheticDefaultImports": true, 6 | "esModuleInterop": true, 7 | "noImplicitAny": true, 8 | "noImplicitThis": true, 9 | "alwaysStrict": true, 10 | "strictNullChecks": true, 11 | "strictFunctionTypes": true, 12 | "moduleResolution": "node", 13 | "target": "es2017", 14 | "lib": ["es2020", "esnext.asynciterable"], 15 | "pretty": true, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "resolveJsonModule": true, 19 | "rootDir": ".", 20 | "skipLibCheck": true, 21 | "declaration": true, 22 | "outDir": "./dist", 23 | "allowJs": true, 24 | }, 25 | "include": ["src/**/*", "bin/*"], 26 | "exclude": ["node_modules", "src/patches/*.js"] 27 | } 28 | --------------------------------------------------------------------------------