├── .gitignore ├── index.js ├── bin ├── postgres-16.3 │ ├── pg_dump │ └── libpq.so.5 └── makezip.sh ├── .editorconfig ├── test ├── .eslintrc ├── utils.js ├── iam.js ├── encryption.js ├── secrets-manager.js ├── pgdump.js └── handler.js ├── lib ├── config.js ├── iam.js ├── utils.js ├── parseDatabaseNames.js ├── upload-s3.js ├── encryption.js ├── secrets-manager.js ├── handler.js └── pgdump.js ├── .github └── workflows │ └── node.js.yml ├── .eslintrc ├── LICENSE ├── package.json └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | dist 4 | 5 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const handler = require('./lib/handler') 2 | 3 | module.exports.handler = handler 4 | -------------------------------------------------------------------------------- /bin/postgres-16.3/pg_dump: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameshy/pgdump-aws-lambda/HEAD/bin/postgres-16.3/pg_dump -------------------------------------------------------------------------------- /bin/postgres-16.3/libpq.so.5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jameshy/pgdump-aws-lambda/HEAD/bin/postgres-16.3/libpq.so.5 -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | end_of_line = lf 10 | -------------------------------------------------------------------------------- /test/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../.eslintrc", 3 | "rules": {}, 4 | "globals": { 5 | "describe": true, 6 | "it": true 7 | }, 8 | "env": { 9 | "mocha": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /lib/config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | // default config that is overridden by the Lambda event 4 | module.exports = { 5 | S3_REGION: 'eu-west-1', 6 | PGDUMP_PATH: path.join(__dirname, '../bin/postgres-16.3'), 7 | // maximum time allowed to connect to postgres before a timeout occurs 8 | PGCONNECT_TIMEOUT: 15, 9 | USE_IAM_AUTH: false, 10 | S3_STORAGE_CLASS: 'STANDARD', 11 | S3_PART_SIZE: 5242880 // 5mb its the default part size 12 | } 13 | -------------------------------------------------------------------------------- /lib/iam.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk') 2 | 3 | function decorateWithIamToken(baseConfig) { 4 | const rdsSigner = new AWS.RDS.Signer() 5 | const token = rdsSigner.getAuthToken({ 6 | hostname: baseConfig.PGHOST, 7 | port: baseConfig.PGPORT != null ? baseConfig.PGPORT : 5432, 8 | region: baseConfig.S3_REGION, 9 | username: baseConfig.PGUSER 10 | }) 11 | return { ...baseConfig, PGPASSWORD: token } 12 | } 13 | 14 | module.exports = decorateWithIamToken 15 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | const moment = require('moment') 2 | const path = require('path') 3 | 4 | module.exports = { 5 | generateBackupPath(databaseName, rootPath, now = null) { 6 | now = now || moment().utc() 7 | const timestamp = moment(now).format('DD-MM-YYYY_HH-mm-ss') 8 | const day = moment(now).format('YYYY-MM-DD') 9 | const filename = `${databaseName}-${timestamp}.backup` 10 | const key = path.join(rootPath || '', day, filename) 11 | return key 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /lib/parseDatabaseNames.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Parse database names from config.PGDATABASE 3 | */ 4 | function parseDatabaseNames(config) { 5 | if (!config.PGDATABASE) { 6 | throw new Error("PGDATABASE was not provided") 7 | } 8 | // we support two types of string: 9 | // a) single database name e.g. "dbone" 10 | // b) multiple database names e.g. "dbone, dbtwo" 11 | const dbnames = config.PGDATABASE.split(",").map(s => s.trim()).filter(s => s) 12 | return Array.from(new Set(dbnames)) 13 | } 14 | 15 | module.exports = parseDatabaseNames -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | node-version: [20.x] 15 | steps: 16 | - uses: actions/checkout@v4 17 | - name: Use Node.js ${{ matrix.node-version }} 18 | uses: actions/setup-node@v4 19 | with: 20 | node-version: ${{ matrix.node-version }} 21 | cache: 'npm' 22 | - run: npm ci 23 | - run: npm test 24 | -------------------------------------------------------------------------------- /test/utils.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai') 2 | const moment = require('moment') 3 | const utils = require('../lib/utils') 4 | 5 | 6 | describe('Utils', () => { 7 | describe('generateBackupPath', () => { 8 | it('should generate a correct path', () => { 9 | const databaseName = 'test-db' 10 | const now = moment('2017-04-22 15:01:02') 11 | const expected = '2017-04-22/test-db-22-04-2017_15-01-02.backup' 12 | const result = utils.generateBackupPath(databaseName, null, now) 13 | expect(result).to.equal(expected) 14 | }) 15 | }) 16 | }) 17 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "rules": { 4 | "func-names": ["error", "never"], 5 | "indent": ["error", 4], 6 | "semi": ["error", "never"], 7 | "brace-style": ["error", "stroustrup"], 8 | "no-restricted-syntax": [ 9 | "error", 10 | "ForInStatement", 11 | "LabeledStatement", 12 | "WithStatement" 13 | ], 14 | "comma-dangle": ["error", "never"], 15 | "no-unused-expressions": 0, 16 | "class-methods-use-this": 0, 17 | "import/no-extraneous-dependencies": [2, {}], 18 | "no-param-reassign": 0, 19 | "prefer-template": 0, 20 | "no-console": 0, 21 | "arrow-parens": 0, 22 | "arrow-body-style": 0 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /lib/upload-s3.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk') 2 | 3 | // configure AWS to log to stdout 4 | AWS.config.update({ 5 | logger: process.stdout 6 | }) 7 | 8 | async function uploadS3(stream, config, key) { 9 | const s3 = new AWS.S3({ 10 | region: config.S3_REGION 11 | }) 12 | const result = await s3.upload( 13 | { 14 | Key: key, 15 | Bucket: config.S3_BUCKET, 16 | Body: stream, 17 | StorageClass: config.S3_STORAGE_CLASS 18 | }, 19 | { 20 | partSize: config.S3_PART_SIZE, 21 | queueSize: 1 22 | } 23 | ).promise() 24 | 25 | console.log('Uploaded to', result.Location) 26 | return result.Location 27 | } 28 | 29 | module.exports = uploadS3 30 | -------------------------------------------------------------------------------- /lib/encryption.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto') 2 | 3 | 4 | const ALGORITHM = 'aes-256-cbc' 5 | 6 | module.exports = { 7 | encrypt(readableStream, key, iv) { 8 | this.validateKey(key) 9 | if (iv.length !== 16) { 10 | throw new Error(`encrypt iv must be exactly 16 bytes, but received ${iv.length}`) 11 | } 12 | const cipher = crypto.createCipheriv(ALGORITHM, Buffer.from(key, 'hex'), iv) 13 | readableStream.pipe(cipher) 14 | return cipher 15 | }, 16 | decrypt(readableStream, key, iv) { 17 | this.validateKey(key) 18 | const decipher = crypto.createDecipheriv(ALGORITHM, Buffer.from(key, 'hex'), iv) 19 | readableStream.pipe(decipher) 20 | return decipher 21 | }, 22 | validateKey(key) { 23 | const bytes = Buffer.from(key, 'hex') 24 | if (bytes.length !== 32) { 25 | throw new Error('encrypt key must be a 32 byte hex string') 26 | } 27 | return true 28 | }, 29 | generateIv() { 30 | return crypto.randomBytes(16) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /bin/makezip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | FILENAME="pgdump-aws-lambda.zip" 5 | 6 | command_exists () { 7 | type "$1" &> /dev/null ; 8 | } 9 | 10 | if ! command_exists zip ; then 11 | echo "zip command not found, try: sudo apt-get install zip" 12 | exit 1 13 | fi 14 | if [ ! -f ./package.json ]; then 15 | echo "command must be run from the project root directory" 16 | exit 1 17 | fi 18 | 19 | 20 | # create a temp directory for our bundle 21 | BUNDLE_DIR=$(mktemp -d) 22 | # copy entire project into BUNDLE_DIR 23 | cp -R * $BUNDLE_DIR/ 24 | 25 | # remove unnecessary things 26 | pushd $BUNDLE_DIR > /dev/null 27 | echo "cleaning.." 28 | rm -rf node_modules/* 29 | npm install --omit=dev --no-progress > /dev/null 30 | rm -rf dist coverage test 31 | 32 | # create zip of bundle/ 33 | echo "zipping.." 34 | zip -q -r $FILENAME * 35 | 36 | # return to project dir 37 | popd > /dev/null 38 | 39 | # copy the zip 40 | mkdir -p ./dist 41 | cp $BUNDLE_DIR/$FILENAME ./dist/$FILENAME 42 | 43 | echo "successfully created dist/$FILENAME" 44 | 45 | # remove bundle/ 46 | rm -rf $BUNDLE_DIR 47 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2017 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/iam.js: -------------------------------------------------------------------------------- 1 | /* eslint no-underscore-dangle: 0 */ 2 | const { expect } = require('chai') 3 | const rewire = require('rewire') 4 | const chai = require('chai') 5 | const chaiAsPromised = require('chai-as-promised') 6 | const AWSMOCK = require('aws-sdk-mock') 7 | const AWS = require('aws-sdk') 8 | 9 | chai.should() 10 | chai.use(chaiAsPromised) 11 | 12 | AWSMOCK.setSDKInstance(AWS) 13 | 14 | 15 | const decorateWithIamToken = rewire('../lib/iam') 16 | 17 | describe('iam-based auth', () => { 18 | it('should set the postgres default if no PGPORT is set', async () => { 19 | const mockEvent = { USE_IAM_AUTH: true } 20 | const token = 'foo' 21 | AWSMOCK.mock('RDS.Signer', 'getAuthToken', (options) => { 22 | expect(options.port).to.equal(5432) 23 | return token 24 | }) 25 | decorateWithIamToken(mockEvent) 26 | }) 27 | 28 | it('should apply PGPORT to the auth-token request', async () => { 29 | const mockEvent = { USE_IAM_AUTH: true, PGPORT: 2345 } 30 | const token = 'foo' 31 | AWSMOCK.mock('RDS.Signer', 'getAuthToken', (options) => { 32 | expect(options.port).to.equal(mockEvent.PGPORT) 33 | return token 34 | }) 35 | decorateWithIamToken(mockEvent) 36 | }) 37 | 38 | afterEach(() => { 39 | AWSMOCK.restore('RDS.Signer') 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pgdump-aws-lambda", 3 | "version": "2.0.0", 4 | "description": "Lambda function for executing pg_dump and streaming the output to s3.", 5 | "main": "index.js", 6 | "dependencies": { 7 | "aws-sdk": "2.1658.0", 8 | "moment": "2.30.1" 9 | }, 10 | "devDependencies": { 11 | "aws-sdk-mock": "6.0.4", 12 | "chai": " 4.4.1", 13 | "chai-as-promised": "7.1.2", 14 | "coveralls": "3.1.1", 15 | "eslint": "8.26.0", 16 | "eslint-config-airbnb": "19.0.4", 17 | "eslint-config-airbnb-base": "15.0.0", 18 | "mocha": "10.6.0", 19 | "mock-spawn": "0.2.6", 20 | "mockdate": "3.0.5", 21 | "nyc": "17.0.0", 22 | "rewire": "7.0.0", 23 | "sinon": "18.0.0", 24 | "tmp": "0.2.3" 25 | }, 26 | "scripts": { 27 | "test": "NODE_ENV=test mocha test", 28 | "test:watch": "NODE_ENV=test mocha test -w", 29 | "coverage": "NODE_ENV=test nyc --reporter=text mocha test", 30 | "coveralls": "NODE_ENV=test nyc --reporter=text-lcov mocha test | coveralls", 31 | "coverage-html": "NODE_ENV=test nyc --reporter=html --reporter=text mocha test", 32 | "makezip": "bin/makezip.sh" 33 | }, 34 | "repository": { 35 | "type": "git", 36 | "url": "git+https://github.com/jameshy/pgdump-aws-lambda.git" 37 | }, 38 | "keywords": [ 39 | "lambda", 40 | "s3", 41 | "pg_dump", 42 | "postgresql", 43 | "backup" 44 | ], 45 | "author": "James Hutchby", 46 | "license": "MIT", 47 | "bugs": { 48 | "url": "https://github.com/jameshy/pgdump-aws-lambda/issues" 49 | }, 50 | "homepage": "https://github.com/jameshy/pgdump-aws-lambda#readme" 51 | } 52 | -------------------------------------------------------------------------------- /lib/secrets-manager.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable brace-style */ 2 | const AWS = require('aws-sdk') 3 | 4 | // configure AWS to log to stdout 5 | AWS.config.update({ 6 | logger: process.stdout 7 | }) 8 | 9 | async function getDbCredentials(config) { 10 | const secretsManager = new AWS.SecretsManager({ 11 | region: config.S3_REGION 12 | }) 13 | 14 | const params = { 15 | SecretId: config.SECRETS_MANAGER_SECRET_ID 16 | } 17 | 18 | return new Promise((resolve, reject) => { 19 | secretsManager.getSecretValue(params, (err, data) => { 20 | if (err) { 21 | console.log('Error while getting secret value:', err) 22 | reject(err) 23 | } else { 24 | const credentials = JSON.parse(data.SecretString) 25 | resolve(credentials) 26 | } 27 | }) 28 | }) 29 | } 30 | 31 | async function decorateWithSecretsManagerCredentials(baseConfig) { 32 | try { 33 | const credentials = await getDbCredentials(baseConfig) 34 | 35 | const credsFromSecret = {} 36 | 37 | if (credentials.username) credsFromSecret.PGUSER = credentials.username 38 | if (credentials.password) credsFromSecret.PGPASSWORD = credentials.password 39 | if (credentials.dbname) credsFromSecret.PGDATABASE = credentials.dbname 40 | if (credentials.host) credsFromSecret.PGHOST = credentials.host 41 | if (credentials.port) credsFromSecret.PGPORT = credentials.port 42 | 43 | return { 44 | ...credsFromSecret, 45 | ...baseConfig 46 | } 47 | } catch (error) { 48 | console.log(error) 49 | return baseConfig 50 | } 51 | } 52 | 53 | module.exports = decorateWithSecretsManagerCredentials 54 | -------------------------------------------------------------------------------- /test/encryption.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const tmp = require('tmp') 3 | const { expect } = require('chai') 4 | const encryption = require('../lib/encryption') 5 | 6 | 7 | function waitForStream(stream) { 8 | return new Promise(fulfill => stream.on('finish', fulfill)) 9 | } 10 | 11 | // in real world usage we use streams from S3 12 | // but here in tests we use streams from fs 13 | describe('encryption', () => { 14 | const ENCRYPT_KEY = '4141414141414141414141414141414141414141414141414141414141414141' 15 | const IV = encryption.generateIv() 16 | 17 | it('should encrypt and decrypt', async () => { 18 | // create a temporary, unencrypted file 19 | const unencryptedPath = tmp.tmpNameSync() 20 | fs.writeFileSync(unencryptedPath, 'some-unencrypted-data') 21 | 22 | // encrypt 23 | const encryptedStream = encryption.encrypt( 24 | fs.createReadStream(unencryptedPath), 25 | ENCRYPT_KEY, 26 | IV 27 | ) 28 | const encryptedPath = tmp.tmpNameSync() 29 | let writeStream = fs.createWriteStream(encryptedPath) 30 | encryptedStream.pipe(writeStream) 31 | await waitForStream(writeStream) 32 | 33 | const contents = fs.readFileSync(encryptedPath) 34 | expect(contents).to.have.length(32) 35 | expect(contents.includes('some-unencrypted-data')).to.be.false 36 | 37 | // decrypt 38 | const decryptedStream = encryption.decrypt( 39 | fs.createReadStream(encryptedPath), 40 | ENCRYPT_KEY, 41 | IV 42 | ) 43 | const decryptedPath = tmp.tmpNameSync() 44 | writeStream = fs.createWriteStream(decryptedPath) 45 | decryptedStream.pipe(writeStream) 46 | await waitForStream(writeStream) 47 | 48 | // verify decrypt was successful 49 | expect( 50 | fs.readFileSync(decryptedPath).toString('utf8') 51 | ).to.equal('some-unencrypted-data') 52 | }) 53 | 54 | it('should throw an error for an invalid key', () => { 55 | expect(() => encryption.encrypt( 56 | undefined, 57 | 'bad-key', 58 | 'bad-IV' 59 | )).to.throw('encrypt key must be a 32 byte hex string') 60 | }) 61 | 62 | it('should throw an error for an invalid iv', () => { 63 | expect(() => encryption.encrypt( 64 | undefined, 65 | ENCRYPT_KEY, 66 | 'bad-IV' 67 | )).to.throw('encrypt iv must be exactly 16 bytes, but received 6') 68 | }) 69 | }) 70 | -------------------------------------------------------------------------------- /lib/handler.js: -------------------------------------------------------------------------------- 1 | const utils = require('./utils') 2 | const uploadS3 = require('./upload-s3') 3 | const pgdump = require('./pgdump') 4 | const decorateWithIamToken = require('./iam') 5 | const decorateWithSecretsManagerCredentials = require('./secrets-manager') 6 | const parseDatabaseNames = require('./parseDatabaseNames') 7 | const encryption = require('./encryption') 8 | 9 | const DEFAULT_CONFIG = require('./config') 10 | 11 | async function backup(config) { 12 | if (!config.PGDATABASE) { 13 | throw new Error('PGDATABASE not provided in the event data') 14 | } 15 | if (!config.S3_BUCKET) { 16 | throw new Error('S3_BUCKET not provided in the event data') 17 | } 18 | 19 | const key = utils.generateBackupPath( 20 | config.PGDATABASE, 21 | config.ROOT 22 | ) 23 | 24 | // spawn the pg_dump process 25 | let stream = await pgdump(config) 26 | if (config.ENCRYPT_KEY && encryption.validateKey(config.ENCRYPT_KEY)) { 27 | // if encryption is enabled, we generate an IV and store it in a separate file 28 | const iv = encryption.generateIv() 29 | const ivKey = key + '.iv' 30 | 31 | await uploadS3(iv.toString('hex'), config, ivKey) 32 | stream = encryption.encrypt(stream, config.ENCRYPT_KEY, iv) 33 | } 34 | // stream the backup to S3 35 | return uploadS3(stream, config, key) 36 | } 37 | 38 | async function handler(event) { 39 | let results = [] 40 | const baseConfig = { ...DEFAULT_CONFIG, ...event } 41 | let decoratedConfig 42 | 43 | if (event.USE_IAM_AUTH === true) { 44 | decoratedConfig = decorateWithIamToken(baseConfig) 45 | } 46 | else if (event.SECRETS_MANAGER_SECRET_ID) { 47 | decoratedConfig = await decorateWithSecretsManagerCredentials(baseConfig) 48 | } 49 | else { 50 | decoratedConfig = baseConfig 51 | } 52 | 53 | const dbnames = parseDatabaseNames(decoratedConfig) 54 | if (!dbnames || !dbnames.length) { 55 | throw new Error("PGDATABASE does not contain a database name") 56 | } 57 | 58 | // sequentially backup the configured database names (1 or more) 59 | for (const dbname of dbnames) { 60 | try { 61 | const dbconfig = { 62 | ...decoratedConfig, 63 | PGDATABASE: dbname 64 | } 65 | results.push(await backup(dbconfig)) 66 | } 67 | catch (error) { 68 | // log the error and rethrow for Lambda 69 | if (process.env.NODE_ENV !== 'test') { 70 | console.error(error) 71 | } 72 | throw error 73 | } 74 | } 75 | 76 | return results.length > 1 ? results : results[0] 77 | } 78 | 79 | module.exports = handler 80 | -------------------------------------------------------------------------------- /lib/pgdump.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process') 2 | const path = require('path') 3 | const fs = require('fs') 4 | const { Transform } = require('stream') 5 | 6 | function spawnPgDump(pgdumpDir, args, env) { 7 | const pgDumpPath = path.join( 8 | pgdumpDir, 9 | 'pg_dump' 10 | ) 11 | if (!fs.existsSync(pgDumpPath)) { 12 | throw new Error('pg_dump not found at ' + pgDumpPath) 13 | } 14 | 15 | return spawn(pgDumpPath, args, { 16 | env 17 | }) 18 | } 19 | 20 | function buildArgs(config) { 21 | let args = ['-Fc', '-Z1'] 22 | const extraArgs = config.PGDUMP_ARGS 23 | 24 | if (typeof extraArgs === 'string') { 25 | const splitArgs = extraArgs.split(' ') 26 | args = args.concat(splitArgs) 27 | } 28 | else if (Array.isArray(extraArgs)) { 29 | args = args.concat(extraArgs) 30 | } 31 | 32 | return args 33 | } 34 | 35 | function pgdump(config, pgDumpSpawnFn = spawnPgDump) { 36 | return new Promise((resolve, reject) => { 37 | let headerChecked = false 38 | let stderr = '' 39 | 40 | // spawn pg_dump process 41 | const args = buildArgs(config) 42 | const env = { ...config, LD_LIBRARY_PATH: config.PGDUMP_PATH } 43 | const process = pgDumpSpawnFn(config.PGDUMP_PATH, args, env) 44 | 45 | // hook into the process 46 | process.stderr.on('data', data => { 47 | stderr += data.toString('utf8') 48 | }) 49 | 50 | process.on('close', code => { 51 | // reject our promise if pg_dump had a non-zero exit 52 | if (code !== 0) { 53 | return reject( 54 | new Error('pg_dump process failed: ' + stderr) 55 | ) 56 | } 57 | // check that pgdump actually gave us some data 58 | if (!headerChecked) { 59 | return reject( 60 | new Error('pg_dump gave us an unexpected response') 61 | ) 62 | } 63 | return null 64 | }) 65 | 66 | // watch the pg_dump stdout stream so we can check it's valid 67 | const transformer = new Transform({ 68 | transform(chunk, enc, callback) { 69 | this.push(chunk) 70 | // if stdout begins with 'PGDMP' then the backup has begun 71 | // otherwise, we abort 72 | if (!headerChecked) { 73 | headerChecked = true 74 | if (chunk.toString('utf8').startsWith('PGDMP')) { 75 | resolve(transformer) 76 | } 77 | else { 78 | reject( 79 | new Error('pg_dump gave us an unexpected response') 80 | ) 81 | } 82 | } 83 | callback() 84 | } 85 | }) 86 | 87 | // pipe pg_dump to transformer 88 | process.stdout.pipe(transformer) 89 | }) 90 | } 91 | module.exports = pgdump 92 | -------------------------------------------------------------------------------- /test/secrets-manager.js: -------------------------------------------------------------------------------- 1 | /* eslint no-underscore-dangle: 0 */ 2 | const { expect } = require('chai') 3 | const rewire = require('rewire') 4 | const chai = require('chai') 5 | const chaiAsPromised = require('chai-as-promised') 6 | const AWSMOCK = require('aws-sdk-mock') 7 | const AWS = require('aws-sdk') 8 | 9 | chai.should() 10 | chai.use(chaiAsPromised) 11 | 12 | AWSMOCK.setSDKInstance(AWS) 13 | 14 | const decorateWithSecretsManagerCredentials = rewire('../lib/secrets-manager') 15 | 16 | describe('secrets-manager-based auth', () => { 17 | const parsedSecretValue = { 18 | dbname: 'somedatabase', 19 | username: 'someuser', 20 | password: 'somepassword', 21 | host: 'somehost', 22 | port: '2345' 23 | } 24 | const secretValue = { 25 | SecretString: JSON.stringify(parsedSecretValue) 26 | } 27 | 28 | const keyMappings = [ 29 | { secretKey: 'username', pgKey: 'PGUSER' }, 30 | { secretKey: 'password', pgKey: 'PGPASSWORD' }, 31 | { secretKey: 'dbname', pgKey: 'PGDATABASE' }, 32 | { secretKey: 'host', pgKey: 'PGHOST' }, 33 | { secretKey: 'port', pgKey: 'PGPORT' } 34 | ] 35 | 36 | keyMappings.forEach((map) => { 37 | it(`should set ${map.pgKey} from the secret ${map.secretKey}`, async () => { 38 | const mockEvent = { SECRETS_MANAGER_SECRET_ID: 'my-secret-id' } 39 | 40 | AWSMOCK.mock('SecretsManager', 'getSecretValue', (params, callback) => { 41 | expect(params.SecretId).to.eql(mockEvent.SECRETS_MANAGER_SECRET_ID) 42 | 43 | callback(null, secretValue) 44 | }) 45 | 46 | const config = await decorateWithSecretsManagerCredentials(mockEvent) 47 | 48 | expect(config[map.pgKey]).to.eql(parsedSecretValue[map.secretKey]) 49 | }) 50 | 51 | context(`when the event contains an override for ${map.pgKey}`, () => { 52 | it(`should set ${map.pgKey} from the event params`, async () => { 53 | const mockEvent = { 54 | SECRETS_MANAGER_SECRET_ID: 'my-secret-id', 55 | [map.pgKey]: 'some-value-override' 56 | } 57 | 58 | AWSMOCK.mock('SecretsManager', 'getSecretValue', (params, callback) => { 59 | expect(params.SecretId).to.eql(mockEvent.SECRETS_MANAGER_SECRET_ID) 60 | 61 | callback(null, secretValue) 62 | }) 63 | 64 | const config = await decorateWithSecretsManagerCredentials(mockEvent) 65 | 66 | expect(config[map.pgKey]).to.eql(mockEvent[map.pgKey]) 67 | }) 68 | }) 69 | }) 70 | 71 | context('when there is an error getting the secret value', () => { 72 | it('should return the given base config', async () => { 73 | const mockEvent = { SECRETS_MANAGER_SECRET_ID: 'my-secret-id' } 74 | 75 | AWSMOCK.mock('SecretsManager', 'getSecretValue', (params, callback) => { 76 | expect(params.SecretId).to.eql(mockEvent.SECRETS_MANAGER_SECRET_ID) 77 | 78 | callback('some error', secretValue) 79 | }) 80 | 81 | const config = await decorateWithSecretsManagerCredentials(mockEvent) 82 | 83 | expect(config).to.eql(mockEvent) 84 | }) 85 | }) 86 | 87 | afterEach(() => { 88 | AWSMOCK.restore('SecretsManager') 89 | }) 90 | }) 91 | -------------------------------------------------------------------------------- /test/pgdump.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs') 3 | const mockSpawn = require('mock-spawn') 4 | const chai = require('chai') 5 | const chaiAsPromised = require('chai-as-promised') 6 | const sinon = require('sinon') 7 | 8 | chai.use(chaiAsPromised) 9 | const { expect } = chai 10 | 11 | const pgdump = require('../lib/pgdump') 12 | const defaultConfig = require('../lib/config') 13 | 14 | describe('pgdump', () => { 15 | it('should export a function', () => { 16 | return expect(pgdump).to.be.a('function') 17 | }) 18 | 19 | it('should throw an error when pg_dump sends invalid data', () => { 20 | const pgdumpProcess = mockSpawn()() 21 | const pgDumpFn = () => pgdumpProcess 22 | const config = {} 23 | const p = pgdump(config, pgDumpFn) 24 | pgdumpProcess.stdout.write('asdfasdf') 25 | pgdumpProcess.emit('close', 0) 26 | return expect(p).to.eventually.be.rejectedWith( 27 | 'pg_dump gave us an unexpected response' 28 | ) 29 | }) 30 | 31 | it('should call pg_dump with some default args', async () => { 32 | const pgdumpProcess = mockSpawn()() 33 | const pgDumpFn = sinon.fake.returns(pgdumpProcess) 34 | const config = {} 35 | const p = pgdump(config, pgDumpFn) 36 | pgdumpProcess.stdout.write('PGDMP - data - data') 37 | pgdumpProcess.emit('close', 0) 38 | await p 39 | 40 | expect(pgDumpFn.calledOnce).to.be.true 41 | const pgDumpArgs = pgDumpFn.getCall(0).args[1] 42 | expect(pgDumpArgs).to.deep.equal(['-Fc', '-Z1']) 43 | }) 44 | 45 | it('should call pg_dump with provided extra arguments as array', async () => { 46 | const pgdumpProcess = mockSpawn()() 47 | const pgDumpFn = sinon.fake.returns(pgdumpProcess) 48 | const config = { 49 | PGDUMP_ARGS: ['--exclude-table=ignored-table', '-N', 'public'] 50 | } 51 | const p = pgdump(config, pgDumpFn) 52 | pgdumpProcess.stdout.write('PGDMP - data - data') 53 | pgdumpProcess.emit('close', 0) 54 | await p 55 | 56 | expect(pgDumpFn.calledOnce).to.be.true 57 | const pgDumpArgs = pgDumpFn.getCall(0).args[1] 58 | 59 | expect( 60 | pgDumpArgs 61 | ).to.deep.equal(['-Fc', '-Z1', '--exclude-table=ignored-table', '-N', 'public']) 62 | }) 63 | 64 | it('should call pg_dump with provided extra arguments as string', async () => { 65 | const pgdumpProcess = mockSpawn()() 66 | const pgDumpFn = sinon.fake.returns(pgdumpProcess) 67 | const config = { 68 | PGDUMP_ARGS: '--exclude-table=ignored-table -N public' 69 | } 70 | 71 | const p = pgdump(config, pgDumpFn) 72 | pgdumpProcess.stdout.write('PGDMP - data - data') 73 | pgdumpProcess.emit('close', 0) 74 | await p 75 | 76 | expect(pgDumpFn.calledOnce).to.be.true 77 | const pgDumpArgs = pgDumpFn.getCall(0).args[1] 78 | 79 | expect( 80 | pgDumpArgs 81 | ).to.deep.equal(['-Fc', '-Z1', '--exclude-table=ignored-table', '-N', 'public']) 82 | }) 83 | 84 | it('should stream correctly', async () => { 85 | const pgdumpProcess = mockSpawn()() 86 | 87 | const pgDumpFn = () => pgdumpProcess 88 | const config = {} 89 | const p = pgdump(config, pgDumpFn) 90 | pgdumpProcess.stdout.write('PGDMP - data - data') 91 | pgdumpProcess.emit('close', 0) 92 | 93 | const buffer = await p 94 | 95 | expect(buffer.read().toString('utf8')).to.equal('PGDMP - data - data') 96 | }) 97 | 98 | it('should throw an error when the pg_dump binary does not exist', () => { 99 | const config = { 100 | PGDUMP_PATH: '/some/non-existant/path/pg_dump' 101 | } 102 | const p = pgdump(config) 103 | return expect(p).to.eventually.be.rejectedWith( 104 | 'pg_dump not found at /some/non-existant/path/pg_dump/pg_dump' 105 | ) 106 | }) 107 | 108 | describe('default pg_dump binary', () => { 109 | const binaryPath = path.join(defaultConfig.PGDUMP_PATH, 'pg_dump') 110 | it('should exist', () => { 111 | if (!fs.existsSync(binaryPath)) { 112 | throw new Error('failed to find pg_dump at ', binaryPath) 113 | } 114 | }) 115 | it('should be +x', () => { 116 | const fd = fs.openSync(binaryPath, 'r') 117 | const stat = fs.fstatSync(fd) 118 | 119 | // eslint-disable-next-line no-bitwise 120 | const permString = '0' + (stat.mode & 0o777).toString(8) 121 | if (permString !== '0755' && permString !== '0775') { 122 | throw new Error('binary ' + binaryPath + ' is not executable') 123 | } 124 | }) 125 | }) 126 | }) 127 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pgdump-aws-lambda 2 | 3 | ![ci status](https://github.com/jameshy/pgdump-aws-lambda/actions/workflows/node.js.yml/badge.svg) 4 | [![Coverage Status](https://coveralls.io/repos/github/jameshy/pgdump-aws-lambda/badge.svg?branch=master)](https://coveralls.io/github/jameshy/pgdump-aws-lambda?branch=master) 5 | 6 | An AWS Lambda function that runs pg_dump and streams the output to s3. 7 | 8 | It can be configured to run periodically using CloudWatch events. 9 | 10 | ## Quick start 11 | 12 | 1. Create an AWS lambda function: 13 | - Author from scratch 14 | - Runtime: Node.js 20.x 15 | - Architecture: x86_64 16 | 2. tab "Code" -> "Upload from" -> ".zip file": 17 | - Upload ([pgdump-aws-lambda.zip](https://github.com/jameshy/pgdump-aws-lambda/releases/latest)) 18 | - tab "Configuration" -> "General Configuration" -> "Edit" 19 | - Timeout: 15 minutes 20 | - Edit the role and attach the policy "AmazonS3FullAccess" 21 | - Save 22 | 3. Give your lambda permissions permissions to write to S3: 23 | - tab "Configuration" -> "Permissions" 24 | - click the existing Execution role 25 | - "Add permissions" -> "Attach policies" 26 | - select "AmazonS3FullAccess" and click "Add Permissions" 27 | 28 | 4. Test 29 | 30 | - Create new test event, e.g.: 31 | 32 | ```json 33 | { 34 | "PGDATABASE": "dbname", 35 | "PGUSER": "postgres", 36 | "PGPASSWORD": "password", 37 | "PGHOST": "host", 38 | "S3_BUCKET": "db-backups", 39 | "ROOT": "hourly-backups" 40 | } 41 | ``` 42 | 43 | - _Test_ and check the output 44 | 45 | 5. Create a CloudWatch rule: 46 | - Event Source: Schedule -> Fixed rate of 1 hour 47 | - Targets: Lambda Function (the one created in step #1) 48 | - Configure input -> Constant (JSON text) and paste your config (as per previous step) 49 | 50 | #### File Naming 51 | 52 | This function will store your backup with the following s3 key: 53 | 54 | s3://${S3_BUCKET}${ROOT}/YYYY-MM-DD/YYYY-MM-DD_HH-mm-ss.backup 55 | 56 | #### AWS Firewall 57 | 58 | - If you run the Lambda function outside a VPC, you must enable public access to your database instance, a non VPC Lambda function executes on the public internet. 59 | - If you run the Lambda function inside a VPC, you must allow access from the Lambda Security Group to your database instance. Also you must either add a NAT gateway ([chargeable](https://aws.amazon.com/vpc/pricing/)) to your VPC so the Lambda can connect to S3 over the Internet, or add an [S3 VPC endpoint (free)](https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints-s3.html) and allow traffic to the appropriate S3 prefixlist. 60 | 61 | #### Encryption 62 | 63 | You can add an encryption key to your event, e.g. 64 | 65 | ```json 66 | { 67 | "PGDATABASE": "dbname", 68 | "PGUSER": "postgres", 69 | "PGPASSWORD": "password", 70 | "PGHOST": "host", 71 | "S3_BUCKET": "db-backups", 72 | "ROOT": "hourly-backups", 73 | "ENCRYPT_KEY": "c0d71d7ae094bdde1ef60db8503079ce615e71644133dc22e9686dc7216de8d0" 74 | } 75 | ``` 76 | 77 | The key should be exactly 64 hex characters (32 hex bytes). 78 | 79 | When this key is present the function will do streaming encryption directly from pg_dump -> S3. 80 | 81 | It uses the aes-256-cbc encryption algorithm with a random IV for each backup file. 82 | The IV is stored alongside the backup in a separate file with the .iv extension. 83 | 84 | You can decrypt such a backup with the following bash command: 85 | 86 | ```bash 87 | openssl enc -aes-256-cbc -d \ 88 | -in postgres-27-12-2019@13-19-13.backup \ 89 | -out postgres-27-12-2019@13-19-13.unencrypted.backup \ 90 | -K c0d71d7ae094bdde1ef60db8503079ce615e71644133dc22e9686dc7216de8d0 \ 91 | -iv $(< postgres-27-12-2019@13-19-13.backup.iv) 92 | ``` 93 | 94 | #### S3 Upload Part Size 95 | 96 | If you experience lamba timeouts while uploading file parts to S3 you can try increasing the part size of each file chunk (might need to increase lambda resources). For instance on a 2GB file using the default part size of 5MB would result on ~400 parts, pushing all this parts was exceeding the 15min timeout for lambdas, by increasing the part size to 1GB the transmit time was reduced to ~3 minutes. 97 | 98 | ```json 99 | { 100 | "S3_PART_SIZE": 1073741824, 101 | } 102 | ``` 103 | 104 | #### IAM-based Postgres authentication 105 | 106 | Your context may require that you use IAM-based authentication to log into the Postgres service. 107 | Support for this can be enabled my making your Cloudwatch Event look like this. 108 | 109 | ```json 110 | { 111 | "PGDATABASE": "dbname", 112 | "PGUSER": "postgres", 113 | "PGHOST": "host", 114 | "S3_BUCKET": "db-backups", 115 | "ROOT": "hourly-backups", 116 | "USE_IAM_AUTH": true 117 | } 118 | ``` 119 | 120 | If you supply `USE_IAM_AUTH` with a value of `true`, the `PGPASSWORD` var may be omitted in the CloudWatch event. 121 | If you still provide it, it will be ignored. 122 | 123 | #### SecretsManager-based Postgres authentication 124 | 125 | If you prefer to not send DB details/credentials in the event parameters, you can store such details in SecretsManager and just provide the SecretId, then the function will fetch your DB details/credentials from the secret value. 126 | 127 | NOTE: the execution role for the Lambda function must have access to GetSecretValue for the given secret. 128 | 129 | Support for this can be enabled by setting the SECRETS_MANAGER_SECRET_ID, so your Cloudwatch Event looks like this: 130 | 131 | ```json 132 | { 133 | "SECRETS_MANAGER_SECRET_ID": "my/secret/id", 134 | "S3_BUCKET": "db-backups", 135 | "ROOT": "hourly-backups" 136 | } 137 | ``` 138 | 139 | If you supply `SECRETS_MANAGER_SECRET_ID`, you can ommit the 'PG\*' keys, and they will be fetched from your SecretsManager secret value instead with the following mapping: 140 | 141 | | Secret Value | PG-Key | 142 | | ------------ | ---------- | 143 | | username | PGUSER | 144 | | password | PGPASSWORD | 145 | | dbname | PGDATABASE | 146 | | host | PGHOST | 147 | | port | PGPORT | 148 | 149 | You can provide overrides in your event to any PG\* keys as event parameters will take precedence over secret values. 150 | 151 | #### Multiple databases 152 | 153 | If you'd like to export multiple databases in a single event, you can add a comma-separated list of database names to the PGDATABASE setting. The results will return in a list. 154 | 155 | ```json 156 | { 157 | "PGDATABASE": "dbname1,dbname2,dbname3", 158 | "PGUSER": "postgres", 159 | "PGPASSWORD": "password", 160 | "PGHOST": "host", 161 | "S3_BUCKET": "db-backups", 162 | "ROOT": "hourly-backups" 163 | } 164 | ``` 165 | 166 | NOTE: The 15 minute timeout for lambda still applies. 167 | 168 | ## Developer 169 | 170 | #### Bundling a new `pg_dump` binary 171 | 172 | 1. Launch an EC2 instance with the Amazon Linux 2023 AMI (ami-0649bea3443ede307) 173 | 2. Connect via SSH and: 174 | 175 | ```bash 176 | # install packages required for building 177 | sudo dnf install make automake gcc gcc-c++ readline-devel zlib-devel openssl-devel libicu-devel 178 | # build and install postgres from source 179 | wget https://ftp.postgresql.org/pub/source/v16.3/postgresql-16.3.tar.gz 180 | tar zxf postgresql-16.3.tar.gz 181 | cd postgresql-16.3 182 | ./configure --with-ssl=openssl 183 | make 184 | sudo make install 185 | exit 186 | ``` 187 | 188 | #### Download the binaries 189 | 190 | ```bash 191 | mkdir bin/postgres-16.3 192 | scp ec2-user@your-ec2-server:/usr/local/pgsql/bin/pg_dump ./bin/postgres-16.3/pg_dump 193 | scp ec2-user@your-ec2-server:/usr/local/pgsql/lib/libpq.so.5 ./bin/postgres-16.3/libpq.so.5 194 | ``` 195 | 196 | 3. To use the new postgres binary pass PGDUMP_PATH in the event: 197 | 198 | ```json 199 | { 200 | "PGDUMP_PATH": "bin/postgres-16.3" 201 | } 202 | ``` 203 | 204 | #### Creating a new function zip 205 | 206 | `npm run makezip` 207 | 208 | #### Contributing 209 | 210 | Please submit issues and PRs. 211 | -------------------------------------------------------------------------------- /test/handler.js: -------------------------------------------------------------------------------- 1 | /* eslint no-underscore-dangle: 0 */ 2 | const { expect } = require('chai') 3 | const rewire = require('rewire') 4 | const sinon = require('sinon') 5 | const mockDate = require('mockdate') 6 | const mockSpawn = require('mock-spawn') 7 | const chai = require('chai') 8 | const chaiAsPromised = require('chai-as-promised') 9 | const AWSMOCK = require('aws-sdk-mock') 10 | const AWS = require('aws-sdk') 11 | 12 | AWSMOCK.setSDKInstance(AWS) 13 | chai.should() 14 | chai.use(chaiAsPromised) 15 | 16 | const handler = rewire('../lib/handler') 17 | const pgdump = require('../lib/pgdump') 18 | 19 | describe('Handler', () => { 20 | function mockPgDumpSuccess() { 21 | const pgdumpProcess = mockSpawn()() 22 | pgdumpProcess.stdout.write('asdfasdf') 23 | pgdumpProcess.emit('close', 0) 24 | return Promise.resolve(pgdumpProcess.stdout) 25 | } 26 | function mockS3UploadSuccess(stream, config, key) { 27 | return Promise.resolve('mock-uploaded/' + key) 28 | } 29 | 30 | function makeMockHandler({ mockPgdump, mockS3upload } = {}) { 31 | mockPgdump = mockPgdump || mockPgDumpSuccess 32 | mockS3upload = mockS3upload || mockS3UploadSuccess 33 | const s3Spy = sinon.spy(mockS3upload) 34 | const pgSpy = sinon.spy(mockPgdump) 35 | handler.__set__('pgdump', pgSpy) 36 | handler.__set__('uploadS3', s3Spy) 37 | return { 38 | s3Spy, 39 | pgSpy 40 | } 41 | } 42 | 43 | const mockEvent = { 44 | PGDATABASE: 'dbname', 45 | S3_BUCKET: 's3bucket' 46 | } 47 | // mock dates, so we can test the backup file name 48 | mockDate.set('2017-05-02T01:33:11Z') 49 | 50 | it('should upload a backup', async () => { 51 | const { s3Spy, pgSpy } = makeMockHandler() 52 | 53 | const result = await handler(mockEvent) 54 | 55 | // handler should have called pgSpy with correct arguments 56 | expect(pgSpy.calledOnce).to.be.true 57 | expect(pgSpy.firstCall.args).to.have.length(1) 58 | const [arg0] = pgSpy.firstCall.args 59 | expect(arg0.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 60 | expect(arg0.PGDATABASE).to.equal(mockEvent.PGDATABASE) 61 | 62 | // handler should have called s3spy with correct arguments 63 | expect(s3Spy.calledOnce).to.be.true 64 | expect(s3Spy.firstCall.args).to.have.length(3) 65 | const [stream, config, key] = s3Spy.firstCall.args 66 | expect(stream).to.be.ok 67 | expect(config.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 68 | expect(config.PGDATABASE).to.equal(mockEvent.PGDATABASE) 69 | expect(key).to.equal('2017-05-02/dbname-02-05-2017_01-33-11.backup') 70 | expect(result).to.equal( 71 | 'mock-uploaded/2017-05-02/dbname-02-05-2017_01-33-11.backup' 72 | ) 73 | }) 74 | 75 | it('should support multiple database names', async () => { 76 | const mockEvent = { 77 | PGDATABASE: 'dbone, dbtwo', 78 | S3_BUCKET: 's3bucket' 79 | } 80 | const { s3Spy, pgSpy } = makeMockHandler() 81 | 82 | const result = await handler(mockEvent) 83 | 84 | // handler should have called pgSpy twice (both databases, with correct arguments 85 | expect(pgSpy.calledTwice).to.be.true 86 | { 87 | // first call 88 | expect(pgSpy.firstCall.args).to.have.length(1) 89 | const [event] = pgSpy.firstCall.args 90 | expect(event.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 91 | expect(event.PGDATABASE).to.equal('dbone') 92 | } 93 | { 94 | // second call 95 | expect(pgSpy.secondCall.args).to.have.length(1) 96 | const [event] = pgSpy.secondCall.args 97 | expect(event.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 98 | expect(event.PGDATABASE).to.equal('dbtwo') 99 | } 100 | 101 | // handler should have called s3spy twice (both databases, with correct arguments 102 | expect(s3Spy.calledTwice).to.be.true 103 | { 104 | // first call 105 | expect(s3Spy.firstCall.args).to.have.length(3) 106 | const [stream, config, key] = s3Spy.firstCall.args 107 | expect(stream).to.be.ok 108 | expect(config.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 109 | expect(config.PGDATABASE).to.equal('dbone') 110 | expect(key).to.equal('2017-05-02/dbone-02-05-2017_01-33-11.backup') 111 | } 112 | { 113 | // second call 114 | expect(s3Spy.secondCall.args).to.have.length(3) 115 | const [stream, config, key] = s3Spy.secondCall.args 116 | expect(stream).to.be.ok 117 | expect(config.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 118 | expect(config.PGDATABASE).to.equal('dbtwo') 119 | expect(key).to.equal('2017-05-02/dbtwo-02-05-2017_01-33-11.backup') 120 | } 121 | 122 | // result should be an array with two backup paths 123 | expect(result).deep.to.equal([ 124 | 'mock-uploaded/2017-05-02/dbone-02-05-2017_01-33-11.backup', 125 | 'mock-uploaded/2017-05-02/dbtwo-02-05-2017_01-33-11.backup' 126 | ]) 127 | }) 128 | 129 | it('should be able to authenticate via IAM ', async () => { 130 | const { s3Spy, pgSpy } = makeMockHandler() 131 | 132 | const iamMockEvent = { ...mockEvent, USE_IAM_AUTH: true } 133 | const token = 'foo' 134 | AWSMOCK.mock('RDS.Signer', 'getAuthToken', token) 135 | await handler(iamMockEvent) 136 | // handler should have called pgSpy with correct arguments 137 | expect(pgSpy.calledOnce).to.be.true 138 | expect(s3Spy.calledOnce).to.be.true 139 | expect(s3Spy.firstCall.args).to.have.length(3) 140 | const config = s3Spy.firstCall.args[1] 141 | // production code is synchronous, so this is annoying 142 | expect(await config.PGPASSWORD.promise()).to.equal(token) 143 | AWSMOCK.restore('RDS.Signer') 144 | }) 145 | 146 | it('should be able to authenticate via SecretsManager', async () => { 147 | const { s3Spy, pgSpy } = makeMockHandler() 148 | 149 | const secretsManagerMockEvent = { ...mockEvent, SECRETS_MANAGER_SECRET_ID: 'my-secret-id' } 150 | const username = 'myuser' 151 | const password = 'mypassword' 152 | const secretValue = { 153 | SecretString: JSON.stringify({ username, password }) 154 | } 155 | 156 | AWSMOCK.mock('SecretsManager', 'getSecretValue', (params, callback) => { 157 | expect(params.SecretId).to.eql(secretsManagerMockEvent.SECRETS_MANAGER_SECRET_ID) 158 | callback(null, secretValue) 159 | }) 160 | 161 | await handler(secretsManagerMockEvent) 162 | // handler should have called pgSpy with correct arguments 163 | expect(pgSpy.calledOnce).to.be.true 164 | expect(s3Spy.calledOnce).to.be.true 165 | expect(s3Spy.firstCall.args).to.have.length(3) 166 | const config = s3Spy.firstCall.args[1] 167 | // production code is synchronous, so this is annoying 168 | expect(config.PGUSER).to.equal(username) 169 | expect(config.PGPASSWORD).to.equal(password) 170 | 171 | AWSMOCK.restore('SecretsManager') 172 | }) 173 | 174 | it('should upload the backup file and an iv file', async () => { 175 | const { s3Spy } = makeMockHandler() 176 | 177 | const event = { 178 | ...mockEvent, 179 | ENCRYPT_KEY: 180 | '4141414141414141414141414141414141414141414141414141414141414141' 181 | } 182 | 183 | const result = await handler(event) 184 | 185 | // handler should have called s3spy with correct arguments 186 | expect(s3Spy.calledTwice).to.be.true 187 | expect(s3Spy.firstCall.args).to.have.length(3) 188 | 189 | // first call is the IV 190 | const [stream, config, key] = s3Spy.firstCall.args 191 | expect(stream).to.have.length(32) 192 | expect(config.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 193 | expect(config.PGDATABASE).to.equal(mockEvent.PGDATABASE) 194 | expect(key).to.be.a.string 195 | expect(key).to.not.be.empty 196 | expect(key).to.equal('2017-05-02/dbname-02-05-2017_01-33-11.backup.iv') 197 | 198 | // second call is the backup 199 | const [stream2, config2, key2] = s3Spy.secondCall.args 200 | expect(stream2).to.be.ok 201 | expect(config2.S3_BUCKET).to.equal(mockEvent.S3_BUCKET) 202 | expect(config2.PGDATABASE).to.equal(mockEvent.PGDATABASE) 203 | expect(key2).to.equal('2017-05-02/dbname-02-05-2017_01-33-11.backup') 204 | 205 | // handler should return the backup path 206 | expect(result).to.equal( 207 | 'mock-uploaded/2017-05-02/dbname-02-05-2017_01-33-11.backup' 208 | ) 209 | }) 210 | 211 | it('should throw an error when PGDATABASE is not provided', () => { 212 | makeMockHandler() 213 | const event = { ...mockEvent } 214 | event.PGDATABASE = undefined 215 | 216 | return handler(event) 217 | .should.be.rejectedWith( 218 | /PGDATABASE was not provided/ 219 | ) 220 | }) 221 | 222 | it('should throw an error when PGDATABASE is an empty array', () => { 223 | makeMockHandler() 224 | const event = { ...mockEvent } 225 | event.PGDATABASE = "," 226 | 227 | return handler(event) 228 | .should.be.rejectedWith( 229 | /PGDATABASE does not contain a database/ 230 | ) 231 | }) 232 | 233 | it('should throw an error when S3_BUCKET is not provided', () => { 234 | makeMockHandler() 235 | const event = { ...mockEvent } 236 | event.S3_BUCKET = undefined 237 | 238 | return handler(event) 239 | .should.be.rejectedWith( 240 | /S3_BUCKET not provided in the event data/ 241 | ) 242 | }) 243 | 244 | it('should handle pgdump errors correctly', () => { 245 | const pgdumpWithErrors = () => { 246 | const pgdumpProcess = mockSpawn()() 247 | pgdumpProcess.stderr.write('-error') 248 | pgdumpProcess.emit('close', 1) 249 | return pgdumpProcess 250 | } 251 | 252 | makeMockHandler({ 253 | mockPgdump: () => pgdump(mockEvent, pgdumpWithErrors) 254 | }) 255 | 256 | return handler(mockEvent) 257 | .should.be.rejectedWith( 258 | /pg_dump gave us an unexpected response/ 259 | ) 260 | }) 261 | }) 262 | --------------------------------------------------------------------------------