├── test ├── integration │ ├── _each.js │ ├── .gitignore │ ├── db │ │ ├── integration-db.js │ │ ├── connect.js │ │ ├── integration-url.js │ │ ├── get-tables.js │ │ ├── list.js │ │ └── reset-entire-database.js │ ├── app │ │ ├── knexapp │ │ │ ├── package.json │ │ │ └── migrations │ │ │ │ ├── 20191124305523_add_image_url.js │ │ │ │ ├── 20191124214437_init.js │ │ │ │ └── 20191124331980_data.js │ │ ├── cwd.js │ │ └── dotfiles.js │ ├── util │ │ ├── reset-metrics.js │ │ ├── matcher.js │ │ ├── read-metrics.js │ │ ├── explode-url.js │ │ ├── stream-utils.js │ │ ├── write-dotfiles.js │ │ ├── find-pgsh.js │ │ ├── exec-pgsh.js │ │ └── context.js │ ├── _teardown.js │ ├── config.test.js │ ├── credentials.test.js │ ├── list.test.js │ ├── clone.test.js │ ├── _setup.js │ ├── env.test.js │ ├── basic.test.js │ ├── migrate.test.js │ ├── init.test.js │ └── metrics.test.js └── unit │ └── build-url.spec.js ├── docs ├── pgsh-intro.gif ├── pgsh-intro-620.gif ├── issue_template.md ├── pull-requests.md └── branching.md ├── src ├── global │ ├── README.md │ ├── keys.js │ └── index.js ├── pgshrc │ ├── exists.js │ ├── create.js │ ├── read.js │ ├── update-existing.js │ └── default.js ├── util │ ├── README.md │ ├── filter-keys.js │ ├── random-string.js │ ├── stringify-env.js │ ├── add-all.js │ ├── build-map.js │ ├── confirm-prompt.js │ ├── quick-hash.js │ ├── find-project-root.js │ ├── print-table.js │ ├── dotenv.js │ ├── find-dir.js │ ├── prompt-for-input.js │ ├── prompt-for-vars.js │ ├── build-url.js │ └── wait-for.js ├── cmd │ ├── migrate │ │ ├── util │ │ │ ├── detect.js │ │ │ └── delegate.js │ │ ├── knex │ │ │ ├── util │ │ │ │ ├── read-migrations.js │ │ │ │ ├── delete-migration.js │ │ │ │ ├── parse-migration-name.js │ │ │ │ ├── get-applied-migrations.js │ │ │ │ ├── print-latest-migration.js │ │ │ │ └── choose-migration-index.js │ │ │ ├── detect.js │ │ │ ├── up.js │ │ │ ├── validate.js │ │ │ ├── force-up.js │ │ │ ├── down.js │ │ │ └── force-down.js │ │ ├── up.js │ │ ├── force-up.js │ │ ├── down.js │ │ ├── force-down.js │ │ └── validate.js │ ├── url.js │ ├── current.js │ ├── switch.js │ ├── dump.js │ ├── restore.js │ ├── metrics.js │ ├── psql.js │ ├── create.js │ ├── destroy.js │ ├── list.js │ ├── clone.js │ └── init.js ├── metrics │ ├── timer.js │ ├── constants.js │ ├── command-line.js │ ├── cpu.js │ ├── opt-in.js │ ├── store.js │ ├── record.js │ ├── README.md │ └── send.js ├── config.js ├── env │ ├── parse.js │ ├── create.js │ └── update-existing.js ├── start.js ├── task │ ├── connection-count.js │ ├── is-privileged.js │ ├── clone.js │ ├── create.js │ └── choose-db.js ├── index.js ├── end-program.js └── db.js ├── .gitignore ├── .eslintrc ├── .env.example ├── docker-compose.yaml ├── .vscode └── launch.json ├── LICENSE ├── .circleci └── config.yml ├── package.json ├── CODE_OF_CONDUCT.md └── README.md /test/integration/_each.js: -------------------------------------------------------------------------------- 1 | jest.setTimeout(10000); 2 | -------------------------------------------------------------------------------- /docs/pgsh-intro.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sastraxi/pgsh/HEAD/docs/pgsh-intro.gif -------------------------------------------------------------------------------- /docs/pgsh-intro-620.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sastraxi/pgsh/HEAD/docs/pgsh-intro-620.gif -------------------------------------------------------------------------------- /src/global/README.md: -------------------------------------------------------------------------------- 1 | The integration test uses items from this folder -- move out into common library -------------------------------------------------------------------------------- /src/pgshrc/exists.js: -------------------------------------------------------------------------------- 1 | const findConfig = require('find-config'); 2 | 3 | module.exports = !!findConfig('.pgshrc'); 4 | -------------------------------------------------------------------------------- /test/integration/.gitignore: -------------------------------------------------------------------------------- 1 | # ignore .env and config files (in this folder, they're generated) 2 | .env 3 | .pgshrc 4 | -------------------------------------------------------------------------------- /test/integration/db/integration-db.js: -------------------------------------------------------------------------------- 1 | const { env } = process; 2 | 3 | module.exports = env.DANGER_INTEGRATION_DATABASE; 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/node_modules/** 2 | node_modules/** 3 | .env 4 | 5 | # testing / development files 6 | *.sql 7 | yarn-error.log 8 | -------------------------------------------------------------------------------- /src/util/README.md: -------------------------------------------------------------------------------- 1 | Please note that the integration test suite uses tools in this directory. 2 | Avoid writing code with complex (any!) business logic. 3 | -------------------------------------------------------------------------------- /src/cmd/migrate/util/detect.js: -------------------------------------------------------------------------------- 1 | const knex = require('../knex/detect'); 2 | 3 | module.exports = async () => { 4 | if (await knex()) return 'knex'; 5 | return undefined; 6 | }; 7 | -------------------------------------------------------------------------------- /src/metrics/timer.js: -------------------------------------------------------------------------------- 1 | const moment = require('moment'); 2 | 3 | let time = null; 4 | 5 | module.exports = { 6 | start: () => { 7 | time = +moment(); 8 | }, 9 | get: () => time, 10 | }; 11 | -------------------------------------------------------------------------------- /src/config.js: -------------------------------------------------------------------------------- 1 | const config = require('./pgshrc/read'); 2 | 3 | module.exports = { 4 | ...config, 5 | // FIXME: metrics are currently broken and could cause the app to hang. 6 | force_disable_metrics: true, 7 | }; 8 | -------------------------------------------------------------------------------- /test/integration/db/connect.js: -------------------------------------------------------------------------------- 1 | const knex = require('knex'); 2 | const explodeUrl = require('../util/explode-url'); 3 | 4 | module.exports = (url) => 5 | knex({ 6 | client: 'pg', 7 | connection: explodeUrl(url), 8 | }); 9 | -------------------------------------------------------------------------------- /src/util/filter-keys.js: -------------------------------------------------------------------------------- 1 | module.exports = (obj, predicate) => { 2 | const output = {}; 3 | Object.keys(obj).forEach((k) => { 4 | if (predicate(k)) { 5 | output[k] = obj[k]; 6 | } 7 | }); 8 | return output; 9 | }; 10 | -------------------------------------------------------------------------------- /test/integration/app/knexapp/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "knexapp", 3 | "version": "0", 4 | "description": "integration testing sample app", 5 | "main": "index.js", 6 | "license": "MIT", 7 | "dependencies": { 8 | "knex": "^0.20.2" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/util/random-string.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-bitwise */ 2 | 3 | // from https://gist.github.com/6174/6062387 4 | const randomString = (len = 16) => 5 | [...Array(len)].map(() => 6 | (~~(Math.random() * 36)).toString(36)).join(''); 7 | 8 | module.exports = randomString; 9 | -------------------------------------------------------------------------------- /test/integration/app/knexapp/migrations/20191124305523_add_image_url.js: -------------------------------------------------------------------------------- 1 | exports.up = knex => knex.raw(` 2 | alter table product add column image_url text default null; 3 | `); 4 | 5 | exports.down = knex => knex.raw(` 6 | alter table product drop column image_url; 7 | `); 8 | -------------------------------------------------------------------------------- /test/integration/db/integration-url.js: -------------------------------------------------------------------------------- 1 | const { env } = process; 2 | 3 | module.exports = `postgres://${env.DANGER_INTEGRATION_USER}:${env.DANGER_INTEGRATION_PASSWORD}` 4 | + `@${env.DANGER_INTEGRATION_HOST}:${env.DANGER_INTEGRATION_PORT}/${env.DANGER_INTEGRATION_DATABASE}`; 5 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "rules": { 4 | "no-console": "off", 5 | "implicit-arrow-linebreak": "off", 6 | "global-require": "off", 7 | "no-multi-spaces": "off", 8 | "arrow-parens": "off" 9 | }, 10 | "env": { 11 | "jest": true 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/integration/db/get-tables.js: -------------------------------------------------------------------------------- 1 | 2 | // const getTables = async (ctx) => { 3 | 4 | // const knex = ctx.connect(); 5 | 6 | // return new Promise(resolve => 7 | // knex.destroy(() => { 8 | // resolve(showBuiltIn ? names : names.filter(excludingBuiltins)); 9 | // })); 10 | // }; 11 | -------------------------------------------------------------------------------- /src/util/stringify-env.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Turn key-value pairs into the dotenv file format. 3 | */ 4 | module.exports = keyValuePairs => 5 | Object.keys(keyValuePairs) 6 | .map((key) => { 7 | const value = keyValuePairs[key]; 8 | return `${key}=${value}`; 9 | }) 10 | .join('\n'); 11 | -------------------------------------------------------------------------------- /test/integration/app/cwd.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const fs = require('fs'); 3 | 4 | module.exports = (app) => { 5 | const dir = path.join(__dirname, app); 6 | if (!fs.lstatSync(dir).isDirectory()) { 7 | throw new Error(`Unknown app ${app}!`); 8 | } 9 | 10 | return dir; 11 | }; 12 | -------------------------------------------------------------------------------- /test/integration/app/knexapp/migrations/20191124214437_init.js: -------------------------------------------------------------------------------- 1 | exports.up = knex => knex.raw(` 2 | create table product ( 3 | id serial primary key, 4 | upc char(20) not null, 5 | name text not null 6 | ); 7 | `); 8 | 9 | exports.down = knex => knex.raw(` 10 | drop table product; 11 | `); 12 | -------------------------------------------------------------------------------- /test/integration/util/reset-metrics.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | const { STORE_PATH } = require('../../../src/metrics/constants'); 4 | 5 | const resetMetrics = () => { 6 | if (fs.existsSync(STORE_PATH)) { 7 | fs.unlinkSync(STORE_PATH); 8 | } 9 | }; 10 | 11 | module.exports = resetMetrics; 12 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | NODE_ENV=development 2 | 3 | DANGER_INTEGRATION_HOST=localhost 4 | DANGER_INTEGRATION_PORT=45432 5 | DANGER_INTEGRATION_USER=integration_test 6 | DANGER_INTEGRATION_PASSWORD=integration_test 7 | DANGER_INTEGRATION_DATABASE=integration_test 8 | DANGER_INTEGRATION_RESET=nuke 9 | DANGER_INTEGRATION_PROXY_PORT=26662 10 | -------------------------------------------------------------------------------- /src/util/add-all.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-param-reassign */ 2 | 3 | /** 4 | * Like mergeOptions, but modifies an existing target. 5 | */ 6 | const addAll = (target, ...mergeInObjects) => 7 | mergeInObjects.forEach(source => 8 | Object.keys(source).forEach((key) => { 9 | target[key] = source[key]; 10 | })); 11 | 12 | module.exports = addAll; 13 | -------------------------------------------------------------------------------- /test/integration/util/matcher.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Escapes all regex special characters from s; the returned 3 | * value can then be used as a literal in a regular expression 4 | */ 5 | const escapeRegex = s => 6 | s.replace(/[-\\^$*+?.()|[\]{}]/g, '\\$&'); 7 | 8 | module.exports = { 9 | 10 | startsWith: s => 11 | new RegExp(`^${escapeRegex(s)}`), 12 | 13 | }; 14 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.6' 2 | services: 3 | postgres: 4 | image: postgres:11 5 | ports: 6 | - "${DANGER_INTEGRATION_PORT}:5432" 7 | restart: always 8 | environment: 9 | POSTGRES_USER: "${DANGER_INTEGRATION_USER}" 10 | POSTGRES_PASSWORD: "${DANGER_INTEGRATION_PASSWORD}" 11 | POSTGRES_DB: "${DANGER_INTEGRATION_DATABASE}" 12 | -------------------------------------------------------------------------------- /test/integration/util/read-metrics.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | const { STORE_PATH } = require('../../../src/metrics/constants'); 4 | 5 | const readMetrics = () => { 6 | const body = fs.readFileSync(STORE_PATH, { encoding: 'utf8' }); 7 | const metrics = body.split('\n') 8 | .filter(x => x.trim() !== '') 9 | .map(x => JSON.parse(x)); 10 | return metrics; 11 | }; 12 | 13 | module.exports = readMetrics; 14 | -------------------------------------------------------------------------------- /src/cmd/url.js: -------------------------------------------------------------------------------- 1 | const { set: setCommandLine } = require('../metrics/command-line'); 2 | const endProgram = require('../end-program'); 3 | 4 | exports.command = 'url'; 5 | exports.desc = 'prints your current connection string'; 6 | 7 | exports.builder = {}; 8 | 9 | exports.handler = () => { 10 | setCommandLine(); 11 | 12 | const db = require('../db')(); 13 | console.log(db.thisUrl()); 14 | 15 | endProgram(0); 16 | }; 17 | -------------------------------------------------------------------------------- /test/integration/util/explode-url.js: -------------------------------------------------------------------------------- 1 | const { parse: parseUrl } = require('pg-connection-string'); 2 | 3 | const explodeUrl = (databaseUrl) => { 4 | const parsed = parseUrl(databaseUrl); 5 | Object.keys(parsed).forEach((key) => { 6 | if (parsed[key] === null) { 7 | parsed[key] = undefined; // nulls get coerced to "null" by psql 8 | } 9 | }); 10 | return parsed; 11 | }; 12 | 13 | module.exports = explodeUrl; 14 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/read-migrations.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const parseMigrationName = require('./parse-migration-name'); 3 | 4 | module.exports = (migrationsPath) => { 5 | if (!fs.existsSync(migrationsPath) 6 | || !fs.lstatSync(migrationsPath).isDirectory()) { 7 | return []; 8 | } 9 | 10 | return fs.readdirSync(migrationsPath) 11 | .map(filename => parseMigrationName(migrationsPath, filename)); 12 | }; 13 | -------------------------------------------------------------------------------- /test/integration/app/knexapp/migrations/20191124331980_data.js: -------------------------------------------------------------------------------- 1 | exports.up = knex => knex.raw(` 2 | insert into product (name, upc, image_url) 3 | values 4 | ('Widget', 'FFCD4', 'http://image1'), 5 | ('Bill Murray', 'A0221', 'http://image2'), 6 | ('Fidget Spinner', 'E1BCC', 'http://image3'), 7 | ('Parsnip', '62E83', 'http://image4'); 8 | 9 | `); 10 | 11 | exports.down = knex => knex.raw(` 12 | truncate table product; 13 | `); 14 | -------------------------------------------------------------------------------- /src/cmd/current.js: -------------------------------------------------------------------------------- 1 | const { set: setCommandLine } = require('../metrics/command-line'); 2 | const endProgram = require('../end-program'); 3 | 4 | exports.command = 'current'; 5 | exports.desc = 'prints the name of the database that your connection string refers to right now'; 6 | 7 | exports.builder = {}; 8 | 9 | exports.handler = () => { 10 | const db = require('../db')(); 11 | console.log(db.thisDb()); 12 | 13 | setCommandLine(); 14 | return endProgram(0); 15 | }; 16 | -------------------------------------------------------------------------------- /src/util/build-map.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Builds a mapping from values of keyMap to values of valueMap 3 | * for entries with the same key. 4 | * Only keys in keyMap will be considered. 5 | * 6 | * @param {object} keyMap 7 | * @param {object} valueMap 8 | */ 9 | const buildMap = (keyMap, valueMap) => { 10 | const output = {}; 11 | Object.keys(keyMap).forEach((key) => { 12 | output[keyMap[key]] = valueMap[key]; 13 | }); 14 | return output; 15 | }; 16 | 17 | module.exports = buildMap; 18 | -------------------------------------------------------------------------------- /src/util/confirm-prompt.js: -------------------------------------------------------------------------------- 1 | const readline = require('readline'); 2 | 3 | module.exports = (prompt, expectedAnswer) => { 4 | const rl = readline.createInterface({ 5 | input: process.stdin, 6 | output: process.stdout, 7 | }); 8 | 9 | return new Promise((resolve, reject) => 10 | rl.question(prompt, (answer) => { 11 | rl.close(); 12 | if (answer === expectedAnswer) { 13 | resolve(); 14 | } else { 15 | reject(); 16 | } 17 | })); 18 | }; 19 | -------------------------------------------------------------------------------- /src/pgshrc/create.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const findConfig = require('find-config'); 4 | 5 | module.exports = (config) => { 6 | const envPath = findConfig('.env'); 7 | if (!envPath) { 8 | throw new Error('Cannot create a .pgshrc without a .env!'); 9 | } 10 | 11 | const configPath = path.join(envPath, '../.pgshrc'); 12 | fs.writeFileSync( 13 | configPath, 14 | `${JSON.stringify(config, null, 2)}\n`, 15 | { encoding: 'utf8' }, 16 | ); 17 | return configPath; 18 | }; 19 | -------------------------------------------------------------------------------- /src/env/parse.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const dotenv = require('dotenv'); 4 | const findProjectRoot = require('../util/find-project-root'); 5 | 6 | const config = require('../config'); 7 | 8 | module.exports = () => { 9 | const projectRoot = findProjectRoot(); 10 | const dotenvPath = path.join(projectRoot, '.env'); 11 | 12 | if (!fs.existsSync(dotenvPath)) return null; 13 | 14 | return dotenv.parse(fs.readFileSync(dotenvPath), { 15 | encoding: config.dotenv_encoding, 16 | }); 17 | }; 18 | -------------------------------------------------------------------------------- /src/global/keys.js: -------------------------------------------------------------------------------- 1 | const METRICS_ENABLED = 'metrics_enabled'; 2 | const METRICS_UPLOAD_PERIOD_SEC = 'metrics_upload_period_sec'; 3 | const METRICS_UPLOAD_USE_HTTPS = 'metrics_upload_use_https'; 4 | const METRICS_IN_PROGRESS = 'metrics_lock'; 5 | const METRICS_LAST_SENT = 'metrics_last_sent'; 6 | 7 | const QUICK_HASH_KEY = 'quick_hash_key'; 8 | 9 | 10 | module.exports = { 11 | METRICS_LAST_SENT, 12 | METRICS_ENABLED, 13 | METRICS_IN_PROGRESS, 14 | METRICS_UPLOAD_PERIOD_SEC, 15 | METRICS_UPLOAD_USE_HTTPS, 16 | QUICK_HASH_KEY, 17 | }; 18 | -------------------------------------------------------------------------------- /docs/issue_template.md: -------------------------------------------------------------------------------- 1 | Please include the following (data or output of given commands). Delete this paragraph before submission. 2 | 3 | > Operating System and Version 4 | 5 | > `pgsh --version` 6 | 7 | > The command you are running 8 | 9 | > Your `.pgshrc` file (if any) and the relevant part of your `.env`, obfuscated as necessary 10 | 11 | > Your expected output 12 | 13 | > The actual output on your system with `DEBUG=pgsh:*` in your environment (no passwords or hostnames are logged) 14 | 15 | > Any additional information you think may help 16 | 17 | -------------------------------------------------------------------------------- /src/cmd/migrate/up.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const config = require('../../config'); 3 | const delegate = require('./util/delegate'); 4 | 5 | if (config.migrations.backend) { 6 | const { backend } = config.migrations; 7 | module.exports = require(`./${backend}/up`); 8 | } else { 9 | exports.command = 'up'; 10 | exports.desc = 'migrates the current database to the latest version found in your migration directory'; 11 | exports.builder = yargs => yargs; 12 | exports.handler = delegate('up', { setConfig: true }); 13 | } 14 | -------------------------------------------------------------------------------- /src/cmd/migrate/force-up.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const config = require('../../config'); 3 | const delegate = require('./util/delegate'); 4 | 5 | if (config.migrations.backend) { 6 | const { backend } = config.migrations; 7 | module.exports = require(`./${backend}/force-up`); 8 | } else { 9 | exports.command = 'force-up'; 10 | exports.desc = 're-writes the migrations record entirely based on your migrations directory'; 11 | exports.builder = yargs => yargs; 12 | exports.handler = delegate('force-up', { setConfig: true }); 13 | } 14 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/delete-migration.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('pgsh:validate'); 2 | const config = require('../../../../config'); 3 | 4 | const deleteMigration = async (knex, id) => { 5 | const SCHEMA = config.migrations.schema || 'public'; 6 | const TABLE = config.migrations.table || 'knex_migrations'; 7 | try { 8 | await knex.raw(`delete from ${SCHEMA}.${TABLE} where id = ?`, [id]); 9 | return true; 10 | } catch (err) { 11 | debug('could not delete migration', err); 12 | return false; 13 | } 14 | }; 15 | 16 | module.exports = deleteMigration; 17 | -------------------------------------------------------------------------------- /src/metrics/constants.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const xdg = require('@folder/xdg'); 4 | 5 | const dirs = xdg(); 6 | fs.mkdirSync(dirs.data, { recursive: true }); 7 | 8 | const SERVER_URL_HTTPS = 'https://pgsh-metrics.herokuapp.com'; 9 | const SERVER_URL_HTTP = 'http://pgsh-metrics.herokuapp.com'; 10 | 11 | const MAX_SAMPLES_PER_SEND = 500; 12 | const STORE_PATH = path.join(dirs.data, 'pgsh_metrics_store.ndjson'); 13 | 14 | module.exports = { 15 | SERVER_URL_HTTPS, 16 | SERVER_URL_HTTP, 17 | MAX_SAMPLES_PER_SEND, 18 | STORE_PATH, 19 | }; 20 | -------------------------------------------------------------------------------- /src/util/quick-hash.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto'); 2 | 3 | const randomString = require('./random-string'); 4 | const global = require('../global'); 5 | const { QUICK_HASH_KEY } = require('../global/keys'); 6 | 7 | let key = global.get(QUICK_HASH_KEY); 8 | if (!key) { 9 | key = randomString(48); 10 | global.set(QUICK_HASH_KEY, key); 11 | } 12 | 13 | const quickHash = (str) => { 14 | const hexString = crypto 15 | .createHmac('sha256', key) 16 | .update(str) 17 | .digest('hex') 18 | .substr(0, 6); 19 | return `<${hexString}>`; 20 | }; 21 | 22 | module.exports = quickHash; 23 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/parse-migration-name.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | const MIGRATION_FILENAME_REGEX = new RegExp( 4 | '(0*)([A-Za-z0-9]+)[_](.+)', 5 | 'i', 6 | ); 7 | 8 | module.exports = (dir, filename) => { 9 | const match = MIGRATION_FILENAME_REGEX.exec(filename); 10 | if (!match) { 11 | return console.warn(`Skipping non-migration ${filename}`); 12 | } 13 | const [_full, zeroes, textualNumber, suffix] = match; // eslint-disable-line no-unused-vars 14 | return { 15 | id: textualNumber, 16 | name: filename, 17 | prefix: `${zeroes}${textualNumber}`, 18 | fullPath: path.join(dir, filename), 19 | suffix, 20 | }; 21 | }; 22 | -------------------------------------------------------------------------------- /src/util/find-project-root.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const findConfig = require('find-config'); 3 | 4 | /** 5 | * Returns the closest ancestor directory that contains 6 | * a file or directory matching name, and returns its enclosing directory. 7 | * Returns null if the given name cannot be found. 8 | */ 9 | const dirOf = (name) => { 10 | const configPath = findConfig(name, { }); 11 | return configPath ? path.join(configPath, '..') : null; 12 | }; 13 | 14 | module.exports = () => 15 | dirOf('.pgshrc') 16 | || dirOf('package.json') 17 | || dirOf('build.gradle') 18 | || dirOf('pom.xml') 19 | || dirOf('manage.py') 20 | || dirOf('.env') 21 | || process.cwd(); 22 | -------------------------------------------------------------------------------- /src/cmd/migrate/down.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const config = require('../../config'); 3 | const delegate = require('./util/delegate'); 4 | 5 | if (config.migrations.backend) { 6 | const { backend } = config.migrations; 7 | module.exports = require(`./${backend}/down`); 8 | } else { 9 | exports.command = 'down '; 10 | exports.desc = 'down-migrates the current database to the given migration'; 11 | 12 | exports.builder = yargs => 13 | yargs 14 | .positional('ver', { 15 | describe: 'the migration to migrate down to', 16 | type: 'string', 17 | }); 18 | 19 | exports.handler = delegate('down', { setConfig: true }); 20 | } 21 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/get-applied-migrations.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('pgsh:validate'); 2 | const config = require('../../../../config'); 3 | 4 | const getAppliedMigrations = async (knex) => { 5 | const SCHEMA = config.migrations.schema || 'public'; 6 | const TABLE = config.migrations.table || 'knex_migrations'; 7 | try { 8 | const { rows } = await knex.raw(` 9 | select id, name from ${SCHEMA}.${TABLE} 10 | order by 11 | split_part(name, '_', 1) desc; 12 | `); 13 | return rows; 14 | } catch (err) { 15 | debug('could not list applied migrations', err); 16 | return []; 17 | } 18 | }; 19 | 20 | module.exports = getAppliedMigrations; 21 | -------------------------------------------------------------------------------- /test/integration/_teardown.js: -------------------------------------------------------------------------------- 1 | const findConfig = require('find-config'); 2 | const fs = require('fs'); 3 | 4 | module.exports = () => { 5 | // if .env and .pgshrc files exist in our path, 6 | // move them so the tests don't pick them up 7 | const envPath = findConfig('.env.pgshIntegrationBackup'); 8 | if (envPath) { 9 | const originalPath = envPath.replace('.pgshIntegrationBackup', ''); 10 | fs.unlinkSync(originalPath); 11 | fs.renameSync(envPath, originalPath); 12 | } 13 | const configPath = findConfig('.pgshrc.pgshIntegrationBackup'); 14 | if (configPath) { 15 | fs.renameSync(configPath, configPath.replace('.pgshIntegrationBackup', '')); 16 | } 17 | 18 | return Promise.resolve(); 19 | }; 20 | -------------------------------------------------------------------------------- /src/cmd/migrate/force-down.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const config = require('../../config'); 3 | const delegate = require('./util/delegate'); 4 | 5 | if (config.migrations.backend) { 6 | const { backend } = config.migrations; 7 | module.exports = require(`./${backend}/force-down`); 8 | } else { 9 | exports.command = 'force-down '; 10 | exports.desc = 'removes the record of any migration past the given version'; 11 | 12 | exports.builder = yargs => 13 | yargs 14 | .positional('ver', { 15 | describe: 'the migration number to migrate down to', 16 | type: 'string', 17 | }); 18 | 19 | exports.handler = delegate('force-down', { setConfig: true }); 20 | } 21 | -------------------------------------------------------------------------------- /test/integration/util/stream-utils.js: -------------------------------------------------------------------------------- 1 | const consume = async (output, lineCb, shouldExit) => { 2 | let iterations = 0; 3 | // eslint-disable-next-line no-await-in-loop 4 | for (let line = await output.next(); !line.done; line = await output.next()) { 5 | if (lineCb) lineCb(line.value); 6 | iterations += 1; 7 | if (!!shouldExit && shouldExit()) break; 8 | } 9 | return iterations; 10 | }; 11 | 12 | /** 13 | * Returns false exactly n times; true thereafter. 14 | */ 15 | const numLines = (n) => { 16 | let count = 0; 17 | return () => { 18 | if (count >= n) return true; 19 | count += 1; 20 | return (count === n); 21 | }; 22 | }; 23 | 24 | module.exports = { 25 | consume, 26 | numLines, 27 | }; 28 | -------------------------------------------------------------------------------- /src/env/create.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const findConfig = require('find-config'); 4 | 5 | const stringifyEnv = require('../util/stringify-env'); 6 | const findProjectRoot = require('../util/find-project-root'); 7 | 8 | /** 9 | * Create a new dotenv file. 10 | */ 11 | module.exports = (keyValuePairs, encoding = 'utf8') => { 12 | if (findConfig('.env')) { 13 | throw new Error('.env file already exists!'); 14 | } 15 | 16 | const createdPath = path.join( 17 | findProjectRoot(), 18 | '.env', 19 | ); 20 | 21 | fs.writeFileSync( 22 | createdPath, 23 | `# generated by pgsh\n\n${stringifyEnv(keyValuePairs)}\n`, 24 | { encoding }, 25 | ); 26 | 27 | return createdPath; 28 | }; 29 | -------------------------------------------------------------------------------- /test/integration/config.test.js: -------------------------------------------------------------------------------- 1 | const { consume, numLines } = require('./util/stream-utils'); 2 | const makeContext = require('./util/context'); 3 | 4 | const APP = 'knexapp'; 5 | const cwd = require('./app/cwd')(APP); 6 | const { env } = require('./app/dotfiles')(APP); 7 | 8 | it('does fine if there is no .pgshrc', async () => { 9 | const ctx = makeContext(cwd, null, env); 10 | const { pgsh } = ctx; 11 | 12 | { // any execution will exit 1 13 | const { exitCode, errors } = pgsh('ls'); 14 | await consume(errors, line => expect(line).toEqual( 15 | 'pgsh is configured to use the value of DATABASE_URL in your .env file, but it is unset. Exiting.', 16 | ), numLines(1)); 17 | expect(await exitCode).toBe(54); 18 | } 19 | }); 20 | -------------------------------------------------------------------------------- /src/start.js: -------------------------------------------------------------------------------- 1 | const global = require('./global'); 2 | const { start: startTimer } = require('./metrics/timer'); 3 | const { METRICS_IN_PROGRESS } = require('./global/keys'); 4 | const tryToSendMetrics = require('./metrics/send'); 5 | 6 | const start = () => { 7 | // don't tread on a running upload process 8 | if (global.get(METRICS_IN_PROGRESS)) { 9 | if (process.argv.indexOf('-x') === -1) { 10 | console.error('pgsh still has lockfiles open. Please kill these processes or call pgsh again with -x'); 11 | process.exit(92); 12 | } 13 | global.set(METRICS_IN_PROGRESS, false); 14 | } 15 | 16 | // send metrics, if it's time 17 | tryToSendMetrics(); 18 | 19 | // start the timer 20 | startTimer(); 21 | }; 22 | 23 | module.exports = start; 24 | -------------------------------------------------------------------------------- /src/metrics/command-line.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const quickHash = require('../util/quick-hash'); 4 | 5 | let commandLine = null; 6 | 7 | const hidePath = (str) => { 8 | if (fs.existsSync(str)) { 9 | return path.basename(str); 10 | } 11 | return str; 12 | }; 13 | 14 | module.exports = { 15 | set: (...sensitiveStrings) => { 16 | const args = process.argv; 17 | 18 | if (args.length >= 2) { 19 | args[0] = hidePath(args[0]); 20 | } 21 | 22 | if (args.length >= 3) { 23 | args[1] = hidePath(args[1]); 24 | } 25 | 26 | commandLine = args.map(x => 27 | (sensitiveStrings.indexOf(x) !== -1 28 | ? quickHash(x) 29 | : x)); 30 | }, 31 | get: () => commandLine, 32 | }; 33 | -------------------------------------------------------------------------------- /src/util/print-table.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable quote-props */ 2 | const Table = require('cli-table'); 3 | 4 | const TABLE_OPTIONS = { 5 | chars: { 6 | 'top': '', 7 | 'top-mid': '', 8 | 'top-left': '', 9 | 'top-right': '', 10 | 'bottom': '', 11 | 'bottom-mid': '', 12 | 'bottom-left': '', 13 | 'bottom-right': '', 14 | 'left': '', 15 | 'left-mid': '', 16 | 'mid': '', 17 | 'mid-mid': '', 18 | 'right': '', 19 | 'right-mid': '', 20 | 'middle': ' ', 21 | }, 22 | style: { 23 | 'padding-left': 0, 24 | 'padding-right': 0, 25 | }, 26 | }; 27 | 28 | module.exports = (rows) => { 29 | const table = new Table(TABLE_OPTIONS); 30 | table.push(...rows); 31 | console.log(table.toString()); 32 | }; 33 | 34 | /* eslint-enable quote-props */ 35 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "list -a", 11 | "program": "${workspaceFolder}/src/index.js", 12 | "args": ["list", "-a"], 13 | "env": { 14 | "DEBUG": "pgsh:*" 15 | } 16 | }, 17 | { 18 | "type": "node", 19 | "request": "launch", 20 | "name": "create hyphen-ated", 21 | "program": "${workspaceFolder}/src/index.js", 22 | "args": ["create", "hyphen-ated"], 23 | "env": { 24 | "DEBUG": "pgsh:*" 25 | } 26 | } 27 | ] 28 | } -------------------------------------------------------------------------------- /src/metrics/cpu.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable key-spacing */ 2 | const os = require('os'); 3 | 4 | const averageOf = (cpus) => { 5 | const avg = (key, key2) => 6 | cpus.map(x => (!key2 ? x[key] : x[key][key2])) 7 | .reduce((a, b) => (a + b), 0) / cpus.length; 8 | 9 | if (cpus.length === 0) return undefined; 10 | return { 11 | model: cpus[0].model, 12 | cores: cpus.length, 13 | speed: avg('speed'), 14 | times: { 15 | user: avg('times', 'user'), 16 | nice: avg('times', 'nice'), 17 | sys: avg('times', 'sys'), 18 | idle: avg('times', 'idle'), 19 | irq: avg('times', 'irq'), 20 | }, 21 | }; 22 | }; 23 | 24 | const getCpuMetrics = () => ({ 25 | cpus: averageOf(os.cpus()), 26 | loadavg: os.loadavg(), 27 | type: os.type(), 28 | platform: os.platform(), 29 | release: os.release(), 30 | }); 31 | 32 | module.exports = getCpuMetrics; 33 | -------------------------------------------------------------------------------- /src/util/dotenv.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const parseEnv = require('../env/parse'); 3 | 4 | module.exports = { 5 | config: () => { 6 | const envConfig = parseEnv() || {}; 7 | 8 | // check if any of our .env vars are set in our execution context 9 | let conflicts = 0; 10 | Object.keys(envConfig).forEach((k) => { 11 | if (k in process.env) { 12 | console.error(`${c.red('FATAL:')} ${c.whiteBright(k)}=${process.env[k]}`); 13 | conflicts += 1; 14 | } 15 | process.env[k] = envConfig[k]; 16 | }); 17 | 18 | // there's too much opportunity for subtle, hard-to-trace bugs 19 | if (conflicts > 0) { 20 | const noun = (conflicts === 1 ? 'this variable' : 'these variables'); 21 | console.error(`\nUNSET ${noun} before running ${c.yellowBright('pgsh')} here.`); 22 | process.exit(14); 23 | } 24 | }, 25 | }; 26 | -------------------------------------------------------------------------------- /src/metrics/opt-in.js: -------------------------------------------------------------------------------- 1 | const { prompt } = require('enquirer'); 2 | 3 | const global = require('../global'); 4 | const { METRICS_ENABLED } = require('../global/keys'); 5 | 6 | /** 7 | * Determine (potentially interactively) whether or not the user 8 | * has opted out of anonymous usage metrics (telemetry). 9 | */ 10 | const askForOptIn = async () => { 11 | const metricsEnabled = global.get(METRICS_ENABLED); 12 | if (metricsEnabled === undefined) { 13 | // ask the user to opt-in; write true or false 14 | const { shouldEnable } = await prompt({ 15 | type: 'toggle', 16 | name: 'shouldEnable', 17 | message: 'Would you like to send anonymous usage data to support further pgsh development?', 18 | }); 19 | global.set(METRICS_ENABLED, shouldEnable); 20 | return shouldEnable; 21 | } 22 | return metricsEnabled; 23 | }; 24 | 25 | module.exports = askForOptIn; 26 | -------------------------------------------------------------------------------- /src/task/connection-count.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('pgsh:util:connection-count'); 2 | 3 | /** 4 | * Returns the number of connections to the given database, 5 | * without counting this one (if we're connecting to the same db). 6 | */ 7 | module.exports = db => async (databaseName) => { 8 | const knex = db.connect(); 9 | 10 | const numConnections = await knex.raw(` 11 | select count(*) as connections 12 | from pg_stat_activity 13 | where datname = ? 14 | `, [databaseName]) 15 | .then(({ rows }) => +rows[0].connections); 16 | 17 | const otherConnections = db.thisDb() === databaseName 18 | ? numConnections - 1 19 | : numConnections; 20 | 21 | debug('other connections:', otherConnections); 22 | debug('using current db', db.thisDb() === databaseName); 23 | 24 | await new Promise(resolve => knex.destroy(resolve)); 25 | return otherConnections; 26 | }; 27 | -------------------------------------------------------------------------------- /test/integration/util/write-dotfiles.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | 4 | const stringifyEnv = require('../../../src/util/stringify-env'); 5 | 6 | const writeDotfiles = (cwd, { config = null, env = null }) => { 7 | const ENV_PATH = path.join(cwd, '.env'); 8 | if (env) { 9 | fs.writeFileSync( 10 | ENV_PATH, 11 | `# generated by pgsh\n\n${stringifyEnv(env)}\n`, 12 | { encoding: 'utf8' }, 13 | ); 14 | } else if (fs.existsSync(ENV_PATH)) { 15 | fs.unlinkSync(ENV_PATH); 16 | } 17 | 18 | const PGSHRC_PATH = path.join(cwd, '.pgshrc'); 19 | if (config) { 20 | fs.writeFileSync( 21 | PGSHRC_PATH, 22 | `${JSON.stringify(config, null, 2)}\n`, 23 | { encoding: 'utf8' }, 24 | ); 25 | } else if (fs.existsSync(PGSHRC_PATH)) { 26 | fs.unlinkSync(PGSHRC_PATH); 27 | } 28 | }; 29 | 30 | module.exports = writeDotfiles; 31 | -------------------------------------------------------------------------------- /test/integration/util/find-pgsh.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const debug = require('debug')('integration:util:find-pgsh'); 4 | const findConfig = require('find-config'); 5 | 6 | /** 7 | * Detect the entrypoint for pgsh in this repository. 8 | */ 9 | module.exports = () => { 10 | const packageJson = findConfig('package.json'); 11 | if (!packageJson) { 12 | debug('could not find package.json'); 13 | return false; 14 | } 15 | 16 | const { name, bin } = JSON.parse(fs.readFileSync(packageJson, 'utf8')); 17 | if (name !== 'pgsh') { 18 | debug(`Found a different package.json than expected: ${name}`); 19 | return false; 20 | } 21 | 22 | const relativePathToEntrypoint = bin.pgsh; 23 | const resolvedPath = path.join( 24 | path.dirname(packageJson), 25 | relativePathToEntrypoint, 26 | ); 27 | 28 | debug(resolvedPath); 29 | return resolvedPath; 30 | }; 31 | -------------------------------------------------------------------------------- /src/pgshrc/read.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const c = require('ansi-colors'); 3 | const findConfig = require('find-config'); 4 | const mergeOptions = require('merge-options'); 5 | 6 | const path = findConfig('.pgshrc'); 7 | 8 | const defaultConfig = require('./default'); 9 | 10 | let userConfig; 11 | try { 12 | userConfig = path 13 | ? JSON.parse(fs.readFileSync(path, 'utf8')) 14 | : null; 15 | } catch (err) { 16 | console.error(`${c.red('FATAL:')} error parsing ${c.underline('.pgshrc')}.`); 17 | console.error(err); 18 | require('../end-program')(15); 19 | } 20 | 21 | const config = mergeOptions(defaultConfig, userConfig || {}); 22 | 23 | // eslint-disable-next-line no-unused-vars 24 | const printConfig = () => { 25 | console.log('*********** CONFIG ***************'); 26 | console.log(JSON.stringify(config, null, 2)); 27 | console.log('*********** CONFIG ***************'); 28 | }; 29 | 30 | module.exports = config; 31 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/detect.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const debug = require('debug')('pgsh:detect'); 3 | const findConfig = require('find-config'); 4 | 5 | /** 6 | * Quickly detect whether or not this project uses knex migrations. 7 | * FIXME: right now, this simply detects if the project uses knex. 8 | */ 9 | module.exports = async () => { 10 | if (findConfig('knexfile.js')) { 11 | debug('found knexfile.js'); 12 | return true; 13 | } 14 | debug('could not find knexfile.js'); 15 | 16 | const packageJson = findConfig('package.json'); 17 | if (!packageJson) { 18 | debug('could not find package.json'); 19 | return false; 20 | } 21 | 22 | const { dependencies, devDependencies } = JSON.parse(fs.readFileSync(packageJson, 'utf8')); 23 | const foundVersion = dependencies.knex || devDependencies.knex; 24 | debug(foundVersion ? `found knex:${foundVersion}` : 'could not find knex'); 25 | return !!foundVersion; 26 | }; 27 | -------------------------------------------------------------------------------- /src/pgshrc/update-existing.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const debug = require('debug')('pgsh:update-existing'); 3 | const deepEqual = require('deep-equal'); 4 | const findConfig = require('find-config'); 5 | const mergeOptions = require('merge-options'); 6 | 7 | const configPath = findConfig('.pgshrc'); 8 | 9 | /** 10 | * Replace a value in the current .pgshrc, optionally 11 | * throwing an error if nothing was changed. 12 | */ 13 | module.exports = (patch) => { 14 | if (!configPath) { 15 | throw new Error('Could not find .pgshrc file!'); 16 | } 17 | 18 | const oldConfig = JSON.parse(fs.readFileSync(configPath, 'utf8')); 19 | const newConfig = mergeOptions(oldConfig, patch); 20 | 21 | if (deepEqual(oldConfig, newConfig, { strict: true })) { 22 | debug('Nothing was changed in .pgshrc!'); 23 | } 24 | 25 | fs.writeFileSync( 26 | configPath, 27 | `${JSON.stringify(newConfig, null, 2)}\n`, 28 | { encoding: 'utf8' }, 29 | ); 30 | }; 31 | -------------------------------------------------------------------------------- /test/unit/build-url.spec.js: -------------------------------------------------------------------------------- 1 | const buildUrl = require('../../src/util/build-url'); 2 | 3 | it('handles a basic case', () => { 4 | expect( 5 | buildUrl({ 6 | user: 'postgres', 7 | password: 'secret', 8 | host: 'local.docker', 9 | port: 15432, 10 | database: 'northwind', 11 | }), 12 | ).toEqual( 13 | 'postgres://postgres:secret@local.docker:15432/northwind', 14 | ); 15 | }); 16 | 17 | it('can deal with missing things', () => { 18 | expect( 19 | buildUrl({ 20 | host: 'localhost', 21 | database: 'northwind', 22 | user: 'postgres', 23 | }), 24 | ).toEqual( 25 | 'postgres://postgres@localhost/northwind', 26 | ); 27 | }); 28 | 29 | it('turns params into queries', () => { 30 | expect( 31 | buildUrl({ 32 | host: 'localhost', 33 | database: 'abc', 34 | ssl: true, 35 | client_encoding: 'utf8', 36 | }), 37 | ).toEqual( 38 | 'postgres://localhost/abc?encoding=utf8&ssl=true', 39 | ); 40 | }); 41 | -------------------------------------------------------------------------------- /src/cmd/migrate/validate.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const config = require('../../config'); 3 | const delegate = require('./util/delegate'); 4 | 5 | const { set: setCommandLine } = require('../../metrics/command-line'); 6 | const endProgram = require('../../end-program'); 7 | 8 | if (config.migrations.backend) { 9 | const { backend } = config.migrations; 10 | module.exports = require(`./${backend}/validate`); 11 | } else { 12 | exports.command = ['validate', 'status']; 13 | exports.desc = 'validates the current database against the migration directory'; 14 | exports.builder = yargs => yargs; 15 | exports.handler = delegate('validate', { 16 | setConfig: true, 17 | 18 | // we can't detect any migrations backends; still display something. 19 | backupHandler: async (yargs) => { 20 | setCommandLine(); 21 | const db = require('../../db')(); 22 | const printLatest = require('./knex/util/print-latest-migration')(db, yargs); 23 | await printLatest(); 24 | endProgram(0); 25 | }, 26 | }); 27 | } 28 | -------------------------------------------------------------------------------- /test/integration/app/dotfiles.js: -------------------------------------------------------------------------------- 1 | const APP_TO_CONFIG = {}; 2 | const APP_TO_ENV = {}; 3 | 4 | // ------------------------------------------------------------------------------- 5 | // knexapp 6 | APP_TO_CONFIG.knexapp = { 7 | mode: 'split', 8 | vars: { 9 | host: 'KNEXAPP_DB_HOST', 10 | port: 'KNEXAPP_DB_PORT', 11 | user: 'KNEXAPP_DB_USER', 12 | password: 'KNEXAPP_DB_PASSWORD', 13 | database: 'KNEXAPP_DB_DATABASE', 14 | }, 15 | migrations: { 16 | backend: 'knex', 17 | }, 18 | force_disable_metrics: true, 19 | }; 20 | APP_TO_ENV.knexapp = { 21 | KNEXAPP_DB_DATABASE: process.env.DANGER_INTEGRATION_DATABASE, 22 | KNEXAPP_DB_HOST: process.env.DANGER_INTEGRATION_HOST, 23 | KNEXAPP_DB_PORT: process.env.DANGER_INTEGRATION_PORT, 24 | KNEXAPP_DB_USER: process.env.DANGER_INTEGRATION_USER, 25 | KNEXAPP_DB_PASSWORD: process.env.DANGER_INTEGRATION_PASSWORD, 26 | }; 27 | 28 | // ------------------------------------------------------------------------------- 29 | module.exports = app => ({ 30 | config: APP_TO_CONFIG[app], 31 | env: APP_TO_ENV[app], 32 | }); 33 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | const config = require('./config'); 3 | 4 | require('./util/dotenv').config({ 5 | encoding: config.dotenv_encoding || 'utf8', 6 | }); 7 | 8 | require('./start')(); 9 | 10 | // eslint-disable-next-line no-unused-expressions 11 | require('yargs') 12 | .parserConfiguration({ 13 | 'unknown-options-as-args': true, 14 | 'halt-at-non-option': true, 15 | }) 16 | .scriptName('pgsh') 17 | .usage('pgsh: developer tools for interacting with postgres databases') 18 | .option('i', { 19 | alias: 'iso', 20 | type: 'boolean', 21 | describe: 'show timestamps in ISO-8601 format', 22 | default: false, 23 | }) 24 | .option('verbose', { 25 | alias: 'a', 26 | type: 'boolean', 27 | default: undefined, 28 | describe: 'introspect databases and show their latest migrations', 29 | }) 30 | .strict() 31 | .commandDir('cmd', { recurse: false }) 32 | .commandDir('cmd/migrate', { recurse: false }) 33 | .demandCommand(1, 'No command specified!') 34 | .help() 35 | .epilogue('See https://github.com/sastraxi/pgsh for more information') 36 | .argv; 37 | -------------------------------------------------------------------------------- /src/util/find-dir.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const fs = require('fs'); 3 | 4 | /** 5 | * Starting at a given directory, attempt to find a sub-directory 6 | * with exactly the given name, then return its absolute path. 7 | * 8 | * Returns null if the directory couldn't be found. 9 | * @param {startDir} the directory to start the search in 10 | * (defaults to process.cwd()) 11 | */ 12 | module.exports = (dirName, startDir = process.cwd()) => { 13 | if (path.isAbsolute(dirName)) return dirName; 14 | 15 | let currentDirectory = startDir; 16 | while (true) { // eslint-disable-line no-constant-condition 17 | // is the directory here? 18 | const candidate = path.join(currentDirectory, dirName); 19 | if (fs.existsSync(candidate) && fs.lstatSync(candidate).isDirectory()) { 20 | return path.resolve(candidate); 21 | } 22 | 23 | // keep going up until we can't anymore 24 | const nextDirectory = path.join(currentDirectory, '..'); 25 | if (currentDirectory === nextDirectory) break; 26 | currentDirectory = nextDirectory; 27 | } 28 | return null; 29 | }; 30 | -------------------------------------------------------------------------------- /src/global/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const debug = require('debug')('pgsh:metrics'); 3 | const path = require('path'); 4 | const xdg = require('@folder/xdg'); 5 | 6 | const dirs = xdg(); 7 | fs.mkdirSync(dirs.config, { recursive: true }); 8 | 9 | const GLOBAL_CONFIG_PATH = path.join(dirs.config, 'pgsh_global.json'); 10 | debug('global config path', GLOBAL_CONFIG_PATH); 11 | 12 | const ensureExists = () => { 13 | if (!fs.existsSync(GLOBAL_CONFIG_PATH)) { 14 | fs.writeFileSync(GLOBAL_CONFIG_PATH, '{}'); 15 | } 16 | }; 17 | 18 | const readAsObject = () => 19 | JSON.parse(fs.readFileSync(GLOBAL_CONFIG_PATH)); 20 | 21 | const writeObject = (obj) => 22 | fs.writeFileSync(GLOBAL_CONFIG_PATH, JSON.stringify(obj, null, 2)); 23 | 24 | module.exports = { 25 | get: (key, defaultValue) => { 26 | ensureExists(); 27 | const obj = readAsObject(); 28 | if (!key) return obj; 29 | return key in obj ? obj[key] : defaultValue; 30 | }, 31 | 32 | set: (key, value) => { 33 | ensureExists(); 34 | const obj = readAsObject(); 35 | obj[key] = value; 36 | writeObject(obj); 37 | }, 38 | }; 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Cameron Gorrie 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/cmd/migrate/util/delegate.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-dynamic-require */ 2 | const debug = require('debug')('pgsh:delegate'); 3 | 4 | const detect = require('./detect'); 5 | const updateConfig = require('../../../pgshrc/update-existing'); 6 | 7 | const DEFAULT_OPTIONS = { 8 | setConfig: false, 9 | backupHandler: undefined, 10 | }; 11 | 12 | /** 13 | * Returns a yargs handler that tries to figure out which backend to run, 14 | * sets it in .pgshrc, then delegates to an existing command's handler. 15 | */ 16 | const delegate = (command, { setConfig, backupHandler } = DEFAULT_OPTIONS) => async (yargs) => { 17 | const backend = await detect(); 18 | if (!backend) { 19 | console.log('Could not detect a migrations backend.'); 20 | if (backupHandler) { 21 | debug('running backup cmd for', command); 22 | return backupHandler(yargs); 23 | } 24 | throw new Error('no backend detected'); 25 | } 26 | 27 | if (setConfig) { 28 | updateConfig({ 29 | migrations: { backend }, 30 | }); 31 | } 32 | 33 | const { handler } = require(`../${backend}/${command}`); 34 | return handler(yargs); 35 | }; 36 | 37 | module.exports = delegate; 38 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/up.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const debug = require('debug')('pgsh:up'); 3 | 4 | const { set: setCommandLine } = require('../../../metrics/command-line'); 5 | const endProgram = require('../../../end-program'); 6 | 7 | exports.command = 'up'; 8 | exports.desc = '(knex) migrates the current database to the latest version found in your migration directory'; 9 | 10 | exports.builder = yargs => yargs; 11 | 12 | exports.handler = async (yargs) => { 13 | const db = require('../../../db')(); 14 | const printLatest = require('./util/print-latest-migration')(db, yargs); 15 | setCommandLine(); 16 | 17 | try { 18 | const knex = db.connect(); 19 | const [batch, filenames] = await knex.migrate.latest(); 20 | if (filenames.length > 0) { 21 | debug(`migration batch #${batch} => ${filenames}`); 22 | filenames.forEach(filename => 23 | console.log(`↑ ${c.yellowBright(filename)}`)); 24 | } 25 | 26 | await printLatest(); 27 | endProgram(0); 28 | } catch (err) { 29 | console.error('migrate failed.'); 30 | debug(err.message); // knex already prints out the error, so don't repeat unless we ask 31 | endProgram(1); 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /src/cmd/switch.js: -------------------------------------------------------------------------------- 1 | const { set: setCommandLine } = require('../metrics/command-line'); 2 | const endProgram = require('../end-program'); 3 | 4 | exports.command = 'switch '; 5 | exports.desc = 'makes target your current database, changing the connection string'; 6 | 7 | exports.builder = yargs => yargs 8 | .positional('target', { 9 | describe: 'the database to switch to', 10 | type: 'string', 11 | }) 12 | .option('f', { 13 | alias: 'force', 14 | type: 'boolean', 15 | describe: 'switch even if the target does not exist', 16 | default: false, 17 | }); 18 | 19 | exports.handler = async ({ target, force }) => { 20 | const db = require('../db')(); 21 | setCommandLine(target); 22 | 23 | if (!force && !(await db.isValidDatabase(target))) { 24 | console.error(`${target} is not a valid database.`); 25 | return endProgram(2); 26 | } 27 | 28 | const current = db.thisDb(); 29 | if (target === current) { 30 | console.log(`Cannot switch to ${target}; that's the current database!`); 31 | return endProgram(2); 32 | } 33 | 34 | console.log(`Switching to ${target}...`); 35 | db.switchTo(target); 36 | console.log('Done!'); 37 | return endProgram(0); 38 | }; 39 | -------------------------------------------------------------------------------- /src/pgshrc/default.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | mode: 'url', /* or 'split', */ 3 | vars: { 4 | /* for url mode */ 5 | url: 'DATABASE_URL', 6 | 7 | /* for split mode */ 8 | host: 'POSTGRES_HOST', 9 | port: 'POSTGRES_PORT', 10 | user: 'POSTGRES_USER', 11 | password: 'POSTGRES_PASSWORD', 12 | database: 'POSTGRES_DATABASE', 13 | 14 | /* if you need a different login for super-user tasks, use this */ 15 | /* super_user: 'PG_SUPER_USER', */ 16 | /* super_password: 'PG_SUPER_PASSWORD', */ 17 | }, 18 | migrations: { 19 | backend: undefined, /* needs to be detected; no default */ 20 | path: 'migrations', /* knex */ 21 | schema: 'public', /* knex */ 22 | table: 'knex_migrations', /* knex */ 23 | }, 24 | protected: ['master'], /* don't destroy these branches */ 25 | /* by default filter: is undefined; default "pgsh list" prefix */ 26 | template: 'template1', /* when creating databases */ 27 | fallback_database: 'postgres', /* if connecting to the named database fails, for e.g. listing */ 28 | dotenv_encoding: 'utf8', /* parse/encode .env in this encoding */ 29 | force_disable_metrics: false, /* override the global setting for this repo */ 30 | }; 31 | -------------------------------------------------------------------------------- /src/cmd/dump.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process'); 2 | 3 | const { set: setCommandLine } = require('../metrics/command-line'); 4 | const endProgram = require('../end-program'); 5 | 6 | exports.command = 'dump [target]'; 7 | exports.desc = 'dumps either the current database, or the named one (if given) to stdout'; 8 | 9 | exports.builder = yargs => 10 | yargs 11 | .positional('target', { 12 | describe: 'the database to dump', 13 | type: 'string', 14 | default: null, 15 | }); 16 | 17 | exports.handler = async ({ target }) => { 18 | const db = require('../db')(); 19 | const name = target || db.thisDb(); 20 | setCommandLine(target); 21 | 22 | if (!(await db.isValidDatabase(name))) { 23 | console.error(`${target} is not a valid database.`); 24 | return endProgram(2); 25 | } 26 | 27 | const p = spawn('pg_dump', [name], { 28 | stdio: 'inherit', 29 | env: db.createSuperPostgresEnv(), 30 | }); 31 | return p.on('exit', (code, signal) => { 32 | if (code !== 0) { 33 | console.error('child process exited with ' 34 | + `code ${code} and signal ${signal}`); 35 | } else { 36 | console.error('Done!'); 37 | } 38 | endProgram(code); 39 | }); 40 | }; 41 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/print-latest-migration.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const moment = require('moment'); 3 | 4 | module.exports = (db, { name, iso }) => { 5 | const timestamp = raw => (iso 6 | ? moment(raw).format() 7 | : moment(raw).fromNow() 8 | ); 9 | 10 | const schema = db.config.migrations.schema || 'public'; 11 | const table = db.config.migrations.table || 'knex_migrations'; 12 | 13 | return async () => { 14 | const knex = db.connect(name ? db.thisUrl(name) : db.thisUrl()); 15 | try { 16 | const latest = await knex(`${schema}.${table}`) 17 | .orderBy('id', 'desc') 18 | .first('name', 'migration_time'); 19 | 20 | if (latest) { 21 | console.log( 22 | `* ${c.yellowBright(name || db.thisDb())}` 23 | + ` ${c.underline(c.greenBright(latest.name))}` 24 | + ` ${c.blueBright(timestamp(latest.migration_time))}`, 25 | ); 26 | } else { 27 | console.log(`* ${c.yellowBright(name || db.thisDb())}`); 28 | } 29 | } catch (err) { 30 | console.log(`* ${c.yellowBright(name || db.thisDb())}`); 31 | } 32 | return new Promise(resolve => 33 | knex.destroy(() => { 34 | resolve(); 35 | })); 36 | }; 37 | }; 38 | -------------------------------------------------------------------------------- /src/env/update-existing.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const findConfig = require('find-config'); 3 | const config = require('../config'); 4 | 5 | const dotenvPath = findConfig('.env'); 6 | 7 | const DEFAULT_OPTIONS = { 8 | throwIfUnchanged: true, 9 | }; 10 | 11 | /** 12 | * Replace a value in the given .env file, optionally 13 | * throwing an error if nothing was changed. 14 | */ 15 | module.exports = (patch, { throwIfUnchanged } = DEFAULT_OPTIONS) => { 16 | if (!dotenvPath) { 17 | throw new Error('Could not find .env file!'); 18 | } 19 | 20 | const encoding = config.dotenv_encoding || 'utf8'; 21 | const envContents = fs.readFileSync( 22 | dotenvPath, 23 | encoding, 24 | ); 25 | 26 | let replacedContents = envContents; 27 | Object.keys(patch).forEach((key) => { 28 | const value = patch[key]; 29 | replacedContents = replacedContents.replace( 30 | new RegExp( 31 | `^${key}=.*$`, 32 | 'im', 33 | ), 34 | `${key}=${value}`, 35 | ); 36 | }); 37 | 38 | if (replacedContents === envContents && throwIfUnchanged) { 39 | throw new Error('Old key/value not found in .env!'); 40 | } 41 | 42 | fs.writeFileSync( 43 | dotenvPath, 44 | replacedContents, 45 | { encoding }, 46 | ); 47 | }; 48 | -------------------------------------------------------------------------------- /test/integration/db/list.js: -------------------------------------------------------------------------------- 1 | const connect = require('./connect'); 2 | 3 | const DEFAULT_DB_NAMES_OPTIONS = { 4 | showBuiltIn: false, 5 | showTemplates: false, 6 | sortByCreation: false, 7 | }; 8 | 9 | const SORT_CREATION = (a, b) => -a.created_at.localeCompare(b.created_at); 10 | const SORT_NAME = (a, b) => a.name.localeCompare(b.name); 11 | 12 | const BUILT_IN_DATABASES = [ 13 | 'postgres', 14 | ]; 15 | 16 | const excludingBuiltins = name => 17 | BUILT_IN_DATABASES.indexOf(name) === -1; 18 | 19 | const databaseNames = async (url, options) => { 20 | const { 21 | showBuiltIn, 22 | showTemplates, 23 | sortByCreation, 24 | } = { ...DEFAULT_DB_NAMES_OPTIONS, ...(options || {}) }; 25 | 26 | const db = connect(url); 27 | const names = await db.raw(` 28 | SELECT 29 | datname as name, 30 | (pg_stat_file('base/'||oid ||'/PG_VERSION')).modification::text as created_at 31 | FROM pg_database 32 | WHERE datistemplate = ? 33 | `, [showTemplates]) 34 | .then(({ rows }) => rows 35 | .sort(sortByCreation ? SORT_CREATION : SORT_NAME) 36 | .map(row => row.name)); 37 | 38 | await new Promise(resolve => db.destroy(resolve)); 39 | return showBuiltIn ? names : names.filter(excludingBuiltins); 40 | }; 41 | 42 | module.exports = databaseNames; 43 | -------------------------------------------------------------------------------- /src/cmd/restore.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process'); 2 | 3 | const { set: setCommandLine } = require('../metrics/command-line'); 4 | const endProgram = require('../end-program'); 5 | 6 | exports.command = 'restore '; 7 | exports.desc = 'restores a previously-dumped database as target from sql on stdin'; 8 | 9 | exports.builder = yargs => 10 | yargs 11 | .positional('target', { 12 | describe: 'the name of the restored database', 13 | type: 'string', 14 | }); 15 | 16 | exports.handler = async ({ target }) => { 17 | const db = require('../db')(); 18 | setCommandLine(target); 19 | 20 | if (await db.isValidDatabase(target)) { 21 | console.error(`Cannot restore to ${target}; that database already exists!`); 22 | return endProgram(1); 23 | } 24 | 25 | const knex = db.connectAsSuper(); // createdb 26 | await knex.raw(` 27 | create database "${target}" 28 | template ${db.config.template} 29 | `); 30 | 31 | const p = spawn(`psql -d ${target}`, { 32 | shell: true, 33 | stdio: 'inherit', 34 | env: db.createSuperPostgresEnv(), 35 | }); 36 | return p.on('exit', (code, signal) => { 37 | if (code !== 0) { 38 | console.error('child process exited with ' 39 | + `code ${code} and signal ${signal}`); 40 | } else { 41 | console.error('Done!'); 42 | } 43 | endProgram(code); 44 | }); 45 | }; 46 | -------------------------------------------------------------------------------- /test/integration/credentials.test.js: -------------------------------------------------------------------------------- 1 | const Knex = require('knex'); 2 | 3 | const { consume, numLines } = require('./util/stream-utils'); 4 | const explodeUrl = require('./util/explode-url'); 5 | const makeContext = require('./util/context'); 6 | const randomString = require('../../src/util/random-string'); 7 | 8 | const APP = 'knexapp'; 9 | const cwd = require('./app/cwd')(APP); 10 | const { env, config } = require('./app/dotfiles')(APP); 11 | 12 | const integrationUrl = require('./db/integration-url'); 13 | 14 | it('warns about pgsh ls if user has no pg_stat_file grant', async () => { 15 | const knex = Knex({ 16 | client: 'pg', 17 | connection: explodeUrl(integrationUrl), 18 | }); 19 | 20 | const password = randomString(); 21 | const user = `user_${randomString(3)}`; 22 | await knex.raw(`CREATE ROLE ${user} LOGIN CREATEDB PASSWORD '${password}'`); 23 | env[config.vars.user] = user; 24 | env[config.vars.password] = password; 25 | 26 | const ctx = makeContext(cwd, config, env); 27 | const { pgsh } = ctx; 28 | { // set up! 29 | const { exitCode, errors } = pgsh('ls', '-c'); 30 | await consume(errors, line => expect(line).toEqual( 31 | 'WARNING: pg_stat_file not avaiable; not sorting by creation.', 32 | ), numLines(1)); 33 | expect(await exitCode).toBe(0); 34 | } 35 | 36 | await knex.raw(`DROP ROLE ${user}`); 37 | return new Promise(resolve => knex.destroy(resolve)); 38 | }); 39 | -------------------------------------------------------------------------------- /src/task/is-privileged.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('pgsh:is-priviliged'); 2 | 3 | const CAP_CHECK = { 4 | // used in db.getNames(...) 5 | pg_stat_file: (knex) => knex('information_schema.role_routine_grants') 6 | .whereRaw(` 7 | grantee = CURRENT_USER and routine_name = 'pg_stat_file' 8 | `) 9 | .first('privilege_type') 10 | .then(x => (x ? x.privilege_type.trim().toLowerCase() === 'execute' : false)), 11 | 12 | // used in lots of places 13 | createdb: knex => knex('pg_user') 14 | .whereRaw('usename = CURRENT_USER') 15 | .first('usecreatedb') 16 | .then(x => x.usecreatedb), 17 | }; 18 | 19 | module.exports = db => async (caps = ['createdb']) => { 20 | const knex = db.connect(db.fallbackUrl()); 21 | 22 | const unknownCapability = caps.find(cap => !(cap in CAP_CHECK)); 23 | if (unknownCapability) { 24 | throw new Error(`Unknown capability: ${unknownCapability}`); 25 | } 26 | 27 | const privileges = (await Promise.all( 28 | caps.map(cap => 29 | CAP_CHECK[cap](knex)), 30 | )); 31 | 32 | // every capability must be granted 33 | const isPrivileged = privileges.reduce((a, b) => a && b, true); 34 | 35 | return new Promise((resolve, reject) => { 36 | knex.destroy((err) => { 37 | if (err) { 38 | debug('could not destroy', err); 39 | reject(); 40 | } else { 41 | resolve(isPrivileged); 42 | } 43 | }); 44 | }); 45 | }; 46 | -------------------------------------------------------------------------------- /test/integration/db/reset-entire-database.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('integration:db:reset'); 2 | 3 | const list = require('./list'); 4 | const connect = require('./connect'); 5 | 6 | // do not drop these databases ever. 7 | const DATABASE_BLACKLIST = [ 8 | process.env.DANGER_INTEGRATION_DATABASE, 9 | 'postgres', 10 | 'template0', 11 | 'template1', 12 | ]; 13 | 14 | /** 15 | * WARNING! 16 | * This code drops the entire database, and is only active 17 | * if the DANGER_INTEGRATION_RESET=nuke flag is set. 18 | * 19 | * Please be careful :-) 20 | */ 21 | const resetEntireDatabase = async (url) => { 22 | if (process.env.DANGER_INTEGRATION_RESET !== 'nuke') { 23 | throw new Error( 24 | 'Please set DANGER_INTEGRATION_RESET=nuke to nuke the database, ' 25 | + 'after ensuring that you do not need any data from that server.', 26 | ); 27 | } 28 | 29 | // find all databases, filter down to those we should delete 30 | const targets = (await list(url)) 31 | .filter(name => !DATABASE_BLACKLIST.find(db => db === name)); 32 | 33 | const knex = connect(url); 34 | await Promise.all( 35 | targets.map( 36 | target => knex.raw(`drop database "${target}"`), 37 | ), 38 | ); 39 | 40 | return new Promise(resolve => 41 | knex.destroy(() => { 42 | debug(`Dropped ${targets.length} databases.`); 43 | resolve(); 44 | })); 45 | }; 46 | 47 | module.exports = resetEntireDatabase; 48 | -------------------------------------------------------------------------------- /src/util/prompt-for-input.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const Bluebird = require('bluebird'); 3 | const { prompt } = require('enquirer'); 4 | 5 | const SKIP_HINT = c.dim(' (^C to skip)'); 6 | 7 | const capitalize = message => 8 | message && message.charAt(0).toUpperCase() + message.slice(1); 9 | 10 | /** 11 | * Iteratively ask for a number of inputs. 12 | * Allow skipping when marked skippable. 13 | * 14 | * @param {*} prompts the things we want to assign, 15 | * [{ name, description, skippable }, ...] 16 | * @returns a mapping from prompt names to the input they've given 17 | */ 18 | const promptForInput = async (prompts) => { 19 | const mapping = {}; 20 | await Bluebird.mapSeries( 21 | prompts, 22 | async ({ 23 | name, 24 | type, 25 | description, 26 | skippable, 27 | ...promptOptions 28 | }) => { 29 | try { 30 | const { selected } = await prompt({ 31 | type: type || 'input', 32 | name: 'selected', 33 | message: 34 | `${c.bold(capitalize(description))}?` 35 | + `${(skippable ? SKIP_HINT : '')}`, 36 | ...promptOptions, 37 | }); 38 | mapping[name] = selected; 39 | } catch (err) { 40 | console.log(err); 41 | if (!skippable) { 42 | throw new Error(`skipped non-skippable prompt "${name}"`); 43 | } 44 | } 45 | }, 46 | ); 47 | return mapping; 48 | }; 49 | 50 | module.exports = promptForInput; 51 | -------------------------------------------------------------------------------- /src/metrics/store.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const { exec } = require('child_process'); 3 | const debug = require('debug')('pgsh:metrics'); 4 | 5 | const { STORE_PATH } = require('./constants'); 6 | 7 | if (!fs.existsSync(STORE_PATH)) { 8 | fs.writeFileSync(STORE_PATH, ''); 9 | } 10 | debug('global data path', STORE_PATH); 11 | 12 | const TEMP = `${STORE_PATH}_bak`; 13 | 14 | const cleanupTemp = () => 15 | new Promise((resolve, reject) => { 16 | exec(`rm ${TEMP}`, (err, stdout, stderr) => { 17 | if (err) { 18 | console.error(err, stderr); 19 | return reject(err); 20 | } 21 | return resolve(stdout); 22 | }); 23 | }); 24 | 25 | module.exports = { 26 | get: (max) => 27 | new Promise((resolve, reject) => { 28 | exec(`head -n ${max} ${STORE_PATH}`, (err, stdout, stderr) => { 29 | if (err) { 30 | console.error(err, stderr); 31 | return reject(err); 32 | } 33 | return resolve(stdout); 34 | }); 35 | }), 36 | 37 | put: (sample) => 38 | fs.appendFileSync(STORE_PATH, `${JSON.stringify(sample)}\n`), 39 | 40 | discard: (num) => 41 | new Promise((resolve, reject) => { 42 | exec(` 43 | mv ${STORE_PATH} ${TEMP} && 44 | tail -n +${num + 1} ${TEMP} > ${STORE_PATH} 45 | `, (err, stdout, stderr) => { 46 | if (err) { 47 | console.error(err, stderr); 48 | return reject(err); 49 | } 50 | return cleanupTemp().then(() => 51 | resolve(stdout)); 52 | }); 53 | }), 54 | }; 55 | -------------------------------------------------------------------------------- /src/cmd/metrics.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const { set: setCommandLine } = require('../metrics/command-line'); 4 | const endProgram = require('../end-program'); 5 | 6 | const global = require('../global'); 7 | const { METRICS_ENABLED } = require('../global/keys'); 8 | 9 | const config = require('../config'); 10 | const updateConfig = require('../pgshrc/update-existing'); 11 | 12 | exports.command = 'metrics [state]'; 13 | exports.desc = 'enables or disables metrics'; 14 | 15 | exports.builder = yargs => yargs 16 | .positional('state', { 17 | describe: 'should we collect and send anonymous usage metrics?', 18 | choices: ['on', 'off'], 19 | default: undefined, 20 | }); 21 | 22 | exports.handler = ({ state }) => { 23 | setCommandLine(); 24 | 25 | if (state === undefined) { 26 | const metricsEnabled = global.get(METRICS_ENABLED); 27 | console.log(`Telemetry is currently ${metricsEnabled ? 'enabled' : 'disabled'} globally.`); 28 | return endProgram(0); 29 | } 30 | 31 | const metricsEnabled = state === 'on'; 32 | 33 | global.set(METRICS_ENABLED, metricsEnabled); 34 | console.log(`Telemetry is now ${metricsEnabled ? 'enabled' : 'disabled'} globally.`); 35 | 36 | // metrics are specifically disabled for this repository; 37 | // remove and let the user know what we did 38 | if (metricsEnabled && config.force_disable_metrics) { 39 | updateConfig({ 40 | force_disable_metrics: false, 41 | }); 42 | console.log(`Removed ${c.yellowBright('force_disable_metrics')} from ${c.underline('.pgshrc')}.`); 43 | } 44 | 45 | return endProgram(0); 46 | }; 47 | -------------------------------------------------------------------------------- /src/task/clone.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const { spawn } = require('child_process'); 3 | 4 | module.exports = db => async (current, target) => { 5 | const { config } = db; 6 | 7 | // FIXME: correctly destroy this instance 8 | const knex = db.connectAsSuper(db.fallbackUrl()); // createdb 9 | await knex.raw(` 10 | create database "${target}" 11 | template ${config.template || 'template1'} 12 | `); 13 | 14 | const p = spawn( 15 | `pg_dump -Fc ${current} | pg_restore -d ${target}`, 16 | { 17 | shell: true, 18 | env: db.createSuperPostgresEnv(), 19 | }, 20 | ); 21 | 22 | // FIXME: capture stderr and pass to reject 23 | p.stderr.on('data', e => 24 | process.stderr.write(c.redBright(e.toString()))); 25 | 26 | return new Promise((resolve, reject) => { 27 | p.on('exit', (code, signal) => { 28 | if (code === 0) { 29 | resolve(); 30 | } else { 31 | const superUser = process.env[config.vars.super_user]; 32 | if (!superUser) { 33 | const { user } = db.explodeUrl(db.thisUrl()); 34 | reject(new Error( 35 | 'clone failed; this can happen if you do not have' 36 | + ` the proper permissions on your user (${user}).` 37 | + ' Try configuring vars.super_[user|password] in your .pgshrc' 38 | + ' to provide login information for commands that need it.', 39 | )); 40 | } else { 41 | reject(new Error( 42 | `psql failed (code ${code}, signal ${signal})`, 43 | )); 44 | } 45 | } 46 | }); 47 | }); 48 | }; 49 | -------------------------------------------------------------------------------- /src/util/prompt-for-vars.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const Bluebird = require('bluebird'); 3 | const { prompt } = require('enquirer'); 4 | 5 | const SKIP_HINT = c.dim(' (^C to skip)'); 6 | 7 | const varChoices = vars => Object.keys(vars) 8 | .map(key => ({ 9 | hint: c.dim(`(${vars[key]})`), 10 | value: key, 11 | })); 12 | 13 | /** 14 | * Iteratively ask for a number of keys (in order), 15 | * removing from the potential set every time one is selected. 16 | * 17 | * @param {*} vars key/value pairs, e.g. extracted from a .env file 18 | * @param {*} prompts the things we want to assign, e.g 19 | * [{ name: 'url', description: 'connection URL' }, ...] 20 | * @returns a mapping from prompt names to the variable key they've chosen 21 | */ 22 | const promptForVars = async (vars, prompts) => { 23 | const mapping = {}; 24 | let choices = varChoices(vars); 25 | await Bluebird.mapSeries( 26 | prompts, 27 | async ({ name, description, skippable }) => { 28 | try { 29 | const { selected } = await prompt({ 30 | type: 'select', 31 | name: 'selected', 32 | message: 33 | `${c.bold(`Which variable contains the ${description}?`)}` 34 | + `${(skippable ? SKIP_HINT : '')}`, 35 | choices, 36 | }); 37 | mapping[name] = selected; 38 | choices = choices.filter(x => x.value !== selected); 39 | } catch (err) { 40 | if (!skippable) { 41 | throw new Error(`skipped non-skippable prompt "${name}"`); 42 | } 43 | } 44 | }, 45 | ); 46 | return mapping; 47 | }; 48 | 49 | module.exports = promptForVars; 50 | -------------------------------------------------------------------------------- /src/end-program.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const { Spinner } = require('cli-spinner'); 3 | 4 | const CHECK_MS = 300; 5 | const INITIAL_WAIT_MS = 500; 6 | 7 | const global = require('./global'); 8 | const { METRICS_IN_PROGRESS } = require('./global/keys'); 9 | 10 | const { recordMetric, recordMetricSync } = require('./metrics/record'); 11 | 12 | const endProgram = async (exitCode, sync = false) => { 13 | if (sync) { 14 | recordMetricSync(exitCode); 15 | return process.exit(exitCode); 16 | } 17 | 18 | if (global.get(METRICS_IN_PROGRESS)) { 19 | // don't show the spinner if the wait is short enough. 20 | await new Promise(resolve => 21 | setTimeout(resolve, INITIAL_WAIT_MS)); 22 | 23 | // assume it's the current process, and start showing a spinner 24 | const spinner = new Spinner(c.blueBright( 25 | 'pgsh is sending anonymous usage statistics.', 26 | )); 27 | spinner.setSpinnerString(19); 28 | spinner.start(); 29 | 30 | // wait for the process to end 31 | return new Promise((resolve) => { 32 | setInterval(async () => { 33 | if (!global.get(METRICS_IN_PROGRESS)) { 34 | // now we can record and exit 35 | spinner.stop(); 36 | process.stdout.write('\r\x1b[K'); // clear the line about metrics! 37 | await recordMetric(exitCode); 38 | process.exit(exitCode); 39 | resolve(); // probably unnecessary 40 | } 41 | }, CHECK_MS); 42 | }); 43 | } 44 | 45 | // we're in the clear; record our metrics. 46 | await recordMetric(exitCode); 47 | return new Promise(() => process.exit(exitCode)); 48 | }; 49 | 50 | module.exports = endProgram; 51 | -------------------------------------------------------------------------------- /test/integration/list.test.js: -------------------------------------------------------------------------------- 1 | const { consume, numLines } = require('./util/stream-utils'); 2 | const makeContext = require('./util/context'); 3 | const randomString = require('../../src/util/random-string'); 4 | const listDatabases = require('./db/list'); 5 | 6 | const APP = 'knexapp'; 7 | const cwd = require('./app/cwd')(APP); 8 | const { env, config } = require('./app/dotfiles')(APP); 9 | 10 | it('lists out all the databases that currently exist', async () => { 11 | const ctx = makeContext(cwd, config, env); 12 | const { pgsh } = ctx; 13 | 14 | const databaseWithMigrations = randomString(); 15 | { // create and run migrations 16 | const { 17 | exitCode, output, send, 18 | } = pgsh('create', databaseWithMigrations, '--no-switch'); 19 | await consume(output, null, numLines(2)); 20 | send.down(); // run migrations 21 | send.enter(); 22 | // consume(output, console.log); 23 | expect(await exitCode).toBe(0); 24 | } 25 | { // create only 26 | const { exitCode, output, send } = pgsh('create', randomString(), '--no-switch'); 27 | await consume(output, null, numLines(2)); 28 | send.enter(); 29 | await exitCode; 30 | } 31 | 32 | // sanity test: compare our list of databases to pgsh's 33 | // (please note that this implementation is ~99% similar to pgsh's) 34 | const databases = await listDatabases(ctx.integrationUrl, { showBuiltIn: false }); 35 | const { exitCode, output } = pgsh('list', '--no-verbose'); 36 | 37 | const foundNames = []; 38 | await consume(output, line => foundNames.push(line.replace('*', '').trim())); 39 | expect(foundNames.sort()).toEqual(databases.sort()); 40 | expect(await exitCode).toBe(0); 41 | }); 42 | -------------------------------------------------------------------------------- /test/integration/clone.test.js: -------------------------------------------------------------------------------- 1 | const matcher = require('./util/matcher'); 2 | const { consume, numLines } = require('./util/stream-utils'); 3 | const makeContext = require('./util/context'); 4 | const randomString = require('../../src/util/random-string'); 5 | 6 | const APP = 'knexapp'; 7 | const cwd = require('./app/cwd')(APP); 8 | const { env, config } = require('./app/dotfiles')(APP); 9 | 10 | it('can forcefully overwrite the current branch', async () => { 11 | const ctx = makeContext(cwd, config, env); 12 | const { pgsh } = ctx; 13 | 14 | const withMigrations = randomString(); 15 | const database = randomString(); 16 | 17 | { // create, run migrations, but don't switch 18 | const { exitCode } = pgsh('create', withMigrations, '--migrate', '--no-switch'); 19 | expect(await exitCode).toBe(0); 20 | } 21 | { // create and switch 22 | const { exitCode } = pgsh('create', database, '--no-migrate', '--switch'); 23 | expect(await exitCode).toBe(0); 24 | } 25 | { // ensure we have no migrations 26 | const { exitCode, output } = pgsh('status'); 27 | await consume(output, line => expect(line).toEqual(`* ${database}`), numLines(1)); 28 | expect(await exitCode).toBe(0); 29 | } 30 | { // clone over this database with the migrated one 31 | const { exitCode } = pgsh('clone', '-f', withMigrations, database); 32 | expect(await exitCode).toBe(0); 33 | } 34 | { // make sure we're at the latest migration now 35 | const { exitCode, output } = pgsh('status'); 36 | await consume(output, line => expect(line).toMatch( 37 | matcher.startsWith(`* ${database} 20191124331980_data.js`), 38 | ), numLines(1)); 39 | expect(await exitCode).toBe(0); 40 | } 41 | }); 42 | -------------------------------------------------------------------------------- /test/integration/_setup.js: -------------------------------------------------------------------------------- 1 | const findConfig = require('find-config'); 2 | const { exec } = require('child_process'); 3 | const fs = require('fs'); 4 | 5 | const run = (cmd) => 6 | new Promise((resolve, reject) => { 7 | exec(cmd, (err, stdout, stderr) => { 8 | if (err) { 9 | console.error(`could not run ${cmd}`, err); 10 | return reject(stderr); 11 | } 12 | return resolve(stdout); 13 | }); 14 | }); 15 | 16 | // FIXME: do we need to do this? https://github.com/kulshekhar/ts-jest/issues/411#issuecomment-355738435 17 | 18 | module.exports = async () => { 19 | // if .env and .pgshrc files exist in our path, 20 | // move them so the tests don't pick them up 21 | if (!findConfig('.env.pgshIntegrationBackup')) { // e.g. jest was CTRL+C'd 22 | const envPath = findConfig('.env'); 23 | if (envPath) { 24 | const backupEnvPath = `${envPath}.pgshIntegrationBackup`; 25 | fs.renameSync(envPath, backupEnvPath); 26 | // get a version of .env that only carries integration testing stuff 27 | await run(`cat ${backupEnvPath} | grep DANGER_INTEGRATION > ${envPath}`); 28 | } 29 | } 30 | if (!findConfig('.pgshrc.pgshIntegrationBackup')) { // e.g. jest was CTRL+C'd 31 | const configPath = findConfig('.pgshrc'); 32 | if (configPath) { 33 | fs.renameSync(configPath, `${configPath}.pgshIntegrationBackup`); 34 | } 35 | } 36 | 37 | // set up our process.env 38 | require('dotenv').config({ encoding: 'utf8' }); 39 | 40 | // purge all databases 41 | const resetEntireDatabase = require('./db/reset-entire-database'); 42 | const integrationUrl = require('./db/integration-url'); 43 | return resetEntireDatabase(integrationUrl); 44 | }; 45 | -------------------------------------------------------------------------------- /src/metrics/record.js: -------------------------------------------------------------------------------- 1 | const findConfig = require('find-config'); 2 | const moment = require('moment'); 3 | const debug = require('debug')('pgsh:metrics'); 4 | const fs = require('fs'); 5 | 6 | const pacakgeJsonPath = findConfig('package.json', { cwd: __dirname }); 7 | const packageJson = pacakgeJsonPath ? fs.readFileSync(pacakgeJsonPath) : null; 8 | 9 | const askForOptIn = require('./opt-in'); 10 | const getCpuMetrics = require('./cpu'); 11 | const config = require('../config'); 12 | const global = require('../global'); 13 | const { METRICS_ENABLED } = require('../global/keys'); 14 | 15 | const { get: getCommandLine } = require('./command-line'); 16 | const { get: getStartedAt } = require('./timer'); 17 | const store = require('./store'); 18 | 19 | const createSample = (exitCode) => ({ 20 | ...getCpuMetrics(), 21 | exitCode, 22 | command: getCommandLine(), 23 | version: packageJson && packageJson.version, 24 | startedAt: getStartedAt(), 25 | finishedAt: +moment(), 26 | }); 27 | 28 | const recordMetric = async (exitCode) => { 29 | if (config.force_disable_metrics) return Promise.resolve(); 30 | if (!await askForOptIn()) return Promise.resolve(); // user just opted out or already had 31 | 32 | const sample = createSample(exitCode); 33 | debug('record sample', sample); 34 | store.put(sample); 35 | return Promise.resolve(sample); 36 | }; 37 | 38 | const recordMetricSync = (exitCode) => { 39 | if (config.force_disable_metrics) return null; 40 | if (!global.get(METRICS_ENABLED)) return null; 41 | 42 | const sample = createSample(exitCode); 43 | debug('record sample', sample); 44 | store.put(sample); 45 | return sample; 46 | }; 47 | 48 | module.exports = { 49 | recordMetric, 50 | recordMetricSync, 51 | }; 52 | -------------------------------------------------------------------------------- /src/util/build-url.js: -------------------------------------------------------------------------------- 1 | const pick = require('lodash.pick'); 2 | 3 | // modified from https://github.com/vitaly-t/connection-string 4 | // to be simpler and only support what pg-connection-string gives us 5 | 6 | const DEFAULT_PROTOCOL = 'postgres'; 7 | 8 | const encode = (text, { encodeDollar }) => { 9 | const encoded = encodeURIComponent(`${text}`); 10 | return encodeDollar ? encoded : encoded.replace(/%24/g, '$'); 11 | }; 12 | 13 | module.exports = function buildUrl(options = {}) { 14 | const { 15 | protocol, 16 | user, password, 17 | host, port, 18 | database, 19 | ...restOptions 20 | } = options; 21 | 22 | 23 | let s = `${encode(protocol || DEFAULT_PROTOCOL, options)}://`; 24 | 25 | if (user) { 26 | s += encode(user, options); 27 | if (password) { 28 | s += `:${encode(password, options)}`; 29 | } 30 | s += '@'; 31 | } else if (password) { 32 | s += `:${encode(password, options)}@`; 33 | } 34 | 35 | s += host; 36 | if (port) { 37 | s += `:${port}`; 38 | } 39 | 40 | s += `/${encode(database, options)}`; 41 | 42 | // all the query params that pg-connection-string throws in our options 43 | const query = pick(restOptions, [ 44 | 'application_name', 45 | 'fallback_application_name', 46 | 'ssl', 47 | ]); 48 | query.encoding = options.client_encoding; 49 | 50 | if (Object.values(query).filter(x => x).length > 0) { 51 | const encodedQuery = Object.entries(query) 52 | .map(([key, value]) => { 53 | if (!value) return null; 54 | return `${encode(key, options)}=${encode(value, options)}`; 55 | }) 56 | .filter(str => str) 57 | .sort(); // predictable output ordering 58 | 59 | if (encodedQuery.length) { 60 | s += `?${encodedQuery.join('&')}`; 61 | } 62 | } 63 | return s; 64 | }; 65 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | # specify the version you desire here 6 | - image: circleci/node:12 7 | 8 | - image: postgres:11 9 | environment: 10 | POSTGRES_USER: testuser 11 | POSTGRES_PASSWORD: password 12 | POSTGRES_DB: integration_test 13 | 14 | working_directory: ~/repo 15 | 16 | steps: 17 | - run: wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - 18 | - run: sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt/ stretch-pgdg main" >> /etc/apt/sources.list.d/pgdg.list' 19 | - run: sudo apt-get update 20 | - run: sudo apt-get install postgresql-client-11 21 | 22 | - checkout 23 | 24 | # Download and cache dependencies 25 | - restore_cache: 26 | keys: 27 | - v1-dependencies-{{ checksum "package.json" }} 28 | # fallback to using the latest cache if no exact match is found 29 | - v1-dependencies- 30 | 31 | - run: yarn install 32 | - save_cache: 33 | paths: 34 | - node_modules 35 | key: v1-dependencies-{{ checksum "package.json" }} 36 | 37 | # 1. linting 38 | - run: yarn lint 39 | 40 | # 2. unit testing 41 | - run: yarn test:unit 42 | 43 | # 3. integration testing 44 | - run: 45 | command: yarn test:integration 46 | environment: 47 | DANGER_INTEGRATION_HOST: localhost 48 | DANGER_INTEGRATION_PORT: 5432 49 | DANGER_INTEGRATION_USER: testuser 50 | DANGER_INTEGRATION_PASSWORD: password 51 | DANGER_INTEGRATION_DATABASE: integration_test 52 | DANGER_INTEGRATION_RESET: nuke 53 | DANGER_INTEGRATION_PROXY_PORT: 14567 54 | -------------------------------------------------------------------------------- /src/cmd/psql.js: -------------------------------------------------------------------------------- 1 | const { spawn } = require('child_process'); 2 | const flattenDeep = require('lodash.flattendeep'); 3 | 4 | const { set: setCommandLine } = require('../metrics/command-line'); 5 | const endProgram = require('../end-program'); 6 | 7 | exports.command = ['psql [name]', 'repl']; 8 | exports.desc = 'connects to the current (or named) database with psql'; 9 | 10 | exports.builder = yargs => 11 | yargs 12 | .positional('name', { 13 | describe: 'the name of the database to connect to', 14 | type: 'string', 15 | default: null, 16 | }) 17 | .option('c', { 18 | alias: 'command', 19 | type: 'array', 20 | describe: 'Specifies that psql is to execute the given command string', 21 | default: [], 22 | }) 23 | .option('f', { 24 | alias: 'file', 25 | type: 'array', 26 | describe: 'Read commands from the file filename, rather than standard input', 27 | default: [], 28 | }) 29 | .option('s', { 30 | alias: 'super-user', 31 | type: 'boolean', 32 | describe: 'Connect to the database as superuser, if configured', 33 | default: false, 34 | }); 35 | 36 | exports.handler = (yargs) => { 37 | const { 38 | name, 39 | command, 40 | file, 41 | superUser, 42 | _, 43 | } = yargs; 44 | setCommandLine([...file, command]); 45 | 46 | const db = require('../db')(); 47 | const psqlArguments = flattenDeep([ 48 | '-d', name || db.thisDb(), 49 | command.map(c => ['-c', c]), 50 | file.map(f => ['-f', f]), 51 | _.slice(1), 52 | ]); 53 | 54 | const p = spawn('psql', psqlArguments, { 55 | stdio: 'inherit', 56 | env: { 57 | ...(superUser 58 | ? db.createSuperPostgresEnv() 59 | : db.createPostgresEnv()), 60 | }, 61 | }); 62 | 63 | p.on('exit', (code) => { 64 | endProgram(code); 65 | }); 66 | }; 67 | -------------------------------------------------------------------------------- /test/integration/util/exec-pgsh.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const readline = require('readline'); 3 | const { spawn } = require('child_process'); 4 | 5 | const stripAnsiStream = require('strip-ansi-stream'); 6 | 7 | const debug = require('debug')('pgsh:integration'); 8 | 9 | const PGSH_PATH = require('./find-pgsh')(); 10 | 11 | module.exports = (workingDirectory, args, env = undefined) => { 12 | const cwd = workingDirectory.startsWith('/') 13 | ? workingDirectory 14 | : path.resolve(workingDirectory); 15 | 16 | const pgsh = spawn(PGSH_PATH, args, { 17 | cwd, 18 | shell: true, 19 | env, 20 | }); 21 | 22 | const exitCode = new Promise((resolve) => { 23 | pgsh.on('close', (code) => { 24 | debug(`child process exited with code ${code}`); 25 | resolve(code); 26 | }); 27 | }); 28 | 29 | pgsh.stderr.setEncoding('utf8'); 30 | pgsh.stdout.setEncoding('utf8'); 31 | 32 | const readStdout = readline.createInterface( 33 | pgsh.stdout.pipe(stripAnsiStream()), 34 | ); 35 | const output = readStdout[Symbol.asyncIterator](); 36 | 37 | const readStderr = readline.createInterface( 38 | pgsh.stderr.pipe(stripAnsiStream()), 39 | ); 40 | const errors = readStderr[Symbol.asyncIterator](); 41 | 42 | const sendText = text => 43 | new Promise(onDrain => 44 | pgsh.stdin.write(text, onDrain)); 45 | 46 | pgsh.stdout.on('data', s => debug('stdout', s)); 47 | pgsh.stderr.on('data', s => debug('stderr', s)); 48 | 49 | const sendKey = keyCode => pgsh.stdin.write(keyCode); 50 | return { 51 | exitCode, 52 | output, 53 | errors, 54 | sendText, 55 | send: { 56 | up: () => sendKey('\x1B\x5B\x41'), 57 | down: () => sendKey('\x1B\x5B\x42'), 58 | enter: () => sendKey('\x0D'), 59 | space: () => sendKey('\x20'), 60 | ctrlC: () => sendKey('\x03'), 61 | }, 62 | }; 63 | }; 64 | -------------------------------------------------------------------------------- /src/util/wait-for.js: -------------------------------------------------------------------------------- 1 | const backoff = require('backoff'); 2 | const connectionCountTask = require('../task/connection-count'); 3 | 4 | const fibonacciBackoff = backoff.fibonacci({ 5 | randomisationFactor: 0, 6 | initialDelay: 300, 7 | maxDelay: 12000, 8 | }); 9 | 10 | /** 11 | * Waits until no other sessions are accessing the given database. 12 | * 13 | * @param {*} db a Database connection 14 | * @param {*} target the name of the database to monitor 15 | * @param {*} interruptHandler if we need to abandon waiting for any reason, call this 16 | * @param {*} failFast give up immediately if the database is not available 17 | */ 18 | const waitFor = (db, target, interruptHandler, failFast = false) => 19 | async (resolve) => { 20 | const connectionCount = connectionCountTask(db); 21 | const otherConnections = await connectionCount(target); 22 | const isPlural = otherConnections !== 1; 23 | 24 | if (otherConnections === 0) { 25 | return resolve(); 26 | } 27 | 28 | console.log( 29 | `There ${isPlural ? 'are' : 'is'} ${otherConnections} other session${isPlural ? 's' : ''}`, 30 | `using the database.${failFast ? '' : ' (waiting)'}`, 31 | ); 32 | 33 | if (failFast) { 34 | return interruptHandler(); 35 | } 36 | 37 | const readyHandler = async () => { 38 | const count = await connectionCount(target); 39 | if (count > 0) { 40 | console.log(`${count}...`); 41 | fibonacciBackoff.backoff(); 42 | } else { 43 | process.removeListener('SIGINT', interruptHandler); 44 | fibonacciBackoff.removeListener('ready', readyHandler); 45 | fibonacciBackoff.reset(); 46 | resolve(); 47 | } 48 | }; 49 | process.on('SIGINT', interruptHandler); 50 | fibonacciBackoff.on('ready', readyHandler); 51 | return fibonacciBackoff.backoff(); 52 | }; 53 | 54 | module.exports = waitFor; 55 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/util/choose-migration-index.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const debug = require('debug')('pgsh:knex:choose-migration'); 3 | const { prompt } = require('enquirer'); 4 | 5 | const endProgram = require('../../../../end-program'); 6 | 7 | const parseMigrationName = require('./parse-migration-name'); 8 | 9 | const pick = async (message, choices) => { 10 | const { choice } = await prompt({ 11 | type: 'select', 12 | name: 'choice', 13 | message, 14 | choices: choices.sort(), 15 | }); 16 | return choice; 17 | }; 18 | 19 | module.exports = db => async (migrationNames, userInput) => { 20 | const migrations = migrationNames.map(name => parseMigrationName('', name)); 21 | 22 | const exactIndex = migrations.findIndex(m => m.id === userInput || m.prefix === userInput); 23 | if (exactIndex !== -1) { 24 | return exactIndex; 25 | } 26 | 27 | const autocompleted = migrations 28 | .filter(m => m.id.startsWith(userInput) || m.suffix.startsWith(userInput)); 29 | if (autocompleted.length === 0) { 30 | console.error( 31 | `Couldn't find migration <${userInput}>`, 32 | 'in your migrations folder', 33 | `(${c.underline(`${db.getMigrationsPath()}/`)})`, 34 | ); 35 | return endProgram(25); 36 | } 37 | 38 | if (autocompleted.length === 1) { 39 | return migrations.indexOf(autocompleted[0]); 40 | } 41 | 42 | try { 43 | const choices = autocompleted.map(m => ({ 44 | value: m.name, 45 | message: m.name, 46 | })); 47 | const chosenName = await pick('Which migration did you mean?', choices); 48 | const index = migrationNames.indexOf(chosenName); 49 | debug(`pgsh down based on prefix match ${userInput} => ${migrationNames[index]}`); 50 | return index; 51 | } catch (err) { 52 | console.error(err); 53 | console.log('Aborted due to user input!'); 54 | return endProgram(26); 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /test/integration/env.test.js: -------------------------------------------------------------------------------- 1 | const { consume, numLines } = require('./util/stream-utils'); 2 | const makeContext = require('./util/context'); 3 | 4 | const APP = 'knexapp'; 5 | const cwd = require('./app/cwd')(APP); 6 | const { env, config } = require('./app/dotfiles')(APP); 7 | 8 | it('fails if env is already injected', async () => { 9 | const ctx = makeContext(cwd, config, env); 10 | const { pgshWithEnv } = ctx; 11 | const pgsh = pgshWithEnv({ ...process.env, ...env }); 12 | 13 | { // any execution will fail with error 14! 14 | const { exitCode, errors } = pgsh('ls'); 15 | 16 | await consume(errors, line => expect(line.split(' ')[0]).toEqual('FATAL:'), numLines(5)); 17 | await consume(errors, null, numLines(1)); 18 | await consume(errors, line => expect(line).toEqual( 19 | 'UNSET these variables before running pgsh here.', 20 | ), numLines(1)); 21 | 22 | expect(await exitCode).toBe(14); 23 | } 24 | }); 25 | 26 | it('fails if .env is empty', async () => { 27 | const ctx = makeContext(cwd, config, {}); 28 | const { pgsh } = ctx; 29 | 30 | { // any execution will exit 1 31 | const { exitCode, errors } = pgsh('ls'); 32 | await consume(errors, line => expect(line).toEqual( 33 | 'pgsh is configured to use the value of KNEXAPP_DB_DATABASE in your .env file, but it is unset. Exiting.', 34 | ), numLines(1)); 35 | expect(await exitCode).toBe(54); 36 | } 37 | }); 38 | 39 | it('fails if there is no .env', async () => { 40 | const ctx = makeContext(cwd, config, null); 41 | const { pgsh } = ctx; 42 | 43 | { // any execution will exit 1 44 | const { exitCode, output, errors } = pgsh('ls'); 45 | await consume(output, console.log); 46 | await consume(errors, line => expect(line).toEqual( 47 | 'pgsh is configured to use the value of KNEXAPP_DB_DATABASE in your .env file, but it is unset. Exiting.', 48 | ), numLines(1)); 49 | expect(await exitCode).toBe(54); 50 | } 51 | }); 52 | -------------------------------------------------------------------------------- /src/metrics/README.md: -------------------------------------------------------------------------------- 1 | # What information does pgsh gather about me? 2 | 3 | An example request that might be sent to the metrics server: 4 | 5 | ```json 6 | [ 7 | { 8 | "command": ["clone", "-f", "", ""], 9 | "version": "0.10.5", 10 | "uname": "Darwin 18.7.0", 11 | "startedAt": 1574787094, 12 | "finishedAt": 1574787912, 13 | "exitCode": 0, 14 | "interactive": false, 15 | "cpus": { 16 | "model": "Intel(R) Core(TM) i7 CPU 860 @ 2.80GHz", 17 | "cores": 4, 18 | "speed": 2926, 19 | "times": { 20 | "user": 252020, 21 | "nice": 0, 22 | "sys": 30340, 23 | "idle": 1070356870, 24 | "irq": 0 25 | } 26 | } 27 | } 28 | ] 29 | ``` 30 | 31 | 1. **Database names and filenames are obscured.** These are hashed alongside a "key" that is randomly re-generated. 32 | 33 | 2. **Your IP address is only used for rate limiting purposes.** It is never recorded anywhere, other than server logs on Heroku. 34 | 35 | As a result, your data is quite anonymous. It is reasonable to assume that — given enough data about your actions — de-anonymization companies could use this information to help identify you. I believe the data being sent is the minimum required to gain useful insights from the way `pgsh` is used in practice. 36 | 37 | > You can expect to send a few kilobytes of data each day. At most 2 megabytes of telemetry will be uploaded each hour. 38 | 39 | ## What will you use this information for? 40 | 41 | * proactive bugfixing 42 | * determine investments based on usage (aka. product) 43 | * optimization 44 | * blog posts and infographics (in aggregate) 45 | 46 | ## How do I opt in / out? 47 | 48 | The first time you run `pgsh` after upgrading, you will be asked if you'd like to opt in. Afterwards, you can use the `pgsh metrics` command to toggle telemetry. 49 | 50 | ## How do I submit a claim under GDPR? 51 | 52 | Please send any data requests or inquiries to sastraxi@gmail.com. 53 | -------------------------------------------------------------------------------- /src/cmd/create.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const { set: setCommandLine } = require('../metrics/command-line'); 4 | const endProgram = require('../end-program'); 5 | 6 | exports.command = 'create '; 7 | exports.desc = 'creates a new database as name, then switches to it'; 8 | 9 | exports.builder = yargs => yargs 10 | .positional('name', { 11 | describe: 'the name to give the new database', 12 | type: 'string', 13 | }) 14 | .option('m', { 15 | alias: 'migrate', 16 | type: 'boolean', 17 | describe: 'also migrate the new database to the current version', 18 | default: undefined, 19 | }) 20 | .option('switch', { // needed for strict mode 21 | type: 'boolean', 22 | hidden: true, 23 | default: undefined, 24 | }) 25 | .option('S', { 26 | alias: 'no-switch', 27 | type: 'boolean', 28 | describe: 'do not switch to the newly-created database', 29 | default: undefined, 30 | }); 31 | 32 | exports.handler = async ({ 33 | name, 34 | migrate, 35 | switch: shouldSwitch, // --no-switch 36 | S: dontSwitch, // -S 37 | ...yargs 38 | }) => { 39 | const db = require('../db')(); 40 | const create = require('../task/create')(db); 41 | setCommandLine(name); 42 | 43 | const current = db.thisDb(); 44 | if (name === current) { 45 | console.log(`Cannot create ${name}; that's the current database!`); 46 | return endProgram(1); 47 | } 48 | 49 | if (await db.isValidDatabase(name)) { 50 | console.error(`Cannot create ${name}; that database already exists!`); 51 | return endProgram(2); 52 | } 53 | 54 | console.log(`Going to create ${name}...`); 55 | try { 56 | await create(name, { 57 | migrate, 58 | yargs, 59 | switch: shouldSwitch !== undefined ? shouldSwitch : !dontSwitch, // TODO: coalesce 60 | }); 61 | 62 | return endProgram(0); 63 | } catch (err) { 64 | console.error(`could not create: ${c.redBright(err.message)}`); 65 | return endProgram(3); 66 | } 67 | }; 68 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pgsh", 3 | "version": "0.12.1", 4 | "description": "Developer Tools for PostgreSQL", 5 | "main": "src/index.js", 6 | "repository": { 7 | "type": "git", 8 | "url": "git://github.com/sastraxi/pgsh.git" 9 | }, 10 | "author": "Cameron Gorrie ", 11 | "license": "MIT", 12 | "bin": { 13 | "pgsh": "src/index.js" 14 | }, 15 | "scripts": { 16 | "test": "jest --runInBand --globalSetup ./test/integration/_setup.js --globalTeardown ./test/integration/_teardown.js", 17 | "test:unit": "jest test/unit", 18 | "test:integration": "jest test/integration --runInBand --globalSetup ./test/integration/_setup.js --globalTeardown ./test/integration/_teardown.js", 19 | "test!": "killall -9 node; reset; DEBUG=pgsh:integration yarn test:integration", 20 | "lint": "eslint ." 21 | }, 22 | "jest": { 23 | "bail": true, 24 | "setupFilesAfterEnv": [ 25 | "/test/integration/_each.js" 26 | ], 27 | "maxConcurrency": 1 28 | }, 29 | "dependencies": { 30 | "@folder/xdg": "^2.1.1", 31 | "ansi-colors": "^4.1.1", 32 | "backoff": "^2.5.0", 33 | "bluebird": "^3.7.1", 34 | "cli-spinner": "^0.2.10", 35 | "cli-table": "^0.3.1", 36 | "debug": "^4.1.1", 37 | "deep-equal": "^1.0.1", 38 | "dotenv": "^8.2.0", 39 | "enquirer": "^2.3.2", 40 | "find-config": "^1.0.0", 41 | "knex": "^0.20.2", 42 | "lodash.flattendeep": "^4.4.0", 43 | "lodash.pick": "^4.4.0", 44 | "merge-options": "^2.0.0", 45 | "moment": "^2.24.0", 46 | "pg": "^8.5.1", 47 | "pg-connection-string": "^2.1.0", 48 | "request": "^2.88.0", 49 | "request-promise-native": "^1.0.8", 50 | "tmp": "^0.1.0", 51 | "yargs": "^15.0.2" 52 | }, 53 | "devDependencies": { 54 | "eslint": "^6.7.1", 55 | "eslint-config-airbnb-base": "^14.0.0", 56 | "eslint-plugin-import": "^2.16.0", 57 | "jest": "^24.1.0", 58 | "strip-ansi": "^6.0.0", 59 | "strip-ansi-stream": "^1.0.0" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/validate.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const debug = require('debug')('pgsh:validate'); 4 | const getAppliedMigrations = require('./util/get-applied-migrations'); 5 | const readMigrations = require('./util/read-migrations'); 6 | 7 | const { set: setCommandLine } = require('../../../metrics/command-line'); 8 | const endProgram = require('../../../end-program'); 9 | 10 | exports.command = ['validate', 'status']; 11 | exports.desc = '(knex) validates the current database against the migration directory'; 12 | 13 | exports.builder = yargs => yargs; 14 | 15 | exports.handler = async (yargs) => { 16 | const db = require('../../../db')(); 17 | const printLatest = require('./util/print-latest-migration')(db, yargs); 18 | setCommandLine(); 19 | 20 | try { 21 | const knex = db.connect(); 22 | const migrationsPath = db.getMigrationsPath(); 23 | const applied = await getAppliedMigrations(knex); 24 | const available = readMigrations(migrationsPath) 25 | .map(m => m.name); 26 | 27 | const missing = applied 28 | .map(m => m.name) 29 | .filter(name => available.indexOf(name) === -1) 30 | .map(c.redBright); 31 | 32 | const unapplied = available 33 | .filter(name => applied.findIndex(f => f.name === name) === -1) 34 | .map(c.yellowBright); 35 | 36 | await printLatest(); 37 | 38 | if (!missing.length && !unapplied.length) { 39 | endProgram(0); 40 | } 41 | 42 | if (missing.length) { 43 | console.log('\nMissing from filesystem:'); 44 | missing.forEach(name => console.log(` ❌ ${c.redBright(c.underline(name))}`)); 45 | } 46 | 47 | if (unapplied.length) { 48 | console.log('\nNot yet applied:'); 49 | unapplied.forEach(u => console.log(` ? ${c.underline(u)}`)); 50 | } 51 | 52 | if (missing.length) return endProgram(1); 53 | return endProgram(0); 54 | } catch (err) { 55 | debug(err.message); // knex already prints out the error, so don't repeat unless we ask 56 | return endProgram(2); 57 | } 58 | }; 59 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/force-up.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const { set: setCommandLine } = require('../../../metrics/command-line'); 4 | const endProgram = require('../../../end-program'); 5 | 6 | const confirm = require('../../../util/confirm-prompt'); 7 | const readMigrations = require('./util/read-migrations'); 8 | 9 | exports.command = 'force-up'; 10 | exports.desc = 're-writes the knex migrations table entirely based on your migration directory'; 11 | 12 | exports.builder = {}; 13 | 14 | exports.handler = async (yargs) => { 15 | const db = require('../../../db')(); 16 | const printLatest = require('./util/print-latest-migration')(db, yargs); 17 | setCommandLine(); 18 | 19 | const schema = db.config.migrations.schema || 'public'; 20 | const table = db.config.migrations.table || 'knex_migrations'; 21 | 22 | const migrationsPath = db.getMigrationsPath(); 23 | const migrations = readMigrations(migrationsPath); 24 | if (!migrations.length) { 25 | console.error( 26 | 'your migrations folder is empty', 27 | `(${c.underline(`${db.getMigrationsPath()}/`)})!`, 28 | ); 29 | endProgram(1); 30 | } 31 | 32 | const highestPrefix = migrations 33 | .map(migration => migration.prefix) 34 | .reduce((a, b) => { 35 | if (!a) return b; 36 | if (!b) return a; 37 | if (a.localeCompare(b) >= 0) return a; 38 | return b; 39 | }); 40 | 41 | console.log(`This will re-write the knex_migrations table based on ${migrationsPath}`); 42 | console.log( 43 | c.redBright('Use of this tool implies that the database has been migrated fully!\n'), 44 | ); 45 | 46 | try { 47 | await confirm('Type the prefix of the highest migration to continue: ', `${highestPrefix}`); 48 | } catch (err) { 49 | console.log('Not re-writing the migrations table.'); 50 | return endProgram(2); 51 | } 52 | 53 | const knex = db.connectAsSuper(); // FIXME: do we need super privileges here? 54 | 55 | // sort migrations by ascending prefix 56 | migrations.sort((a, b) => a.prefix.localeCompare(b.prefix)); 57 | 58 | await knex(`${schema}.${table}`).del(); 59 | await knex(`${schema}.${table}`) 60 | .insert(migrations.map(migration => ({ 61 | name: migration.name, 62 | batch: 1, 63 | migration_time: knex.fn.now(), 64 | }))); 65 | 66 | console.log('Migrations table re-written!\n'); 67 | await printLatest(); 68 | 69 | return endProgram(0); 70 | }; 71 | -------------------------------------------------------------------------------- /src/cmd/destroy.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const confirm = require('../util/confirm-prompt'); 4 | const waitFor = require('../util/wait-for'); 5 | 6 | const { set: setCommandLine } = require('../metrics/command-line'); 7 | const endProgram = require('../end-program'); 8 | 9 | exports.command = ['destroy ', 'drop', 'rm']; 10 | exports.desc = 'Destroys the given database. This cannot be undone!'; 11 | 12 | exports.builder = yargs => 13 | yargs 14 | .positional('target', { 15 | describe: 'The database to drop. You can maintain a blacklist ' 16 | + 'of databases to protect from this command in your .pgshrc', 17 | type: 'string', 18 | }) 19 | .option('f', { 20 | alias: 'fail-fast', 21 | type: 'boolean', 22 | describe: 'Do not wait for the database to be unused; exit immediately', 23 | default: false, 24 | }); 25 | 26 | exports.handler = async ({ target, failFast }) => { 27 | const db = require('../db')(); 28 | setCommandLine(target); 29 | 30 | const current = db.thisDb(); 31 | if (target === current) { 32 | console.log(`Cannot destroy ${target}; that's the current database!`); 33 | return endProgram(1); 34 | } 35 | 36 | if (db.config.protected 37 | && db.config.protected 38 | .map(x => x.toLowerCase()) 39 | .includes(target.toLowerCase())) { 40 | console.error(`Cannot drop ${target} (protected by your .pgshrc)`); 41 | return endProgram(2); 42 | } 43 | 44 | if (!(await db.isValidDatabase(target))) { 45 | console.error(`${target} is not a valid database.`); 46 | return endProgram(3); 47 | } 48 | 49 | const interruptHandler = () => { 50 | console.log(`\nDid not drop ${target}!`); 51 | return endProgram(0); 52 | }; 53 | 54 | try { 55 | await waitFor(db, target, interruptHandler, failFast); 56 | await confirm(c.redBright('Type the database name to drop it: '), target); 57 | await waitFor(db, target, interruptHandler, failFast); 58 | } catch (err) { 59 | console.log('Not dropping.'); 60 | return endProgram(0); 61 | } 62 | 63 | try { 64 | console.log(`Dropping ${target}...`); 65 | const knex = db.connectAsSuper(db.fallbackUrl()); // createdb 66 | await knex.raw(`drop database "${target}"`); 67 | return endProgram(0); 68 | } catch (err) { 69 | console.error(`Could not drop ${target}!`); 70 | console.error(err); 71 | return endProgram(4); 72 | } 73 | }; 74 | -------------------------------------------------------------------------------- /test/integration/basic.test.js: -------------------------------------------------------------------------------- 1 | const { consume } = require('./util/stream-utils'); 2 | const makeContext = require('./util/context'); 3 | const randomString = require('../../src/util/random-string'); 4 | 5 | const pgshGlobal = require('../../src/global'); 6 | const { METRICS_ENABLED } = require('../../src/global/keys'); 7 | 8 | const APP = 'knexapp'; 9 | const cwd = require('./app/cwd')(APP); 10 | const { env, config } = require('./app/dotfiles')(APP); 11 | 12 | const integrationDb = require('./db/integration-db'); 13 | 14 | it('identifies the current db as the integration database', async () => { 15 | pgshGlobal.set(METRICS_ENABLED, false); 16 | const { pgsh } = makeContext(cwd, config, env); 17 | const { exitCode, output } = pgsh('list'); 18 | 19 | consume(output, (line) => { 20 | if (line.startsWith('*')) { 21 | expect(line).toEqual(`* ${integrationDb}`); 22 | } 23 | }); 24 | 25 | expect(await exitCode).toBe(0); 26 | }); 27 | 28 | it('balks on unknown commands', async () => { 29 | pgshGlobal.set(METRICS_ENABLED, false); 30 | const { pgsh } = makeContext(cwd, config, env); 31 | const { exitCode } = pgsh('badcmd'); 32 | expect(await exitCode).toBe(1); 33 | }); 34 | 35 | it('can switch back and forth', async () => { 36 | pgshGlobal.set(METRICS_ENABLED, false); 37 | const ctx = makeContext(cwd, config, env); 38 | const { pgsh } = ctx; 39 | 40 | const database = randomString(); 41 | 42 | { // create, don't run migrations, but don't switch 43 | const { exitCode } = pgsh('create', database, '--no-migrate', '--no-switch'); 44 | expect(await exitCode).toBe(0); 45 | } 46 | { // ensure we're on the integration database 47 | const { exitCode, output } = pgsh('current'); 48 | await consume(output, l => expect(l).toEqual(integrationDb)); 49 | expect(await exitCode).toBe(0); 50 | } 51 | { // switch to the new database 52 | const { exitCode } = pgsh('switch', database); 53 | expect(await exitCode).toBe(0); 54 | } 55 | { // ensure we're on the new database 56 | const { exitCode, output } = pgsh('current'); 57 | await consume(output, l => expect(l).toEqual(database)); 58 | expect(await exitCode).toBe(0); 59 | } 60 | { // switch to the integration database 61 | const { exitCode } = pgsh('switch', integrationDb); 62 | expect(await exitCode).toBe(0); 63 | } 64 | { // ensure we're on the integration database 65 | const { exitCode, output } = pgsh('current'); 66 | await consume(output, l => expect(l).toEqual(integrationDb)); 67 | expect(await exitCode).toBe(0); 68 | } 69 | }); 70 | -------------------------------------------------------------------------------- /docs/pull-requests.md: -------------------------------------------------------------------------------- 1 | ## Pull Request Review and Merging 2 | 3 | In the below scenario, we will keep a consistent naming scheme: branches will have any type (e.g. `feat/`) removed, then the whole thing will be converted to `snake_case` and prefixed with `app_`. 4 | 5 | 1. Imagine you're working on a new feature branch `feat/shiny-new`, and that feature includes some new database migrations. 6 | 7 | 2. While you're working, your co-worker has an urgent request: she needs her `fix/urgent` branch reviewed ASAP! 8 | 9 | 3. Because you're already using `pgsh`, your feature database `app_shiny_new` is separate from your `develop` branch database `app_develop`. Switch back to it using `pgsh switch app_develop`, as it is a common ancestor for your and your co-worker's development history. 10 | 11 | 4. Let's `git pull` then `git checkout fix/urgent`. 12 | 13 | 5. Turns out that as part of the fix for the bug, your co-worker needed to retroactively fix some entries in the database, resulting in a new migration, `050_fix_timestamps.js`. Let's clone our database so we can try out the migration without clobbering our data: `pgsh clone app_urgent`. 14 | 15 | 6. `pgsh up` will migrate the newly-created database up to version 50. 16 | 17 | 7. You can now explore the applied database and run the branch's code against it, verifying both the code fix and that the migration also works with your local data. 18 | 19 | 8. Let's say you approve the pull request and merge into `develop`. While the ops team is busy deploying the fix, we can move our migrations around so that come PR time things aren't so difficult. 20 | 21 | 9. Pull the `develop` branch changes by `git checkout develop && git pull`. 22 | 23 | 10. Switch back to our branch by `git checkout feat/shiny_new && pgsh switch app_shiny_new`. 24 | 25 | 11. Let's bring our co-worker's changes and migrations into our branch: `git merge develop` or `git rebase develop`, depending on your workflow. 26 | 27 | 12. Because migrations are ordered, we now have two `050_` migrations. Our co-worker's code has been committed to `develop`, so we need to re-order our migrations after by increasing their sequence number(s). 28 | 29 | 13. Now we have a valid migrations directory, but our knex migration log doesn't know about our co-worker's migration (and it hasn't been applied to our database). Let's fix the latter first by *manually running the migration code* by pasting it into `pgsh psql`, preferably inside of a transaction so we can rollback if we get into trouble. 30 | 31 | * in the future, pgsh will facilitate this more directly. 32 | 33 | 14. If there are problems, we should fix them in *our* migration(s). 34 | 35 | 15. Once you've manually migrated the database to the latest version, run `pgsh force-up` to re-write the `knex_migrations` table and complete your process. 36 | -------------------------------------------------------------------------------- /src/task/create.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const { prompt } = require('enquirer'); 3 | const mergeOptions = require('merge-options'); 4 | 5 | const readMigrations = require('../cmd/migrate/knex/util/read-migrations'); 6 | 7 | module.exports = (db) => { 8 | const { config } = db; 9 | const defaultOptions = { 10 | template: db.config.template, 11 | migrate: undefined, 12 | yargs: undefined, 13 | switch: true, 14 | }; 15 | 16 | return async (name, options) => { 17 | const opts = mergeOptions(defaultOptions, options || {}); 18 | 19 | const current = db.thisUrl(); 20 | 21 | // FIXME: correctly destroy this instance 22 | const knexFallback = db.connectAsSuper(db.fallbackUrl()); // createdb 23 | await knexFallback.raw(` 24 | create database "${name}" 25 | template ${opts.template} 26 | `); 27 | if (opts.switch) { 28 | db.switchTo(name); 29 | console.log(`Done! Switched to ${name}.`); 30 | } else { 31 | console.log(`Done! created ${name}.`); 32 | } 33 | 34 | let shouldMigrate = opts.migrate || false; 35 | if (config.migrations && opts.migrate === undefined) { 36 | // only show the prompt if we have some migrations in the folder. 37 | const migrationFiles = readMigrations(db.getMigrationsPath()); 38 | if (migrationFiles.length > 0) { 39 | const response = await prompt({ 40 | type: 'toggle', 41 | name: 'migrate', 42 | message: 'Migrate this database to the latest version?', 43 | }); 44 | shouldMigrate = response.migrate; 45 | } 46 | } 47 | 48 | if (config.migrations && shouldMigrate) { 49 | const printLatest = require('../cmd/migrate/knex/util/print-latest-migration')(db, { 50 | ...opts.yargs, 51 | name, 52 | }); 53 | try { 54 | // TODO: DRY with "up" command 55 | const knex = db.connect(db.thisUrl(name)); 56 | const [batch, filenames] = await knex.migrate.latest(); 57 | 58 | if (filenames.length > 0) { 59 | console.log(`Migration batch #${batch} applied!`); 60 | filenames.forEach(filename => 61 | console.log(`↑ ${c.yellowBright(filename)}`)); 62 | console.log(); 63 | } 64 | } catch (err) { 65 | console.error(c.redBright('Knex migration failed:'), err); 66 | if (opts.switch) { 67 | console.log( 68 | `Switching back to ${c.yellowBright(current)}` 69 | + ' and dropping the new database...', 70 | ); 71 | db.switchTo(current); 72 | } 73 | await knexFallback.raw(`drop database "${name}"`); 74 | console.log('Done.'); 75 | throw new Error('Migration failed; database dropped.'); 76 | } 77 | await printLatest(); 78 | } 79 | 80 | return name; 81 | }; 82 | }; 83 | -------------------------------------------------------------------------------- /src/cmd/list.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const fs = require('fs'); 3 | const path = require('path'); 4 | const moment = require('moment'); 5 | const Bluebird = require('bluebird'); 6 | 7 | const { set: setCommandLine } = require('../metrics/command-line'); 8 | const endProgram = require('../end-program'); 9 | 10 | const config = require('../config'); 11 | 12 | const printTable = require('../util/print-table'); 13 | 14 | exports.command = ['list [prefix]', 'ls', 'l']; 15 | exports.desc = 'prints all databases, filtered by an optional prefix'; 16 | 17 | exports.builder = yargs => yargs 18 | .positional('prefix', { 19 | describe: 'only list databases that start with the given string', 20 | type: 'string', 21 | default: config.prefix ? `${config.prefix}_` : '', 22 | }) 23 | .option('c', { 24 | alias: 'created', 25 | type: 'boolean', 26 | describe: 'order databse lists by creation time descending', 27 | default: false, 28 | }); 29 | 30 | const IGNORE_DATABASES = ['postgres']; 31 | 32 | const migrationOutput = async (db, knex, isPrimary) => { 33 | const SCHEMA = config.migrations.schema || 'public'; 34 | const TABLE = config.migrations.table || 'knex_migrations'; 35 | try { 36 | const latest = await knex(`${SCHEMA}.${TABLE}`) 37 | .orderBy('id', 'desc') 38 | .first('name', 'migration_time'); 39 | 40 | let filename; 41 | const fileExists = fs.existsSync(path.join(db.getMigrationsPath(), latest.name)); 42 | if (fileExists) { 43 | filename = isPrimary 44 | ? c.greenBright(c.underline(latest.name)) 45 | : c.underline(latest.name); 46 | } else { 47 | filename = c.gray(latest.name); 48 | } 49 | 50 | const reltime = fileExists 51 | ? c.blueBright(moment(latest.migration_time).fromNow()) 52 | : c.gray(moment(latest.migration_time).fromNow()); 53 | 54 | return [filename, reltime]; 55 | } catch (err) { 56 | return []; 57 | } 58 | }; 59 | 60 | exports.handler = async (yargs) => { 61 | const db = require('../db')(); 62 | const { prefix, verbose: explictlyVerbose, created } = yargs; 63 | const showMigrations = explictlyVerbose !== undefined ? explictlyVerbose : !!db.config.migrations; 64 | 65 | setCommandLine(prefix); 66 | 67 | try { 68 | const current = db.thisDb(); 69 | const databaseNames = await db.databaseNames({ 70 | showTemplates: false, 71 | sortByCreation: created || false, 72 | }); 73 | 74 | const rows = await Bluebird.map( 75 | databaseNames 76 | .filter(x => !IGNORE_DATABASES.includes(x)) 77 | .filter(x => !prefix || x.startsWith(prefix)), 78 | 79 | async (name) => { 80 | let migration = []; 81 | if (showMigrations) { 82 | // FIXME: do we need super privileges here? 83 | const knex = db.connectAsSuper(db.thisUrl(name)); 84 | migration = await migrationOutput(db, knex, name === current); 85 | } 86 | 87 | if (name === current) { 88 | return ['*', `${c.yellowBright(name)}`, ...migration]; 89 | } 90 | return ['', name, ...migration]; 91 | }, 92 | ); 93 | printTable(rows); 94 | 95 | endProgram(0); 96 | } catch (err) { 97 | const { message } = err; 98 | console.error(`postgres: ${c.redBright(message)}`); 99 | endProgram(1); 100 | } 101 | }; 102 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/down.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const debug = require('debug')('pgsh:knex:down'); 3 | 4 | const { set: setCommandLine } = require('../../../metrics/command-line'); 5 | const endProgram = require('../../../end-program'); 6 | 7 | const readMigrations = require('./util/read-migrations'); 8 | const getAppliedMigrations = require('./util/get-applied-migrations'); 9 | const deleteMigration = require('./util/delete-migration'); 10 | const chooseMigrationIndex = require('./util/choose-migration-index'); 11 | 12 | exports.command = 'down '; 13 | exports.desc = '(knex) down-migrates the current database to the given migration'; 14 | 15 | exports.builder = yargs => 16 | yargs 17 | .positional('ver', { 18 | describe: 'the migration to migrate down to', 19 | type: 'string', 20 | }); 21 | 22 | exports.handler = async (yargs) => { 23 | const db = require('../../../db')(); 24 | const printLatest = require('./util/print-latest-migration')(db, yargs); 25 | const { ver: userInput } = yargs; 26 | setCommandLine(); 27 | 28 | // grab the migrations from the file system (under some VCS) 29 | const migrationsPath = db.getMigrationsPath(); 30 | const vcsMigrations = readMigrations(migrationsPath); 31 | if (!vcsMigrations.length) { 32 | console.error( 33 | 'your migrations folder is empty', 34 | `(${c.underline(`${db.getMigrationsPath()}/`)})!`, 35 | ); 36 | endProgram(1); 37 | } 38 | 39 | // determine which migration the user's talking about 40 | const destVcsIndex = await chooseMigrationIndex(db)( 41 | vcsMigrations.map(m => m.name), 42 | userInput, 43 | ); 44 | 45 | // grab the set of migrations already recorded in the database 46 | const knex = db.connect(); 47 | const appliedMigrations = await getAppliedMigrations(knex); 48 | 49 | /* eslint-disable import/no-dynamic-require */ 50 | /* eslint-disable no-await-in-loop */ 51 | for ( 52 | // start from the lexicographically highest-prefixed migration on disk 53 | // and go down to the ID of the migration we want to be at. 54 | let i = vcsMigrations.findIndex(m => m.name === appliedMigrations[0].name); 55 | i > destVcsIndex; 56 | i -= 1 57 | ) { 58 | // match it with a migration on the filesystem 59 | const thisDbMigration = appliedMigrations.find(m => m.name === vcsMigrations[i].name); 60 | if (!thisDbMigration) { 61 | console.error( 62 | `Trying to cascade deletion but migration ${c.redBright(vcsMigrations[i].name)} ` 63 | + 'could not be found in the database! Exiting.', 64 | ); 65 | endProgram(1); 66 | } 67 | 68 | // execute the migration in our database and record it in the knex migrations table 69 | const { name, fullPath } = vcsMigrations[i]; 70 | const { down: runDownMigration } = require(fullPath); 71 | try { 72 | await runDownMigration(knex, Promise); 73 | await deleteMigration(knex, thisDbMigration.id); 74 | console.log(`↓ ${c.redBright(name)}`); 75 | } catch (err) { 76 | console.error(`something went wrong running down from: ${fullPath}`); 77 | console.error(err); 78 | } 79 | } 80 | /* eslint-enable no-await-in-loop */ 81 | /* eslint-enable import/no-dynamic-require */ 82 | 83 | // close our database connection 84 | await printLatest(); 85 | return new Promise(resolve => 86 | knex.destroy(() => { 87 | debug(`Down-migration to <${userInput}> finished!`); 88 | resolve(); 89 | })); 90 | }; 91 | -------------------------------------------------------------------------------- /src/cmd/migrate/knex/force-down.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const moment = require('moment'); 3 | 4 | const { set: setCommandLine } = require('../../../metrics/command-line'); 5 | const endProgram = require('../../../end-program'); 6 | 7 | const getAppliedMigrations = require('./util/get-applied-migrations'); 8 | const chooseMigrationIndex = require('./util/choose-migration-index'); 9 | 10 | const confirm = require('../../../util/confirm-prompt'); 11 | const printTable = require('../../../util/print-table'); 12 | 13 | exports.command = 'force-down '; 14 | exports.desc = 're-writes the `knex_migrations` table to not include the record of any migration past the given version.'; 15 | 16 | exports.builder = yargs => 17 | yargs 18 | .positional('ver', { 19 | describe: 'the migration number to migrate down to', 20 | type: 'string', 21 | }); 22 | 23 | exports.handler = async (yargs) => { 24 | const db = require('../../../db')(); 25 | const { ver: userInput, iso } = yargs; 26 | const printLatest = require('./util/print-latest-migration')(db, yargs); 27 | const timestamp = raw => (iso 28 | ? moment(raw).format() 29 | : moment(raw).fromNow() 30 | ); 31 | setCommandLine(); 32 | 33 | const knex = db.connectAsSuper(); // FIXME: do we need super privileges here? 34 | const SCHEMA = db.config.migrations.schema || 'public'; 35 | const TABLE = db.config.migrations.table || 'knex_migrations'; 36 | 37 | // determine which migration the user's talking about 38 | const appliedMigrations = await getAppliedMigrations(knex); // from db 39 | const idx = await chooseMigrationIndex(db)( 40 | appliedMigrations.map(m => m.name), userInput, 41 | ); 42 | const prefix = appliedMigrations[idx].name.split('_')[0]; 43 | 44 | let migrationsToDelete; 45 | try { 46 | migrationsToDelete = await knex.raw(` 47 | select 48 | name, 49 | migration_time::text as migratedAt 50 | from ${SCHEMA}.${TABLE} 51 | where split_part(name, '_', 1) > ? 52 | `, [prefix]).then(({ rows }) => rows); 53 | } catch (err) { 54 | const { message } = err; 55 | console.error(`postgres: ${c.redBright(message)}`); 56 | endProgram(1); 57 | } 58 | 59 | if (!migrationsToDelete.length) { 60 | console.error( 61 | 'No migrations to forget! This usually means', 62 | 'your database is <= the given version.', 63 | ); 64 | return endProgram(2); 65 | } 66 | 67 | console.log(`This will forceably downgrade your database to ${prefix}`); 68 | console.log('After doing this, you should manually downgrade the actual database data / schema.\n'); 69 | console.log('The following migrations will be forgotten:'); 70 | const rows = migrationsToDelete.map(({ name, migratedAt }) => ([ 71 | '*', 72 | `${c.greenBright(timestamp(migratedAt))}`, 73 | `${c.redBright(name)}`, 74 | ])); 75 | printTable(rows); 76 | 77 | console.log( 78 | '\nIf the above migrations exist in the directory,', 79 | 'you can use down instead of force-down.', 80 | ); 81 | 82 | try { 83 | await confirm('Otherwise, type the target prefix again to execute: ', `${prefix}`); 84 | } catch (err) { 85 | console.error('Not downgrading.'); 86 | return endProgram(2); 87 | } 88 | 89 | console.log(`\nSetting database to ${prefix}...`); 90 | await knex.raw(` 91 | delete from ${SCHEMA}.${TABLE} 92 | where split_part(name, '_', 1) > ? 93 | `, [prefix]); 94 | 95 | await printLatest(); 96 | return endProgram(0); 97 | }; 98 | -------------------------------------------------------------------------------- /docs/branching.md: -------------------------------------------------------------------------------- 1 | ## A database branching model 2 | 3 | As your database schema evolves, you quickly realise the challenge of keeping the structure (and triggers, stored procedures, seed data...) of the database in sync with your codebase. You may have even witnessed the horror of inconsistent db builds due to "repeatable migrations". Instead, treat the database as a code repository itself: clone and switch between branches just like you do in git. 4 | 5 | This makes it easy to dynamically switch between tasks: juggle maintenance, feature development, and code reviews easily by keeping separate postgres databases. pgsh does not enforce a 1:1 relationship between git and database branches, but (for your own sanity!) it's a good place to start. 6 | 7 | ### Branching 8 | 9 | When you start a topic branch with `git checkout -b`, you should also `pgsh clone`. 10 | 11 | ### Merging 12 | 13 | When you want to `git merge` a topic branch back into e.g. **develop**, you'll need to manually re-order migrations, as well as resolving any conflicts. Do this by performing the lion's share of the work on the topic branch: 14 | 15 | 1. Make sure your **develop** database is up-to-date, then `pgsh clone` into a temporary database. 16 | 2. Switch to the topic branch and `git merge|rebase develop`. 17 | 3. Re-order your migrations after existing ones, then use `pgsh up` to attempt the migration. 18 | 4. If the migration fails, modify the migrations and try again. 19 | 5. Once the migrations have all been applied and the database looks correct, merge your topic branch into **develop**. 20 | 21 | There are many other ways to accomplish this using `pgsh`. You should find the workflow that feels most natural to your team. The `force-down` and `force-up` commands can help get knex out of your way by modifying the `knex_migrations` log stored in your database. 22 | 23 | ## Recommendations 24 | 25 | * Use integer prefixes for your migrations (e.g. `005_add_user_tokens.js`) rather than timestamps, as this makes it easier to spot and re-order. 26 | 27 | * It's not recommended to `CREATE ROLE` in a migrations as roles are database-wide. If you do, you'll never be able to create and migrate a new database using `pgsh create -m`! On the other hand, `CREATE EXTENSION` is fine in migrations as long as your user has sufficient permissions. In practice, some extensions can only be created by the superuser. 28 | 29 | Instead of this, I'd suggest creating a template database and configuring `.pgshrc` to use that. If you're only developing one project on your postgres server, feel free to modify `template1` (which is the default `config.template`). 30 | 31 | * Write your down migrations properly. When your migration replaces a function or transforms data, make sure its *down* edge faithfully re-creates the previous database to the best of its ability. Some data loss is OK (it's only development data), but structural differences restrict your ability to use `pgsh` to its fullest. 32 | 33 | If your migration `v` can't be down-migrated, try the following: 34 | 35 | 1. If you're using knex to perform migrations, you can use `exports.down = (knex, Promise) => Promise.reject()`. This will force down-migrations through this edge to fail. 36 | 37 | 2. Manually bring your database to some earlier verison `v' <= v`. 38 | 39 | 3. Run `pgsh force-down ` to re-write the migrations table to reflect your manual work. 40 | 41 | * Add migrations to your CI database and, where possible, use (fuzzed) subsets of production data to catch migration issues early. 42 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at sastraxi@gmail.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /src/task/choose-db.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const { prompt } = require('enquirer'); 3 | 4 | const clone = require('./clone'); 5 | const create = require('./create'); 6 | const config = require('../config'); 7 | 8 | const CHOICES = [ 9 | { 10 | value: 'switch', 11 | message: 'Connect to an existing database', 12 | }, 13 | { 14 | value: 'clone', 15 | message: 'Clone an existing database', 16 | }, 17 | { 18 | value: 'create', 19 | message: 'Create a new database', 20 | }, 21 | ]; 22 | 23 | const KEEP_CHOICE = { 24 | value: 'keep', 25 | message: 'Keep as-is', 26 | }; 27 | 28 | const pick = async (db, message = 'Which database?', showTemplates = false) => { 29 | const names = await db.databaseNames({ showTemplates }); 30 | const { name } = await prompt({ 31 | type: 'select', 32 | name: 'name', 33 | message, 34 | choices: names.sort(), 35 | }); 36 | return name; 37 | }; 38 | 39 | const dispatch = { 40 | keep: 41 | db => ({ database: db.thisDb() }), 42 | 43 | switch: 44 | async (db) => { 45 | const database = await pick(db, 'Connect to which database?'); 46 | return { database }; 47 | }, 48 | 49 | clone: 50 | async (db) => { 51 | const source = await pick(db, 'Which database do you want to clone?'); 52 | const { target } = await prompt({ 53 | type: 'input', 54 | name: 'target', 55 | message: 'What should the new database be called?', 56 | }); 57 | 58 | console.log(); 59 | console.log( 60 | `Going to clone ${c.yellowBright(source)} to ${c.yellowBright(target)}...`, 61 | ); 62 | await clone(db)(source, target); 63 | console.log('Done!'); 64 | 65 | return { database: target }; 66 | }, 67 | 68 | create: 69 | async (db) => { 70 | const { pickTemplate } = await prompt({ 71 | type: 'toggle', 72 | name: 'pickTemplate', 73 | message: 74 | `${c.bold('Do you need to change the template database?')}` 75 | + ` (default ${c.yellowBright(config.template)})`, 76 | }); 77 | 78 | const template = pickTemplate 79 | ? await pick(db, 'Which template do you want to use?', true) 80 | : config.template; 81 | 82 | const { name } = await prompt({ 83 | type: 'input', 84 | name: 'name', 85 | message: 'What should the new database be called?', 86 | }); 87 | 88 | console.log(); 89 | console.log(`Going to create ${c.yellowBright(name)}...`); 90 | await create(db)(name, { template, switch: false }); 91 | console.log('Done!'); 92 | 93 | return { 94 | database: name, 95 | config: pickTemplate ? { template } : {}, 96 | }; 97 | }, 98 | }; 99 | 100 | /** 101 | * Pass in the current database value rather than get it from the config 102 | * because we're probably in "knex init" and we don't have the env loaded. 103 | * 104 | * @returns { database, config } the database we've switched to and any config changes 105 | */ 106 | module.exports = db => async (currentDatabase) => { 107 | if (currentDatabase) { 108 | console.log( 109 | `Your environment currently points to ${c.yellowBright(currentDatabase)}.`, 110 | ); 111 | console.log(); 112 | } 113 | 114 | const { mode } = await prompt( 115 | { 116 | name: 'mode', 117 | type: 'select', 118 | message: 'What would you like to do?', 119 | choices: [ 120 | ...(currentDatabase ? [KEEP_CHOICE] : []), 121 | ...CHOICES, 122 | ], 123 | }, 124 | ); 125 | 126 | return dispatch[mode](db); 127 | }; 128 | -------------------------------------------------------------------------------- /test/integration/migrate.test.js: -------------------------------------------------------------------------------- 1 | const matcher = require('./util/matcher'); 2 | const { consume, numLines } = require('./util/stream-utils'); 3 | const makeContext = require('./util/context'); 4 | const randomString = require('../../src/util/random-string'); 5 | 6 | const APP = 'knexapp'; 7 | const cwd = require('./app/cwd')(APP); 8 | const { env, config } = require('./app/dotfiles')(APP); 9 | 10 | const pgshGlobal = require('../../src/global'); 11 | const { METRICS_ENABLED } = require('../../src/global/keys'); 12 | 13 | it('migrates properly upon creation', async () => { 14 | pgshGlobal.set(METRICS_ENABLED, false); 15 | const ctx = makeContext(cwd, config, env); 16 | const { pgsh } = ctx; 17 | 18 | const database = randomString(); 19 | 20 | { // create, run migrations, and switch 21 | const { exitCode, output, send } = pgsh('create', database); 22 | await consume(output, null, numLines(2)); 23 | await send.down(); // run migrations 24 | await send.enter(); 25 | expect(await exitCode).toBe(0); 26 | } 27 | { // ensure we're at the latest migration 28 | const { exitCode, output } = pgsh('status', '-a'); 29 | await consume(output, line => expect(line).toMatch( 30 | matcher.startsWith(`* ${database} 20191124331980_data.js`), 31 | ), numLines(1)); 32 | expect(await exitCode).toBe(0); 33 | } 34 | }); 35 | 36 | it('can migrate up and down successfully', async () => { 37 | pgshGlobal.set(METRICS_ENABLED, false); 38 | const ctx = makeContext(cwd, config, env); 39 | const { pgsh } = ctx; 40 | 41 | const database = randomString(); 42 | 43 | { // create and switch but don't run migrations 44 | const { exitCode, output } = pgsh('create', database, '--no-migrate'); 45 | await consume(output); 46 | expect(await exitCode).toBe(0); 47 | } 48 | { // ensure we're *not* migrated 49 | const { exitCode, output } = pgsh('status', '--no-verbose'); 50 | await consume(output, line => expect(line).toEqual(`* ${database}`), numLines(1)); 51 | 52 | const EXPECTED_UNMIGRATED = [ 53 | '20191124214437_init.js', 54 | '20191124305523_add_image_url.js', 55 | '20191124331980_data.js', 56 | ]; 57 | const unmigrated = []; 58 | await consume(output, (line) => { 59 | const [mark, name] = line.trim().split(' '); 60 | if (mark === '?') { 61 | unmigrated.push(name); 62 | } 63 | }); 64 | expect(unmigrated.sort()).toEqual(EXPECTED_UNMIGRATED.sort()); 65 | expect(await exitCode).toBe(0); 66 | } 67 | { // migrate up 68 | const { exitCode, output } = pgsh('up'); 69 | await consume(output); 70 | expect(await exitCode).toBe(0); 71 | } 72 | { // ensure we're at the latest migration 73 | const { exitCode, output } = pgsh('status'); 74 | await consume(output, line => expect(line).toMatch( 75 | matcher.startsWith(`* ${database} 20191124331980_data.js`), 76 | ), numLines(1)); 77 | expect(await exitCode).toBe(0); 78 | } 79 | { // migrate down to the first (and use its suffix to do so) 80 | const { exitCode, output } = pgsh('down', 'init'); 81 | await consume(output, line => expect(line).toContain('↓'), numLines(2)); 82 | await consume(output, line => expect(line).toMatch( 83 | matcher.startsWith(`* ${database} 20191124214437_init.js`), 84 | ), numLines(1)); 85 | expect(await exitCode).toBe(0); 86 | } 87 | { // migrate up and ensure we're at the latest migration 88 | const { exitCode, output } = pgsh('up'); 89 | await consume(output, line => expect(line).toContain('↑'), numLines(2)); 90 | await consume(output, line => expect(line).toMatch( 91 | matcher.startsWith(`* ${database} 20191124331980_data.js`), 92 | ), numLines(1)); 93 | expect(await exitCode).toBe(0); 94 | } 95 | }); 96 | -------------------------------------------------------------------------------- /src/cmd/clone.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | 3 | const confirm = require('../util/confirm-prompt'); 4 | const waitFor = require('../util/wait-for'); 5 | 6 | const { set: setCommandLine } = require('../metrics/command-line'); 7 | const endProgram = require('../end-program'); 8 | 9 | // N.B. yargs didn't work so well when this was [source] , 10 | // so we have to account for the 1-argument version in our handler. 11 | exports.command = 'clone [source] [target]'; 12 | exports.desc = 'clones a database, then (potentially) switches to it'; 13 | 14 | exports.builder = yargs => yargs 15 | .positional('source', { 16 | describe: 'the database to clone (defaults to the current database)', 17 | type: 'string', 18 | }) 19 | .positional('target', { 20 | describe: 'the name to give the cloned database (required!)', 21 | type: 'string', 22 | required: true, 23 | default: undefined, 24 | }) 25 | .option('f', { 26 | alias: 'force', 27 | type: 'boolean', 28 | describe: 'do not warn before overwriting an existing database (use with caution!)', 29 | default: false, 30 | }) 31 | .option('s', { 32 | alias: 'switch', 33 | type: 'boolean', 34 | describe: 'switch to the newly-created database (by default true when cloning the current db)', 35 | default: undefined, 36 | }) 37 | .option('S', { 38 | type: 'boolean', 39 | describe: 'do not switch to the newly-created database', 40 | default: undefined, 41 | }) 42 | .conflicts('s', 'S') 43 | .conflicts('S', 's'); 44 | 45 | exports.handler = async ({ 46 | source: argSource, 47 | target: argTarget, 48 | force, 49 | s, // -s, --switch 50 | S, // -S, --no-switch 51 | }) => { 52 | const db = require('../db')(); 53 | const clone = require('../task/clone')(db); 54 | 55 | // account for the 1-argument version (target only). 56 | const currentDb = db.thisDb(); 57 | const oneArg = !argTarget; 58 | const source = oneArg ? currentDb : argSource; 59 | const target = oneArg ? argSource : argTarget; 60 | 61 | setCommandLine(source, target); 62 | 63 | if (source === target) { 64 | console.log('Cannot clone a database over itself.'); 65 | return endProgram(2); 66 | } 67 | 68 | const targetExists = await db.isValidDatabase(target); 69 | if (targetExists) { 70 | const interruptHandler = () => { 71 | console.log(`\nDid not drop ${target}!`); 72 | return endProgram(0); 73 | }; 74 | 75 | console.log(`The ${target} database already exists.`); 76 | try { 77 | if (!force) { 78 | await confirm(c.redBright('Type the database name to drop it: '), target); 79 | } 80 | 81 | // wait for the database to be unused, then drop it 82 | await waitFor(db, target, interruptHandler); 83 | const knex = db.connectAsSuper(db.fallbackUrl()); // createdb 84 | await knex.raw(`drop database "${target}"`); 85 | await new Promise(resolve => knex.destroy(resolve)); 86 | 87 | console.log(c.redBright(`Dropped ${target}!`)); 88 | } catch (err) { 89 | console.error('reason:', err); 90 | console.log('Not dropping.'); 91 | return endProgram(0); 92 | } 93 | } 94 | 95 | /* eslint-disable max-len */ 96 | let shouldSwitch = (source === currentDb); // by default, only switch if we're cloning the current db 97 | if (s !== undefined) shouldSwitch = s; // user has explicitly decided via -s, --switch 98 | if (S !== undefined) shouldSwitch = !S; // user has explicitly decided via -S, --no-switch 99 | /* eslint-enable max-len */ 100 | 101 | console.log(`Going to clone ${source} to ${target}...`); 102 | try { 103 | await clone(source, target); 104 | if (shouldSwitch) { 105 | db.switchTo(target); 106 | console.log(`Done! Switched to ${target}.`); 107 | } else if (currentDb === target) { 108 | console.log(`Done! Cloned over your current database ${target}.`); 109 | } else { 110 | console.log(`Done! Created ${target}.`); 111 | } 112 | return endProgram(0); 113 | } catch (err) { 114 | console.error( 115 | `Clone failed: ${c.redBright(err.message)}`, 116 | ); 117 | return endProgram(1); 118 | } 119 | }; 120 | -------------------------------------------------------------------------------- /test/integration/util/context.js: -------------------------------------------------------------------------------- 1 | const knex = require('knex'); 2 | const debug = require('debug')('integration:util:db:pg-factory'); // eslint-disable-line 3 | 4 | const execPgsh = require('./exec-pgsh'); 5 | const writeDotfiles = require('./write-dotfiles'); 6 | const explodeUrl = require('./explode-url'); 7 | const findDir = require('../../../src/util/find-dir'); 8 | const combineUrl = require('../../../src/util/build-url'); 9 | const defaultConfig = require('../../../src/pgshrc/default'); 10 | const integrationDb = require('../db/integration-db'); 11 | 12 | const ALLOWED_HOSTS = [ 13 | 'localhost', 14 | '127.0.0.1', 15 | 'dockerhost', 16 | ]; 17 | 18 | const modifyUrl = (modifications, databaseUrl) => 19 | combineUrl({ 20 | ...explodeUrl(databaseUrl), 21 | ...modifications, 22 | }); 23 | 24 | const makeContext = (cwd, pgshrc, dotenv) => { 25 | const env = dotenv || {}; 26 | const config = pgshrc || defaultConfig; 27 | const URL_MODE = config.mode === 'url'; 28 | 29 | const PGSH_URL = URL_MODE 30 | ? env[config.vars.url] 31 | : combineUrl({ 32 | user: env[config.vars.user], 33 | password: env[config.vars.password], 34 | host: env[config.vars.host], 35 | port: env[config.vars.port], 36 | database: env[config.vars.database], 37 | }); 38 | 39 | const testVar = URL_MODE ? config.vars.url : config.vars.database; 40 | const isValid = PGSH_URL !== undefined && (testVar in env); 41 | 42 | // our "context" object requires you to override connecting 43 | // to any database other than the integration test db explicitly 44 | const DATABASE_URL = isValid 45 | ? modifyUrl( 46 | { database: integrationDb }, 47 | PGSH_URL, 48 | ) 49 | : undefined; 50 | 51 | const thisDb = () => 52 | explodeUrl(DATABASE_URL).database; 53 | 54 | if (isValid) { 55 | const { host } = explodeUrl(DATABASE_URL); 56 | if (ALLOWED_HOSTS.indexOf(host) === -1) { 57 | throw new Error( 58 | 'The integration test drops all other databases when it starts. ' 59 | + 'As such, we only allow running it on localhost / dockerhost.', 60 | ); 61 | } 62 | } 63 | 64 | /** 65 | * Returns the connection string, optionally 66 | * with a different database name at the end of it. 67 | */ 68 | const thisUrl = database => 69 | (database 70 | ? modifyUrl({ database }, DATABASE_URL) 71 | : DATABASE_URL); 72 | 73 | const fallbackUrl = () => 74 | thisUrl(config.fallback_database); 75 | 76 | const getMigrationsPath = () => 77 | findDir( 78 | config.migrations.path || 'migrations', 79 | cwd, 80 | ) || 'migrations'; 81 | 82 | const knexMigrationsOptions = () => { 83 | const schema = config.migrations.schema || 'public'; 84 | const table = config.migrations.table || 'knex_migrations'; 85 | const migrations = { schemaName: schema, tableName: table }; 86 | 87 | const migrationsPath = getMigrationsPath(); 88 | if (migrationsPath) { 89 | migrations.directory = migrationsPath; 90 | } 91 | return { migrations }; 92 | }; 93 | 94 | const connect = (databaseUrl = DATABASE_URL) => 95 | knex({ 96 | client: 'pg', 97 | connection: explodeUrl(databaseUrl), 98 | ...knexMigrationsOptions(), 99 | }); 100 | 101 | const connectAsSuper = (databaseUrl = DATABASE_URL) => { 102 | const connection = explodeUrl(databaseUrl); 103 | if (config.vars.super_user) { 104 | connection.user = env[config.vars.super_user]; 105 | connection.password = env[config.vars.super_password]; 106 | } 107 | 108 | return knex({ 109 | client: 'pg', 110 | connection, 111 | ...knexMigrationsOptions(), 112 | }); 113 | }; 114 | 115 | // it begins: write the initial .pgshrc and .env 116 | writeDotfiles(cwd, { config: pgshrc, env: dotenv }); 117 | return { 118 | isValid, 119 | connect, 120 | connectAsSuper, 121 | database: isValid ? thisDb() : undefined, 122 | integrationUrl: DATABASE_URL, 123 | pgshUrl: PGSH_URL, 124 | fallbackUrl: isValid ? fallbackUrl() : undefined, 125 | pgsh: (...args) => execPgsh(cwd, args), 126 | pgshWithEnv: envToInject => (...args) => execPgsh(cwd, args, envToInject), 127 | }; 128 | }; 129 | 130 | module.exports = makeContext; 131 | -------------------------------------------------------------------------------- /src/metrics/send.js: -------------------------------------------------------------------------------- 1 | const debug = require('debug')('pgsh:metrics'); 2 | const moment = require('moment'); 3 | const crypto = require('crypto'); 4 | const request = require('request-promise-native'); 5 | 6 | const { 7 | SERVER_URL_HTTP, 8 | SERVER_URL_HTTPS, 9 | MAX_SAMPLES_PER_SEND, 10 | } = require('./constants'); 11 | 12 | // Yep, this is in version control. Sue me! 13 | const HMAC_KEY = '125091675yhiofa70rt2_pgsh_metrics_server'; 14 | 15 | // Actually, please don't sue me. 16 | // ... why is it here? Well, pgsh is a CLI tool. It needs the key 17 | // to be able to sign packets before sending them to the 18 | // metrics server. Since the tool will be running on untrusted 19 | // computers, we can't really discern between our "deployment" 20 | // and someone else's. As far as I know, CLI CORS doesn't exist... 21 | const hmac = (str) => 22 | crypto 23 | .createHmac('sha1', HMAC_KEY) 24 | .update(str) 25 | .digest('hex'); 26 | 27 | // Anyway, the point is this: anyone who can download pgsh will 28 | // have the key anyway, so why go to the trouble of hiding it? 29 | 30 | const global = require('../global'); 31 | const { 32 | METRICS_ENABLED, 33 | METRICS_LAST_SENT, 34 | METRICS_IN_PROGRESS, 35 | METRICS_UPLOAD_PERIOD_SEC, 36 | METRICS_UPLOAD_USE_HTTPS, 37 | } = require('../global/keys'); 38 | 39 | const store = require('./store'); 40 | 41 | class RateLimited extends Error { 42 | constructor(remaining) { 43 | super(`Rate limit reached: ${remaining} available`); 44 | this.remaining = remaining; 45 | } 46 | } 47 | 48 | const actualSend = async (samples) => { 49 | const body = await store.get(samples); 50 | if (body.trim().length === 0) return; // nothing to send 51 | 52 | try { 53 | const useHttps = global.get(METRICS_UPLOAD_USE_HTTPS, true); 54 | const response = await request.post( 55 | useHttps ? SERVER_URL_HTTPS : SERVER_URL_HTTP, 56 | { 57 | headers: { 58 | 'X-Pgsh-Signature': hmac(body), 59 | 'Content-Type': 'text/plain', 60 | }, 61 | body, 62 | json: false, 63 | gzip: true, 64 | }, 65 | ); 66 | debug('response from metrics server', response); 67 | 68 | // remove however many the server handled 69 | const { insert } = JSON.parse(response); 70 | debug('insert', insert); 71 | await store.discard(insert); 72 | } catch (err) { 73 | debug('metrics error', err); 74 | const { error, response, ...extra } = err; 75 | if (response.code === 429) { 76 | const remaining = +response.headers['x-rate-limit-remaining']; 77 | debug('recv from server remaining', remaining); 78 | throw new RateLimited(remaining); 79 | } else { 80 | console.error(error, extra); 81 | } 82 | } 83 | }; 84 | 85 | const sendMetrics = async () => { 86 | if (global.get(METRICS_IN_PROGRESS)) { 87 | // let's assume they're trying to run a couple pgsh processes at once; it's OK 88 | // to miss uploading this time and just do it next time we have a clean mutex 89 | return; 90 | } 91 | 92 | global.set(METRICS_IN_PROGRESS, true); 93 | try { 94 | await actualSend(MAX_SAMPLES_PER_SEND); 95 | } catch (err) { 96 | if (err instanceof RateLimited) { 97 | console.log(`* retry with remaining: ${err.remaining}`); 98 | await actualSend(this.remaining); 99 | } 100 | } 101 | global.set(METRICS_IN_PROGRESS, false); 102 | }; 103 | 104 | // eslint-disable-next-line 105 | const sendMetricsIfTime = async () => { 106 | const timestamp = moment(); 107 | 108 | if (global.get(METRICS_LAST_SENT)) { 109 | // upload exactly once each period 110 | const lastSent = moment(+global.get(METRICS_LAST_SENT)); 111 | const uploadPeriodSec = +global.get(METRICS_UPLOAD_PERIOD_SEC, 3600); 112 | const canUploadNext = moment(lastSent).add(uploadPeriodSec, 'seconds'); 113 | debug('upload period (sec) is', uploadPeriodSec); 114 | debug( 115 | `timestamp: ${+timestamp}`, 116 | `goal: ${+canUploadNext}`, 117 | `lastSent: ${+lastSent}`, 118 | ); 119 | if (canUploadNext.isAfter(timestamp)) { 120 | debug('not sending as last upload was too recent'); 121 | return; 122 | } 123 | } 124 | await sendMetrics(); 125 | global.set(METRICS_LAST_SENT, +timestamp); 126 | }; 127 | 128 | module.exports = async () => { 129 | if (global.get(METRICS_ENABLED)) { 130 | return sendMetricsIfTime(); 131 | } 132 | debug('metrics disabled!'); 133 | return Promise.resolve(); 134 | }; 135 | -------------------------------------------------------------------------------- /test/integration/init.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const Knex = require('knex'); 3 | 4 | const { consume, numLines } = require('./util/stream-utils'); 5 | const explodeUrl = require('./util/explode-url'); 6 | const makeContext = require('./util/context'); 7 | const randomString = require('../../src/util/random-string'); 8 | 9 | const pgshGlobal = require('../../src/global'); 10 | const { METRICS_ENABLED } = require('../../src/global/keys'); 11 | 12 | const APP = 'knexapp'; 13 | const cwd = require('./app/cwd')(APP); 14 | const { env, config } = require('./app/dotfiles')(APP); 15 | 16 | const integrationDb = require('./db/integration-db'); 17 | const integrationUrl = require('./db/integration-url'); 18 | 19 | it('warns about cloning if regular user does not have CREATEDB', async () => { 20 | const knex = Knex({ 21 | client: 'pg', 22 | connection: explodeUrl(integrationUrl), 23 | }); 24 | 25 | pgshGlobal.set(METRICS_ENABLED, false); 26 | 27 | const password = randomString(); 28 | const user = `user_${randomString(3)}`; 29 | await knex.raw(`CREATE ROLE ${user} LOGIN NOCREATEDB PASSWORD '${password}'`); 30 | env[config.vars.user] = user; 31 | env[config.vars.password] = password; 32 | 33 | const ctx = makeContext(cwd, null, env); 34 | const { pgsh } = ctx; 35 | { // set up! 36 | const { 37 | exitCode, output, errors, send, 38 | } = pgsh('init'); 39 | await consume(output, null, numLines(6)); 40 | await send.down(); 41 | await send.enter(); 42 | await consume(output, null, numLines(6)); 43 | await send.enter(); 44 | await consume(output, null, numLines(5)); 45 | await send.enter(); 46 | await consume(output, null, numLines(4)); 47 | await send.enter(); 48 | await consume(output, null, numLines(3)); 49 | await send.enter(); 50 | await consume(output, null, numLines(2)); 51 | await send.enter(); 52 | await consume(output, null, numLines(1)); 53 | await consume(output, line => expect(line).toEqual(''), numLines(1)); 54 | await consume(output, line => expect(line).toEqual( 55 | `You are connecting as an underprivileged user ${user}.`, 56 | ), numLines(1)); 57 | await send.ctrlC(); 58 | await consume(errors, line => expect(line).toEqual( 59 | 'pgsh init failed: Either add variables for a superuser ' 60 | + 'name and password to .env, or modify your existing ' 61 | + `user with ALTER ROLE ${user} CREATEDB.`, 62 | ), numLines(1)); 63 | expect(await exitCode).toBe(2); 64 | } 65 | 66 | await knex.raw(`DROP ROLE ${user}`); 67 | return new Promise(resolve => knex.destroy(resolve)); 68 | }); 69 | 70 | it('creates the correct .pgshrc via init without superuser credentials', async () => { 71 | const knex = Knex({ 72 | client: 'pg', 73 | connection: explodeUrl(integrationUrl), 74 | }); 75 | 76 | pgshGlobal.set(METRICS_ENABLED, false); 77 | 78 | const password = randomString(); 79 | const user = `user_${randomString(3)}`; 80 | await knex.raw(`CREATE ROLE ${user} LOGIN CREATEDB PASSWORD '${password}'`); 81 | env[config.vars.user] = user; 82 | env[config.vars.password] = password; 83 | 84 | const ctx = makeContext(cwd, null, env); 85 | const { pgsh } = ctx; 86 | { // set up! 87 | const { exitCode, output, send } = pgsh('init'); 88 | await consume(output, null, numLines(6)); 89 | await send.down(); 90 | await send.enter(); 91 | await consume(output, null, numLines(6)); 92 | await send.enter(); 93 | await consume(output, null, numLines(5)); 94 | await send.enter(); 95 | await consume(output, null, numLines(4)); 96 | await send.enter(); 97 | await consume(output, null, numLines(3)); 98 | await send.enter(); 99 | await consume(output, null, numLines(2)); 100 | await send.enter(); 101 | await consume(output, null, numLines(1)); 102 | await consume(output, line => expect(line).toEqual(''), numLines(1)); 103 | await consume(output, line => expect(line).toEqual( 104 | `Your environment currently points to ${integrationDb}.`, 105 | ), numLines(1)); 106 | await send.enter(); 107 | expect(await exitCode).toBe(0); 108 | } 109 | 110 | const writtenConfig = fs.readFileSync(`${cwd}/.pgshrc`, { encoding: 'utf8' }); 111 | const { mode, vars } = JSON.parse(writtenConfig); 112 | expect(mode).toEqual('split'); 113 | Object.keys(vars).forEach(key => 114 | expect(vars[key]).toEqual(config.vars[key])); 115 | 116 | await knex.raw(`DROP ROLE ${user}`); 117 | return new Promise(resolve => knex.destroy(resolve)); 118 | }); 119 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## **pgsh**: PostgreSQL tools for local development 2 | 3 | [![npm](https://img.shields.io/npm/v/pgsh.svg)](https://npmjs.com/package/pgsh) 4 | ![license](https://img.shields.io/github/license/sastraxi/pgsh.svg) 5 | ![circleci](https://img.shields.io/circleci/project/github/sastraxi/pgsh/master.svg) 6 | ![downloads](https://img.shields.io/npm/dm/pgsh.svg) 7 | 8 |

9 | 10 |

11 | 12 | Finding database migrations painful to work with? Switching contexts a chore? [Pull requests](docs/pull-requests.md) piling up? `pgsh` helps by managing a connection string in your `.env` file and allows you to [branch your database](docs/branching.md) just like you branch with git. 13 | 14 | --- 15 | 16 | ## Prerequisites 17 | There are only a couple requirements: 18 | 19 | * your project reads its database configuration from the environment 20 | * it uses a `.env` file to do so in development. 21 | 22 | > See [dotenv](https://www.npmjs.com/package/dotenv) for more details, and [The Twelve-Factor App](https://12factor.net) for why this is a best practice. 23 | 24 | | Language / Framework | `.env` solution | Maturity | 25 | | -------------------- | --------------- | -------- | 26 | | javascript | [dotenv](https://www.npmjs.com/package/dotenv) | high | 27 | 28 | pgsh can help even more if you use [knex](https://knexjs.org) for migrations. 29 | 30 | ## Installation 31 | 32 | 1. `yarn global add pgsh` to make the `pgsh` command available everywhere 33 | 2. `pgsh init` to create a `.pgshrc` config file in your project folder, beside your `.env` file (see `src/pgshrc/default.js` for futher configuration) 34 | 3. You can now run `pgsh` anywhere in your project directory (try `pgsh -a`!) 35 | 4. It is recommended to check your `.pgshrc` into version control. [Why?](docs/pgshrc.md) 36 | 37 | ## URL vs split mode 38 | There are two different ways pgsh can help you manage your current connection (`mode` in `.pgshrc`): 39 | * `url` (default) when one variable in your `.env` has your full database connection string (e.g. `DATABASE_URL=postgres://...`) 40 | * `split` when your `.env` has different keys (e.g. `PG_HOST=localhost`, `PG_DATABASE=myapp`, ...) 41 | 42 | ## Running tests 43 | 44 | 1. Make sure the postgres client and its associated tools (`psql`, `pg_dump`, etc.) are installed locally 45 | 2. `cp .env.example .env` 46 | 3. `docker-compose up -d` 47 | 4. Run the test suite using `yarn test`. Note that this test suite will destroy all 48 | databases on the connected postgres server, so it will force you to send a certain 49 | environment variable to confirm this is ok. 50 | 51 | --- 52 | 53 | ## Command reference 54 | 55 | * `pgsh init` generates a `.pgshrc` file for your project. 56 | * `pgsh url` prints your connection string. 57 | * `pgsh psql -- ` connects to the current (or *name*d) database with psql 58 | * `pgsh current` prints the name of the database that your connection string refers to right now. 59 | * `pgsh` or `pgsh list ` prints all databases, filtered by an optional filter. Output is similar to `git branch`. By adding the `-a` option you can see migration status too! 60 | 61 | ## Database branching 62 | 63 | Read up on the recommended [branching model](docs/branching.md) for more details. 64 | 65 | * `pgsh clone ` clones your current (or the `from`) database as *name*, then (optionally) runs `switch `. 66 | * `pgsh create ` creates an empty database, then runs `switch ` and optionally migrates it to the latest version. 67 | * `pgsh switch ` makes *name* your current database, changing the connection string. 68 | * `pgsh destroy ` destroys the given database. *This cannot be undone.* You can maintain a blacklist of databases to protect from this command in `.pgshrc` 69 | 70 | ## Dump and restore 71 | 72 | * `pgsh dump ` dumps the current database (or the *name*d one if given) to stdout 73 | * `pgsh restore ` restores a previously-dumped database as *name* from stdin 74 | 75 | ## Migration management (via knex) 76 | 77 | pgsh provides a slightly-more-user-friendly interface to knex's [migration system](https://knexjs.org/#Migrations). 78 | 79 | * `pgsh up` migrates the current database to the latest version found in your migration directory. 80 | 81 | * `pgsh down ` down-migrates the current database to *version*. Requires your migrations to have `down` edges! 82 | 83 | * `pgsh force-up` re-writes the `knex_migrations` table *entirely* based on your migration directory. In effect, running this command is saying to knex "trust me, the database has the structure you expect". 84 | 85 | * `pgsh force-down ` re-writes the `knex_migrations` table to not include the record of any migration past the given *version*. Use this command when you manually un-migrated some migations (e.g. a bad migration or when you are trying to undo a migration with missing "down sql"). 86 | 87 | * `pgsh validate` compares the `knex_migrations` table to the configured migrations directory and reports any inconsistencies between the two. 88 | -------------------------------------------------------------------------------- /src/db.js: -------------------------------------------------------------------------------- 1 | const knex = require('knex'); 2 | const c = require('ansi-colors'); 3 | const debug = require('debug')('pgsh:db'); 4 | const { parse: parseUrl } = require('pg-connection-string'); 5 | 6 | const endProgram = require('./end-program'); 7 | 8 | const existingConfig = require('./config'); 9 | const updateExistingEnv = require('./env/update-existing'); 10 | 11 | const findDir = require('./util/find-dir'); 12 | const buildUrl = require('./util/build-url'); 13 | 14 | module.exports = (config = existingConfig) => { 15 | const combineUrl = buildUrl; 16 | 17 | const URL_MODE = config.mode === 'url'; 18 | const testVar = URL_MODE ? config.vars.url : config.vars.database; 19 | 20 | if (!(testVar in process.env)) { 21 | console.error( 22 | `pgsh is configured to use the value of ${c.greenBright(testVar)}` 23 | + ` in your ${c.underline('.env')} file, but it is unset. Exiting.`, 24 | ); 25 | return endProgram(54, true); 26 | } 27 | 28 | const DATABASE_URL = URL_MODE 29 | ? process.env[config.vars.url] 30 | : combineUrl({ 31 | user: process.env[config.vars.user], 32 | password: process.env[config.vars.password], 33 | host: process.env[config.vars.host], 34 | port: process.env[config.vars.port], 35 | database: process.env[config.vars.database], 36 | }); 37 | 38 | const explodeUrl = (databaseUrl) => { 39 | const parsed = parseUrl(databaseUrl); 40 | Object.keys(parsed).forEach((key) => { 41 | if (parsed[key] === null) { 42 | parsed[key] = undefined; // nulls get coerced to "null" by psql 43 | } 44 | }); 45 | return parsed; 46 | }; 47 | 48 | const thisDb = () => 49 | explodeUrl(DATABASE_URL).database; 50 | 51 | const createPostgresEnv = (databaseUrl = DATABASE_URL) => { 52 | const { 53 | user, 54 | password, 55 | host, 56 | port, 57 | } = explodeUrl(databaseUrl); 58 | return { 59 | ...process.env, 60 | PGUSER: user, 61 | PGPASSWORD: password, 62 | PGHOST: host, 63 | PGPORT: port, 64 | }; 65 | }; 66 | 67 | const createSuperPostgresEnv = (databaseUrl = DATABASE_URL) => { 68 | const { 69 | user, 70 | password, 71 | host, 72 | port, 73 | } = explodeUrl(databaseUrl); 74 | return { 75 | ...process.env, 76 | PGUSER: process.env[config.vars.super_user] || user, 77 | PGPASSWORD: process.env[config.vars.super_password] || password, 78 | PGHOST: host, 79 | PGPORT: port, 80 | }; 81 | }; 82 | 83 | const getMigrationsPath = () => 84 | findDir( 85 | config.migrations.path || 'migrations', 86 | ) || 'migrations'; 87 | 88 | const knexMigrationsOptions = () => { 89 | const schema = config.migrations.schema || 'public'; 90 | const table = config.migrations.table || 'knex_migrations'; 91 | const migrations = { schemaName: schema, tableName: table }; 92 | 93 | const migrationsPath = getMigrationsPath(); 94 | if (migrationsPath) { 95 | migrations.directory = migrationsPath; 96 | } 97 | return { migrations }; 98 | }; 99 | 100 | const connect = (databaseUrl = DATABASE_URL) => 101 | knex({ 102 | client: 'pg', 103 | connection: explodeUrl(databaseUrl), 104 | ...knexMigrationsOptions(), 105 | }); 106 | 107 | const connectAsSuper = (databaseUrl = DATABASE_URL) => { 108 | const connection = explodeUrl(databaseUrl); 109 | if (config.vars.super_user) { 110 | connection.user = process.env[config.vars.super_user]; 111 | connection.password = process.env[config.vars.super_password]; 112 | } 113 | return knex({ 114 | client: 'pg', 115 | connection, 116 | ...knexMigrationsOptions(), 117 | }); 118 | }; 119 | 120 | const modifyUrl = (modifications, databaseUrl = DATABASE_URL) => 121 | combineUrl({ 122 | ...explodeUrl(databaseUrl), 123 | ...modifications, 124 | }); 125 | 126 | /** 127 | * Returns the connection string, optionally 128 | * with a different database name at the end of it. 129 | */ 130 | const thisUrl = database => 131 | (database 132 | ? modifyUrl({ database }, DATABASE_URL) 133 | : DATABASE_URL); 134 | 135 | const fallbackUrl = () => 136 | thisUrl(config.fallback_database); 137 | 138 | const DEFAULT_DB_NAMES_OPTIONS = { 139 | showTemplates: false, 140 | sortByCreation: false, 141 | }; 142 | 143 | const SORT_CREATION = (a, b) => -a.created_at.localeCompare(b.created_at); 144 | const SORT_NAME = (a, b) => a.name.localeCompare(b.name); 145 | 146 | const databaseNames = async (options) => { 147 | const { 148 | showTemplates, 149 | sortByCreation, 150 | } = { ...DEFAULT_DB_NAMES_OPTIONS, ...(options || {}) }; 151 | 152 | const getNames = async (...connectionArgs) => { 153 | const db = connectAsSuper(...connectionArgs); // pg_stat_file 154 | 155 | let names; 156 | try { 157 | names = await db.raw(` 158 | SELECT 159 | datname as name, 160 | (pg_stat_file('base/'||oid ||'/PG_VERSION')).modification::text as created_at 161 | FROM pg_database 162 | WHERE datistemplate = ? 163 | `, [showTemplates]) 164 | .then(({ rows }) => rows 165 | .sort(sortByCreation ? SORT_CREATION : SORT_NAME) 166 | .map(row => row.name)); 167 | } catch (err) { 168 | debug(err.code, err); 169 | if (+err.code === 42501) { 170 | // insufficient privileges; retry without created_at 171 | names = await db.raw(` 172 | SELECT datname as name 173 | FROM pg_database 174 | WHERE datistemplate = ? 175 | `, [showTemplates]) 176 | .then(({ rows }) => rows 177 | .sort(SORT_NAME) 178 | .map(row => row.name)); 179 | 180 | if (sortByCreation) { 181 | console.error( 182 | c.red('WARNING: pg_stat_file not avaiable; not sorting by creation.'), 183 | ); 184 | } 185 | } else { 186 | throw err; 187 | } 188 | } 189 | 190 | await new Promise(resolve => db.destroy(resolve)); 191 | return names; 192 | }; 193 | 194 | // first attempt to connect to the given database; 195 | // if that does not work, fall back to the built-in "postgres" 196 | try { 197 | const names = await getNames(); 198 | return names; 199 | } catch (err) { 200 | const { message } = err; 201 | debug(`databaseNames: ${c.redBright(message)}`); 202 | debug(`databaseNames: using ${c.yellowBright(config.fallback_database)} instead`); 203 | try { 204 | const gg = await getNames(thisUrl(config.fallback_database)); 205 | return gg; 206 | } catch (fatalErr) { 207 | console.error(`${c.redBright('FATAL ERROR:')} could not read system catalogue.`); 208 | console.error(`Make sure that your ${c.underline('.pgshrc')} has vars for superuser credentials!`); 209 | return endProgram(55, true); 210 | } 211 | } 212 | }; 213 | 214 | const isValidDatabase = async (name) => { 215 | const names = await databaseNames(); 216 | return names.includes(name); 217 | }; 218 | 219 | const switchTo = (database) => { 220 | if (URL_MODE) { 221 | updateExistingEnv({ 222 | [config.vars.url]: thisUrl(database), 223 | }, { throwIfUnchanged: false }); 224 | } else { 225 | updateExistingEnv({ 226 | [config.vars.database]: database, 227 | }, { throwIfUnchanged: false }); 228 | } 229 | }; 230 | 231 | return { 232 | config, 233 | 234 | thisDb, 235 | thisUrl, 236 | fallbackUrl, 237 | combineUrl, 238 | explodeUrl, 239 | 240 | getMigrationsPath, 241 | 242 | connect, 243 | createPostgresEnv, 244 | connectAsSuper, 245 | createSuperPostgresEnv, 246 | 247 | databaseNames, 248 | isValidDatabase, 249 | switchTo, 250 | }; 251 | }; 252 | -------------------------------------------------------------------------------- /test/integration/metrics.test.js: -------------------------------------------------------------------------------- 1 | const mergeOptions = require('merge-options'); 2 | const http = require('http'); 3 | const moment = require('moment'); 4 | const fs = require('fs'); 5 | 6 | const pgshGlobal = require('../../src/global'); 7 | const { 8 | METRICS_ENABLED, 9 | METRICS_LAST_SENT, 10 | METRICS_UPLOAD_PERIOD_SEC, 11 | METRICS_UPLOAD_USE_HTTPS, 12 | } = require('../../src/global/keys'); 13 | const randomString = require('../../src/util/random-string'); 14 | const { SERVER_URL_HTTP } = require('../../src/metrics/constants'); 15 | 16 | const makeContext = require('./util/context'); 17 | const readMetrics = require('./util/read-metrics'); 18 | const resetMetrics = require('./util/reset-metrics'); 19 | const integrationDb = require('./db/integration-db'); 20 | const { consume, numLines } = require('./util/stream-utils'); 21 | 22 | const APP = 'knexapp'; 23 | const cwd = require('./app/cwd')(APP); 24 | const { env, config: telemetryDisabledConfig } = require('./app/dotfiles')(APP); 25 | 26 | const config = mergeOptions(telemetryDisabledConfig, { 27 | force_disable_metrics: false, 28 | }); 29 | 30 | env.HTTP_PROXY = `http://localhost:${process.env.DANGER_INTEGRATION_PROXY_PORT}`; 31 | 32 | it.skip('enables telemetry from clean global config', async () => { 33 | pgshGlobal.set(METRICS_ENABLED, undefined); 34 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 35 | const ctx = makeContext(cwd, config, env); 36 | const { pgsh } = ctx; 37 | 38 | const { send, exitCode, output } = pgsh('current'); 39 | await consume(output, line => expect(line).toEqual(integrationDb), numLines(1)); 40 | await send.down(); // opt in 41 | await send.enter(); 42 | 43 | expect(await exitCode).toBe(0); 44 | expect(pgshGlobal.get(METRICS_ENABLED)).toEqual(true); 45 | }); 46 | 47 | it.skip('disables telemetry from clean global config', async () => { 48 | pgshGlobal.set(METRICS_ENABLED, undefined); 49 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 50 | const ctx = makeContext(cwd, config, env); 51 | const { pgsh } = ctx; 52 | 53 | const { send, exitCode, output } = pgsh('current'); 54 | await consume(output, line => expect(line).toEqual(integrationDb), numLines(1)); 55 | await send.enter(); // opt out 56 | 57 | expect(await exitCode).toBe(0); 58 | expect(pgshGlobal.get(METRICS_ENABLED)).toEqual(false); 59 | }); 60 | 61 | it('does not ask for opt-in when running with clean config and force_disable_telemetry', async () => { 62 | pgshGlobal.set(METRICS_ENABLED, undefined); 63 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 64 | const ctx = makeContext(cwd, telemetryDisabledConfig, env); 65 | const { pgsh } = ctx; 66 | 67 | const { exitCode, output } = pgsh('current'); 68 | await consume(output, line => expect(line).toEqual(integrationDb), numLines(1)); 69 | expect(await exitCode).toBe(0); 70 | 71 | expect(pgshGlobal.get(METRICS_ENABLED)).toEqual(undefined); 72 | }); 73 | 74 | it.skip('pgsh metrics on turns on metrics and disables force_disable_metrics', async () => { 75 | pgshGlobal.set(METRICS_ENABLED, false); 76 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 77 | const ctx = makeContext(cwd, telemetryDisabledConfig, env); 78 | const { pgsh } = ctx; 79 | 80 | const { exitCode, output } = pgsh('metrics', 'on'); 81 | await consume(output, line => expect(line).toEqual( 82 | 'Telemetry is now enabled globally.', 83 | ), numLines(1)); 84 | await consume(output, line => expect(line).toEqual( 85 | 'Removed force_disable_metrics from .pgshrc.', 86 | ), numLines(1)); 87 | expect(await exitCode).toBe(0); 88 | 89 | // ensure metrics were globally enabled 90 | expect(pgshGlobal.get(METRICS_ENABLED)).toEqual(true); 91 | 92 | // ensure metrics were locally enabled 93 | const writtenConfig = fs.readFileSync(`${cwd}/.pgshrc`, { encoding: 'utf8' }); 94 | const { force_disable_metrics: forceDisableMetrics } = JSON.parse(writtenConfig); 95 | expect(forceDisableMetrics).toEqual(false); 96 | }); 97 | 98 | it.skip('pgsh metrics off turns off metrics', async () => { 99 | pgshGlobal.set(METRICS_ENABLED, true); 100 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 101 | const ctx = makeContext(cwd, config, env); 102 | const { pgsh } = ctx; 103 | 104 | const { exitCode, output } = pgsh('metrics', 'off'); 105 | await consume(output, line => expect(line).toEqual( 106 | 'Telemetry is now disabled globally.', 107 | ), numLines(1)); 108 | expect(await exitCode).toBe(0); 109 | 110 | // ensure metrics were globally disabled 111 | expect(pgshGlobal.get(METRICS_ENABLED)).toEqual(false); 112 | 113 | // ensure metrics not locally disabled 114 | const writtenConfig = fs.readFileSync(`${cwd}/.pgshrc`, { encoding: 'utf8' }); 115 | const { force_disable_metrics: forceDisableMetrics } = JSON.parse(writtenConfig); 116 | expect(forceDisableMetrics).toEqual(false); 117 | }); 118 | 119 | // --------------------------------------------------------------- // 120 | 121 | it.skip('pgsh clone writes to log, obscuring database names and outputting correct error code', async () => { 122 | pgshGlobal.set(METRICS_ENABLED, true); 123 | pgshGlobal.set(METRICS_LAST_SENT, +moment().add(1, 'day')); 124 | pgshGlobal.set(METRICS_UPLOAD_PERIOD_SEC, +moment().add(1, 'month')); // ensure we don't upload 125 | pgshGlobal.set(METRICS_UPLOAD_USE_HTTPS, false); 126 | const ctx = makeContext(cwd, config, env); 127 | const { pgsh } = ctx; 128 | 129 | // capture calls to the server 130 | let requestCount = 0; 131 | const proxyServer = http.createServer(() => { 132 | requestCount += 1; 133 | }).listen(+process.env.DANGER_INTEGRATION_PROXY_PORT); 134 | 135 | // remove history of all metrics 136 | resetMetrics(); 137 | 138 | const databaseName = randomString(); 139 | let hashedName; 140 | { // create, but don't switch 141 | const { exitCode } = pgsh('clone', databaseName, '--no-switch'); 142 | expect(await exitCode).toEqual(0); 143 | 144 | const metrics = readMetrics(); 145 | expect(metrics.length).toEqual(1); 146 | 147 | // FIXME: version is knexapp/package.json's version right now :/ 148 | const { exitCode: recordedExitCode, command } = metrics[0]; 149 | expect(recordedExitCode).toEqual(0); 150 | expect(command[0]).toEqual('node'); 151 | expect(command[1]).toEqual('index.js'); 152 | expect(command[2]).toEqual('clone'); 153 | expect(command[3]).not.toEqual(databaseName); // must be hashed somehow! 154 | expect(command[4]).toEqual('--no-switch'); 155 | hashedName = command[3]; // eslint-disable-line 156 | } 157 | { 158 | const { exitCode } = pgsh('switch', databaseName); 159 | expect(await exitCode).toEqual(0); 160 | 161 | const metrics = readMetrics(); 162 | expect(metrics.length).toEqual(2); 163 | 164 | const { exitCode: recordedExitCode, command } = metrics[1]; 165 | expect(recordedExitCode).toEqual(0); 166 | expect(command[0]).toEqual('node'); 167 | expect(command[1]).toEqual('index.js'); 168 | expect(command[2]).toEqual('switch'); 169 | expect(command[3]).toEqual(hashedName); // hash must be consistent! 170 | } 171 | { 172 | const { exitCode } = pgsh('switch', databaseName); 173 | expect(await exitCode).toEqual(2); 174 | 175 | const metrics = readMetrics(); 176 | expect(metrics.length).toEqual(3); 177 | 178 | const { exitCode: recordedExitCode, command } = metrics[2]; 179 | expect(recordedExitCode).toEqual(2); 180 | expect(command[0]).toEqual('node'); 181 | expect(command[1]).toEqual('index.js'); 182 | expect(command[2]).toEqual('switch'); 183 | expect(command[3]).toEqual(hashedName); // hash must be consistent! 184 | } 185 | 186 | // make sure we didn't actually send anything! 187 | expect(requestCount).toEqual(0); 188 | proxyServer.close(); 189 | }); 190 | 191 | it.skip('setting upload period to 0 => upload at start of next command', async () => { 192 | pgshGlobal.set(METRICS_ENABLED, true); 193 | pgshGlobal.set(METRICS_LAST_SENT, undefined); 194 | pgshGlobal.set(METRICS_UPLOAD_PERIOD_SEC, 0); 195 | pgshGlobal.set(METRICS_UPLOAD_USE_HTTPS, false); 196 | const ctx = makeContext(cwd, config, env); 197 | const { pgsh } = ctx; 198 | 199 | // capture calls to the server 200 | let requestCount = 0; 201 | let lastServerMetrics; 202 | let lastWrittenMetric; 203 | const proxyServer = http.createServer(async (req, res) => { 204 | expect(req.method).toEqual('POST'); 205 | expect(req.url).toEqual(`${SERVER_URL_HTTP}/`); 206 | 207 | // re-assemble the request body 208 | // TODO: make a fn; https://stackoverflow.com/a/49428486/220642 209 | const body = await new Promise((resolve) => { 210 | const chunks = []; 211 | req.on('data', chunk => chunks.push(chunk)); 212 | req.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))); 213 | }); 214 | lastServerMetrics = body.split('\n').filter(x => x.trim() !== '').map(JSON.parse); 215 | requestCount += 1; 216 | 217 | res.writeHead(200, { 'Content-Type': 'application/json' }); 218 | res.write(JSON.stringify({ 219 | insert: lastServerMetrics.length, 220 | })); 221 | res.end(); 222 | }).listen(+process.env.DANGER_INTEGRATION_PROXY_PORT); 223 | 224 | // remove history of all metrics 225 | resetMetrics(); 226 | 227 | { 228 | const { exitCode } = pgsh('ls'); 229 | expect(await exitCode).toEqual(0); 230 | expect(requestCount).toEqual(0); 231 | expect(lastServerMetrics).toEqual(undefined); 232 | 233 | const metrics = readMetrics(); 234 | expect(metrics.length).toEqual(1); 235 | lastWrittenMetric = metrics[metrics.length - 1]; 236 | } 237 | { 238 | const { exitCode } = pgsh('current'); 239 | expect(await exitCode).toEqual(0); 240 | expect(requestCount).toEqual(1); 241 | expect(lastServerMetrics).toEqual([lastWrittenMetric]); 242 | 243 | const metrics = readMetrics(); 244 | expect(metrics.length).toEqual(1); 245 | lastWrittenMetric = metrics[metrics.length - 1]; 246 | } 247 | { 248 | const { exitCode } = pgsh('url'); 249 | expect(await exitCode).toEqual(0); 250 | expect(requestCount).toEqual(2); 251 | expect(lastServerMetrics).toEqual([lastWrittenMetric]); 252 | } 253 | 254 | proxyServer.close(); 255 | }); 256 | -------------------------------------------------------------------------------- /src/cmd/init.js: -------------------------------------------------------------------------------- 1 | const c = require('ansi-colors'); 2 | const os = require('os'); 3 | const path = require('path'); 4 | const { prompt } = require('enquirer'); 5 | const mergeOptions = require('merge-options'); 6 | 7 | const { set: setCommandLine } = require('../metrics/command-line'); 8 | const endProgram = require('../end-program'); 9 | 10 | const dbFactory = require('../db'); 11 | const addAll = require('../util/add-all'); 12 | const buildMap = require('../util/build-map'); 13 | const buildUrl = require('../util/build-url'); 14 | const chooseDb = require('../task/choose-db'); 15 | const filterKeys = require('../util/filter-keys'); 16 | const isPrivileged = require('../task/is-privileged'); 17 | const randomString = require('../util/random-string'); 18 | const promptForVars = require('../util/prompt-for-vars'); 19 | const promptForInput = require('../util/prompt-for-input'); 20 | const findProjectRoot = require('../util/find-project-root'); 21 | 22 | const configExists = require('../pgshrc/exists'); 23 | const defaultConfig = require('../pgshrc/default'); 24 | const createConfig = require('../pgshrc/create'); 25 | const stringifyEnv = require('../util/stringify-env'); 26 | const createEnv = require('../env/create'); 27 | const parseEnv = require('../env/parse'); 28 | 29 | const TEMP_DB_NAME_LENGTH = 30; 30 | const DEFAULT_USER = os.userInfo().username; 31 | const DEFAULT_DATABASE = path.basename(findProjectRoot()); 32 | 33 | const URL_DEFAULT_VARS = { 34 | url: defaultConfig.vars.url, 35 | }; 36 | 37 | const SPLIT_DEFAULT_VARS = { 38 | host: defaultConfig.vars.host, 39 | port: defaultConfig.vars.port, 40 | user: defaultConfig.vars.user, 41 | password: defaultConfig.vars.password, 42 | database: defaultConfig.vars.database, 43 | }; 44 | 45 | const SUPERUSER_DEFAULT_VARS = { 46 | super_user: defaultConfig.vars.super_user || 'PG_SUPER_USER', 47 | super_password: defaultConfig.vars.super_password || 'PG_SUPER_PASSWORD', 48 | }; 49 | 50 | const URL_PROMPTS = [ 51 | { 52 | name: 'url', 53 | description: 'connection URL', 54 | initial: `postgres://${DEFAULT_USER}@localhost/${DEFAULT_DATABASE}`, 55 | }, 56 | ]; 57 | 58 | const isNumeric = x => x === `${+x}`; 59 | 60 | const injectEnv = (vars, env) => 61 | Object.entries(buildMap(vars, env)) 62 | .forEach(([k, v]) => { 63 | process.env[k] = v; 64 | }); 65 | 66 | const SPLIT_PROMPTS = [ 67 | { 68 | name: 'database', 69 | description: 'database name', 70 | initial: DEFAULT_DATABASE, 71 | }, 72 | { 73 | name: 'host', 74 | description: 'hostname (e.g. localhost)', 75 | initial: 'localhost', 76 | }, 77 | { 78 | name: 'port', 79 | description: 'port (e.g. 5432)', 80 | initial: 5432, 81 | validate: isNumeric, 82 | skippable: true, 83 | }, 84 | { 85 | name: 'user', 86 | description: 'username', 87 | skippable: true, 88 | }, 89 | { 90 | name: 'password', 91 | description: 'password', 92 | type: 'password', 93 | skippable: true, 94 | }, 95 | ]; 96 | 97 | const SUPERUSER_PROMPTS = [ 98 | { 99 | name: 'super_user', 100 | description: 'superuser name', 101 | }, 102 | { 103 | name: 'super_password', 104 | description: 'superuser password', 105 | type: 'password', 106 | skippable: true, 107 | }, 108 | ]; 109 | 110 | exports.command = 'init'; 111 | exports.desc = 'generates .pgshrc / .env files conversationally'; 112 | exports.builder = {}; 113 | 114 | const makeConfig = (mode, vars) => 115 | mergeOptions( 116 | defaultConfig, 117 | { mode, vars }, 118 | ); 119 | 120 | const makeDb = (mode, vars) => 121 | dbFactory(makeConfig(mode, vars)); 122 | 123 | /** 124 | * @returns { vars, env } always 125 | */ 126 | const ensureSuperUser = async (initDb, envChoices) => { 127 | if (await isPrivileged(initDb)()) { 128 | // the given user has enough permissions 129 | return { 130 | env: {}, 131 | vars: {}, 132 | }; 133 | } 134 | 135 | const { user } = initDb.explodeUrl(initDb.thisUrl()); 136 | console.log(); 137 | console.log( 138 | `You are connecting as an underprivileged user ${c.greenBright(user)}.`, 139 | ); 140 | console.log( 141 | 'This will prevent pgsh from successfully cloning databases.', 142 | ); 143 | 144 | try { 145 | const { config } = initDb; 146 | 147 | console.log(); 148 | const { fix } = await prompt({ 149 | type: 'toggle', 150 | name: 'fix', 151 | message: 'Do you have access to superuser credentials?', 152 | }); 153 | 154 | if (!fix) { 155 | // user wants to go ahead without figuring things out 156 | console.log(); 157 | console.log('For full pgsh functionality, modify your existing user via psql:'); 158 | console.log(`# ALTER ROLE ${user} CREATEDB`); 159 | return { 160 | env: {}, 161 | vars: {}, 162 | }; 163 | } 164 | 165 | if (envChoices) { 166 | // if we have an existing .env, ask which variables correspond 167 | console.log( 168 | `You will need to choose which variables in ${c.underline('.env')}` 169 | + ' contain your superuser name and password.', 170 | ); 171 | console.log(); 172 | const extraVars = await promptForVars( 173 | filterKeys(envChoices, k => !Object.values(config.vars).includes(k)), 174 | SUPERUSER_PROMPTS, 175 | ); 176 | return { 177 | env: {}, 178 | vars: extraVars, 179 | }; 180 | } 181 | 182 | // if not, ask for the values directly 183 | console.log( 184 | 'Please enter superuser credentials for', 185 | `${process.env[config.vars.host]}:${process.env[config.vars.port] || '5432'}`, 186 | ); 187 | console.log(); 188 | const extraEnv = await promptForInput(SUPERUSER_PROMPTS); 189 | return { 190 | env: filterKeys(extraEnv, key => !!extraEnv[key]), 191 | vars: filterKeys(SUPERUSER_DEFAULT_VARS, key => (key in extraEnv)), 192 | }; 193 | } catch (err) { 194 | throw new Error( 195 | 'Either add variables for a superuser name and password ' 196 | + `to ${c.underline('.env')}, or modify your existing user ` 197 | + `with ALTER ROLE ${user} CREATEDB.`, 198 | ); 199 | } 200 | }; 201 | 202 | exports.handler = async () => { 203 | setCommandLine(); 204 | 205 | if (configExists) { 206 | const existingEnv = parseEnv(); 207 | 208 | // if both .pgshrc and .env exist, show the choose prompt 209 | if (existingEnv) { 210 | const db = dbFactory(); 211 | const { database } = await chooseDb(db)(db.thisDb()); 212 | db.switchTo(database); 213 | return endProgram(0); 214 | } 215 | 216 | // otherwise, inform the user they'll need to create a .env file 217 | const config = require('../pgshrc/read'); 218 | const envMap = {}; 219 | Object.values(config.vars).forEach((envKey) => { 220 | envMap[envKey] = ''; 221 | }); 222 | 223 | console.log(c.yellowBright( 224 | `${c.underline('.pgshrc')} exists, but ${c.underline('.env')} does not!`, 225 | )); 226 | console.log('Try creating one, e.g.\n'); 227 | console.log(stringifyEnv(envMap)); 228 | return endProgram(1); 229 | } 230 | 231 | console.log( 232 | `${c.yellowBright('pgsh')} manages your database`, 233 | `connection via variables in ${c.underline('.env')}.`, 234 | ); 235 | 236 | try { 237 | console.log( 238 | `In ${c.cyan('url')} mode, one variable holds the entire`, 239 | 'connection string.', 240 | ); 241 | console.log( 242 | `In ${c.cyan('split')} mode, you have separate variables for`, 243 | 'user, host, password, etc.\n', 244 | ); 245 | 246 | const { mode } = await prompt( 247 | { 248 | name: 'mode', 249 | type: 'select', 250 | message: 'Which mode would you like to use?', 251 | choices: ['url', 'split'], 252 | }, 253 | ); 254 | 255 | // skip the rest of the wizard if there's no .env file; create 256 | // with our defaults and let the user change it if they want 257 | const existingEnv = parseEnv(); 258 | if (!existingEnv) { 259 | // interactively fill in env variables; a real db will be chosen later 260 | const userValues = { 261 | ...(await promptForInput(SPLIT_PROMPTS.filter(p => p.name !== 'database'))), 262 | database: randomString(TEMP_DB_NAME_LENGTH), 263 | }; 264 | 265 | // make env from their choice 266 | const env = mode === 'url' 267 | ? { url: buildUrl(userValues) } 268 | : userValues; // split mode can use values directly 269 | 270 | // only create variables for env the user is interested in 271 | const vars = filterKeys( 272 | mode === 'url' 273 | ? URL_DEFAULT_VARS 274 | : SPLIT_DEFAULT_VARS, 275 | key => (key in env), 276 | ); 277 | 278 | // inject user env into our process so we can 279 | // bootstrap the db with our .pgshrc variables 280 | injectEnv(vars, env); 281 | 282 | // check for super-user access, merge in new env / vars 283 | const suProps = await ensureSuperUser(makeDb(mode, vars)); 284 | if (suProps) { 285 | console.log(); 286 | addAll(vars, suProps.vars); 287 | addAll(env, suProps.env); 288 | injectEnv(suProps.vars, suProps.env); 289 | } 290 | 291 | // ready for our fully-bootstrapped db 292 | const initDb = makeDb(mode, vars); 293 | 294 | // ask the user which db to connect to / create / clone 295 | const { database, config: extraConfig } = await chooseDb(initDb)(); 296 | 297 | // add in our database choice to the env vars we're writing 298 | env.database = database; 299 | env.url = initDb.thisUrl(database); 300 | 301 | createEnv(buildMap(vars, env)); 302 | createConfig({ 303 | ...extraConfig, 304 | mode, 305 | vars, 306 | }); 307 | console.log( 308 | `${c.underline('.pgshrc')} and ${c.underline('.env')} created!`, 309 | ); 310 | 311 | console.log( 312 | 'Now, configure your application to use the values', 313 | `in your ${c.underline('.env')} file.`, 314 | ); 315 | return endProgram(0); 316 | } 317 | 318 | // ask the user which variables in their .env file 319 | // correspond to which database connection parameters 320 | const userVars = await promptForVars( 321 | existingEnv, 322 | mode === 'url' ? URL_PROMPTS : SPLIT_PROMPTS, 323 | ); 324 | 325 | // merge in vars needed for superuser or fail out 326 | const vars = { 327 | ...userVars, 328 | ...(await ensureSuperUser(makeDb(mode, userVars), existingEnv)).vars, 329 | }; 330 | console.log(); 331 | 332 | // bootstrap "db" with our mode / vars and the existing 333 | // environment (already injected into our process by dotenv.config()) 334 | const initDb = makeDb(mode, vars); 335 | 336 | // figure out where the user wants to switch to and do it 337 | const { database, config: extraConfig } = await chooseDb(initDb)(initDb.thisDb()); 338 | initDb.switchTo(database); 339 | 340 | createConfig({ 341 | ...extraConfig, 342 | mode, 343 | vars, 344 | }); 345 | console.log(`${c.underline('.pgshrc')} created!`); 346 | 347 | return endProgram(0); 348 | } catch (err) { 349 | console.error( 350 | `pgsh init failed: ${c.redBright(err.message)}`, 351 | ); 352 | return endProgram(2); 353 | } 354 | }; 355 | --------------------------------------------------------------------------------