├── .github ├── FUNDING.yml └── workflows │ ├── lint.yml │ └── ci.yml ├── .eslintignore ├── src ├── pg.ts ├── version.ts ├── interfaces.ts ├── index.ts ├── header.ts ├── sluggify.ts ├── logger.ts ├── hash.ts ├── commands │ ├── index.ts │ ├── compile.ts │ ├── reset.ts │ ├── uncommit.ts │ ├── migrate.ts │ ├── status.ts │ ├── run.ts │ ├── _common.ts │ ├── commit.ts │ ├── init.ts │ └── watch.ts ├── indent.ts ├── memoize.ts ├── __mocks__ │ ├── pg.ts │ └── migration.ts ├── lib.ts ├── cli.ts ├── instrumentation.ts ├── pgReal.ts ├── actions.ts ├── current.ts └── settings.ts ├── __tests__ ├── warmup.cjs ├── warmup.js ├── __snapshots__ │ ├── include.test.ts.snap │ ├── migrate.test.ts.snap │ └── settings.test.ts.snap ├── watch-actions.test.ts ├── mergeWithoutClobbering.test.ts ├── compile.test.ts ├── actions-live.test.ts ├── watch.test.ts ├── migration.test.ts ├── readCurrentMigration.test.ts ├── include.test.ts ├── uncommit.test.ts ├── manageGraphileMigrateSchema.test.ts ├── commit.test.ts ├── migrate.test.ts ├── writeCurrentMigration.test.ts ├── actions.test.ts └── helpers.ts ├── .npmignore ├── .well-known └── funding-manifest-urls ├── .prettierrc.js ├── .gitignore ├── tsconfig.json ├── jest.config.js ├── scripts ├── version.mjs ├── update-docs.js └── usage ├── tsconfig.build.json ├── docs ├── docker │ ├── Dockerfile │ ├── graphile-migrate │ └── README.md └── idempotent-examples.md ├── LICENSE.md ├── SPONSORS.md ├── CONTRIBUTING.md ├── package.json ├── FORMATS.md └── .eslintrc.js /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: Benjie 2 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | -------------------------------------------------------------------------------- /src/pg.ts: -------------------------------------------------------------------------------- 1 | export * from "./pgReal"; 2 | -------------------------------------------------------------------------------- /__tests__/warmup.cjs: -------------------------------------------------------------------------------- 1 | module.exports = {}; 2 | -------------------------------------------------------------------------------- /__tests__/warmup.js: -------------------------------------------------------------------------------- 1 | module.exports = {}; 2 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | **/__mocks__/** 2 | **/__tests__/** 3 | -------------------------------------------------------------------------------- /.well-known/funding-manifest-urls: -------------------------------------------------------------------------------- 1 | https://www.graphile.org/funding.json 2 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | trailingComma: "all", 3 | proseWrap: "always", 4 | }; 5 | -------------------------------------------------------------------------------- /src/version.ts: -------------------------------------------------------------------------------- 1 | // This file is autogenerated by /scripts/version.mjs 2 | export const version = "2.0.0-rc.2"; 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /dist/ 2 | node_modules 3 | 4 | # VSCode editor settings 5 | .vscode 6 | 7 | .env 8 | .gmrc 9 | migrations/ 10 | -------------------------------------------------------------------------------- /src/interfaces.ts: -------------------------------------------------------------------------------- 1 | /** Represents the graphile_migrate.current type in the DB */ 2 | export interface DbCurrent { 3 | filename: string; 4 | content: string; 5 | date: Date; 6 | } 7 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./commands"; 2 | export { defaultLogger } from "./logger"; 3 | export { Settings } from "./settings"; 4 | export { LogFunctionFactory, Logger, LogLevel } from "@graphile/logger"; 5 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.build.json", 3 | "include": ["src/**/*", "__tests__/**/*", "*.js", "./.*.js"], 4 | "exclude": [], 5 | "compilerOptions": { 6 | "noEmit": true 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /src/header.ts: -------------------------------------------------------------------------------- 1 | export function isNoTransactionDefined(sql: string): boolean { 2 | const i = sql.indexOf("\n"); 3 | const firstLine = i > 0 ? sql.substring(0, i) : sql; 4 | return /^--!\s*no-transaction\b/.test(firstLine); 5 | } 6 | -------------------------------------------------------------------------------- /src/sluggify.ts: -------------------------------------------------------------------------------- 1 | export function sluggify(text: string): string { 2 | return text 3 | .toLowerCase() 4 | .replace(/[^a-z0-9_]+/g, "-") 5 | .replace(/--+/g, "-") 6 | .replace(/(^-+|-+$)/g, "") 7 | .substring(0, 60); 8 | } 9 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | import { Logger, makeConsoleLogFactory } from "@graphile/logger"; 2 | 3 | export const defaultLogger = new Logger( 4 | makeConsoleLogFactory({ 5 | format: `%s`, 6 | formatParameters(_level, message) { 7 | return [message]; 8 | }, 9 | }), 10 | ); 11 | -------------------------------------------------------------------------------- /src/hash.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from "crypto"; 2 | export const calculateHash = ( 3 | str: string, 4 | previousHash: string | null, 5 | algorithm = "sha1", 6 | ): string => 7 | algorithm + 8 | ":" + 9 | crypto 10 | .createHash(algorithm) 11 | .update(((previousHash || "") + "\n" + str.trim()).trim() + "\n") 12 | .digest("hex"); 13 | -------------------------------------------------------------------------------- /src/commands/index.ts: -------------------------------------------------------------------------------- 1 | export { commit } from "./commit"; 2 | export { compile } from "./compile"; 3 | export { init } from "./init"; 4 | export { migrate } from "./migrate"; 5 | export { reset } from "./reset"; 6 | export { run } from "./run"; 7 | export { status } from "./status"; 8 | export { uncommit } from "./uncommit"; 9 | export { watch } from "./watch"; 10 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | transform: { 3 | "^.+\\.tsx?$": "ts-jest", 4 | }, 5 | testRegex: "__tests__/.*\\.test\\.[tj]s$", 6 | moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], 7 | 8 | // Jest doesn't currently support prettier v3; see https://github.com/jestjs/jest/issues/14305 9 | prettierPath: require.resolve("@localrepo/prettier2-for-jest"), 10 | }; 11 | -------------------------------------------------------------------------------- /src/indent.ts: -------------------------------------------------------------------------------- 1 | function indent(text: string, spaces: number): string; 2 | function indent(text: string, indentString: string): string; 3 | function indent(text: string, indentStringOrSpaces: number | string): string { 4 | const indentString = 5 | typeof indentStringOrSpaces === "string" 6 | ? indentStringOrSpaces 7 | : " ".repeat(indentStringOrSpaces); 8 | return indentString + text.replace(/\n(?!$)/g, "\n" + indentString); 9 | } 10 | export default indent; 11 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Linting 2 | 3 | on: [push, pull_request] 4 | 5 | env: 6 | CI: true 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v1 14 | - name: Use Node.js 18.x 15 | uses: actions/setup-node@v1 16 | with: 17 | node-version: 18.x 18 | - run: yarn --frozen-lockfile 19 | - run: yarn clean 20 | - run: yarn prepack 21 | - run: yarn lint 22 | - run: yarn lint:deps 23 | -------------------------------------------------------------------------------- /scripts/version.mjs: -------------------------------------------------------------------------------- 1 | import { writeFile, readFile } from "node:fs/promises"; 2 | import { fileURLToPath } from "node:url"; 3 | 4 | const __dirname = fileURLToPath(new URL(".", import.meta.url)).replace( 5 | /\/+$/, 6 | "", 7 | ); 8 | 9 | const { version } = JSON.parse( 10 | await readFile(`${__dirname}/../package.json`, "utf8"), 11 | ); 12 | 13 | await writeFile( 14 | `${__dirname}/../src/version.ts`, 15 | `\ 16 | // This file is autogenerated by /scripts/version.mjs 17 | export const version = ${JSON.stringify(version)}; 18 | `, 19 | ); 20 | -------------------------------------------------------------------------------- /__tests__/__snapshots__/include.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`compiles an included file, and won't get stuck in an infinite include loop 1`] = ` 4 | "Circular include detected - '~/migrations/fixtures/foo.sql' is included again! Import statement: \`--!include foo.sql\`; trace: 5 | ~/migrations/fixtures/foo.sql 6 | ~/migrations/current.sql" 7 | `; 8 | 9 | exports[`disallows calling files outside of the migrations/fixtures folder 1`] = `"Forbidden: cannot include path '~/outsideFolder/foo.sql' because it's not inside '~/migrations/fixtures'"`; 10 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node18/tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "src", 5 | "declarationDir": "./dist", 6 | "outDir": "./dist", 7 | "declaration": true, 8 | "allowJs": false, 9 | "sourceMap": true, 10 | "pretty": true, 11 | "importHelpers": true, 12 | "experimentalDecorators": true, 13 | "noImplicitAny": true, 14 | "strictNullChecks": true, 15 | "noFallthroughCasesInSwitch": true, 16 | "noUnusedParameters": false, 17 | "noUnusedLocals": false, 18 | "preserveWatchOutput": true, 19 | "sourceMap": true 20 | }, 21 | "include": ["src/**/*"], 22 | "exclude": ["**/__mocks__/*"] 23 | } 24 | -------------------------------------------------------------------------------- /docs/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | 3 | ARG NODEJS_VERSION=14 4 | ARG POSTGRES_VERSION=12 5 | 6 | RUN apt-get update && \ 7 | apt-get install -y \ 8 | curl 9 | 10 | # Install postgres client tools 11 | RUN apt-get update && \ 12 | apt-get install -y \ 13 | postgresql-client-${POSTGRES_VERSION} 14 | 15 | # Install nodejs via nodesource. 16 | RUN curl -fsSL https://deb.nodesource.com/setup_${NODEJS_VERSION}.x | bash - 17 | RUN apt-get install -y nodejs 18 | 19 | # Latest version of graphile-migrate 20 | RUN npm install -g graphile-migrate 21 | 22 | # Default working directory. Map your migrations folder in here with `docker -v` 23 | WORKDIR /migrate 24 | 25 | ENTRYPOINT ["/usr/bin/graphile-migrate"] 26 | -------------------------------------------------------------------------------- /src/memoize.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-assignment */ 2 | export default function memoize) => any>( 3 | fn: T, 4 | ): (...funcArgs: Parameters) => ReturnType { 5 | let lastArgs: Parameters; 6 | let lastResult: ReturnType; 7 | return (...args: Parameters): ReturnType => { 8 | if ( 9 | lastArgs && 10 | args.length === lastArgs.length && 11 | args.every((arg, i) => arg === lastArgs[i]) 12 | ) { 13 | return lastResult; 14 | } else { 15 | lastArgs = args; 16 | lastResult = fn(...args); 17 | return lastResult; 18 | } 19 | }; 20 | } 21 | -------------------------------------------------------------------------------- /scripts/update-docs.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | const fsp = require("fs/promises"); 3 | const { spawnSync } = require("child_process"); 4 | 5 | async function main() { 6 | const readmePath = `${__dirname}/../README.md`; 7 | const readme = await fsp.readFile(readmePath, "utf8"); 8 | const { stdout: usage } = await spawnSync("bash", [`${__dirname}/usage`], { 9 | encoding: "utf8", 10 | }); 11 | await fsp.writeFile( 12 | readmePath, 13 | readme.replace( 14 | /()[\s\S]*()/, 15 | (_, start, fin) => `${start}\n${usage.trim()}\n${fin}`, 16 | ), 17 | ); 18 | } 19 | 20 | main().catch((e) => { 21 | // eslint-disable-next-line no-console 22 | console.error(e); 23 | process.exit(1); 24 | }); 25 | -------------------------------------------------------------------------------- /src/__mocks__/pg.ts: -------------------------------------------------------------------------------- 1 | import "../../__tests__/helpers"; // Has side-effects, must come first 2 | 3 | import { parse } from "pg-connection-string"; 4 | 5 | import { mockPgClient } from "../../__tests__/helpers"; 6 | 7 | export const withClient = jest.fn( 8 | (connectionString, _parsedSettings, callback) => { 9 | const { database } = parse(connectionString); 10 | const mockContext = { 11 | database, 12 | }; 13 | return callback(mockPgClient, mockContext); 14 | }, 15 | ); 16 | 17 | const { withTransaction: originalWithTransaction } = 18 | jest.requireActual("../migration"); 19 | 20 | export const withTransaction = jest.fn(originalWithTransaction); 21 | export const withAdvisoryLock = jest.fn((pgClient, callback) => 22 | callback(pgClient), 23 | ); 24 | -------------------------------------------------------------------------------- /docs/docker/graphile-migrate: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # A wrapper for running graphile-migrate within a container 4 | 5 | # Use 6 | # * -ti --init to allow for CTRL-C to work with `graphile-migrate watch` 7 | # * --user to allow the current user to own any committed migrations 8 | # * Mount $PWD as /migrate for access to graphile-migrate working files 9 | 10 | # The following may be added if you run a Postgres container on an internal 11 | # Docker network which isn't accessible from the host where this script runs. 12 | if [[ ${DATABASE_DOCKER_NETWORK} != "" ]] ; then 13 | DOCKER_EXTRA_OPTS="${DOCKER_EXTRA_OPTS} --network ${DATABASE_DOCKER_NETWORK}" 14 | fi 15 | 16 | docker run \ 17 | -ti --init \ 18 | --user "$(id -u):$(id -g)" \ 19 | --rm \ 20 | --volume "$PWD:/migrate" \ 21 | --env DATABASE_URL="${DATABASE_URL}" \ 22 | --env SHADOW_DATABASE_URL="${SHADOW_DATABASE_URL}" \ 23 | --env ROOT_DATABASE_URL="${ROOT_DATABASE_URL}" \ 24 | ${DOCKER_EXTRA_OPTS} \ 25 | graphile-migrate "$@" 26 | -------------------------------------------------------------------------------- /__tests__/watch-actions.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("child_process"); 2 | jest.mock("../src/migration"); 3 | 4 | import "./helpers"; // Has side-effects; must come first 5 | 6 | import { _makeCurrentMigrationRunner, _watch } from "../src/commands/watch"; 7 | import { _migrateMigrationSchema } from "../src/migration"; 8 | import { parseSettings } from "../src/settings"; 9 | import { 10 | makeActionSpies, 11 | mockCurrentSqlContentOnce, 12 | resetDb, 13 | setup, 14 | TEST_DATABASE_URL, 15 | } from "./helpers"; 16 | 17 | beforeEach(resetDb); 18 | 19 | it("calls beforeCurrent and afterCurrent when ran once", async () => { 20 | const { settings, getActionCalls } = makeActionSpies(); 21 | const parsedSettings = await parseSettings({ 22 | connectionString: TEST_DATABASE_URL, 23 | ...settings, 24 | }); 25 | await setup(parsedSettings); 26 | 27 | mockCurrentSqlContentOnce(parsedSettings, "SQL"); 28 | 29 | await _watch(parsedSettings, true, false); 30 | expect(getActionCalls()).toEqual(["beforeCurrent", "afterCurrent"]); 31 | }); 32 | -------------------------------------------------------------------------------- /src/lib.ts: -------------------------------------------------------------------------------- 1 | export function mergeWithoutClobbering( 2 | original: { [key: string]: string | undefined }, 3 | newStuff: { [key: string]: string | undefined }, 4 | message: string, 5 | ): { [key: string]: string | undefined } { 6 | const result = { ...original }; 7 | for (const key in newStuff) { 8 | if (typeof result[key] === "undefined" || result[key] === newStuff[key]) { 9 | result[key] = newStuff[key]; 10 | } else { 11 | throw new Error( 12 | `Refusing to clobber '${key}' (from '${original[key]}' to '${newStuff[key]}'): ${message}`, 13 | ); 14 | } 15 | } 16 | 17 | return result; 18 | } 19 | 20 | export function isLoggedError(error: unknown): boolean { 21 | return ( 22 | typeof error === "object" && 23 | error !== null && 24 | "_gmlogged" in error && 25 | error._gmlogged === true 26 | ); 27 | } 28 | 29 | export function errorCode(e: unknown): string | null { 30 | return typeof e === "object" && 31 | e !== null && 32 | "code" in e && 33 | typeof e.code === "string" 34 | ? e.code 35 | : null; 36 | } 37 | -------------------------------------------------------------------------------- /docs/docker/README.md: -------------------------------------------------------------------------------- 1 | # Running Graphile Migrate from a container 2 | 3 | When working in a team it can be useful to package `graphile-migrate` and the 4 | Postgres tooling into a Docker file so that everyone has easy access to the same 5 | versions. This is also helpful if you're using Migrate as part of a larger 6 | non-Node based project. 7 | 8 | For these purposes we provide an [example Dockerfile](./Dockerfile) in the 9 | `docs/docker` directory in the source tree. This uses the latest released 10 | version of `graphile-migrate` from `npm` and packages it together with the 11 | necessary Node and Postgres tools. You can build the Dockerfile from the root of 12 | the repository using a command like such as: 13 | 14 | ```bash 15 | docker build -t graphile-migrate docs/docker \ 16 | --build-arg NODEJS_VERSION=14 --build-arg POSTGRES_VERSION=12 17 | ``` 18 | 19 | To conveniently run Graphile Migrate within the container you can then use the 20 | [`graphile-migrate` wrapper script](./graphile-migrate) which passes the 21 | standard Migrate environment variables through to the container. 22 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # The MIT License (MIT) 2 | 3 | Copyright © `2019` Benjie Gillam 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the “Software”), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /SPONSORS.md: -------------------------------------------------------------------------------- 1 | # Sponsors 2 | 3 | These individuals and companies sponsor ongoing development of projects in the 4 | Graphile ecosystem. Find out 5 | [how you can become a sponsor](https://graphile.org/sponsor/). 6 | 7 | ## Featured 8 | 9 | - The Guild 10 | - Steelhead 11 | 12 | ## Leaders 13 | 14 | - Robert Claypool 15 | - nigelrmtaylor 16 | - Principia Mentis 17 | - Trigger.dev 18 | - Axinom 19 | - Taiste 20 | - BairesDev 21 | - Two Bit Solutions 22 | - Apollo 23 | - Dimply 24 | - Ndustrial 25 | - Cintra 26 | - Beacon 27 | - deliver.media 28 | - Ravio 29 | - prodready 30 | - Locomote 31 | 32 | ## Supporters 33 | 34 | - HR-ON 35 | - stlbucket 36 | - Simon Elliott 37 | - Matt Bretl 38 | - Alvin Ali Khaled 39 | - Paul Melnikow 40 | - Keith Layne 41 | - nullachtvierzehn 42 | - Zymego 43 | - garpulon 44 | - Vizcom 45 | - Ether 46 | - The Outbound Collective 47 | - Charlie Hadden 48 | - Kiron Open Higher Education 49 | - Nate Smith 50 | - Andrew Joseph 51 | - SIED 70 - TE 70 52 | - Peter C. Romano 53 | - mateo 54 | - Dialo 55 | - kontakto-fi 56 | - Tailos, Inc. 57 | - sheilamosaik 58 | - Jody Hoon-Starr 59 | - Justin Carrus 60 | - WorkOS 61 | - Malachi Bergman 62 | -------------------------------------------------------------------------------- /scripts/usage: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | export GRAPHILE_SPONSOR=1 5 | 6 | GRAPHILE_MIGRATE="node ../dist/cli.js" 7 | 8 | echo -e '## graphile-migrate\n\n```' 9 | $GRAPHILE_MIGRATE --help 10 | echo -e '```\n\n' 11 | 12 | echo -e '## graphile-migrate init\n\n```' 13 | $GRAPHILE_MIGRATE init --help 14 | echo -e '```\n\n' 15 | 16 | echo -e '## graphile-migrate migrate\n\n```' 17 | $GRAPHILE_MIGRATE migrate --help 18 | echo -e '```\n\n' 19 | 20 | echo -e '## graphile-migrate watch\n\n```' 21 | $GRAPHILE_MIGRATE watch --help 22 | echo -e '```\n\n' 23 | 24 | echo -e '## graphile-migrate commit\n\n```' 25 | $GRAPHILE_MIGRATE commit --help 26 | echo -e '```\n\n' 27 | 28 | echo -e '## graphile-migrate uncommit\n\n```' 29 | $GRAPHILE_MIGRATE uncommit --help 30 | echo -e '```\n\n' 31 | 32 | echo -e '## graphile-migrate reset\n\n```' 33 | $GRAPHILE_MIGRATE reset --help 34 | echo -e '```\n\n' 35 | 36 | echo -e '## graphile-migrate status\n\n```' 37 | $GRAPHILE_MIGRATE status --help 38 | echo -e '```\n\n' 39 | 40 | echo -e '## graphile-migrate compile\n\n```' 41 | $GRAPHILE_MIGRATE compile --help 42 | echo -e '```\n\n' 43 | 44 | echo -e '## graphile-migrate run\n\n```' 45 | $GRAPHILE_MIGRATE run --help 46 | echo -e '```\n\n' 47 | -------------------------------------------------------------------------------- /src/__mocks__/migration.ts: -------------------------------------------------------------------------------- 1 | const { 2 | slowGeneratePlaceholderReplacement: originalGeneratePlaceholderReplacement, 3 | _migrateMigrationSchema: realMigrateMigrationSchema, 4 | runStringMigration: realRunStringMigration, 5 | } = jest.requireActual("../migration"); 6 | 7 | export const generatePlaceholderReplacement = jest.fn( 8 | originalGeneratePlaceholderReplacement, 9 | ); 10 | 11 | export const _migrateMigrationSchema = jest.fn(realMigrateMigrationSchema); 12 | 13 | export const getLastMigration = jest.fn((_client, _settings) => 14 | Promise.resolve(null), 15 | ); 16 | 17 | export const getAllMigrations = jest.fn((_settings) => Promise.resolve([])); 18 | 19 | export const getMigrationsAfter = jest.fn((_settings, _previousMigration) => 20 | Promise.resolve([]), 21 | ); 22 | 23 | export const runStringMigration = jest.fn( 24 | (_client, _settings, _context, _body, _filename, _committedMigration) => 25 | realRunStringMigration( 26 | _client, 27 | _settings, 28 | _context, 29 | _body, 30 | _filename, 31 | _committedMigration, 32 | true, 33 | ), 34 | ); 35 | 36 | export const runCommittedMigration = jest.fn( 37 | (_client, _settings, _context, _committedMigration, _logSuffix) => {}, 38 | ); 39 | 40 | export const compileIncludes = jest.fn((parsedSettings, content) => { 41 | return content; 42 | }); 43 | -------------------------------------------------------------------------------- /__tests__/mergeWithoutClobbering.test.ts: -------------------------------------------------------------------------------- 1 | import { mergeWithoutClobbering } from "../src/lib"; 2 | process.env.NODE_ENV = "test"; 3 | 4 | test("merges", () => { 5 | const result = mergeWithoutClobbering( 6 | process.env, 7 | { FOO: "bar" }, 8 | "don't set this envvar.", 9 | ); 10 | expect(result).toMatchObject({ 11 | ...process.env, 12 | FOO: "bar", 13 | }); 14 | }); 15 | 16 | test("doesn't mutate source", () => { 17 | expect(process.env.FOO).toBe(undefined); 18 | const result = mergeWithoutClobbering( 19 | process.env, 20 | { FOO: "bar" }, 21 | "don't set this envvar.", 22 | ); 23 | expect(result).toMatchObject({ 24 | ...process.env, 25 | FOO: "bar", 26 | }); 27 | expect(process.env.FOO).toBe(undefined); 28 | }); 29 | 30 | test("throws if property already set", () => { 31 | expect(process.env.FOO).toBe(undefined); 32 | expect(() => { 33 | mergeWithoutClobbering( 34 | process.env, 35 | { NODE_ENV: "bar" }, 36 | "don't set this envvar.", 37 | ); 38 | }).toThrowErrorMatchingInlineSnapshot( 39 | `"Refusing to clobber 'NODE_ENV' (from 'test' to 'bar'): don't set this envvar."`, 40 | ); 41 | }); 42 | 43 | test("doesn't throw if property already set to same value", () => { 44 | expect(process.env.FOO).toBe(undefined); 45 | const result = mergeWithoutClobbering( 46 | process.env, 47 | { NODE_ENV: "test" }, 48 | "don't set this envvar.", 49 | ); 50 | expect(result).toMatchObject(process.env); 51 | }); 52 | -------------------------------------------------------------------------------- /src/commands/compile.ts: -------------------------------------------------------------------------------- 1 | import * as fsp from "fs/promises"; 2 | import { CommandModule } from "yargs"; 3 | 4 | import { compilePlaceholders } from "../migration"; 5 | import { parseSettings, Settings } from "../settings"; 6 | import { CommonArgv, getSettings, readStdin } from "./_common"; 7 | 8 | interface CompileArgv extends CommonArgv { 9 | shadow?: boolean; 10 | } 11 | 12 | export async function compile( 13 | settings: Settings, 14 | content: string, 15 | shadow = false, 16 | ): Promise { 17 | const parsedSettings = await parseSettings(settings, shadow); 18 | return compilePlaceholders(parsedSettings, content, shadow); 19 | } 20 | 21 | export const compileCommand: CommandModule< 22 | Record, 23 | CompileArgv 24 | > = { 25 | command: "compile [file]", 26 | aliases: [], 27 | describe: `\ 28 | Compiles a SQL file, inserting all the placeholders and returning the result to STDOUT`, 29 | builder: { 30 | shadow: { 31 | type: "boolean", 32 | default: false, 33 | description: "Apply shadow DB placeholders (for development).", 34 | }, 35 | }, 36 | handler: async (argv) => { 37 | const settings = await getSettings({ configFile: argv.config }); 38 | const content = 39 | typeof argv.file === "string" 40 | ? await fsp.readFile(argv.file, "utf8") 41 | : await readStdin(); 42 | 43 | const compiled = await compile(settings, content, argv.shadow); 44 | 45 | // eslint-disable-next-line no-console 46 | console.log(compiled); 47 | }, 48 | }; 49 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | env: 6 | CI: true 7 | PGUSER: postgres 8 | PGPASSWORD: postgres 9 | PGHOST: "127.0.0.1" 10 | PGPORT: 5432 11 | TEST_ROOT_DATABASE_URL: postgres://postgres:postgres@127.0.0.1:5432/postgres 12 | TEST_DATABASE_URL: postgres://someone:something@127.0.0.1:5432/graphile_migrate_test 13 | PGVERSION: 10 14 | NODE_OPTIONS: "--experimental-vm-modules" 15 | 16 | jobs: 17 | test: 18 | runs-on: ubuntu-latest 19 | 20 | strategy: 21 | matrix: 22 | node-version: [18.x, 20.x] 23 | postgres-version: [12, 15] 24 | 25 | services: 26 | postgres: 27 | image: postgres:${{ matrix.postgres-version }} 28 | env: 29 | POSTGRES_USER: postgres 30 | POSTGRES_PASSWORD: postgres 31 | POSTGRES_DB: postgres 32 | ports: 33 | - "0.0.0.0:5432:5432" 34 | # needed because the postgres container does not provide a healthcheck 35 | options: 36 | --health-cmd pg_isready --health-interval 10s --health-timeout 5s 37 | --health-retries 5 38 | 39 | steps: 40 | - uses: actions/checkout@v1 41 | - name: Use Node.js ${{ matrix.node-version }} 42 | uses: actions/setup-node@v1 43 | with: 44 | node-version: ${{ matrix.node-version }} 45 | - run: yarn --frozen-lockfile 46 | - run: psql "${TEST_ROOT_DATABASE_URL}" -c "CREATE USER someone WITH LOGIN PASSWORD 'something';" 47 | - run: yarn clean 48 | - run: yarn prepack 49 | - run: FORCE_COLOR=1 yarn jest -i --ci 50 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thanks for your interesting in contributing to Graphile Migrate! 4 | 5 | First, and most importantly, contributions to Graphile are governed by the 6 | Graphile Code of Conduct (which uses the Contributor Covenant); you can read it 7 | here: https://www.graphile.org/postgraphile/code-of-conduct/ 8 | 9 | Following are some guidelines for contributions. 10 | 11 | ## Running tests 12 | 13 | The tests require a database connection. One way to get to a point where tests 14 | are passing is to use a minimal `docker-compose.yml` file. This one works nicely 15 | (note that the connections are made by your local user account, which may not 16 | work in a Windows environment): 17 | 18 | ``` 19 | version: "3" 20 | 21 | services: 22 | graphile_migrate_postgres: 23 | container_name: graphile_migrate_postgres 24 | environment: 25 | POSTGRES_USER: ${USER} 26 | POSTGRES_HOST_AUTH_METHOD: trust 27 | image: "postgres:12.4" 28 | ports: 29 | - 5432:5432 30 | restart: always 31 | volumes: 32 | - graphile_migrate_data:/var/lib/postgresql/data 33 | 34 | volumes: 35 | graphile_migrate_data: 36 | driver: local 37 | ``` 38 | 39 | ### Creating the database 40 | 41 | You'll then need to connect to the database and issue the following commands: 42 | 43 | ```sql 44 | create role gmtestuser with login password 'gmtestpass'; 45 | create database graphile_migrate_test owner gmtestuser; 46 | ``` 47 | 48 | ## ASK FIRST! 49 | 50 | There's nothing worse than having your PR with 3 days of work in it rejected 51 | because it's just too complex to be sensibly reviewed! If you're interested in 52 | opening a PR please open an issue to discuss it, or come chat with us: 53 | http://discord.gg/graphile 54 | 55 | Small, focussed PRs are generally welcome without previous approval. 56 | -------------------------------------------------------------------------------- /__tests__/compile.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; 2 | 3 | import * as mockFs from "mock-fs"; 4 | 5 | import { compile } from "../src"; 6 | let old: string | undefined; 7 | beforeAll(() => { 8 | old = process.env.DATABASE_AUTHENTICATOR; 9 | process.env.DATABASE_AUTHENTICATOR = "dbauth"; 10 | }); 11 | afterAll(() => { 12 | process.env.DATABASE_AUTHENTICATOR = old; 13 | }); 14 | 15 | afterEach(() => { 16 | mockFs.restore(); 17 | }); 18 | 19 | it("compiles SQL with settings", async () => { 20 | expect( 21 | await compile( 22 | { 23 | connectionString: "postgres://dbowner:dbpassword@dbhost:1221/dbname", 24 | placeholders: { 25 | ":DATABASE_AUTHENTICATOR": "!ENV", 26 | }, 27 | }, 28 | `\ 29 | BEGIN; 30 | GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER; 31 | GRANT CONNECT ON DATABASE :DATABASE_NAME TO :DATABASE_AUTHENTICATOR; 32 | GRANT ALL ON DATABASE :DATABASE_NAME TO :DATABASE_OWNER; 33 | 34 | -- Some extensions require superuser privileges, so we create them before migration time. 35 | CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; 36 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; 37 | CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public; 38 | CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; 39 | COMMIT; 40 | `, 41 | ), 42 | ).toEqual(`\ 43 | BEGIN; 44 | GRANT CONNECT ON DATABASE dbname TO dbowner; 45 | GRANT CONNECT ON DATABASE dbname TO dbauth; 46 | GRANT ALL ON DATABASE dbname TO dbowner; 47 | 48 | -- Some extensions require superuser privileges, so we create them before migration time. 49 | CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; 50 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public; 51 | CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public; 52 | CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; 53 | COMMIT; 54 | `); 55 | }); 56 | -------------------------------------------------------------------------------- /__tests__/actions-live.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import { Logger, LogLevel, LogMeta } from "@graphile/logger"; 4 | import mockFs from "mock-fs"; 5 | 6 | import { executeActions } from "../src/actions"; 7 | import { _migrate } from "../src/commands/migrate"; 8 | import { parseSettings } from "../src/settings"; 9 | import { mockPgClient, TEST_DATABASE_URL } from "./helpers"; 10 | 11 | beforeAll(() => { 12 | // eslint-disable-next-line no-console 13 | console.log("[mock-fs callsites hack]"); // Without this, jest fails due to 'callsites' 14 | mockFs({}); 15 | }); 16 | 17 | afterAll(() => { 18 | mockFs.restore(); 19 | }); 20 | 21 | it("logs output from command actions on success", async () => { 22 | const logs: Array<{ 23 | scope: any; 24 | level: LogLevel; 25 | message: string; 26 | meta?: LogMeta; 27 | }> = []; 28 | const logger = new Logger((scope) => (level, message, meta) => { 29 | logs.push({ scope, level, message, meta }); 30 | }); 31 | const parsedSettings = await parseSettings({ 32 | connectionString: TEST_DATABASE_URL, 33 | afterAllMigrations: [ 34 | { _: "command", command: "echo 'success' && echo 'err' >&2" }, 35 | ], 36 | logger, 37 | }); 38 | mockPgClient.query.mockClear(); 39 | await executeActions( 40 | parsedSettings, 41 | false, 42 | parsedSettings.afterAllMigrations, 43 | ); 44 | expect(mockPgClient.query).toHaveBeenCalledTimes(0); 45 | expect(logs).toHaveLength(2); 46 | expect(logs[0]).toMatchObject({ 47 | level: "info", 48 | message: "success\n", 49 | }); 50 | expect(logs[1]).toMatchObject({ 51 | level: "error", 52 | message: "err\n", 53 | }); 54 | }); 55 | 56 | it("logs output from command actions on failure", async () => { 57 | const logs: Array<{ 58 | scope: any; 59 | level: LogLevel; 60 | message: string; 61 | meta?: LogMeta; 62 | }> = []; 63 | const logger = new Logger((scope) => (level, message, meta) => { 64 | logs.push({ scope, level, message, meta }); 65 | }); 66 | const parsedSettings = await parseSettings({ 67 | connectionString: TEST_DATABASE_URL, 68 | afterAllMigrations: [ 69 | { _: "command", command: "echo 'success' && echo 'err' >&2 && false" }, 70 | ], 71 | logger, 72 | }); 73 | mockPgClient.query.mockClear(); 74 | let err; 75 | try { 76 | await executeActions( 77 | parsedSettings, 78 | false, 79 | parsedSettings.afterAllMigrations, 80 | ); 81 | } catch (e) { 82 | err = e; 83 | } 84 | expect(err).toBeTruthy(); 85 | expect(mockPgClient.query).toHaveBeenCalledTimes(0); 86 | expect(logs).toHaveLength(2); 87 | expect(logs[0]).toMatchObject({ 88 | level: "info", 89 | message: "success\n", 90 | }); 91 | expect(logs[1]).toMatchObject({ 92 | level: "error", 93 | message: "err\n", 94 | }); 95 | }); 96 | -------------------------------------------------------------------------------- /docs/idempotent-examples.md: -------------------------------------------------------------------------------- 1 | # Idempotent Examples 2 | 3 | Idempotency is an important concept in Graphile Migrate, if a migration is 4 | idempotent it means that you can run the migration multiple times and the end 5 | state of the database structure will always be the same. (NOTE: though the 6 | structure may be the same, some idempotent commands may result in 7 | deleting/dropping data, so extreme care must be exercised.) 8 | 9 | Many of PostgreSQL's commands have built in support for idempotency; you will 10 | see this commonly with `IF EXISTS` or `IF NOT EXISTS` clauses, 11 | `CREATE OR REPLACE`, and similar constructs: 12 | 13 | ```sql 14 | -- Create a schema 15 | DROP SCHEMA IF EXISTS app CASCADE; 16 | CREATE SCHEMA app; 17 | 18 | -- Create a table 19 | DROP TABLE IF EXISTS foo CASCADE; 20 | CREATE TABLE foo ...; 21 | 22 | -- Add a column to the end of the table 23 | ALTER TABLE foo DROP COLUMN IF EXISTS bar CASCADE; 24 | ALTER TABLE foo ADD COLUMN bar ...; 25 | 26 | -- Make a column NOT NULL 27 | ALTER TABLE foo ALTER COLUMN bar SET NOT NULL; 28 | 29 | -- Alter a column type 30 | ALTER TABLE foo ALTER COLUMN bar TYPE int USING bar::int; 31 | 32 | -- Change the body or flags of a function 33 | CREATE OR REPLACE FUNCTION ...; 34 | 35 | -- Change a function signature (arguments, return type, etc) 36 | DROP FUNCTION IF EXISTS ... CASCADE; 37 | CREATE OR REPLACE FUNCTION ... 38 | ``` 39 | 40 | Sometimes idempotency is a little more difficult to achieve. For instance, some 41 | commands do not have the `if exists` parameter. One such example is `rename`. In 42 | this case, we can implement the `if exists` logic ourselves using an anonymous 43 | code block: 44 | 45 | ```sql 46 | do $$ 47 | begin 48 | /* if column `username` exists on users table */ 49 | if exists( 50 | select 1 51 | from information_schema.columns 52 | where table_schema = 'public' 53 | and table_name = 'users' 54 | and column_name = 'username' 55 | ) then 56 | /* rename the column to `name` */ 57 | alter table users 58 | rename column username to name; 59 | end if; 60 | end$$; 61 | ``` 62 | 63 | The structure changes a little if we want to rename an enum value, but the idea 64 | is the same: 65 | 66 | ```sql 67 | do $$ 68 | begin 69 | /* if `PENDING` exists in purchase_status enum */ 70 | if exists( 71 | select 1 72 | from pg_catalog.pg_enum as enum_value 73 | inner join pg_catalog.pg_type as custom_type 74 | on custom_type.oid = enum_value.enumtypid 75 | where typname = 'purchase_status' 76 | and enumlabel = 'PENDING' 77 | ) then 78 | /* rename the enum value to `PURCHASE_PENDING` */ 79 | alter type app_public.purchase_status rename value 'PENDING' to 'PURCHASE_PENDING'; 80 | end if; 81 | end$$; 82 | 83 | ``` 84 | 85 | Because of its compliance with the SQL standard, the `information_schema` does 86 | not contain Postgres-only objects, like enums. 87 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "graphile-migrate", 3 | "version": "2.0.0-rc.2", 4 | "description": "Opinionated SQL-powered migration tool for PostgreSQL", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "lint": "yarn prettier:check && eslint --ext .js,.jsx,.ts,.tsx,.graphql .", 8 | "lint:deps": "depcheck --ignores @types/jest,@types/json5,@types/node,@types/mock-fs,@types/pg,@types/yargs,eslint_d,mock-fs,tslib", 9 | "lint:fix": "eslint --ext .js,.jsx,.ts,.tsx,.graphql . --fix; prettier --ignore-path .eslintignore --write '**/*.{js,jsx,ts,tsx,graphql,md,json}'", 10 | "prettier:check": "prettier --ignore-path .eslintignore --check '**/*.{js,jsx,ts,tsx,graphql,md,json}'", 11 | "tsc": "tsc -p tsconfig.build.json", 12 | "prepack": "npm run tsc && chmod +x dist/cli.js", 13 | "clean": "rm -Rf dist", 14 | "test": "yarn lint && yarn run lint:deps && FORCE_COLOR=1 yarn run test:only --ci", 15 | "test:only": "NODE_OPTIONS=\"--experimental-vm-modules\" jest -i", 16 | "version": "yarn prepack && ./scripts/update-docs.js && node ./scripts/version.mjs && git add README.md src/version.ts", 17 | "watch": "mkdir -p dist && touch dist/cli.js && chmod +x dist/cli.js && npm run tsc --watch" 18 | }, 19 | "bin": { 20 | "graphile-migrate": "./dist/cli.js" 21 | }, 22 | "repository": { 23 | "type": "git", 24 | "url": "git+ssh://git@github.com/graphile/migrate.git" 25 | }, 26 | "keywords": [ 27 | "postgresql", 28 | "postgres", 29 | "pg", 30 | "migrate", 31 | "sql", 32 | "easy", 33 | "fast", 34 | "watch", 35 | "functions" 36 | ], 37 | "author": "Benjie Gillam ", 38 | "license": "MIT", 39 | "bugs": { 40 | "url": "https://github.com/graphile/migrate/issues" 41 | }, 42 | "homepage": "https://github.com/graphile/migrate#readme", 43 | "dependencies": { 44 | "@graphile/logger": "^0.2.0", 45 | "@types/json5": "^2.2.0", 46 | "@types/node": "^18", 47 | "@types/pg": ">=6 <9", 48 | "chalk": "^4", 49 | "chokidar": "^3.5.3", 50 | "json5": "^2.2.3", 51 | "pg": ">=6.5 <9", 52 | "pg-connection-string": "^2.6.2", 53 | "pg-minify": "^1.6.3", 54 | "tslib": "^2.6.2", 55 | "yargs": "^17.7.2" 56 | }, 57 | "devDependencies": { 58 | "@localrepo/prettier2-for-jest": "npm:prettier@^2", 59 | "@tsconfig/node18": "^18.2.2", 60 | "@types/jest": "^29.5.11", 61 | "@types/mock-fs": "^4.13.4", 62 | "@types/yargs": "^17.0.32", 63 | "@typescript-eslint/eslint-plugin": "^6.19.0", 64 | "@typescript-eslint/parser": "^6.19.0", 65 | "depcheck": "^1.4.7", 66 | "eslint": "^8.56.0", 67 | "eslint-config-prettier": "^9.1.0", 68 | "eslint-plugin-import": "^2.29.1", 69 | "eslint-plugin-jest": "^27.6.3", 70 | "eslint-plugin-simple-import-sort": "^10.0.0", 71 | "eslint_d": "^13.1.2", 72 | "jest": "^29.7.0", 73 | "mock-fs": "^5.2.0", 74 | "prettier": "^3.2.4", 75 | "ts-jest": "^29.1.1", 76 | "typescript": "^5.3.3" 77 | }, 78 | "files": [ 79 | "dist" 80 | ] 81 | } 82 | -------------------------------------------------------------------------------- /FORMATS.md: -------------------------------------------------------------------------------- 1 | # Formats 2 | 3 | `graphile-migrate` uses cryptographic hashes to sign the resulting migration 4 | files, so it's critically important that these signatures are stable. No matter 5 | what previously committed migration exists, assuming `current/*` / `current.sql` 6 | is empty you should be able to run 7 | `graphile-migrate uncommit && graphile-migrate commit` and the hash should be 8 | unchanged. This should be true independent of whether you are using commit 9 | messages, multi-file or single-file migrations, etc. 10 | 11 | So, we have the following rules: 12 | 13 | ## Trim and trail 14 | 15 | When the migration is ready to be signed and/or written to disk, we trim it 16 | (using `String.prototype.trim`) and then append a newline. So every file should 17 | always end with exactly one newline (and should never start with a newline or 18 | whitespace). 19 | 20 | ## Header comments 21 | 22 | Header comments such as `--! Hash`, `--! Previous` and `--! Message` will always 23 | be at the top of a file, and _should_ have two newlines between them and the 24 | rest of the content. This last part is enforced for committed migrations, but is 25 | more relaxed when dealing with `current`. 26 | 27 | Header comments always start with a capital letter. 28 | 29 | ## Body comments 30 | 31 | Body comments such as `--! split` occur after the header section. They should be 32 | at the top. 33 | 34 | Body comments always start with a lower case letter. 35 | 36 | ## Unexpected comments 37 | 38 | Comments elsewhere in the file are ignored - we do not implement an SQL parser 39 | so we do not know if the comment is within a SQL string or similar. It's easiest 40 | just not parse that far. 41 | 42 | ## `--! AllowInvalidHash` 43 | 44 | Should you need to go back and edit a _committed_ migration you can opt out of 45 | Graphile Migrate's consistency checks by adding this comment to the very top of 46 | your committed migration. Please note that editing the migration **WILL NOT** 47 | cause the migration to run again. This is primarily useful where there was a 48 | mistake in your migration that prevents it running on production but you don't 49 | want to reset your staging database, or where an update to PostgreSQL has made 50 | the syntax or commands in an older migration invalid and thus you must edit them 51 | to make the migration run against a clean database again. 52 | 53 | ## `--! no-transaction` 54 | 55 | This is treated as a body comment for backwards compatibility reasons. This 56 | comment is only valid in `migrations/current.sql` and is ignored or will error 57 | if found in `migrations/current/*.sql`. It has to be the very first line (after 58 | any headers). 59 | 60 | ## Multifile 61 | 62 | Multi-file dumps use `--! split: name_of_file.sql` comments to split the file 63 | into multiple parts. 64 | 65 | Any lines that come before the first `--! split` are pushed into that split 66 | (this should only be headers). 67 | 68 | Every split is separated from the next split by a newline. 69 | 70 | Due to "trim and trail" (above), an empty file is treated as a single newline, 71 | which means that it would be output as two newlines - one for the file itself, 72 | and one for the regular split. E.g. 73 | 74 | ```sql 75 | --! split: 001.sql 76 | select 1; 77 | 78 | --! split: 002-empty.sql 79 | 80 | 81 | --! split: 003.sql 82 | select 3; 83 | 84 | ``` 85 | -------------------------------------------------------------------------------- /__tests__/watch.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("child_process"); 2 | 3 | import "./helpers"; // Has side-effects; must come first 4 | 5 | import mockFs from "mock-fs"; 6 | 7 | import { _makeCurrentMigrationRunner, _watch } from "../src/commands/watch"; 8 | import { parseSettings } from "../src/settings"; 9 | import { makeMigrations } from "./helpers"; 10 | import { 11 | makeActionSpies, 12 | mockCurrentSqlContentOnce, 13 | resetDb, 14 | setup, 15 | TEST_DATABASE_URL, 16 | } from "./helpers"; 17 | 18 | beforeEach(resetDb); 19 | 20 | const { MIGRATION_MULTIFILE_FILES } = makeMigrations(); 21 | 22 | it("doesn't run current.sql if it's already up to date", async () => { 23 | const { settings, getActionCalls } = makeActionSpies(); 24 | const parsedSettings = await parseSettings({ 25 | connectionString: TEST_DATABASE_URL, 26 | ...settings, 27 | }); 28 | await setup(parsedSettings); 29 | const migrationRunner = _makeCurrentMigrationRunner( 30 | parsedSettings, 31 | false, 32 | false, 33 | ); 34 | 35 | expect(getActionCalls()).toEqual([]); 36 | mockCurrentSqlContentOnce( 37 | parsedSettings, 38 | `\ 39 | -- First migration 40 | SELECT ':DATABASE_NAME'; 41 | `, 42 | ); 43 | await migrationRunner(); 44 | expect(getActionCalls()).toEqual(["beforeCurrent", "afterCurrent"]); 45 | 46 | // This one is identical 47 | mockCurrentSqlContentOnce( 48 | parsedSettings, 49 | `\ 50 | -- Second migration; identical except for this comment 51 | SELECT ':DATABASE_NAME'; 52 | `, 53 | ); 54 | await migrationRunner(); 55 | expect(getActionCalls()).toEqual(["beforeCurrent", "afterCurrent"]); 56 | 57 | mockCurrentSqlContentOnce( 58 | parsedSettings, 59 | `\ 60 | -- Third migration; DIFFERENT! 61 | SELECT ':DATABASE_NAME', 2 * 2; 62 | `, 63 | ); 64 | await migrationRunner(); 65 | expect(getActionCalls()).toEqual([ 66 | "beforeCurrent", 67 | "afterCurrent", 68 | "beforeCurrent", 69 | "afterCurrent", 70 | ]); 71 | }); 72 | 73 | it("watches symlinked files", async () => { 74 | const { settings, getActionCalls } = makeActionSpies(); 75 | const parsedSettings = await parseSettings({ 76 | connectionString: TEST_DATABASE_URL, 77 | ...settings, 78 | }); 79 | await setup(parsedSettings); 80 | const migrationRunner = _makeCurrentMigrationRunner( 81 | parsedSettings, 82 | false, 83 | false, 84 | ); 85 | 86 | expect(getActionCalls()).toEqual([]); 87 | mockFs({ 88 | ...MIGRATION_MULTIFILE_FILES, 89 | "migrations/links/two.sql": `\ 90 | -- First migration 91 | SELECT ':DATABASE_NAME'; 92 | `, 93 | }); 94 | await migrationRunner(); 95 | expect(getActionCalls()).toEqual(["beforeCurrent", "afterCurrent"]); 96 | 97 | // This one is identical 98 | mockFs({ 99 | ...MIGRATION_MULTIFILE_FILES, 100 | "migrations/links/two.sql": `\ 101 | -- Second migration; identical except for this comment 102 | SELECT ':DATABASE_NAME'; 103 | `, 104 | }); 105 | await migrationRunner(); 106 | expect(getActionCalls()).toEqual(["beforeCurrent", "afterCurrent"]); 107 | 108 | mockFs({ 109 | ...MIGRATION_MULTIFILE_FILES, 110 | "migrations/links/two.sql": `\ 111 | -- Third migration; DIFFERENT! 112 | SELECT ':DATABASE_NAME', 2 * 2; 113 | `, 114 | }); 115 | await migrationRunner(); 116 | expect(getActionCalls()).toEqual([ 117 | "beforeCurrent", 118 | "afterCurrent", 119 | "beforeCurrent", 120 | "afterCurrent", 121 | ]); 122 | }); 123 | -------------------------------------------------------------------------------- /__tests__/migration.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("child_process"); 2 | jest.mock("../src/pg"); 3 | jest.mock("../src/migration"); 4 | 5 | import "./helpers"; // Has side-effects; must come first 6 | 7 | import { _migrate } from "../src/commands/migrate"; 8 | import { 9 | FileMigration, 10 | generatePlaceholderReplacement, 11 | getMigrationsAfter, 12 | } from "../src/migration"; 13 | import { Context } from "../src/pg"; 14 | import { parseSettings } from "../src/settings"; 15 | import { 16 | makeActionSpies, 17 | TEST_DATABASE_NAME, 18 | TEST_DATABASE_URL, 19 | } from "./helpers"; 20 | 21 | it("doesn't mind about placeholder order", async () => { 22 | const context: Context = { 23 | database: TEST_DATABASE_NAME, 24 | }; 25 | const parsedSettings = await parseSettings({ 26 | connectionString: "[connectionString]", 27 | rootConnectionString: "[rootConnectionString]", 28 | 29 | placeholders: { 30 | ":DATABASE_AUTHENTICATOR": "[DATABASE_AUTHENTICATOR]", 31 | ":DATABASE_AUTHENTICATOR_PASSWORD": "[DATABASE_AUTHENTICATOR_PASSWORD]", 32 | }, 33 | beforeReset: [], 34 | beforeAllMigrations: [], 35 | beforeCurrent: [], 36 | afterReset: [], 37 | afterAllMigrations: [], 38 | afterCurrent: [], 39 | }); 40 | const placeholderReplacement = generatePlaceholderReplacement( 41 | parsedSettings, 42 | context, 43 | ); 44 | const body = placeholderReplacement( 45 | `CREATE ROLE :DATABASE_AUTHENTICATOR WITH LOGIN PASSWORD ':DATABASE_AUTHENTICATOR_PASSWORD';`, 46 | ); 47 | 48 | expect(body).toEqual( 49 | `CREATE ROLE [DATABASE_AUTHENTICATOR] WITH LOGIN PASSWORD '[DATABASE_AUTHENTICATOR_PASSWORD]';`, 50 | ); 51 | }); 52 | 53 | it("calls no actions if no migrations", async () => { 54 | const { settings, getActionCalls } = makeActionSpies(); 55 | const parsedSettings = await parseSettings({ 56 | connectionString: TEST_DATABASE_NAME, 57 | ...settings, 58 | }); 59 | await _migrate(parsedSettings, false, false); 60 | expect(getActionCalls()).toEqual([]); 61 | }); 62 | 63 | it("calls afterAllMigrations action (only) if force is true", async () => { 64 | const { settings, getActionCalls } = makeActionSpies(); 65 | const parsedSettings = await parseSettings({ 66 | connectionString: TEST_DATABASE_URL, 67 | ...settings, 68 | }); 69 | await _migrate(parsedSettings, false, true); 70 | expect(getActionCalls()).toEqual([ 71 | "beforeAllMigrations", 72 | "afterAllMigrations", 73 | ]); 74 | }); 75 | 76 | it("calls beforeAllMigrations and afterAllMigrations action (only) if we did some migrations", async () => { 77 | (getMigrationsAfter as any).mockImplementationOnce( 78 | async (): Promise => { 79 | return [ 80 | { 81 | filename: "000001.sql", 82 | realFilename: "000001-test-message.sql", 83 | hash: "TEST_HASH", 84 | previousHash: null, 85 | body: "TEST_BODY", 86 | fullPath: "TEST_PATH", 87 | previous: null, 88 | message: "TEST MESSAGE", 89 | messageSlug: "test-message", 90 | allowInvalidHash: false, 91 | }, 92 | ]; 93 | }, 94 | ); 95 | const { settings, getActionCalls } = makeActionSpies(); 96 | const parsedSettings = await parseSettings({ 97 | connectionString: TEST_DATABASE_URL, 98 | ...settings, 99 | }); 100 | await _migrate(parsedSettings, false, false); 101 | expect(getActionCalls()).toEqual([ 102 | "beforeAllMigrations", 103 | "afterAllMigrations", 104 | ]); 105 | }); 106 | -------------------------------------------------------------------------------- /src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import * as yargs from "yargs"; 3 | 4 | import { commitCommand } from "./commands/commit"; 5 | import { compileCommand } from "./commands/compile"; 6 | import { initCommand } from "./commands/init"; 7 | import { migrateCommand } from "./commands/migrate"; 8 | import { resetCommand } from "./commands/reset"; 9 | import { runCommand } from "./commands/run"; 10 | import { statusCommand } from "./commands/status"; 11 | import { uncommitCommand } from "./commands/uncommit"; 12 | import { watchCommand } from "./commands/watch"; 13 | import { isLoggedError } from "./lib"; 14 | import { version } from "./version"; 15 | 16 | function wrapHandler( 17 | input: yargs.CommandModule, 18 | ): yargs.CommandModule { 19 | const { handler, ...rest } = input; 20 | 21 | const newHandler: yargs.CommandModule["handler"] = async (argv) => { 22 | try { 23 | return await Promise.resolve(handler(argv)); 24 | } catch (e) { 25 | if (!isLoggedError(e)) { 26 | // eslint-disable-next-line no-console 27 | console.error(e); 28 | } 29 | process.exit(1); 30 | } 31 | }; 32 | 33 | return { 34 | ...rest, 35 | handler: newHandler, 36 | }; 37 | } 38 | 39 | const f = yargs 40 | .parserConfiguration({ 41 | "boolean-negation": true, 42 | "camel-case-expansion": false, 43 | "combine-arrays": false, 44 | "dot-notation": false, 45 | "duplicate-arguments-array": false, 46 | "flatten-duplicate-arrays": false, 47 | "halt-at-non-option": false, 48 | "parse-numbers": false, 49 | "populate--": false, 50 | "set-placeholder-key": false, 51 | "short-option-groups": true, 52 | "sort-commands": false, 53 | "strip-aliased": true, 54 | "strip-dashed": false, 55 | "unknown-options-as-args": false, 56 | }) 57 | .scriptName("graphile-migrate") 58 | 59 | .strict(true) 60 | .version(version) 61 | .hide("version") 62 | .help(true) 63 | .demandCommand(1, 1, "Please select a command to run.") 64 | .recommendCommands() 65 | 66 | // Commands 67 | .command(wrapHandler(initCommand)) 68 | .command(wrapHandler(migrateCommand)) 69 | .command(wrapHandler(watchCommand)) 70 | .command(wrapHandler(commitCommand)) 71 | .command(wrapHandler(uncommitCommand)) 72 | .command(wrapHandler(statusCommand)) 73 | .command(wrapHandler(resetCommand)) 74 | .command(wrapHandler(compileCommand)) 75 | .command(wrapHandler(runCommand)) 76 | 77 | // Make sure options added here are represented in CommonArgv 78 | .option("config", { 79 | alias: "c", 80 | type: "string", 81 | description: "Optional path to gmrc file", 82 | defaultDescription: ".gmrc[.js|.cjs]", 83 | }) 84 | 85 | .completion("completion", "Generate shell completion script.") 86 | .epilogue( 87 | process.env.GRAPHILE_SPONSOR 88 | ? `\ 89 | You are running graphile-migrate v${version}.` 90 | : `\ 91 | You are running graphile-migrate v${version}. 92 | 93 | ╔═══════════════════════════════════╗ 94 | ║ Graphile Migrate is crowd-funded, ║ 95 | ║ please consider sponsorship: ║ 96 | ║ ║ 97 | ║ https://www.graphile.org/sponsor/ ║ 98 | ║ ║ 99 | ║ 🙏 THANK YOU SPONSORS! 🙏 ║ 100 | ╚═══════════════════════════════════╝ 101 | `, 102 | ).argv; 103 | 104 | if ("then" in f && typeof f.then === "function") { 105 | f.then(null, (e: Error) => { 106 | // eslint-disable-next-line no-console 107 | console.error(e); 108 | }); 109 | } 110 | -------------------------------------------------------------------------------- /__tests__/readCurrentMigration.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import mockFs from "mock-fs"; 4 | 5 | import { 6 | getCurrentMigrationLocation, 7 | readCurrentMigration, 8 | } from "../src/current"; 9 | import { ParsedSettings, parseSettings } from "../src/settings"; 10 | import { TEST_DATABASE_URL } from "./helpers"; 11 | 12 | let parsedSettings: ParsedSettings; 13 | beforeEach(async () => { 14 | mockFs({ migrations: mockFs.directory() }); 15 | parsedSettings = await parseSettings({ 16 | connectionString: TEST_DATABASE_URL, 17 | }); 18 | }); 19 | afterEach(() => { 20 | mockFs.restore(); 21 | }); 22 | 23 | it("reads from current.sql", async () => { 24 | mockFs({ 25 | "migrations/current.sql": "-- TEST", 26 | }); 27 | 28 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 29 | 30 | const content = await readCurrentMigration(parsedSettings, currentLocation); 31 | expect(content).toEqual("-- TEST"); 32 | }); 33 | 34 | it("returns empty if there's no current.sql", async () => { 35 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 36 | 37 | const content = await readCurrentMigration(parsedSettings, currentLocation); 38 | expect(content).toEqual(""); 39 | }); 40 | 41 | it("returns empty if there's an empty current/", async () => { 42 | mockFs({ 43 | "migrations/current": mockFs.directory(), 44 | }); 45 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 46 | 47 | const content = await readCurrentMigration(parsedSettings, currentLocation); 48 | expect(content).toEqual(""); 49 | }); 50 | 51 | const contentWithSplits = `\ 52 | --! split: 100-first.sql 53 | First content 54 | 55 | --! split: 200-second.sql 56 | Some more content 57 | With multiple lines 58 | -- and comments 59 | 60 | --! split: 300-third.sql 61 | 62 | 63 | --! split: 400-fourth.sql 64 | Note: 300 was empty 65 | `; 66 | 67 | it("reads multiple files", async () => { 68 | mockFs({ 69 | "migrations/current": { 70 | "100-first.sql": "First content\n", 71 | "200-second.sql": `\ 72 | Some more content 73 | With multiple lines 74 | -- and comments 75 | `, 76 | "300-third.sql": "", 77 | "400-fourth.sql": "Note: 300 was empty", 78 | }, 79 | }); 80 | 81 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 82 | const content = await readCurrentMigration(parsedSettings, currentLocation); 83 | expect(content).toEqual(contentWithSplits); 84 | }); 85 | 86 | it("ignores extraneous files", async () => { 87 | mockFs({ 88 | "migrations/current": { 89 | "README.md": "Blah blah\nEtc etc\nFoo bar baz", 90 | "100-first.sql": "First content\n", 91 | "200-second.sql": `\ 92 | Some more content 93 | With multiple lines 94 | -- and comments 95 | `, 96 | "300-third.sql": "", 97 | "400-fourth.sql": "Note: 300 was empty", 98 | }, 99 | }); 100 | 101 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 102 | const content = await readCurrentMigration(parsedSettings, currentLocation); 103 | expect(content).toEqual(contentWithSplits); 104 | }); 105 | 106 | it("reads from current.sql, and processes included files", async () => { 107 | mockFs({ 108 | "migrations/current.sql": "--!include foo_current.sql", 109 | "migrations/fixtures/foo_current.sql": "-- TEST from foo", 110 | }); 111 | 112 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 113 | const content = await readCurrentMigration(parsedSettings, currentLocation); 114 | expect(content).toEqual("-- TEST from foo"); 115 | }); 116 | -------------------------------------------------------------------------------- /src/commands/reset.ts: -------------------------------------------------------------------------------- 1 | import { CommandModule } from "yargs"; 2 | 3 | import { executeActions } from "../actions"; 4 | import { escapeIdentifier, withClient } from "../pg"; 5 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 6 | import { CommonArgv, getSettings } from "./_common"; 7 | import { _migrate } from "./migrate"; 8 | 9 | interface ResetArgv extends CommonArgv { 10 | shadow: boolean; 11 | erase: boolean; 12 | } 13 | 14 | export async function _reset( 15 | parsedSettings: ParsedSettings, 16 | shadow: boolean, 17 | ): Promise { 18 | const connectionString = shadow 19 | ? parsedSettings.shadowConnectionString 20 | : parsedSettings.connectionString; 21 | if (!connectionString) { 22 | throw new Error("Could not determine connection string for reset"); 23 | } 24 | await executeActions(parsedSettings, shadow, parsedSettings.beforeReset); 25 | await withClient( 26 | parsedSettings.rootConnectionString, 27 | parsedSettings, 28 | async (pgClient) => { 29 | const databaseName = shadow 30 | ? parsedSettings.shadowDatabaseName 31 | : parsedSettings.databaseName; 32 | if (!databaseName) { 33 | throw new Error("Database name unknown"); 34 | } 35 | const databaseOwner = parsedSettings.databaseOwner; 36 | const logSuffix = shadow ? "[shadow]" : ""; 37 | await pgClient.query( 38 | `DROP DATABASE IF EXISTS ${escapeIdentifier(databaseName)};`, 39 | ); 40 | parsedSettings.logger.info( 41 | `graphile-migrate${logSuffix}: dropped database '${databaseName}'`, 42 | ); 43 | try { 44 | await pgClient.query( 45 | `CREATE DATABASE ${escapeIdentifier( 46 | databaseName, 47 | )} OWNER ${escapeIdentifier(databaseOwner)};`, 48 | ); 49 | } catch (e) { 50 | throw new Error( 51 | `Failed to create database '${databaseName}' with owner '${databaseOwner}': ${e instanceof Error ? e.message : String(e)}`, 52 | ); 53 | } 54 | await pgClient.query( 55 | `REVOKE ALL ON DATABASE ${escapeIdentifier(databaseName)} FROM PUBLIC;`, 56 | ); 57 | parsedSettings.logger.info( 58 | `graphile-migrate${logSuffix}: recreated database '${databaseName}'`, 59 | ); 60 | }, 61 | ); 62 | await executeActions(parsedSettings, shadow, parsedSettings.afterReset); 63 | await _migrate(parsedSettings, shadow); 64 | } 65 | 66 | export async function reset(settings: Settings, shadow = false): Promise { 67 | const parsedSettings = await parseSettings(settings, shadow); 68 | return _reset(parsedSettings, shadow); 69 | } 70 | 71 | export const resetCommand: CommandModule, ResetArgv> = { 72 | command: "reset", 73 | aliases: [], 74 | describe: 75 | "Drops and re-creates the database, re-running all committed migrations from the start. **HIGHLY DESTRUCTIVE**.", 76 | builder: { 77 | shadow: { 78 | type: "boolean", 79 | default: false, 80 | description: "Applies migrations to shadow DB.", 81 | }, 82 | erase: { 83 | type: "boolean", 84 | default: false, 85 | description: 86 | "This is your double opt-in to make it clear this DELETES EVERYTHING.", 87 | }, 88 | }, 89 | handler: async (argv) => { 90 | if (!argv.erase) { 91 | // eslint-disable-next-line no-console 92 | console.error( 93 | "DANGER! Reset is highly destructive. If you're sure you want to do this, please add --erase to your command.", 94 | ); 95 | process.exit(2); 96 | } 97 | await reset(await getSettings({ configFile: argv.config }), argv.shadow); 98 | }, 99 | }; 100 | -------------------------------------------------------------------------------- /src/commands/uncommit.ts: -------------------------------------------------------------------------------- 1 | import pgMinify = require("pg-minify"); 2 | import * as fsp from "fs/promises"; 3 | import { CommandModule } from "yargs"; 4 | 5 | import { 6 | getCurrentMigrationLocation, 7 | readCurrentMigration, 8 | writeCurrentMigration, 9 | } from "../current"; 10 | import { 11 | getAllMigrations, 12 | parseMigrationText, 13 | serializeMigration, 14 | undoMigration, 15 | } from "../migration"; 16 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 17 | import { CommonArgv, getSettings } from "./_common"; 18 | import { _migrate } from "./migrate"; 19 | import { _reset } from "./reset"; 20 | 21 | export async function _uncommit(parsedSettings: ParsedSettings): Promise { 22 | // Determine the last migration 23 | const allMigrations = await getAllMigrations(parsedSettings); 24 | const lastMigration = allMigrations[allMigrations.length - 1]; 25 | if (!lastMigration) { 26 | throw new Error("There's no committed migration to uncommit"); 27 | } 28 | 29 | // Check current.sql is blank 30 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 31 | const currentBody = await readCurrentMigration( 32 | parsedSettings, 33 | currentLocation, 34 | ); 35 | const minifiedCurrentBody = pgMinify(currentBody); 36 | if (minifiedCurrentBody !== "") { 37 | throw new Error("Cannot uncommit - current migration is not blank."); 38 | } 39 | 40 | // Restore current.sql from migration 41 | const lastMigrationFilepath = lastMigration.fullPath; 42 | const contents = await fsp.readFile(lastMigrationFilepath, "utf8"); 43 | const { headers, body } = parseMigrationText(lastMigrationFilepath, contents); 44 | 45 | // Drop Hash, Previous and AllowInvalidHash from headers; then write out 46 | const { Hash, Previous, AllowInvalidHash, ...otherHeaders } = headers; 47 | const completeBody = serializeMigration(body, otherHeaders); 48 | await writeCurrentMigration(parsedSettings, currentLocation, completeBody); 49 | 50 | // Delete the migration from committed and from the DB 51 | await fsp.unlink(lastMigrationFilepath); 52 | await undoMigration(parsedSettings, lastMigration); 53 | 54 | parsedSettings.logger.info( 55 | `graphile-migrate: migration '${lastMigrationFilepath}' undone`, 56 | ); 57 | 58 | // Reset shadow 59 | await _reset(parsedSettings, true); 60 | await _migrate(parsedSettings, true, true); 61 | } 62 | 63 | export async function uncommit(settings: Settings): Promise { 64 | const parsedSettings = await parseSettings(settings, true); 65 | return _uncommit(parsedSettings); 66 | } 67 | 68 | export const uncommitCommand: CommandModule< 69 | Record, 70 | CommonArgv 71 | > = { 72 | command: "uncommit", 73 | aliases: [], 74 | describe: 75 | "This command is useful in development if you need to modify your latest commit before you push/merge it, or if other DB commits have been made by other developers and you need to 'rebase' your migration onto theirs. Moves the latest commit out of the committed migrations folder and back to the current migration (assuming the current migration is empty-ish). Removes the migration tracking entry from ONLY the local database. Do not use after other databases have executed this committed migration otherwise they will fall out of sync. Assuming nothing else has changed, `graphile-migrate uncommit && graphile-migrate commit` should result in the exact same hash. Development only, and liable to cause conflicts with other developers - be careful.", 76 | builder: {}, 77 | handler: async (argv) => { 78 | if (argv.message !== undefined && !argv.message) { 79 | throw new Error("Missing or empty commit message after --message flag"); 80 | } 81 | await uncommit(await getSettings({ configFile: argv.config })); 82 | }, 83 | }; 84 | -------------------------------------------------------------------------------- /src/instrumentation.ts: -------------------------------------------------------------------------------- 1 | import chalk from "chalk"; 2 | import { QueryResultRow } from "pg"; 3 | 4 | import indent from "./indent"; 5 | import { Client } from "./pg"; 6 | import { ParsedSettings } from "./settings"; 7 | 8 | export interface InstrumentationError extends Error { 9 | severity?: string; 10 | code?: string; 11 | detail?: string; 12 | hint?: string; 13 | _gmlogged?: boolean; 14 | _gmMessageOverride?: string; 15 | } 16 | 17 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 18 | export async function runQueryWithErrorInstrumentation< 19 | T extends QueryResultRow = QueryResultRow, 20 | >(pgClient: Client, body: string, filename: string): Promise { 21 | try { 22 | const { rows } = await pgClient.query({ 23 | text: body, 24 | }); 25 | return rows; 26 | } catch (e) { 27 | if ( 28 | e instanceof Error && 29 | "position" in e && 30 | (typeof e.position === "string" || typeof e.position === "number") 31 | ) { 32 | const p = parseInt(String(e.position), 10); 33 | let line = 1; 34 | let column = 0; 35 | let idx = 0; 36 | while (idx < p) { 37 | column++; 38 | const char = body[idx]; 39 | if (char === "\n") { 40 | line++; 41 | column = 0; 42 | } else { 43 | // ... 44 | } 45 | idx++; 46 | } 47 | const endOfLine = body.indexOf("\n", p); 48 | const previousNewline = body.lastIndexOf("\n", p); 49 | const previousNewline2 = 50 | body.lastIndexOf("\n", previousNewline - 1) || previousNewline; 51 | const previousNewline3 = 52 | body.lastIndexOf("\n", previousNewline2 - 1) || previousNewline2; 53 | const previousNewline4 = 54 | body.lastIndexOf("\n", previousNewline3 - 1) || previousNewline3; 55 | const startOfLine = previousNewline + 1; 56 | const positionWithinLine = p - startOfLine; 57 | const snippet = body.substring(previousNewline4 + 1, endOfLine); 58 | const indentString = chalk.red("| "); 59 | const codeIndent = 2; 60 | const lines = [ 61 | chalk.bold.red( 62 | `🛑 Error occurred at line ${line}, column ${column} of "${filename}":`, 63 | ), 64 | chalk.reset(indent(indent(snippet, codeIndent), indentString)), 65 | indentString + 66 | chalk.red("-".repeat(positionWithinLine - 1 + codeIndent) + "^"), 67 | indentString + 68 | chalk.red.bold((e as InstrumentationError).code) + 69 | chalk.red(": " + e.message), 70 | ]; 71 | (e as InstrumentationError)["_gmMessageOverride"] = lines.join("\n"); 72 | } 73 | throw e; 74 | } 75 | } 76 | 77 | export const logDbError = ({ logger }: ParsedSettings, error: Error): void => { 78 | const e = error as InstrumentationError; 79 | e._gmlogged = true; 80 | const messages = [""]; 81 | if (e["_gmMessageOverride"]) { 82 | messages.push(e["_gmMessageOverride"]); 83 | } else { 84 | messages.push( 85 | chalk.red.bold(`🛑 Error occurred whilst processing migration`), 86 | ); 87 | } 88 | const { severity, code, detail, hint } = e; 89 | messages.push(indent(e.stack ? e.stack : e.message, 4)); 90 | messages.push(""); 91 | if (severity) { 92 | messages.push(indent(`Severity:\t${severity}`, 4)); 93 | } 94 | if (code) { 95 | messages.push(indent(`Code: \t${code}`, 4)); 96 | } 97 | if (detail) { 98 | messages.push(indent(`Detail: \t${detail}`, 4)); 99 | } 100 | if (hint) { 101 | messages.push(indent(`Hint: \t${hint}`, 4)); 102 | } 103 | messages.push(""); 104 | logger.error(messages.join("\n"), { error: e }); 105 | /* eslint-enable */ 106 | }; 107 | -------------------------------------------------------------------------------- /__tests__/__snapshots__/migrate.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`handles errors during migration gracefully 1`] = `[error: division by zero]`; 4 | 5 | exports[`handles errors during migration gracefully 2`] = ` 6 | " 7 | 🛑 Error occurred whilst processing migration 8 | error: division by zero 9 | at ~/node_modules/pg/lib/client.js:[LINE]:[COL] 10 | at processTicksAndRejections (node:internal/process/task_queues:[LINE]:[COL]) 11 | at runQueryWithErrorInstrumentation (~/src/instrumentation.ts:[LINE]:[COL]) 12 | at ~/src/migration.ts:[LINE]:[COL] 13 | at withAdvisoryLock (~/src/pgReal.ts:[LINE]:[COL]) 14 | at runCommittedMigration (~/src/migration.ts:[LINE]:[COL]) 15 | at ~/src/commands/migrate.ts:[LINE]:[COL] 16 | at withAdvisoryLock (~/src/pgReal.ts:[LINE]:[COL]) 17 | at ~/src/commands/migrate.ts:[LINE]:[COL] 18 | at withClient (~/src/pgReal.ts:[LINE]:[COL]) 19 | at _migrate (~/src/commands/migrate.ts:[LINE]:[COL]) 20 | at Object. (~/__tests__/migrate.test.ts:[LINE]:[COL]) 21 | 22 | Severity: ERROR 23 | Code: 22012 24 | ,[object Object]" 25 | `; 26 | 27 | exports[`refuses to run migration with invalid hash 1`] = `"Hash for 000002.sql does not match - sha1:cbed240dda7dfa510ff785783bbe6af7743b3a11 !== sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b; has the file been tampered with?"`; 28 | 29 | exports[`runs migrations 1`] = ` 30 | [ 31 | { 32 | "filename": "000001.sql", 33 | "hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 34 | "previous_hash": null, 35 | }, 36 | { 37 | "filename": "000002.sql", 38 | "hash": "sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b", 39 | "previous_hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 40 | }, 41 | ] 42 | `; 43 | 44 | exports[`runs migrations 2`] = ` 45 | [ 46 | "foo", 47 | ] 48 | `; 49 | 50 | exports[`runs migrations 3`] = ` 51 | [ 52 | { 53 | "typname": "user_role", 54 | "value_count": "1", 55 | }, 56 | ] 57 | `; 58 | 59 | exports[`runs migrations 4`] = ` 60 | [ 61 | { 62 | "filename": "000001.sql", 63 | "hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 64 | "previous_hash": null, 65 | }, 66 | { 67 | "filename": "000002.sql", 68 | "hash": "sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b", 69 | "previous_hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 70 | }, 71 | { 72 | "filename": "000003.sql", 73 | "hash": "sha1:2d248344ac299ebbad2aeba5bfec2ae3c3cb0a4f", 74 | "previous_hash": "sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b", 75 | }, 76 | ] 77 | `; 78 | 79 | exports[`runs migrations 5`] = ` 80 | [ 81 | "foo", 82 | ] 83 | `; 84 | 85 | exports[`runs migrations 6`] = ` 86 | [ 87 | { 88 | "typname": "user_role", 89 | "value_count": "2", 90 | }, 91 | ] 92 | `; 93 | 94 | exports[`will run a migration with invalid hash if told to do so 1`] = ` 95 | [ 96 | { 97 | "filename": "000001.sql", 98 | "hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 99 | "previous_hash": null, 100 | }, 101 | { 102 | "filename": "000002.sql", 103 | "hash": "sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b", 104 | "previous_hash": "sha1:e00ec93314a423ee5cc68d1182ad52f16442d7df", 105 | }, 106 | { 107 | "filename": "000003.sql", 108 | "hash": "sha1:2d248344ac299ebbad2aeba5bfec2ae3c3cb0a4f", 109 | "previous_hash": "sha1:bddc1ead3310dc1c42cdc7f63537ebdff2e9fd7b", 110 | }, 111 | ] 112 | `; 113 | 114 | exports[`will run a migration with invalid hash if told to do so 2`] = ` 115 | [ 116 | { 117 | "typname": "user_role", 118 | "value_count": "2", 119 | }, 120 | ] 121 | `; 122 | -------------------------------------------------------------------------------- /src/commands/migrate.ts: -------------------------------------------------------------------------------- 1 | import { CommandModule } from "yargs"; 2 | 3 | import { executeActions } from "../actions"; 4 | import { 5 | getLastMigration, 6 | getMigrationsAfter, 7 | runCommittedMigration, 8 | } from "../migration"; 9 | import { withClient } from "../pg"; 10 | import { withAdvisoryLock } from "../pgReal"; 11 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 12 | import { CommonArgv, getSettings } from "./_common"; 13 | 14 | interface MigrateArgv extends CommonArgv { 15 | shadow: boolean; 16 | forceActions: boolean; 17 | } 18 | 19 | export async function _migrate( 20 | parsedSettings: ParsedSettings, 21 | shadow = false, 22 | forceActions = false, 23 | ): Promise { 24 | const connectionString = shadow 25 | ? parsedSettings.shadowConnectionString 26 | : parsedSettings.connectionString; 27 | if (!connectionString) { 28 | throw new Error("Could not determine connection string"); 29 | } 30 | const logSuffix = shadow ? "[shadow]" : ""; 31 | await withClient( 32 | connectionString, 33 | parsedSettings, 34 | async (pgClient, context) => { 35 | await withAdvisoryLock(pgClient, async () => { 36 | const lastMigration = await getLastMigration(pgClient, parsedSettings); 37 | const remainingMigrations = await getMigrationsAfter( 38 | parsedSettings, 39 | lastMigration, 40 | ); 41 | const shouldExecuteActions = 42 | remainingMigrations.length > 0 || forceActions; 43 | if (shouldExecuteActions) { 44 | await executeActions( 45 | parsedSettings, 46 | shadow, 47 | parsedSettings.beforeAllMigrations, 48 | ); 49 | } 50 | // Run migrations in series 51 | for (const migration of remainingMigrations) { 52 | await runCommittedMigration( 53 | pgClient, 54 | parsedSettings, 55 | context, 56 | migration, 57 | logSuffix, 58 | ); 59 | } 60 | if (shouldExecuteActions) { 61 | await executeActions( 62 | parsedSettings, 63 | shadow, 64 | parsedSettings.afterAllMigrations, 65 | ); 66 | } 67 | parsedSettings.logger.info( 68 | `graphile-migrate${logSuffix}: ${ 69 | remainingMigrations.length > 0 70 | ? `${remainingMigrations.length} committed migrations executed` 71 | : lastMigration 72 | ? "Already up to date" 73 | : `Up to date — no committed migrations to run` 74 | }`, 75 | ); 76 | }); 77 | }, 78 | ); 79 | } 80 | 81 | export async function migrate( 82 | settings: Settings, 83 | shadow = false, 84 | forceActions = false, 85 | ): Promise { 86 | const parsedSettings = await parseSettings(settings, shadow); 87 | return _migrate(parsedSettings, shadow, forceActions); 88 | } 89 | 90 | export const migrateCommand: CommandModule< 91 | Record, 92 | MigrateArgv 93 | > = { 94 | command: "migrate", 95 | aliases: [], 96 | describe: 97 | "Runs any un-executed committed migrations. Does NOT run the current migration. For use in production and development.", 98 | builder: { 99 | shadow: { 100 | type: "boolean", 101 | default: false, 102 | description: "Apply migrations to the shadow DB (for development).", 103 | }, 104 | forceActions: { 105 | type: "boolean", 106 | default: false, 107 | description: 108 | "Run beforeAllMigrations and afterAllMigrations actions even if no migration was necessary.", 109 | }, 110 | }, 111 | handler: async (argv) => { 112 | await migrate( 113 | await getSettings({ configFile: argv.config }), 114 | argv.shadow, 115 | argv.forceActions, 116 | ); 117 | }, 118 | }; 119 | -------------------------------------------------------------------------------- /__tests__/include.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; 2 | 3 | import mockFs from "mock-fs"; 4 | 5 | import { compileIncludes } from "../src/migration"; 6 | import { ParsedSettings, parseSettings } from "../src/settings"; 7 | 8 | let old: string | undefined; 9 | let settings: ParsedSettings; 10 | beforeAll(async () => { 11 | old = process.env.DATABASE_AUTHENTICATOR; 12 | process.env.DATABASE_AUTHENTICATOR = "dbauth"; 13 | settings = await parseSettings({ 14 | connectionString: "postgres://dbowner:dbpassword@dbhost:1221/dbname", 15 | placeholders: { 16 | ":DATABASE_AUTHENTICATOR": "!ENV", 17 | }, 18 | migrationsFolder: "migrations", 19 | }); 20 | }); 21 | afterAll(() => { 22 | process.env.DATABASE_AUTHENTICATOR = old; 23 | }); 24 | 25 | afterEach(() => { 26 | mockFs.restore(); 27 | }); 28 | 29 | /** Pretents that our compiled files are 'current.sql' */ 30 | const FAKE_VISITED = new Set([`${process.cwd()}/migrations/current.sql`]); 31 | 32 | it("compiles an included file", async () => { 33 | mockFs({ 34 | "migrations/fixtures/foo.sql": "select * from foo;", 35 | }); 36 | expect( 37 | await compileIncludes( 38 | settings, 39 | `\ 40 | --!include foo.sql 41 | `, 42 | FAKE_VISITED, 43 | ), 44 | ).toEqual(`\ 45 | select * from foo; 46 | `); 47 | }); 48 | 49 | it("compiles multiple included files", async () => { 50 | mockFs({ 51 | "migrations/fixtures/dir1/foo.sql": "select * from foo;", 52 | "migrations/fixtures/dir2/bar.sql": "select * from bar;", 53 | "migrations/fixtures/dir3/baz.sql": "--!include dir4/qux.sql", 54 | "migrations/fixtures/dir4/qux.sql": "select * from qux;", 55 | }); 56 | expect( 57 | await compileIncludes( 58 | settings, 59 | `\ 60 | --!include dir1/foo.sql 61 | --!include dir2/bar.sql 62 | --!include dir3/baz.sql 63 | `, 64 | FAKE_VISITED, 65 | ), 66 | ).toEqual(`\ 67 | select * from foo; 68 | select * from bar; 69 | select * from qux; 70 | `); 71 | }); 72 | 73 | it("compiles an included file, and won't get stuck in an infinite include loop", async () => { 74 | mockFs({ 75 | "migrations/fixtures/foo.sql": "select * from foo;\n--!include foo.sql", 76 | }); 77 | const promise = compileIncludes( 78 | settings, 79 | `\ 80 | --!include foo.sql 81 | `, 82 | FAKE_VISITED, 83 | ); 84 | await expect(promise).rejects.toThrowError(/Circular include/); 85 | const message = await promise.catch((e) => e.message); 86 | expect(message.replaceAll(process.cwd(), "~")).toMatchSnapshot(); 87 | }); 88 | 89 | it("disallows calling files outside of the migrations/fixtures folder", async () => { 90 | mockFs({ 91 | "migrations/fixtures/bar.sql": "", 92 | "outsideFolder/foo.sql": "select * from foo;", 93 | }); 94 | 95 | const promise = compileIncludes( 96 | settings, 97 | `\ 98 | --!include ../../outsideFolder/foo.sql 99 | `, 100 | FAKE_VISITED, 101 | ); 102 | await expect(promise).rejects.toThrowError(/Forbidden: cannot include/); 103 | const message = await promise.catch((e) => e.message); 104 | expect(message.replaceAll(process.cwd(), "~")).toMatchSnapshot(); 105 | }); 106 | 107 | it("compiles an included file that contains escapable things", async () => { 108 | mockFs({ 109 | "migrations/fixtures/foo.sql": `\ 110 | begin; 111 | 112 | create or replace function current_user_id() returns uuid as $$ 113 | select nullif(current_setting('user.id', true)::text, '')::uuid; 114 | $$ language sql stable; 115 | 116 | comment on function current_user_id is E'The ID of the current user.'; 117 | 118 | grant all on function current_user_id to :DATABASE_USER; 119 | 120 | commit; 121 | `, 122 | }); 123 | expect( 124 | await compileIncludes( 125 | settings, 126 | `\ 127 | --!include foo.sql 128 | `, 129 | FAKE_VISITED, 130 | ), 131 | ).toEqual(`\ 132 | begin; 133 | 134 | create or replace function current_user_id() returns uuid as $$ 135 | select nullif(current_setting('user.id', true)::text, '')::uuid; 136 | $$ language sql stable; 137 | 138 | comment on function current_user_id is E'The ID of the current user.'; 139 | 140 | grant all on function current_user_id to :DATABASE_USER; 141 | 142 | commit; 143 | 144 | `); 145 | }); 146 | -------------------------------------------------------------------------------- /src/commands/status.ts: -------------------------------------------------------------------------------- 1 | import pgMinify = require("pg-minify"); 2 | import { CommandModule } from "yargs"; 3 | 4 | import { getCurrentMigrationLocation, readCurrentMigration } from "../current"; 5 | import { getLastMigration, getMigrationsAfter } from "../migration"; 6 | import { withClient } from "../pg"; 7 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 8 | import { CommonArgv, getSettings } from "./_common"; 9 | 10 | interface StatusOptions { 11 | skipDatabase?: boolean; 12 | } 13 | 14 | interface StatusArgv extends StatusOptions, CommonArgv {} 15 | 16 | interface Status { 17 | remainingMigrations?: Array; 18 | hasCurrentMigration: boolean; 19 | } 20 | 21 | async function _status( 22 | parsedSettings: ParsedSettings, 23 | { skipDatabase }: StatusOptions, 24 | ): Promise { 25 | // Checks that don't need a database connection 26 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 27 | const body = await readCurrentMigration(parsedSettings, currentLocation); 28 | const minifiedBody = pgMinify(body); 29 | const hasCurrentMigration = minifiedBody !== ""; 30 | 31 | // Checks that need a database connection 32 | let remainingMigrations: undefined | string[]; 33 | if (!skipDatabase) { 34 | const connectionString = parsedSettings.connectionString; 35 | if (!connectionString) { 36 | throw new Error("Could not determine connection string"); 37 | } 38 | await withClient(connectionString, parsedSettings, async (pgClient) => { 39 | const lastMigration = await getLastMigration(pgClient, parsedSettings); 40 | const remainingMigrationDefinitions = await getMigrationsAfter( 41 | parsedSettings, 42 | lastMigration, 43 | ); 44 | remainingMigrations = remainingMigrationDefinitions.map( 45 | (m) => m.filename, 46 | ); 47 | return { 48 | remainingMigrations, 49 | }; 50 | }); 51 | } 52 | 53 | return { 54 | remainingMigrations, 55 | hasCurrentMigration, 56 | }; 57 | } 58 | 59 | export async function status( 60 | settings: Settings, 61 | options: StatusOptions = {}, 62 | ): Promise { 63 | const parsedSettings = await parseSettings(settings, true); 64 | return _status(parsedSettings, options); 65 | } 66 | 67 | export const statusCommand: CommandModule, StatusArgv> = { 68 | command: "status", 69 | aliases: [], 70 | describe: `\ 71 | Exits with a bitmap status code indicating statuses: 72 | 73 | - 1 if there are committed migrations that have not been executed yet (requires DB connection) 74 | - 2 if the current migration is non-empty (ignoring comments) 75 | 76 | If both of the above are true then the output status will be 3 (1+2). If neither 77 | are true, exit status will be 0 (success). Additional messages may also be output.`, 78 | builder: { 79 | skipDatabase: { 80 | type: "boolean", 81 | description: "Skip checks that require a database connection.", 82 | default: false, 83 | }, 84 | }, 85 | handler: async (argv) => { 86 | /* eslint-disable no-console */ 87 | let exitCode = 0; 88 | const { config, ...options } = argv; 89 | const details = await status( 90 | await getSettings({ configFile: config }), 91 | options, 92 | ); 93 | if (details.remainingMigrations) { 94 | const remainingCount = details.remainingMigrations?.length; 95 | if (remainingCount > 0) { 96 | console.log( 97 | `There are ${remainingCount} committed migrations pending:\n\n ${details.remainingMigrations.join( 98 | "\n ", 99 | )}`, 100 | ); 101 | exitCode += 1; 102 | } 103 | } 104 | if (details.hasCurrentMigration) { 105 | if (exitCode) { 106 | console.log(); 107 | } 108 | console.log( 109 | "The current migration is not empty and has not been committed.", 110 | ); 111 | exitCode += 2; 112 | } 113 | 114 | // ESLint false positive. 115 | // eslint-disable-next-line require-atomic-updates 116 | process.exitCode = exitCode; 117 | 118 | if (exitCode === 0) { 119 | console.log("Up to date."); 120 | } 121 | /* eslint-enable */ 122 | }, 123 | }; 124 | -------------------------------------------------------------------------------- /src/commands/run.ts: -------------------------------------------------------------------------------- 1 | import * as fsp from "fs/promises"; 2 | import { QueryResultRow } from "pg"; 3 | import { CommandModule } from "yargs"; 4 | 5 | import { DO_NOT_USE_DATABASE_URL } from "../actions"; 6 | import { runQueryWithErrorInstrumentation } from "../instrumentation"; 7 | import { compilePlaceholders } from "../migration"; 8 | import { withClient } from "../pgReal"; 9 | import { 10 | makeRootDatabaseConnectionString, 11 | parseSettings, 12 | Settings, 13 | } from "../settings"; 14 | import { CommonArgv, getDatabaseName, getSettings, readStdin } from "./_common"; 15 | 16 | interface RunArgv extends CommonArgv { 17 | shadow?: boolean; 18 | root?: boolean; 19 | rootDatabase?: boolean; 20 | } 21 | 22 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 23 | export async function run( 24 | settings: Settings, 25 | content: string, 26 | filename: string, 27 | { 28 | shadow = false, 29 | root = false, 30 | rootDatabase = false, 31 | }: { 32 | shadow?: boolean; 33 | root?: boolean; 34 | rootDatabase?: boolean; 35 | } = {}, 36 | ): Promise { 37 | const parsedSettings = await parseSettings(settings, shadow); 38 | const sql = compilePlaceholders(parsedSettings, content, shadow); 39 | const baseConnectionString = rootDatabase 40 | ? parsedSettings.rootConnectionString 41 | : shadow 42 | ? parsedSettings.shadowConnectionString 43 | : parsedSettings.connectionString; 44 | if (!baseConnectionString) { 45 | throw new Error("Could not determine connection string to use."); 46 | } 47 | 48 | const connectionString = 49 | root && !rootDatabase 50 | ? makeRootDatabaseConnectionString( 51 | parsedSettings, 52 | getDatabaseName(baseConnectionString), 53 | ) 54 | : baseConnectionString; 55 | 56 | return withClient(connectionString, parsedSettings, (pgClient) => 57 | runQueryWithErrorInstrumentation(pgClient, sql, filename), 58 | ); 59 | } 60 | 61 | export const runCommand: CommandModule, RunArgv> = { 62 | command: "run [file]", 63 | aliases: [], 64 | describe: `\ 65 | Compiles a SQL file, inserting all the placeholders, and then runs it against the database. Useful for seeding. If called from an action will automatically run against the same database (via GM_DBURL envvar) unless --shadow or --rootDatabase are supplied.`, 66 | builder: { 67 | shadow: { 68 | type: "boolean", 69 | default: false, 70 | description: "Apply to the shadow database (for development).", 71 | }, 72 | root: { 73 | type: "boolean", 74 | default: false, 75 | description: 76 | "Run the file using the root user (but application database).", 77 | }, 78 | rootDatabase: { 79 | type: "boolean", 80 | default: false, 81 | description: 82 | "Like --root, but also runs against the root database rather than application database.", 83 | }, 84 | }, 85 | handler: async (argv) => { 86 | const defaultSettings = await getSettings({ configFile: argv.config }); 87 | 88 | // `run` might be called from an action; in this case `DATABASE_URL` will 89 | // be unavailable (overwritten with DO_NOT_USE_DATABASE_URL) to avoid 90 | // ambiguity (so we don't accidentally run commands against the main 91 | // database when it was the shadow database that triggered the action); in 92 | // this case, unless stated otherwise, the user would want to `run` against 93 | // whatever database was just modified, so we automatically use `GM_DBURL` 94 | // in this case. 95 | const settings = 96 | argv.shadow || 97 | argv.rootDatabase || 98 | process.env.DATABASE_URL !== DO_NOT_USE_DATABASE_URL 99 | ? defaultSettings 100 | : { 101 | ...defaultSettings, 102 | connectionString: process.env.GM_DBURL, 103 | }; 104 | 105 | const { content, filename } = 106 | typeof argv.file === "string" 107 | ? { 108 | filename: argv.file, 109 | content: await fsp.readFile(argv.file, "utf8"), 110 | } 111 | : { filename: "stdin", content: await readStdin() }; 112 | 113 | const rows = await run(settings, content, filename, argv); 114 | 115 | if (rows) { 116 | // eslint-disable-next-line no-console 117 | console.table(rows); 118 | } 119 | }, 120 | }; 121 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: "@typescript-eslint/parser", 3 | extends: [ 4 | "eslint:recommended", 5 | "plugin:@typescript-eslint/eslint-recommended", 6 | "plugin:@typescript-eslint/recommended", 7 | "plugin:@typescript-eslint/recommended-requiring-type-checking", 8 | "plugin:import/errors", 9 | "plugin:import/typescript", 10 | "prettier", 11 | ], 12 | plugins: ["jest", "@typescript-eslint", "simple-import-sort", "import"], 13 | parserOptions: { 14 | ecmaVersion: 2018, 15 | sourceType: "module", 16 | project: "tsconfig.json", 17 | }, 18 | env: { 19 | node: true, 20 | jest: true, 21 | es6: true, 22 | }, 23 | rules: { 24 | "object-shorthand": "error", 25 | 26 | // If something might be async in future, using `await` guarantees it will return a promise 27 | "@typescript-eslint/require-await": "off", 28 | "@typescript-eslint/no-empty-function": "off", 29 | "@typescript-eslint/no-unused-vars": [ 30 | "error", 31 | { 32 | argsIgnorePattern: "^_", 33 | varsIgnorePattern: "^_", 34 | args: "after-used", 35 | ignoreRestSiblings: true, 36 | }, 37 | ], 38 | // We know how to JavaScript 39 | "@typescript-eslint/unbound-method": "off", 40 | curly: "error", 41 | "no-console": "error", 42 | "no-else-return": 0, 43 | "no-return-assign": [2, "except-parens"], 44 | "no-underscore-dangle": 0, 45 | "jest/no-focused-tests": 2, 46 | "jest/no-identical-title": 2, 47 | camelcase: 0, 48 | "prefer-arrow-callback": [ 49 | "error", 50 | { 51 | allowNamedFunctions: true, 52 | }, 53 | ], 54 | "class-methods-use-this": 0, 55 | "no-restricted-syntax": 0, 56 | "no-param-reassign": [ 57 | "error", 58 | { 59 | props: false, 60 | }, 61 | ], 62 | 63 | "arrow-body-style": 0, 64 | "no-nested-ternary": 0, 65 | 66 | /* 67 | * simple-import-sort seems to be the most stable import sorting currently, 68 | * disable others 69 | */ 70 | "simple-import-sort/imports": "error", 71 | "simple-import-sort/exports": "error", 72 | "sort-imports": "off", 73 | "import/order": "off", 74 | 75 | "import/no-deprecated": "warn", 76 | "import/no-duplicates": "error", 77 | }, 78 | overrides: [ 79 | { 80 | files: ["__tests__/**/*.ts", "src/__mocks__/**/*.ts"], 81 | rules: { 82 | "no-console": "warn", 83 | "@typescript-eslint/ban-ts-ignore": "off", 84 | "@typescript-eslint/no-explicit-any": "off", 85 | "@typescript-eslint/no-non-null-assertion": "off", 86 | "@typescript-eslint/explicit-function-return-type": "off", 87 | "@typescript-eslint/no-unsafe-argument": "off", 88 | "@typescript-eslint/no-unsafe-assignment": "off", 89 | "@typescript-eslint/no-unsafe-call": "off", 90 | "@typescript-eslint/no-unsafe-enum-comparison": "off", 91 | "@typescript-eslint/no-unsafe-member-access": "off", 92 | "@typescript-eslint/no-unsafe-return": "off", 93 | "@typescript-eslint/no-floating-promises": "off", 94 | "@typescript-eslint/ban-ts-comment": "off", 95 | "import/no-duplicates": "off", 96 | }, 97 | }, 98 | { 99 | files: ["scripts/*.js"], 100 | parserOptions: { 101 | project: null, 102 | }, 103 | rules: { 104 | "@typescript-eslint/await-thenable": "off", 105 | "@typescript-eslint/explicit-function-return-type": "off", 106 | "@typescript-eslint/no-misused-promises": "off", 107 | "@typescript-eslint/no-unnecessary-type-assertion": "off", 108 | "@typescript-eslint/prefer-includes": "off", 109 | "@typescript-eslint/no-var-requires": "off", 110 | "@typescript-eslint/prefer-regexp-exec": "off", 111 | "@typescript-eslint/prefer-string-starts-ends-with": "off", 112 | "@typescript-eslint/no-base-to-string": "off", 113 | "@typescript-eslint/no-duplicate-type-constituents": "off", 114 | "@typescript-eslint/no-floating-promises": "off", 115 | "@typescript-eslint/no-implied-eval": "off", 116 | "@typescript-eslint/no-redundant-type-constituents": "off", 117 | "@typescript-eslint/no-unsafe-argument": "off", 118 | "@typescript-eslint/no-unsafe-assignment": "off", 119 | "@typescript-eslint/no-unsafe-call": "off", 120 | "@typescript-eslint/no-unsafe-enum-comparison": "off", 121 | "@typescript-eslint/no-unsafe-member-access": "off", 122 | "@typescript-eslint/no-unsafe-return": "off", 123 | "@typescript-eslint/restrict-plus-operands": "off", 124 | "@typescript-eslint/restrict-template-expressions": "off", 125 | }, 126 | }, 127 | ], 128 | }; 129 | -------------------------------------------------------------------------------- /__tests__/uncommit.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import * as fsp from "fs/promises"; 4 | import mockFs from "mock-fs"; 5 | 6 | import { commit, migrate, uncommit } from "../src"; 7 | import { sluggify } from "../src/sluggify"; 8 | import { makeMigrations, resetDb, settings } from "./helpers"; 9 | 10 | beforeEach(resetDb); 11 | beforeEach(async () => { 12 | mockFs({ migrations: mockFs.directory() }); 13 | }); 14 | afterEach(() => { 15 | mockFs.restore(); 16 | }); 17 | 18 | it("aborts if there is no previous migration", async () => { 19 | mockFs({ 20 | "migrations/current.sql": "-- JUST A COMMENT\n", 21 | }); 22 | 23 | const promise = uncommit(settings); 24 | await promise.catch(() => {}); 25 | 26 | mockFs.restore(); 27 | expect(promise).rejects.toMatchInlineSnapshot( 28 | `[Error: There's no committed migration to uncommit]`, 29 | ); 30 | }); 31 | it("aborts if current migration is not empty", async () => { 32 | const { MIGRATION_1_COMMITTED } = makeMigrations(); 33 | mockFs({ 34 | "migrations/committed/000001.sql": MIGRATION_1_COMMITTED, 35 | "migrations/current.sql": "SELECT 1;", 36 | }); 37 | 38 | await migrate(settings); 39 | 40 | const promise = uncommit(settings); 41 | await promise.catch(() => {}); 42 | 43 | mockFs.restore(); 44 | await expect(promise).rejects.toMatchInlineSnapshot( 45 | `[Error: Cannot uncommit - current migration is not blank.]`, 46 | ); 47 | }); 48 | 49 | describe.each([[undefined], ["My Commit Message"]])( 50 | "uncommit message '%s'", 51 | (commitMessage) => { 52 | const commitMessageSlug = commitMessage 53 | ? `-${sluggify(commitMessage)}` 54 | : ``; 55 | const { 56 | MIGRATION_1_TEXT, 57 | MIGRATION_1_COMMITTED, 58 | MIGRATION_MULTIFILE_COMMITTED, 59 | MIGRATION_MULTIFILE_FILES, 60 | } = makeMigrations(commitMessage); 61 | 62 | it("rolls back migration", async () => { 63 | mockFs({ 64 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 65 | MIGRATION_1_COMMITTED, 66 | "migrations/current.sql": "-- JUST A COMMENT\n", 67 | }); 68 | await migrate(settings); 69 | await uncommit(settings); 70 | 71 | await expect( 72 | fsp.stat("migrations/committed/000001.sql"), 73 | ).rejects.toMatchObject({ 74 | code: "ENOENT", 75 | }); 76 | expect(await fsp.readFile("migrations/current.sql", "utf8")).toEqual( 77 | (commitMessage ? `--! Message: ${commitMessage}\n\n` : "") + 78 | MIGRATION_1_TEXT.trim() + 79 | "\n", 80 | ); 81 | 82 | await commit(settings); 83 | expect( 84 | await fsp.readFile( 85 | `migrations/committed/000001${commitMessageSlug}.sql`, 86 | "utf8", 87 | ), 88 | ).toEqual(MIGRATION_1_COMMITTED); 89 | }); 90 | 91 | it("rolls back multifile migration", async () => { 92 | mockFs({ 93 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 94 | MIGRATION_1_COMMITTED, 95 | [`migrations/committed/000002${commitMessageSlug}.sql`]: 96 | MIGRATION_MULTIFILE_COMMITTED, 97 | "migrations/current/1.sql": "-- COMMENT", 98 | }); 99 | await migrate(settings); 100 | await uncommit(settings); 101 | 102 | expect( 103 | await fsp.readFile( 104 | `migrations/committed/000001${commitMessageSlug}.sql`, 105 | "utf8", 106 | ), 107 | ).toEqual(MIGRATION_1_COMMITTED); 108 | await expect( 109 | fsp.stat("migrations/committed/000002.sql"), 110 | ).rejects.toMatchObject({ 111 | code: "ENOENT", 112 | }); 113 | expect(await fsp.readFile("migrations/current/001.sql", "utf8")).toEqual( 114 | (commitMessage ? `--! Message: ${commitMessage}\n\n` : "") + 115 | MIGRATION_MULTIFILE_FILES["migrations/current"]["001.sql"].trim() + 116 | "\n", 117 | ); 118 | expect( 119 | await fsp.readFile("migrations/current/002-two.sql", "utf8"), 120 | ).toEqual( 121 | MIGRATION_MULTIFILE_FILES["migrations/links/two.sql"].trim() + "\n", 122 | ); 123 | expect(await fsp.readFile("migrations/current/003.sql", "utf8")).toEqual( 124 | MIGRATION_MULTIFILE_FILES["migrations/current"]["003.sql"].trim() + 125 | "\n", 126 | ); 127 | 128 | await commit(settings); 129 | expect( 130 | await fsp.readFile( 131 | `migrations/committed/000001${commitMessageSlug}.sql`, 132 | "utf8", 133 | ), 134 | ).toEqual(MIGRATION_1_COMMITTED); 135 | expect( 136 | await fsp.readFile( 137 | `migrations/committed/000002${commitMessageSlug}.sql`, 138 | "utf8", 139 | ), 140 | ).toEqual(MIGRATION_MULTIFILE_COMMITTED); 141 | }); 142 | }, 143 | ); 144 | -------------------------------------------------------------------------------- /src/commands/_common.ts: -------------------------------------------------------------------------------- 1 | import { constants } from "fs"; 2 | import * as fsp from "fs/promises"; 3 | import * as JSON5 from "json5"; 4 | import { resolve } from "path"; 5 | import { parse } from "pg-connection-string"; 6 | import { pathToFileURL } from "url"; 7 | 8 | import { Settings } from "../settings"; 9 | 10 | export const DEFAULT_GMRC_PATH = `${process.cwd()}/.gmrc`; 11 | export const DEFAULT_GMRCJS_PATH = `${DEFAULT_GMRC_PATH}.js`; 12 | export const DEFAULT_GMRC_COMMONJS_PATH = `${DEFAULT_GMRC_PATH}.cjs`; 13 | 14 | /** 15 | * Represents the option flags that are valid for all commands (see 16 | * src/cli.ts). 17 | */ 18 | export interface CommonArgv { 19 | /** 20 | * Optional path to the gmrc file. 21 | */ 22 | config?: string; 23 | } 24 | 25 | export async function exists(path: string): Promise { 26 | try { 27 | await fsp.access(path, constants.F_OK /* visible to us */); 28 | return true; 29 | } catch (e) { 30 | return false; 31 | } 32 | } 33 | 34 | export async function getSettingsFromJSON(path: string): Promise { 35 | let data; 36 | try { 37 | data = await fsp.readFile(path, "utf8"); 38 | } catch (e) { 39 | throw new Error( 40 | `Failed to read '${path}': ${e instanceof Error ? e.message : String(e)}`, 41 | ); 42 | } 43 | try { 44 | return JSON5.parse(data); 45 | } catch (e) { 46 | throw new Error( 47 | `Failed to parse '${path}': ${e instanceof Error ? e.message : String(e)}`, 48 | ); 49 | } 50 | } 51 | 52 | /** 53 | * Options passed to the getSettings function. 54 | */ 55 | interface Options { 56 | /** 57 | * Optional path to the gmrc config path to use; if not provided we'll fall 58 | * back to `./.gmrc` and `./.gmrc.js`. 59 | * 60 | * This must be the full path, including extension. If the extension is `.js` 61 | * then we'll use `require` to import it, otherwise we'll read it as JSON5. 62 | */ 63 | configFile?: string; 64 | } 65 | 66 | /** 67 | * Gets the raw settings from the relevant .gmrc file. Does *not* validate the 68 | * settings - the result of this call should not be trusted. Pass the result of 69 | * this function to `parseSettings` to get validated settings. 70 | */ 71 | export async function getSettings(options: Options = {}): Promise { 72 | const { configFile } = options; 73 | const tryRequire = async (path: string): Promise => { 74 | // If the file is e.g. `foo.js` then Node `require('foo.js')` would look in 75 | // `node_modules`; we don't want this - instead force it to be a relative 76 | // path. 77 | const relativePath = pathToFileURL(resolve(process.cwd(), path)).href; 78 | 79 | try { 80 | const module = (await import(relativePath)) as Record; 81 | return (module.default ?? module) as Settings; 82 | } catch (e) { 83 | throw new Error( 84 | `Failed to import '${relativePath}'; error:\n ${ 85 | e instanceof Error && e.stack 86 | ? e.stack.replace(/\n/g, "\n ") 87 | : String(e) 88 | }`, 89 | ); 90 | } 91 | }; 92 | 93 | if (configFile != null) { 94 | if (!(await exists(configFile))) { 95 | throw new Error(`Failed to import '${configFile}': file not found`); 96 | } 97 | 98 | if (configFile.endsWith(".mjs")) { 99 | throw new Error( 100 | `ES module imports aren't currently supported, change your config extension to .cjs.`, 101 | ); 102 | } 103 | 104 | if (configFile.endsWith(".js") || configFile.endsWith(".cjs")) { 105 | return tryRequire(configFile); 106 | } else { 107 | return await getSettingsFromJSON(configFile); 108 | } 109 | } else if (await exists(DEFAULT_GMRC_PATH)) { 110 | return await getSettingsFromJSON(DEFAULT_GMRC_PATH); 111 | } else if (await exists(DEFAULT_GMRCJS_PATH)) { 112 | return tryRequire(DEFAULT_GMRCJS_PATH); 113 | } else if (await exists(DEFAULT_GMRC_COMMONJS_PATH)) { 114 | return tryRequire(DEFAULT_GMRC_COMMONJS_PATH); 115 | } else { 116 | throw new Error( 117 | "No .gmrc file found; please run `graphile-migrate init` first.", 118 | ); 119 | } 120 | } 121 | 122 | export function readStdin(): Promise { 123 | return new Promise((resolve, reject) => { 124 | let data = ""; 125 | process.stdin.setEncoding("utf8"); 126 | 127 | process.stdin.on("error", reject); 128 | process.stdin.on("readable", () => { 129 | let chunk; 130 | // Use a loop to make sure we read all available data. 131 | while ((chunk = process.stdin.read() as string | null) !== null) { 132 | data += chunk; 133 | } 134 | }); 135 | 136 | process.stdin.on("end", () => { 137 | resolve(data); 138 | }); 139 | }); 140 | } 141 | 142 | export function getDatabaseName(connectionString: string): string { 143 | const databaseName = parse(connectionString).database; 144 | if (!databaseName) { 145 | throw new Error( 146 | "Could not determine database name from connection string.", 147 | ); 148 | } 149 | return databaseName; 150 | } 151 | -------------------------------------------------------------------------------- /src/commands/commit.ts: -------------------------------------------------------------------------------- 1 | import pgMinify = require("pg-minify"); 2 | import * as fsp from "fs/promises"; 3 | import { CommandModule } from "yargs"; 4 | 5 | import { 6 | getCurrentMigrationLocation, 7 | readCurrentMigration, 8 | writeCurrentMigration, 9 | } from "../current"; 10 | import { calculateHash } from "../hash"; 11 | import { logDbError } from "../instrumentation"; 12 | import { 13 | getAllMigrations, 14 | isMigrationFilename, 15 | parseMigrationText, 16 | serializeMigration, 17 | } from "../migration"; 18 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 19 | import { sluggify } from "../sluggify"; 20 | import { CommonArgv, getSettings } from "./_common"; 21 | import { _migrate } from "./migrate"; 22 | import { _reset } from "./reset"; 23 | 24 | interface CommitArgv extends CommonArgv { 25 | message?: string; 26 | } 27 | 28 | function omit( 29 | obj: T, 30 | keys: K[], 31 | ): Omit { 32 | const newObject = { ...obj }; 33 | for (const key of keys) { 34 | delete newObject[key]; 35 | } 36 | return newObject; 37 | } 38 | 39 | export async function _commit( 40 | parsedSettings: ParsedSettings, 41 | messageOverride: string | null | undefined = undefined, 42 | ): Promise { 43 | const { migrationsFolder } = parsedSettings; 44 | 45 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 46 | const contents = await readCurrentMigration(parsedSettings, currentLocation); 47 | 48 | const committedMigrationsFolder = `${migrationsFolder}/committed`; 49 | const allMigrations = await getAllMigrations(parsedSettings); 50 | const lastMigration = allMigrations[allMigrations.length - 1]; 51 | const newMigrationNumber = lastMigration 52 | ? parseInt(lastMigration.filename, 10) + 1 53 | : 1; 54 | if (Number.isNaN(newMigrationNumber)) { 55 | throw new Error("Could not determine next migration number"); 56 | } 57 | 58 | const { headers, body } = parseMigrationText( 59 | currentLocation.path, 60 | contents, 61 | false, 62 | ); 63 | const messageFromComment = headers.Message; 64 | 65 | const message = 66 | messageOverride !== undefined ? messageOverride : messageFromComment; 67 | 68 | if (message && /[\r\n\0\b\v\f\cA-\cZ]/u.test(message)) { 69 | throw new Error("Invalid commit message: contains disallowed characters"); 70 | } 71 | if (message && message.length > 512) { 72 | throw new Error( 73 | "Invalid commit message: message is too long (max: 512 chars)", 74 | ); 75 | } 76 | 77 | const sluggifiedMessage = message ? sluggify(message) : null; 78 | 79 | const newMigrationFilename = 80 | String(newMigrationNumber).padStart(6, "0") + 81 | (sluggifiedMessage ? `-${sluggifiedMessage}` : "") + 82 | ".sql"; 83 | if (!isMigrationFilename(newMigrationFilename)) { 84 | throw Error("Could not construct migration filename"); 85 | } 86 | const minifiedBody = pgMinify(body); 87 | if (minifiedBody === "") { 88 | throw new Error("Current migration is blank."); 89 | } 90 | 91 | const hash = calculateHash(body, lastMigration && lastMigration.hash); 92 | const finalBody = serializeMigration(body, { 93 | Previous: lastMigration ? lastMigration.hash : "-", 94 | Hash: hash, 95 | Message: message ? message : undefined, 96 | ...omit(headers, ["Previous", "Hash", "Message"]), 97 | }); 98 | await _reset(parsedSettings, true); 99 | const newMigrationFilepath = `${committedMigrationsFolder}/${newMigrationFilename}`; 100 | await fsp.writeFile(newMigrationFilepath, finalBody); 101 | await fsp.chmod(newMigrationFilepath, "440"); 102 | 103 | parsedSettings.logger.info( 104 | `graphile-migrate: New migration '${newMigrationFilename}' created`, 105 | ); 106 | try { 107 | await _migrate(parsedSettings, true); 108 | await _migrate(parsedSettings); 109 | await writeCurrentMigration( 110 | parsedSettings, 111 | currentLocation, 112 | parsedSettings.blankMigrationContent.trim() + "\n", 113 | ); 114 | } catch (err) { 115 | const e = err instanceof Error ? err : new Error(String(err)); 116 | 117 | logDbError(parsedSettings, e); 118 | 119 | parsedSettings.logger.error("ABORTING..."); 120 | await writeCurrentMigration(parsedSettings, currentLocation, body); 121 | await fsp.unlink(newMigrationFilepath); 122 | parsedSettings.logger.error("ABORTED AND ROLLED BACK"); 123 | throw e; 124 | } 125 | } 126 | 127 | export async function commit( 128 | settings: Settings, 129 | message?: string | null, 130 | ): Promise { 131 | const parsedSettings = await parseSettings(settings, true); 132 | return _commit(parsedSettings, message); 133 | } 134 | 135 | export const commitCommand: CommandModule, CommitArgv> = { 136 | command: "commit", 137 | aliases: [], 138 | describe: 139 | "Commits the current migration into the `committed/` folder, resetting the current migration. Resets the shadow database.", 140 | builder: { 141 | message: { 142 | type: "string", 143 | alias: ["m"], 144 | description: 145 | "Optional commit message to label migration, must not contain newlines.", 146 | nargs: 1, 147 | }, 148 | }, 149 | handler: async (argv) => { 150 | if (argv.message !== undefined && !argv.message) { 151 | throw new Error("Missing or empty commit message after --message flag"); 152 | } 153 | await commit(await getSettings({ configFile: argv.config }), argv.message); 154 | }, 155 | }; 156 | -------------------------------------------------------------------------------- /__tests__/manageGraphileMigrateSchema.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("child_process"); 2 | jest.mock("../src/migration"); 3 | 4 | import "./helpers"; // Has side-effects; must come first 5 | 6 | import { Pool, PoolClient } from "pg"; 7 | 8 | import { _makeCurrentMigrationRunner, _watch } from "../src/commands/watch"; 9 | import { _migrateMigrationSchema } from "../src/migration"; 10 | import { ParsedSettings, parseSettings } from "../src/settings"; 11 | import { resetDb, TEST_DATABASE_URL } from "./helpers"; 12 | 13 | beforeEach(resetDb); 14 | 15 | async function withClient( 16 | parsedSettings: ParsedSettings, 17 | cb: (client: PoolClient) => Promise, 18 | ): Promise { 19 | const pool = new Pool({ 20 | connectionString: parsedSettings.connectionString, 21 | max: 1, 22 | }); 23 | 24 | try { 25 | const client = await pool.connect(); 26 | try { 27 | return await cb(client); 28 | } finally { 29 | client.release(); 30 | } 31 | } finally { 32 | pool.end(); 33 | } 34 | } 35 | async function getError(initialSchema = ""): Promise { 36 | const parsedSettings = await parseSettings({ 37 | connectionString: TEST_DATABASE_URL, 38 | manageGraphileMigrateSchema: false, 39 | }); 40 | return withClient(parsedSettings, async (client) => { 41 | if (initialSchema) { 42 | await client.query(initialSchema); 43 | } 44 | let error: Error | null = null; 45 | try { 46 | await _migrateMigrationSchema(client, parsedSettings); 47 | } catch (e) { 48 | error = e instanceof Error ? e : new Error(String(e)); 49 | } 50 | return error; 51 | }); 52 | } 53 | 54 | describe("manageGraphileMigrateSchema = false", () => { 55 | it("throws error if we set the option to something strange", async () => { 56 | let error; 57 | try { 58 | await parseSettings({ 59 | connectionString: TEST_DATABASE_URL, 60 | // @ts-ignore Deliberate error - that's what we're testing 61 | manageGraphileMigrateSchema: "false", 62 | }); 63 | } catch (e) { 64 | error = e; 65 | } 66 | expect(error).toBeTruthy(); 67 | expect(error).toMatchInlineSnapshot(` 68 | [Error: Errors occurred during settings validation: 69 | - Setting 'manageGraphileMigrateSchema': Expected boolean, received 'string'] 70 | `); 71 | }); 72 | 73 | it("throws error if schema doesn't exist", async () => { 74 | const error = await getError(); 75 | expect(error).toBeTruthy(); 76 | expect(error).toMatchInlineSnapshot( 77 | `[Error: You've set manageGraphileMigrateSchema to false, but have not installed our database schema - we cannot continue.]`, 78 | ); 79 | }); 80 | 81 | it("throws error if schema exists but is empty", async () => { 82 | const error = await getError(` 83 | create schema graphile_migrate; 84 | `); 85 | expect(error).toBeTruthy(); 86 | expect(error).toMatchInlineSnapshot( 87 | `[Error: You've set manageGraphileMigrateSchema to false, but the 'graphile_migrate.migrations' table couldn't be found - we cannot continue.]`, 88 | ); 89 | }); 90 | 91 | it("throws error if schema exists but doesn't contain one of the tables", async () => { 92 | const error = await getError(` 93 | create schema graphile_migrate; 94 | 95 | create table if not exists graphile_migrate.migrations ( 96 | hash text primary key, 97 | previous_hash text references graphile_migrate.migrations, 98 | filename text not null, 99 | date timestamptz not null default now() 100 | ); 101 | `); 102 | expect(error).toBeTruthy(); 103 | expect(error).toMatchInlineSnapshot( 104 | `[Error: You've set manageGraphileMigrateSchema to false, but the 'graphile_migrate.current' table couldn't be found - we cannot continue.]`, 105 | ); 106 | }); 107 | 108 | it("throws error if schema exists but one of the tables has the wrong number of columns", async () => { 109 | const error = await getError(` 110 | create schema graphile_migrate; 111 | 112 | create table if not exists graphile_migrate.migrations ( 113 | hash text primary key, 114 | previous_hash text references graphile_migrate.migrations, 115 | filename text not null 116 | -- DELETED LINE 117 | ); 118 | 119 | create table if not exists graphile_migrate.current ( 120 | filename text primary key default 'current.sql', 121 | content text not null, 122 | date timestamptz not null default now() 123 | ); 124 | `); 125 | expect(error).toBeTruthy(); 126 | expect(error).toMatchInlineSnapshot( 127 | `[Error: You've set manageGraphileMigrateSchema to false, but the 'graphile_migrate.migrations' table has the wrong number of columns (3 != 4) - we cannot continue.]`, 128 | ); 129 | }); 130 | 131 | it("succeeds if everything is fine", async () => { 132 | const error = await getError(` 133 | create schema if not exists graphile_migrate; 134 | 135 | create table if not exists graphile_migrate.migrations ( 136 | hash text primary key, 137 | previous_hash text references graphile_migrate.migrations, 138 | filename text not null, 139 | date timestamptz not null default now() 140 | ); 141 | 142 | create table if not exists graphile_migrate.current ( 143 | filename text primary key default 'current.sql', 144 | content text not null, 145 | date timestamptz not null default now() 146 | ); 147 | `); 148 | expect(error).toBeFalsy(); 149 | }); 150 | }); 151 | -------------------------------------------------------------------------------- /__tests__/commit.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import * as fsp from "fs/promises"; 4 | import mockFs from "mock-fs"; 5 | 6 | import { commit } from "../src"; 7 | import { sluggify } from "../src/sluggify"; 8 | import { makeMigrations, resetDb, settings } from "./helpers"; 9 | 10 | beforeEach(resetDb); 11 | beforeEach(async () => { 12 | mockFs({ migrations: mockFs.directory() }); 13 | }); 14 | afterEach(() => { 15 | mockFs.restore(); 16 | }); 17 | 18 | it("aborts if current.sql is empty", async () => { 19 | mockFs({ 20 | "migrations/current.sql": "-- JUST A COMMENT\n", 21 | }); 22 | 23 | const promise = commit(settings); 24 | await promise.catch(() => {}); 25 | 26 | mockFs.restore(); 27 | expect(promise).rejects.toMatchInlineSnapshot( 28 | `[Error: Current migration is blank.]`, 29 | ); 30 | }); 31 | 32 | describe.each([[undefined], ["My Commit Message"]])( 33 | "commit message '%s'", 34 | (commitMessage) => { 35 | const commitMessageSlug = commitMessage 36 | ? `-${sluggify(commitMessage)}` 37 | : ``; 38 | const { 39 | MIGRATION_1_TEXT, 40 | MIGRATION_1_COMMITTED, 41 | MIGRATION_2_TEXT, 42 | MIGRATION_2_COMMITTED, 43 | MIGRATION_ENUM_COMMITTED, 44 | MIGRATION_NOTRX_TEXT, 45 | MIGRATION_NOTRX_COMMITTED, 46 | MIGRATION_MULTIFILE_COMMITTED, 47 | MIGRATION_MULTIFILE_FILES, 48 | } = makeMigrations(commitMessage); 49 | 50 | it("can commit the first migration", async () => { 51 | mockFs({ 52 | "migrations/current.sql": MIGRATION_1_TEXT, 53 | }); 54 | 55 | await commit(settings, commitMessage); 56 | expect( 57 | await fsp.readFile( 58 | `migrations/committed/000001${commitMessageSlug}.sql`, 59 | "utf8", 60 | ), 61 | ).toEqual(MIGRATION_1_COMMITTED); 62 | }); 63 | 64 | it("can commit the second migration", async () => { 65 | mockFs({ 66 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 67 | MIGRATION_1_COMMITTED, 68 | "migrations/current.sql": MIGRATION_2_TEXT, 69 | }); 70 | 71 | await commit(settings, commitMessage); 72 | expect( 73 | await fsp.readFile( 74 | `migrations/committed/000001${commitMessageSlug}.sql`, 75 | "utf8", 76 | ), 77 | ).toEqual(MIGRATION_1_COMMITTED); 78 | expect( 79 | await fsp.readFile( 80 | `migrations/committed/000002${commitMessageSlug}.sql`, 81 | "utf8", 82 | ), 83 | ).toEqual(MIGRATION_2_COMMITTED); 84 | const stat = await fsp.stat( 85 | `migrations/committed/000002${commitMessageSlug}.sql`, 86 | ); 87 | expect(stat.mode & 0o777).toEqual(0o440); 88 | }); 89 | 90 | it("can execute a --! no-transaction migration", async () => { 91 | mockFs({ 92 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 93 | MIGRATION_1_COMMITTED, 94 | [`migrations/committed/000002${commitMessageSlug}.sql`]: 95 | MIGRATION_ENUM_COMMITTED, 96 | "migrations/current.sql": MIGRATION_NOTRX_TEXT, 97 | }); 98 | 99 | await commit(settings, commitMessage); 100 | expect( 101 | await fsp.readFile( 102 | `migrations/committed/000001${commitMessageSlug}.sql`, 103 | "utf8", 104 | ), 105 | ).toEqual(MIGRATION_1_COMMITTED); 106 | expect( 107 | await fsp.readFile( 108 | `migrations/committed/000002${commitMessageSlug}.sql`, 109 | "utf8", 110 | ), 111 | ).toEqual(MIGRATION_ENUM_COMMITTED); 112 | expect( 113 | await fsp.readFile( 114 | `migrations/committed/000003${commitMessageSlug}.sql`, 115 | "utf8", 116 | ), 117 | ).toEqual(MIGRATION_NOTRX_COMMITTED); 118 | }); 119 | 120 | it("can commit multi-file migration", async () => { 121 | mockFs({ 122 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 123 | MIGRATION_1_COMMITTED, 124 | ...MIGRATION_MULTIFILE_FILES, 125 | }); 126 | 127 | await commit(settings, commitMessage); 128 | expect( 129 | await fsp.readFile( 130 | `migrations/committed/000001${commitMessageSlug}.sql`, 131 | "utf8", 132 | ), 133 | ).toEqual(MIGRATION_1_COMMITTED); 134 | expect( 135 | await fsp.readFile( 136 | `migrations/committed/000002${commitMessageSlug}.sql`, 137 | "utf8", 138 | ), 139 | ).toEqual(MIGRATION_MULTIFILE_COMMITTED); 140 | }); 141 | 142 | it("throws on invalid message", async () => { 143 | mockFs({ 144 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 145 | MIGRATION_1_COMMITTED, 146 | ...MIGRATION_MULTIFILE_FILES, 147 | }); 148 | 149 | const promise = commit( 150 | settings, 151 | "This message contains\na newline character", 152 | ); 153 | await expect(promise).rejects.toThrow("Invalid commit message"); 154 | }); 155 | 156 | it("throws on --!no-transaction in multifile", async () => { 157 | mockFs({ 158 | [`migrations/committed/000001${commitMessageSlug}.sql`]: 159 | MIGRATION_1_COMMITTED, 160 | "migrations/current": { 161 | "001.sql": "--! no-transaction\nSELECT 1;", 162 | }, 163 | }); 164 | 165 | const promise = commit(settings); 166 | await expect(promise).rejects.toThrow( 167 | "cannot use '--! no-transaction' with 'current/'", 168 | ); 169 | }); 170 | }, 171 | ); 172 | -------------------------------------------------------------------------------- /__tests__/__snapshots__/settings.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`actions is backwards-compatible with untagged command specs 1`] = ` 4 | { 5 | "afterAllMigrations": [ 6 | { 7 | "_": "sql", 8 | "file": "foo.sql", 9 | }, 10 | { 11 | "_": "sql", 12 | "file": "bar.sql", 13 | }, 14 | { 15 | "_": "command", 16 | "command": "pg_dump --schema-only", 17 | }, 18 | { 19 | "_": "command", 20 | "command": "graphile-worker --once", 21 | }, 22 | ], 23 | "afterCurrent": [], 24 | "afterReset": [], 25 | "beforeAllMigrations": [], 26 | "beforeCurrent": [], 27 | "beforeReset": [], 28 | "blankMigrationContent": "-- Enter migration here 29 | ", 30 | "connectionString": "postgres://localhost:5432/dbname?ssl=true", 31 | "databaseName": "dbname", 32 | "databaseOwner": "dbname", 33 | "logger": Logger { 34 | "_logFactory": [Function], 35 | "_scope": {}, 36 | "log": [Function], 37 | }, 38 | "manageGraphileMigrateSchema": true, 39 | "migrationsFolder": "./migrations", 40 | "placeholders": undefined, 41 | "rootConnectionString": "postgres:///template1", 42 | "shadowConnectionString": undefined, 43 | "shadowDatabaseName": undefined, 44 | } 45 | `; 46 | 47 | exports[`actions parses SQL actions 1`] = ` 48 | { 49 | "afterAllMigrations": [ 50 | { 51 | "_": "sql", 52 | "file": "bar.sql", 53 | }, 54 | { 55 | "_": "sql", 56 | "file": "baz.sql", 57 | }, 58 | ], 59 | "afterCurrent": [], 60 | "afterReset": [ 61 | { 62 | "_": "sql", 63 | "file": "foo.sql", 64 | }, 65 | ], 66 | "beforeAllMigrations": [], 67 | "beforeCurrent": [], 68 | "beforeReset": [], 69 | "blankMigrationContent": "-- Enter migration here 70 | ", 71 | "connectionString": "postgres://localhost:5432/dbname?ssl=true", 72 | "databaseName": "dbname", 73 | "databaseOwner": "dbname", 74 | "logger": Logger { 75 | "_logFactory": [Function], 76 | "_scope": {}, 77 | "log": [Function], 78 | }, 79 | "manageGraphileMigrateSchema": true, 80 | "migrationsFolder": "./migrations", 81 | "placeholders": undefined, 82 | "rootConnectionString": "postgres:///template1", 83 | "shadowConnectionString": undefined, 84 | "shadowDatabaseName": undefined, 85 | } 86 | `; 87 | 88 | exports[`actions parses command actions 1`] = ` 89 | { 90 | "afterAllMigrations": [ 91 | { 92 | "_": "command", 93 | "command": "pg_dump --schema-only", 94 | }, 95 | { 96 | "_": "command", 97 | "command": "graphile-worker --once", 98 | }, 99 | ], 100 | "afterCurrent": [], 101 | "afterReset": [], 102 | "beforeAllMigrations": [], 103 | "beforeCurrent": [], 104 | "beforeReset": [], 105 | "blankMigrationContent": "-- Enter migration here 106 | ", 107 | "connectionString": "postgres://localhost:5432/dbname?ssl=true", 108 | "databaseName": "dbname", 109 | "databaseOwner": "dbname", 110 | "logger": Logger { 111 | "_logFactory": [Function], 112 | "_scope": {}, 113 | "log": [Function], 114 | }, 115 | "manageGraphileMigrateSchema": true, 116 | "migrationsFolder": "./migrations", 117 | "placeholders": undefined, 118 | "rootConnectionString": "postgres:///template1", 119 | "shadowConnectionString": undefined, 120 | "shadowDatabaseName": undefined, 121 | } 122 | `; 123 | 124 | exports[`actions parses mixed actions 1`] = ` 125 | { 126 | "afterAllMigrations": [ 127 | { 128 | "_": "sql", 129 | "file": "foo.sql", 130 | }, 131 | { 132 | "_": "sql", 133 | "file": "bar.sql", 134 | }, 135 | { 136 | "_": "command", 137 | "command": "pg_dump --schema-only", 138 | }, 139 | { 140 | "_": "command", 141 | "command": "graphile-worker --once", 142 | }, 143 | ], 144 | "afterCurrent": [], 145 | "afterReset": [], 146 | "beforeAllMigrations": [], 147 | "beforeCurrent": [], 148 | "beforeReset": [], 149 | "blankMigrationContent": "-- Enter migration here 150 | ", 151 | "connectionString": "postgres://localhost:5432/dbname?ssl=true", 152 | "databaseName": "dbname", 153 | "databaseOwner": "dbname", 154 | "logger": Logger { 155 | "_logFactory": [Function], 156 | "_scope": {}, 157 | "log": [Function], 158 | }, 159 | "manageGraphileMigrateSchema": true, 160 | "migrationsFolder": "./migrations", 161 | "placeholders": undefined, 162 | "rootConnectionString": "postgres:///template1", 163 | "shadowConnectionString": undefined, 164 | "shadowDatabaseName": undefined, 165 | } 166 | `; 167 | 168 | exports[`actions parses string values into SQL actions 1`] = ` 169 | { 170 | "afterAllMigrations": [ 171 | { 172 | "_": "sql", 173 | "file": "bar.sql", 174 | }, 175 | { 176 | "_": "sql", 177 | "file": "baz.sql", 178 | }, 179 | ], 180 | "afterCurrent": [], 181 | "afterReset": [ 182 | { 183 | "_": "sql", 184 | "file": "foo.sql", 185 | }, 186 | ], 187 | "beforeAllMigrations": [], 188 | "beforeCurrent": [], 189 | "beforeReset": [], 190 | "blankMigrationContent": "-- Enter migration here 191 | ", 192 | "connectionString": "postgres://localhost:5432/dbname?ssl=true", 193 | "databaseName": "dbname", 194 | "databaseOwner": "dbname", 195 | "logger": Logger { 196 | "_logFactory": [Function], 197 | "_scope": {}, 198 | "log": [Function], 199 | }, 200 | "manageGraphileMigrateSchema": true, 201 | "migrationsFolder": "./migrations", 202 | "placeholders": undefined, 203 | "rootConnectionString": "postgres:///template1", 204 | "shadowConnectionString": undefined, 205 | "shadowDatabaseName": undefined, 206 | } 207 | `; 208 | -------------------------------------------------------------------------------- /src/pgReal.ts: -------------------------------------------------------------------------------- 1 | import { Pool, PoolClient } from "pg"; 2 | import { parse } from "pg-connection-string"; 3 | 4 | import { ParsedSettings } from "./settings"; 5 | 6 | type PoolOrMockClient = PoolClient & { __isMockClient?: boolean }; 7 | 8 | export interface Context { 9 | database: string; 10 | } 11 | 12 | /** 13 | * For efficiency, we keep pools around for a period of time after they were 14 | * last "released" so we don't have to keep re-creating them. This value 15 | * chooses this time (in milliseconds). Note: clean exit will be delayed by 16 | * this duration. 17 | */ 18 | const POOL_KEEPALIVE = 200; 19 | 20 | interface PoolDetails { 21 | pool: Pool; 22 | database: string; 23 | referenceCount: number; 24 | release(): void; 25 | } 26 | interface PoolDetailsInternal extends PoolDetails { 27 | _reference(): void; 28 | _reallyRelease(): void; 29 | _timer: NodeJS.Timeout | undefined; 30 | } 31 | const poolDetailsByConnectionString = new Map(); 32 | 33 | export function clearAllPools(): void { 34 | for (const details of poolDetailsByConnectionString.values()) { 35 | if (details.referenceCount === 0) { 36 | details._reallyRelease(); 37 | } 38 | } 39 | } 40 | 41 | function getPoolDetailsFromConnectionString( 42 | { logger }: ParsedSettings, 43 | connectionString: string, 44 | ): PoolDetails { 45 | let details: PoolDetailsInternal | undefined = 46 | poolDetailsByConnectionString.get(connectionString); 47 | if (!details) { 48 | const { database } = parse(connectionString); 49 | if (!database) { 50 | throw new Error("Connection string does not specify a database"); 51 | } 52 | const pool = new Pool({ connectionString }); 53 | pool.on("error", (error: Error) => { 54 | logger.error(`An error occurred in the PgPool: ${error.message}`, { 55 | error, 56 | }); 57 | process.exit(1); 58 | }); 59 | 60 | // We don't want someone else ending our pool; delete the end method. 61 | const end = pool.end; 62 | pool.end = (): never => { 63 | throw new Error( 64 | "You must not call .end() on this pool! Release the pool detail instead", 65 | ); 66 | }; 67 | 68 | details = { 69 | pool, 70 | database, 71 | referenceCount: 0, 72 | release(): void { 73 | this.referenceCount--; 74 | if (this.referenceCount === 0) { 75 | this._timer = setTimeout(this._reallyRelease, POOL_KEEPALIVE); 76 | } 77 | }, 78 | _timer: undefined, 79 | _reference(): void { 80 | clearTimeout(this._timer); 81 | this._timer = undefined; 82 | this.referenceCount++; 83 | }, 84 | _reallyRelease(): void { 85 | clearTimeout(this._timer); 86 | this._timer = undefined; 87 | pool.end = end; 88 | pool.end().catch((e) => { 89 | // eslint-disable-next-line no-console 90 | console.error("Error occurred whilst releasing pool:"); 91 | // eslint-disable-next-line no-console 92 | console.dir(e); 93 | }); 94 | poolDetailsByConnectionString.delete(connectionString); 95 | }, 96 | }; 97 | poolDetailsByConnectionString.set(connectionString, details); 98 | } 99 | details._reference(); 100 | return details; 101 | } 102 | 103 | export type Client = PoolClient; 104 | export async function withClient( 105 | connectionString: string, 106 | parsedSettings: ParsedSettings, 107 | callback: (pgClient: PoolClient, context: Context) => Promise, 108 | ): Promise { 109 | const details = getPoolDetailsFromConnectionString( 110 | parsedSettings, 111 | connectionString, 112 | ); 113 | const { pool: pgPool, database } = details; 114 | try { 115 | const pgClient = await pgPool.connect(); 116 | try { 117 | if (parsedSettings.pgSettings) { 118 | const sqlFragments = []; 119 | const sqlValues = []; 120 | for (const [key, value] of Object.entries(parsedSettings.pgSettings)) { 121 | sqlValues.push(key, value); 122 | sqlFragments.push( 123 | `pg_catalog.set_config($${sqlValues.length - 1}::text, $${ 124 | sqlValues.length 125 | }::text, false)`, 126 | ); 127 | } 128 | if (sqlFragments.length) { 129 | await pgClient.query({ 130 | text: `select ${sqlFragments.join(", ")}`, 131 | values: sqlValues, 132 | }); 133 | } 134 | } 135 | const context: Context = { 136 | database, 137 | }; 138 | return await callback(pgClient, context); 139 | } finally { 140 | await Promise.resolve(pgClient.release()); 141 | } 142 | } finally { 143 | details.release(); 144 | } 145 | } 146 | 147 | const ADVISORY_LOCK_MIGRATE = 148 | "4727445306447283"; /* `GRAPHILE MIGRATE` on phone keypad */ 149 | export async function withAdvisoryLock( 150 | pgClient: PoolOrMockClient, 151 | callback: (pgClient: PoolClient) => Promise, 152 | ): Promise { 153 | if (pgClient["__isMockClient"]) { 154 | return callback(pgClient); 155 | } 156 | const { 157 | rows: [{ locked }], 158 | } = await pgClient.query<{ locked: boolean }>( 159 | "select pg_try_advisory_lock($1) as locked", 160 | [ADVISORY_LOCK_MIGRATE], 161 | ); 162 | if (!locked) { 163 | throw new Error("Failed to get exclusive lock"); 164 | } 165 | try { 166 | return await callback(pgClient); 167 | } finally { 168 | await pgClient.query("select pg_advisory_unlock($1)", [ 169 | ADVISORY_LOCK_MIGRATE, 170 | ]); 171 | } 172 | } 173 | 174 | export async function withTransaction( 175 | pgClient: PoolClient, 176 | callback: () => Promise, 177 | ): Promise { 178 | await pgClient.query("begin"); 179 | try { 180 | const result = await callback(); 181 | await pgClient.query("commit"); 182 | return result; 183 | } catch (e) { 184 | await pgClient.query("rollback"); 185 | throw e; 186 | } 187 | } 188 | 189 | export function escapeIdentifier(str: string): string { 190 | return '"' + str.replace(/"/g, '""') + '"'; 191 | } 192 | -------------------------------------------------------------------------------- /__tests__/migrate.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import mockFs from "mock-fs"; 4 | 5 | import { migrate } from "../src"; 6 | import { logDbError } from "../src/instrumentation"; 7 | import { withClient } from "../src/pg"; 8 | import { ParsedSettings, parseSettings } from "../src/settings"; 9 | import { makeMigrations, resetDb, settings } from "./helpers"; 10 | 11 | beforeEach(resetDb); 12 | beforeEach(async () => { 13 | mockFs({ migrations: mockFs.directory() }); 14 | }); 15 | afterEach(() => { 16 | mockFs.restore(); 17 | }); 18 | const { 19 | MIGRATION_1_TEXT, 20 | MIGRATION_1_COMMITTED, 21 | MIGRATION_ENUM_COMMITTED, 22 | MIGRATION_NOTRX_TEXT, 23 | MIGRATION_NOTRX_COMMITTED, 24 | } = makeMigrations(); 25 | 26 | function getStuff(parsedSettings: ParsedSettings) { 27 | return withClient( 28 | parsedSettings.connectionString, 29 | parsedSettings, 30 | async (pgClient, _context) => { 31 | const { rows: migrations } = await pgClient.query( 32 | "select * from graphile_migrate.migrations", 33 | ); 34 | const { rows: tables } = await pgClient.query( 35 | "select * from pg_class where relnamespace = 'public'::regnamespace and relkind = 'r'", 36 | ); 37 | const { rows: enums } = await pgClient.query( 38 | "select typname, (select count(*) from pg_enum where enumtypid = pg_type.oid) as value_count from pg_type where typnamespace = 'public'::regnamespace and typtype = 'e'", 39 | ); 40 | return { migrations, tables, enums }; 41 | }, 42 | ); 43 | } 44 | 45 | it("runs migrations", async () => { 46 | mockFs({ 47 | "migrations/current.sql": MIGRATION_1_TEXT, 48 | }); 49 | 50 | await migrate(settings); 51 | 52 | const parsedSettings = await parseSettings(settings); 53 | 54 | { 55 | const { migrations, tables, enums } = await getStuff(parsedSettings); 56 | expect(migrations).toHaveLength(0); 57 | expect(tables).toHaveLength(0); 58 | expect(enums).toHaveLength(0); 59 | } 60 | 61 | mockFs({ 62 | [`migrations/committed/000001.sql`]: MIGRATION_1_COMMITTED, 63 | [`migrations/committed/000002.sql`]: MIGRATION_ENUM_COMMITTED, 64 | "migrations/current.sql": MIGRATION_NOTRX_TEXT, 65 | }); 66 | 67 | await migrate(settings); 68 | 69 | { 70 | const { migrations, tables, enums } = await getStuff(parsedSettings); 71 | 72 | expect(migrations).toHaveLength(2); 73 | expect(migrations.map(({ date, ...rest }) => rest)).toMatchSnapshot(); 74 | expect(tables).toHaveLength(1); 75 | expect(tables.map((t) => t.relname)).toMatchSnapshot(); 76 | expect(enums).toHaveLength(1); 77 | expect(enums).toMatchSnapshot(); 78 | } 79 | 80 | mockFs({ 81 | [`migrations/committed/000001.sql`]: MIGRATION_1_COMMITTED, 82 | [`migrations/committed/000002.sql`]: MIGRATION_ENUM_COMMITTED, 83 | [`migrations/committed/000003.sql`]: MIGRATION_NOTRX_COMMITTED, 84 | "migrations/current.sql": "", 85 | }); 86 | 87 | await migrate(settings); 88 | 89 | { 90 | const { migrations, tables, enums } = await getStuff(parsedSettings); 91 | 92 | expect(migrations).toHaveLength(3); 93 | const mappedMigrations = migrations.map(({ date, ...rest }) => rest); 94 | expect(mappedMigrations).toMatchSnapshot(); 95 | expect(tables).toHaveLength(1); 96 | const mappedTables = tables.map((t) => t.relname); 97 | expect(mappedTables).toMatchSnapshot(); 98 | expect(enums).toHaveLength(1); 99 | expect(enums).toMatchSnapshot(); 100 | } 101 | }); 102 | 103 | it("refuses to run migration with invalid hash", async () => { 104 | mockFs({ 105 | [`migrations/committed/000001.sql`]: MIGRATION_1_COMMITTED, 106 | [`migrations/committed/000002.sql`]: 107 | MIGRATION_ENUM_COMMITTED + 108 | "\ncomment on type user_role is 'this invalidates the hash';", 109 | [`migrations/committed/000003.sql`]: MIGRATION_NOTRX_COMMITTED, 110 | "migrations/current.sql": "", 111 | }); 112 | 113 | await expect(migrate(settings)).rejects.toThrowErrorMatchingSnapshot(); 114 | }); 115 | 116 | it("will run a migration with invalid hash if told to do so", async () => { 117 | const parsedSettings = await parseSettings(settings); 118 | 119 | mockFs({ 120 | [`migrations/committed/000001.sql`]: MIGRATION_1_COMMITTED, 121 | [`migrations/committed/000002.sql`]: 122 | "--! AllowInvalidHash\n" + 123 | MIGRATION_ENUM_COMMITTED + 124 | "\ncomment on type user_role is 'this invalidates the hash';", 125 | [`migrations/committed/000003.sql`]: MIGRATION_NOTRX_COMMITTED, 126 | "migrations/current.sql": "", 127 | }); 128 | 129 | await migrate(settings); 130 | 131 | { 132 | const { migrations, enums } = await getStuff(parsedSettings); 133 | 134 | expect(migrations).toHaveLength(3); 135 | const mappedMigrations = migrations.map(({ date, ...rest }) => rest); 136 | expect(mappedMigrations).toMatchSnapshot(); 137 | expect(enums).toHaveLength(1); 138 | expect(enums).toMatchSnapshot(); 139 | } 140 | }); 141 | 142 | it("handles errors during migration gracefully", async () => { 143 | mockFs({ 144 | "migrations/current.sql": ``, 145 | "migrations/committed/000001.sql": `\ 146 | --! Previous: - 147 | --! Hash: sha1:2fd4e1c67a2d28fced849ee1bb76e7391b93eb12 148 | --! AllowInvalidHash 149 | 150 | drop table if exists frogs; 151 | 152 | create table frogs ( 153 | id serial primary key, 154 | name text not null, 155 | speckled bool not null 156 | ); 157 | 158 | select 1/0; 159 | 160 | comment on table frogs is 'Ribbit'; 161 | `, 162 | }); 163 | 164 | let err: any; 165 | try { 166 | await migrate(settings); 167 | } catch (e) { 168 | err = e; 169 | } 170 | expect(err).toBeTruthy(); 171 | expect(err.message).toMatch(/division by zero/); 172 | expect(err).toMatchSnapshot(); 173 | 174 | const parsedSettings = await parseSettings(settings); 175 | const mock = jest.fn(); 176 | parsedSettings.logger.error = mock; 177 | 178 | logDbError(parsedSettings, err); 179 | expect(mock).toHaveBeenCalledTimes(1); 180 | const call = mock.mock.calls[0]; 181 | expect( 182 | String(call) 183 | .replaceAll(process.cwd(), "~") 184 | .replace(/:[0-9]+:[0-9]+($|\))/gm, ":[LINE]:[COL]$1"), 185 | ).toMatchSnapshot(); 186 | }); 187 | -------------------------------------------------------------------------------- /__tests__/writeCurrentMigration.test.ts: -------------------------------------------------------------------------------- 1 | import "./helpers"; // Has side-effects; must come first 2 | 3 | import * as fsp from "fs/promises"; 4 | import mockFs from "mock-fs"; 5 | 6 | import { 7 | getCurrentMigrationLocation, 8 | writeCurrentMigration, 9 | } from "../src/current"; 10 | import { ParsedSettings, parseSettings } from "../src/settings"; 11 | import { TEST_DATABASE_URL } from "./helpers"; 12 | 13 | let parsedSettings: ParsedSettings; 14 | beforeEach(async () => { 15 | mockFs({ migrations: mockFs.directory() }); 16 | parsedSettings = await parseSettings({ 17 | connectionString: TEST_DATABASE_URL, 18 | }); 19 | }); 20 | afterEach(() => { 21 | mockFs.restore(); 22 | }); 23 | 24 | it("writes to current.sql if current.sql exists", async () => { 25 | mockFs({ 26 | "migrations/current.sql": "-- TEST", 27 | }); 28 | 29 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 30 | 31 | await writeCurrentMigration(parsedSettings, currentLocation, "TEST!\n"); 32 | const content = await fsp.readFile("migrations/current.sql", "utf8"); 33 | expect(content).toEqual("TEST!\n"); 34 | }); 35 | 36 | it("writes to current.sql if no current.sql exists", async () => { 37 | expect(fsp.stat("migrations/current.sql")).rejects.toMatchObject({ 38 | code: "ENOENT", 39 | }); 40 | 41 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 42 | 43 | await writeCurrentMigration(parsedSettings, currentLocation, "TEST!\n"); 44 | const content = await fsp.readFile("migrations/current.sql", "utf8"); 45 | expect(content).toEqual("TEST!\n"); 46 | }); 47 | 48 | it("writes to current/1-current.sql if current directory exists", async () => { 49 | mockFs({ "migrations/current": mockFs.directory() }); 50 | 51 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 52 | 53 | await writeCurrentMigration(parsedSettings, currentLocation, "TEST!\n"); 54 | const content = await fsp.readFile( 55 | "migrations/current/1-current.sql", 56 | "utf8", 57 | ); 58 | expect(content).toEqual("TEST!\n"); 59 | }); 60 | 61 | const contentWithSplits = `\ 62 | --! split: 100-first.sql 63 | First content 64 | 65 | --! split: 200-second.sql 66 | Some more content 67 | With multiple lines 68 | -- and comments 69 | 70 | --! split: 300-third.sql 71 | 72 | 73 | --! split: 400-fourth.sql 74 | Note: 300 was empty 75 | `; 76 | 77 | it("writes to current/*.sql with splits", async () => { 78 | mockFs({ "migrations/current": mockFs.directory() }); 79 | 80 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 81 | 82 | await writeCurrentMigration( 83 | parsedSettings, 84 | currentLocation, 85 | contentWithSplits, 86 | ); 87 | const contents = await fsp.readdir("migrations/current"); 88 | expect(contents.sort()).toEqual([ 89 | "100-first.sql", 90 | "200-second.sql", 91 | "300-third.sql", 92 | "400-fourth.sql", 93 | ]); 94 | expect( 95 | await fsp.readFile("migrations/current/100-first.sql", "utf8"), 96 | ).toEqual("First content\n"); 97 | expect(await fsp.readFile("migrations/current/200-second.sql", "utf8")) 98 | .toEqual(`\ 99 | Some more content 100 | With multiple lines 101 | -- and comments 102 | `); 103 | expect( 104 | await fsp.readFile("migrations/current/300-third.sql", "utf8"), 105 | ).toEqual("\n"); 106 | expect( 107 | await fsp.readFile("migrations/current/400-fourth.sql", "utf8"), 108 | ).toEqual("Note: 300 was empty\n"); 109 | }); 110 | 111 | it("writes to current/*.sql and deletes previous content", async () => { 112 | mockFs({ 113 | "migrations/current": { 114 | "001-placeholder.sql": "-- Comment", 115 | "300-third.sql": "NOT EMPTY", 116 | "500-fifth.sql": "DELETE ME", 117 | }, 118 | }); 119 | 120 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 121 | 122 | await writeCurrentMigration( 123 | parsedSettings, 124 | currentLocation, 125 | contentWithSplits, 126 | ); 127 | const contents = await fsp.readdir("migrations/current"); 128 | expect(contents.sort()).toEqual([ 129 | "100-first.sql", 130 | "200-second.sql", 131 | "300-third.sql", 132 | "400-fourth.sql", 133 | ]); 134 | expect( 135 | await fsp.readFile("migrations/current/100-first.sql", "utf8"), 136 | ).toEqual("First content\n"); 137 | expect(await fsp.readFile("migrations/current/200-second.sql", "utf8")) 138 | .toEqual(`\ 139 | Some more content 140 | With multiple lines 141 | -- and comments 142 | `); 143 | expect( 144 | await fsp.readFile("migrations/current/300-third.sql", "utf8"), 145 | ).toEqual("\n"); 146 | expect( 147 | await fsp.readFile("migrations/current/400-fourth.sql", "utf8"), 148 | ).toEqual("Note: 300 was empty\n"); 149 | }); 150 | 151 | it("merges to first file if there's no initial split", async () => { 152 | mockFs({ 153 | "migrations/current": { 154 | "001-placeholder.sql": "-- Comment", 155 | "300-third.sql": "NOT EMPTY", 156 | "500-fifth.sql": "DELETE ME", 157 | }, 158 | }); 159 | 160 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 161 | 162 | await writeCurrentMigration( 163 | parsedSettings, 164 | currentLocation, 165 | "-- HELLO WORLD\n" + contentWithSplits, 166 | ); 167 | const contents = await fsp.readdir("migrations/current"); 168 | expect(contents.sort()).toEqual([ 169 | "100-first.sql", 170 | "200-second.sql", 171 | "300-third.sql", 172 | "400-fourth.sql", 173 | ]); 174 | await expect( 175 | fsp.stat("migrations/current/1-current.sql"), 176 | ).rejects.toMatchObject({ code: "ENOENT" }); 177 | expect( 178 | await fsp.readFile("migrations/current/100-first.sql", "utf8"), 179 | ).toEqual("-- HELLO WORLD\nFirst content\n"); 180 | expect(await fsp.readFile("migrations/current/200-second.sql", "utf8")) 181 | .toEqual(`\ 182 | Some more content 183 | With multiple lines 184 | -- and comments 185 | `); 186 | expect( 187 | await fsp.readFile("migrations/current/300-third.sql", "utf8"), 188 | ).toEqual("\n"); 189 | expect( 190 | await fsp.readFile("migrations/current/400-fourth.sql", "utf8"), 191 | ).toEqual("Note: 300 was empty\n"); 192 | }); 193 | 194 | it("writes to current/1-current.sql only if there's no splits", async () => { 195 | mockFs({ 196 | "migrations/current": { 197 | "001-placeholder.sql": "-- Comment", 198 | "300-third.sql": "NOT EMPTY", 199 | "500-fifth.sql": "DELETE ME", 200 | }, 201 | }); 202 | 203 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 204 | 205 | await writeCurrentMigration( 206 | parsedSettings, 207 | currentLocation, 208 | "-- HELLO WORLD\n", 209 | ); 210 | const contents = await fsp.readdir("migrations/current"); 211 | expect(contents.sort()).toEqual(["1-current.sql"]); 212 | expect( 213 | await fsp.readFile("migrations/current/1-current.sql", "utf8"), 214 | ).toEqual("-- HELLO WORLD\n"); 215 | }); 216 | -------------------------------------------------------------------------------- /src/actions.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@graphile/logger"; 2 | import { exec as rawExec } from "child_process"; 3 | import * as fsp from "fs/promises"; 4 | import { parse } from "pg-connection-string"; 5 | import { inspect, promisify } from "util"; 6 | 7 | import { mergeWithoutClobbering } from "./lib"; 8 | import { generatePlaceholderReplacement } from "./migration"; 9 | import { withClient } from "./pg"; 10 | import { 11 | isActionSpec, 12 | isCommandActionSpec, 13 | isSqlActionSpec, 14 | makeRootDatabaseConnectionString, 15 | ParsedSettings, 16 | } from "./settings"; 17 | 18 | interface ActionSpecBase { 19 | _: string; 20 | shadow?: boolean; 21 | 22 | /** 23 | * USE THIS WITH CARE! Currently only supported by the afterReset hook, all 24 | * other hooks will throw an error when set. Runs the file using the 25 | * rootConnectionString role (i.e. a superuser, but with database name from 26 | * connectionString), useful for creating extensions. 27 | */ 28 | root?: boolean; 29 | } 30 | 31 | export const DO_NOT_USE_DATABASE_URL = "postgres://PLEASE:USE@GM_DBURL/INSTEAD"; 32 | 33 | export interface SqlActionSpec extends ActionSpecBase { 34 | _: "sql"; 35 | file: string; 36 | } 37 | 38 | export interface CommandActionSpec extends ActionSpecBase { 39 | _: "command"; 40 | command: string; 41 | } 42 | 43 | export type ActionSpec = SqlActionSpec | CommandActionSpec; 44 | 45 | const exec = promisify(rawExec); 46 | 47 | export async function executeActions( 48 | parsedSettings: ParsedSettings, 49 | shadow = false, 50 | actions: ActionSpec[], 51 | ): Promise { 52 | if (!actions) { 53 | return; 54 | } 55 | const connectionString = shadow 56 | ? parsedSettings.shadowConnectionString 57 | : parsedSettings.connectionString; 58 | if (!connectionString) { 59 | throw new Error( 60 | "Could not determine connection string for running commands", 61 | ); 62 | } 63 | const { database: databaseName, user: databaseUser } = 64 | parse(connectionString); 65 | if (!databaseName) { 66 | throw new Error("Could not extract database name from connection string"); 67 | } 68 | for (const actionSpec of actions) { 69 | if (actionSpec.shadow !== undefined && actionSpec.shadow !== shadow) { 70 | continue; 71 | } 72 | const hookConnectionString = actionSpec.root 73 | ? makeRootDatabaseConnectionString(parsedSettings, databaseName) 74 | : connectionString; 75 | if (actionSpec._ === "sql") { 76 | const body = await fsp.readFile( 77 | `${parsedSettings.migrationsFolder}/${actionSpec.file}`, 78 | "utf8", 79 | ); 80 | await withClient( 81 | hookConnectionString, 82 | parsedSettings, 83 | async (pgClient, context) => { 84 | const query = generatePlaceholderReplacement( 85 | parsedSettings, 86 | context, 87 | )(body); 88 | await pgClient.query({ 89 | text: query, 90 | }); 91 | }, 92 | ); 93 | } else if (actionSpec._ === "command") { 94 | // Run the command 95 | const promise = exec(actionSpec.command, { 96 | env: mergeWithoutClobbering( 97 | { 98 | ...process.env, 99 | DATABASE_URL: DO_NOT_USE_DATABASE_URL, // DO NOT USE THIS! It can be misleading. 100 | }, 101 | { 102 | GM_DBNAME: databaseName, 103 | // When `root: true`, GM_DBUSER may be perceived as ambiguous, so we must not set it. 104 | ...(actionSpec.root 105 | ? null 106 | : { 107 | GM_DBUSER: databaseUser, 108 | }), 109 | GM_DBURL: hookConnectionString, 110 | ...(shadow 111 | ? { 112 | GM_SHADOW: "1", 113 | } 114 | : null), 115 | }, 116 | "please ensure this environmental variable is not set because graphile-migrate sets it dynamically for children.", 117 | ), 118 | encoding: "utf8", 119 | // 50MB of log data should be enough for any reasonable migration... right? 120 | maxBuffer: 50 * 1024 * 1024, 121 | }); 122 | try { 123 | const { stdout, stderr } = await promise; 124 | if (stdout) { 125 | parsedSettings.logger.info(stdout); 126 | } 127 | if (stderr) { 128 | parsedSettings.logger.error(stderr); 129 | } 130 | } catch (e) { 131 | if (typeof e === "object" && e !== null) { 132 | if ("stdout" in e && typeof e.stdout === "string" && e.stdout) { 133 | parsedSettings.logger.info(e.stdout); 134 | } 135 | if ("stderr" in e && typeof e.stderr === "string" && e.stderr) { 136 | parsedSettings.logger.error(e.stderr); 137 | } 138 | } 139 | throw e; 140 | } 141 | } 142 | } 143 | } 144 | 145 | export function makeValidateActionCallback(logger: Logger, allowRoot = false) { 146 | return async (inputValue: unknown): Promise => { 147 | const specs: ActionSpec[] = []; 148 | if (inputValue) { 149 | const rawSpecArray = Array.isArray(inputValue) 150 | ? (inputValue as unknown[]) 151 | : [inputValue]; 152 | for (const trueRawSpec of rawSpecArray) { 153 | // This fudge is for backwards compatibility with v0.0.3 154 | const isV003OrBelowCommand = 155 | typeof trueRawSpec === "object" && 156 | trueRawSpec !== null && 157 | !("_" in trueRawSpec && trueRawSpec._) && 158 | "command" in trueRawSpec && 159 | typeof trueRawSpec["command"] === "string"; 160 | if (isV003OrBelowCommand) { 161 | logger.warn( 162 | "DEPRECATED: graphile-migrate now requires command action specs to have an `_: 'command'` property; we'll back-fill this for now, but please update your configuration", 163 | ); 164 | } 165 | const rawSpec = isV003OrBelowCommand 166 | ? { _: "command", ...trueRawSpec } 167 | : trueRawSpec; 168 | 169 | if (rawSpec && typeof rawSpec === "string") { 170 | const sqlSpec: SqlActionSpec = rawSpec.startsWith("!") 171 | ? { 172 | _: "sql", 173 | file: rawSpec.substring(1), 174 | root: true, 175 | } 176 | : { 177 | _: "sql", 178 | file: rawSpec, 179 | }; 180 | specs.push(sqlSpec); 181 | } else if (isActionSpec(rawSpec)) { 182 | if (isSqlActionSpec(rawSpec) || isCommandActionSpec(rawSpec)) { 183 | specs.push(rawSpec); 184 | } else { 185 | throw new Error( 186 | `Action spec '${inspect(rawSpec)}' not supported; perhaps you need to upgrade?`, 187 | ); 188 | } 189 | } else { 190 | throw new Error( 191 | `Expected action spec to contain an array of strings or action specs; received '${typeof rawSpec}'`, 192 | ); 193 | } 194 | } 195 | } 196 | 197 | // Final validations 198 | for (const spec of specs) { 199 | if (!allowRoot && spec._ === "sql" && spec.root) { 200 | throw new Error( 201 | "This hooks isn't permitted to require root privileges.", 202 | ); 203 | } 204 | } 205 | 206 | return specs; 207 | }; 208 | } 209 | -------------------------------------------------------------------------------- /__tests__/actions.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("child_process"); 2 | jest.mock("../src/pg"); 3 | jest.mock("../src/migration"); 4 | 5 | import "./helpers"; // Has side-effects; must come first 6 | 7 | import { exec } from "child_process"; 8 | import mockFs from "mock-fs"; 9 | import { parse } from "pg-connection-string"; 10 | 11 | import { executeActions } from "../src/actions"; 12 | import { _migrate } from "../src/commands/migrate"; 13 | import { withClient } from "../src/pg"; 14 | import { parseSettings } from "../src/settings"; 15 | import { 16 | mockPgClient, 17 | TEST_DATABASE_NAME, 18 | TEST_DATABASE_URL, 19 | TEST_ROOT_DATABASE_URL, 20 | } from "./helpers"; 21 | 22 | beforeAll(() => { 23 | // eslint-disable-next-line no-console 24 | console.log("[mock-fs callsites hack]"); // Without this, jest fails due to 'callsites' 25 | mockFs({}); 26 | }); 27 | 28 | afterAll(() => { 29 | mockFs.restore(); 30 | }); 31 | 32 | it("runs SQL actions", async () => { 33 | mockFs({ 34 | "migrations/sqlfile1.sql": `[CONTENT:migrations/sqlfile1.sql]`, 35 | "migrations/sqlfile2.sql": `[CONTENT:migrations/sqlfile2.sql]`, 36 | }); 37 | const parsedSettings = await parseSettings({ 38 | connectionString: TEST_DATABASE_URL, 39 | afterAllMigrations: ["sqlfile1.sql", { _: "sql", file: "sqlfile2.sql" }], 40 | }); 41 | const mockedExec: jest.Mock = exec as any; 42 | mockedExec.mockClear(); 43 | mockPgClient.query.mockClear(); 44 | await executeActions( 45 | parsedSettings, 46 | false, 47 | parsedSettings.afterAllMigrations, 48 | ); 49 | expect(mockedExec).toHaveBeenCalledTimes(0); 50 | expect(mockPgClient.query).toHaveBeenCalledTimes(2); 51 | expect(mockPgClient.query).toHaveBeenNthCalledWith(1, { 52 | text: `[CONTENT:migrations/sqlfile1.sql]`, 53 | }); 54 | expect(mockPgClient.query).toHaveBeenNthCalledWith(2, { 55 | text: `[CONTENT:migrations/sqlfile2.sql]`, 56 | }); 57 | }); 58 | 59 | it("runs command actions", async () => { 60 | const parsedSettings = await parseSettings({ 61 | connectionString: TEST_DATABASE_URL, 62 | afterAllMigrations: [{ _: "command", command: "touch testCommandAction" }], 63 | }); 64 | const mockedExec: jest.Mock = exec as any; 65 | mockedExec.mockClear(); 66 | mockedExec.mockImplementationOnce((_cmd, _options, callback) => 67 | callback(null, { stdout: "", stderr: "" }), 68 | ); 69 | 70 | mockPgClient.query.mockClear(); 71 | await executeActions( 72 | parsedSettings, 73 | false, 74 | parsedSettings.afterAllMigrations, 75 | ); 76 | expect(mockPgClient.query).toHaveBeenCalledTimes(0); 77 | expect(mockedExec).toHaveBeenCalledTimes(1); 78 | expect(mockedExec.mock.calls[0][0]).toBe("touch testCommandAction"); 79 | expect(mockedExec.mock.calls[0][1].env.PATH).toBe(process.env.PATH); 80 | expect(mockedExec.mock.calls[0][1].env.GM_SHADOW).toBe(undefined); 81 | expect(typeof mockedExec.mock.calls[0][1].env.GM_DBURL).toBe("string"); 82 | }); 83 | 84 | it("runs sql afterReset action with correct connection string when root", async () => { 85 | mockFs({ 86 | "migrations/sqlfile1.sql": `[CONTENT:migrations/sqlfile1.sql]`, 87 | }); 88 | const parsedSettings = await parseSettings({ 89 | connectionString: TEST_DATABASE_URL, 90 | afterReset: [{ _: "sql", file: "sqlfile1.sql", root: true }], 91 | }); 92 | const mockedWithClient: jest.Mock = withClient as any; 93 | mockedWithClient.mockClear(); 94 | await executeActions(parsedSettings, false, parsedSettings.afterReset); 95 | expect(mockedWithClient).toHaveBeenCalledTimes(1); 96 | expect(mockedWithClient.mock.calls[0][0]).toBe( 97 | `postgres:///${TEST_DATABASE_NAME}`, 98 | ); 99 | }); 100 | 101 | it("runs command afterReset action with correct env vars when root", async () => { 102 | const parsedSettings = await parseSettings({ 103 | connectionString: TEST_DATABASE_URL, 104 | rootConnectionString: TEST_ROOT_DATABASE_URL, 105 | afterReset: [ 106 | { _: "command", command: "touch testCommandAction", root: true }, 107 | ], 108 | }); 109 | const mockedExec: jest.Mock = exec as any; 110 | mockedExec.mockClear(); 111 | mockedExec.mockImplementationOnce((_cmd, _options, callback) => 112 | callback(null, { stdout: "", stderr: "" }), 113 | ); 114 | 115 | await executeActions(parsedSettings, false, parsedSettings.afterReset); 116 | // When `root: true`, GM_DBUSER may be perceived as ambiguous, so we must not set it. 117 | expect(mockedExec.mock.calls[0][1].env.GM_DBUSER).toBe(undefined); 118 | const connectionStringParts = parse(TEST_DATABASE_URL); 119 | const rootConnectionStringParts = parse(TEST_ROOT_DATABASE_URL); 120 | expect(rootConnectionStringParts.database).not.toBe( 121 | connectionStringParts.database, 122 | ); 123 | const execUrlParts = parse(mockedExec.mock.calls[0][1].env.GM_DBURL); 124 | expect(execUrlParts.host).toBe(rootConnectionStringParts.host); 125 | expect(execUrlParts.port).toBe(rootConnectionStringParts.port); 126 | expect(execUrlParts.user).toBe(rootConnectionStringParts.user); 127 | expect(execUrlParts.password).toBe(rootConnectionStringParts.password); 128 | expect(execUrlParts.database).toBe(connectionStringParts.database); 129 | }); 130 | 131 | it("run normal and non-shadow actions in non-shadow mode", async () => { 132 | mockFs({ 133 | "migrations/non-shadow-only.sql": `[CONTENT:migrations/non-shadow-only.sql]`, 134 | "migrations/shadow-only.sql": `[CONTENT:migrations/shadow-only.sql]`, 135 | "migrations/everywhere.sql": `[CONTENT:migrations/everywhere.sql]`, 136 | }); 137 | const parsedSettings = await parseSettings({ 138 | connectionString: TEST_DATABASE_URL, 139 | afterAllMigrations: [ 140 | { _: "sql", file: "non-shadow-only.sql", shadow: false }, 141 | { _: "sql", file: "shadow-only.sql", shadow: true }, 142 | { _: "sql", file: "everywhere.sql" }, 143 | ], 144 | }); 145 | const mockedExec: jest.Mock = exec as any; 146 | mockedExec.mockClear(); 147 | mockPgClient.query.mockClear(); 148 | await executeActions( 149 | parsedSettings, 150 | false, 151 | parsedSettings.afterAllMigrations, 152 | ); 153 | expect(mockedExec).toHaveBeenCalledTimes(0); 154 | expect(mockPgClient.query).toHaveBeenCalledTimes(2); 155 | expect(mockPgClient.query).toHaveBeenNthCalledWith(1, { 156 | text: `[CONTENT:migrations/non-shadow-only.sql]`, 157 | }); 158 | expect(mockPgClient.query).toHaveBeenNthCalledWith(2, { 159 | text: `[CONTENT:migrations/everywhere.sql]`, 160 | }); 161 | }); 162 | 163 | it("run normal and shadow actions in shadow mode", async () => { 164 | const parsedSettings = await parseSettings( 165 | { 166 | connectionString: TEST_DATABASE_URL, 167 | shadowConnectionString: "foo_shadow", 168 | afterAllMigrations: [ 169 | { _: "sql", file: "non-shadow-only.sql", shadow: false }, 170 | { _: "sql", file: "shadow-only.sql", shadow: true }, 171 | { _: "sql", file: "everywhere.sql" }, 172 | ], 173 | }, 174 | true, 175 | ); 176 | const mockedExec: jest.Mock = exec as any; 177 | mockedExec.mockClear(); 178 | mockPgClient.query.mockClear(); 179 | await executeActions(parsedSettings, true, parsedSettings.afterAllMigrations); 180 | expect(mockedExec).toHaveBeenCalledTimes(0); 181 | expect(mockPgClient.query).toHaveBeenCalledTimes(2); 182 | expect(mockPgClient.query).toHaveBeenNthCalledWith(1, { 183 | text: `[CONTENT:migrations/shadow-only.sql]`, 184 | }); 185 | expect(mockPgClient.query).toHaveBeenNthCalledWith(2, { 186 | text: `[CONTENT:migrations/everywhere.sql]`, 187 | }); 188 | }); 189 | -------------------------------------------------------------------------------- /src/commands/init.ts: -------------------------------------------------------------------------------- 1 | import * as fsp from "fs/promises"; 2 | import { CommandModule } from "yargs"; 3 | 4 | import { getCurrentMigrationLocation, writeCurrentMigration } from "../current"; 5 | import { parseSettings } from "../settings"; 6 | import { version } from "../version"; 7 | import { 8 | CommonArgv, 9 | DEFAULT_GMRC_PATH, 10 | DEFAULT_GMRCJS_PATH, 11 | exists, 12 | getSettings, 13 | } from "./_common"; 14 | 15 | interface InitArgv extends CommonArgv { 16 | folder?: boolean; 17 | } 18 | 19 | export async function init(options: InitArgv = {}): Promise { 20 | if (await exists(DEFAULT_GMRC_PATH)) { 21 | throw new Error(`.gmrc file already exists at ${DEFAULT_GMRC_PATH}`); 22 | } 23 | if (await exists(DEFAULT_GMRCJS_PATH)) { 24 | throw new Error(`.gmrc.js file already exists at ${DEFAULT_GMRCJS_PATH}`); 25 | } 26 | if (options.config && (await exists(options.config))) { 27 | throw new Error(`.gmrc file already exists at ${options.config}`); 28 | } 29 | 30 | const gmrcPath = options.config || DEFAULT_GMRC_PATH; 31 | 32 | const dbStrings = 33 | process.env.DATABASE_URL && 34 | process.env.SHADOW_DATABASE_URL && 35 | process.env.ROOT_DATABASE_URL 36 | ? ` 37 | /* 38 | * Database connections strings are sourced from the DATABASE_URL, 39 | * SHADOW_DATABASE_URL and ROOT_DATABASE_URL environmental variables. 40 | */ 41 | ` 42 | : ` 43 | /* 44 | * connectionString: this tells Graphile Migrate where to find the database 45 | * to run the migrations against. 46 | * 47 | * RECOMMENDATION: use \`DATABASE_URL\` envvar instead. 48 | */ 49 | // "connectionString": "postgres://dbowner:password@host:5432/appdb", 50 | 51 | /* 52 | * shadowConnectionString: like connectionString, but this is used for the 53 | * shadow database (which will be reset frequently). 54 | * 55 | * RECOMMENDATION: use \`SHADOW_DATABASE_URL\` envvar instead. 56 | */ 57 | // "shadowConnectionString": "postgres://dbowner:password@host:5432/appdb_shadow", 58 | 59 | /* 60 | * rootConnectionString: like connectionString, but this is used for 61 | * dropping/creating the database in \`graphile-migrate reset\`. This isn't 62 | * necessary, shouldn't be used in production, but helps during development. 63 | * 64 | * RECOMMENDATION: use \`ROOT_DATABASE_URL\` envvar instead. 65 | */ 66 | // "rootConnectionString": "postgres://adminuser:adminpassword@host:5432/postgres", 67 | `; 68 | 69 | const initialComment = `\ 70 | /* 71 | * Graphile Migrate configuration. 72 | * 73 | * If you decide to commit this file (recommended) please ensure that it does 74 | * not contain any secrets (passwords, etc) - we recommend you manage these 75 | * with environmental variables instead. 76 | * 77 | * This file is in JSON5 format, in VSCode you can use "JSON with comments" as 78 | * the file format. 79 | */ 80 | `; 81 | 82 | const jsonContent = `\ 83 | {${dbStrings} 84 | /* 85 | * pgSettings: key-value settings to be automatically loaded into PostgreSQL 86 | * before running migrations, using an equivalent of \`SET LOCAL TO 87 | * \` 88 | */ 89 | "pgSettings": { 90 | // "search_path": "app_public,app_private,app_hidden,public", 91 | }, 92 | 93 | /* 94 | * placeholders: substituted in SQL files when compiled/executed. Placeholder 95 | * keys should be prefixed with a colon and in all caps, like 96 | * \`:COLON_PREFIXED_ALL_CAPS\`. Placeholder values should be strings. They 97 | * will be replaced verbatim with NO ESCAPING AT ALL (this differs from how 98 | * psql handles placeholders) so should only be used with "safe" values. This 99 | * is useful for committing migrations where certain parameters can change 100 | * between environments (development, staging, production) but you wish to 101 | * use the same signed migration files for all. 102 | * 103 | * The special value "!ENV" can be used to indicate an environmental variable 104 | * of the same name should be used. 105 | * 106 | * Graphile Migrate automatically sets the \`:DATABASE_NAME\` and 107 | * \`:DATABASE_OWNER\` placeholders, and you should not attempt to override 108 | * these. 109 | */ 110 | "placeholders": { 111 | // ":DATABASE_VISITOR": "!ENV", // Uses process.env.DATABASE_VISITOR 112 | }, 113 | 114 | /* 115 | * Actions allow you to run scripts or commands at certain points in the 116 | * migration lifecycle. SQL files are ran against the database directly. 117 | * "command" actions are ran with the following environmental variables set: 118 | * 119 | * - GM_DBURL: the PostgreSQL URL of the database being migrated 120 | * - GM_DBNAME: the name of the database from GM_DBURL 121 | * - GM_DBUSER: the user from GM_DBURL 122 | * - GM_SHADOW: set to 1 if the shadow database is being migrated, left unset 123 | * otherwise 124 | * 125 | * If "shadow" is unspecified, the actions will run on events to both shadow 126 | * and normal databases. If "shadow" is true the action will only run on 127 | * actions to the shadow DB, and if false only on actions to the main DB. 128 | */ 129 | 130 | /* 131 | * afterReset: actions executed after a \`graphile-migrate reset\` command. 132 | */ 133 | "afterReset": [ 134 | // "afterReset.sql", 135 | // { "_": "command", "command": "graphile-worker --schema-only" }, 136 | ], 137 | 138 | /* 139 | * afterAllMigrations: actions executed once all migrations are complete. 140 | */ 141 | "afterAllMigrations": [ 142 | // { 143 | // "_": "command", 144 | // "shadow": true, 145 | // "command": "if [ \\"$IN_TESTS\\" != \\"1\\" ]; then ./scripts/dump-db; fi", 146 | // }, 147 | ], 148 | 149 | /* 150 | * afterCurrent: actions executed once the current migration has been 151 | * evaluated (i.e. in watch mode). 152 | */ 153 | "afterCurrent": [ 154 | // { 155 | // "_": "command", 156 | // "shadow": true, 157 | // "command": "if [ \\"$IN_TESTS\\" = \\"1\\" ]; then ./scripts/test-seed; fi", 158 | // }, 159 | ], 160 | 161 | /* 162 | * blankMigrationContent: content to be written to the current migration 163 | * after commit. NOTE: this should only contain comments. 164 | */ 165 | // "blankMigrationContent": "-- Write your migration here\\n", 166 | 167 | /****************************************************************************\\ 168 | *** *** 169 | *** You probably don't want to edit anything below here. *** 170 | *** *** 171 | \\****************************************************************************/ 172 | 173 | /* 174 | * manageGraphileMigrateSchema: if you set this false, you must be sure to 175 | * keep the graphile_migrate schema up to date yourself. We recommend you 176 | * leave it at its default. 177 | */ 178 | // "manageGraphileMigrateSchema": true, 179 | 180 | /* 181 | * migrationsFolder: path to the folder in which to store your migrations. 182 | */ 183 | // migrationsFolder: "./migrations", 184 | 185 | "//generatedWith": "${version}" 186 | }`; 187 | 188 | const fileContent = gmrcPath.endsWith(".js") 189 | ? `${initialComment}module.exports = ${jsonContent};\n` 190 | : `${initialComment}${jsonContent}\n`; 191 | await fsp.writeFile(gmrcPath, fileContent); 192 | 193 | // eslint-disable-next-line 194 | console.log( 195 | `Template .gmrc file written to '${gmrcPath}'; please read and edit it to suit your needs.`, 196 | ); 197 | const settings = await getSettings({ configFile: options.config }); 198 | const parsedSettings = await parseSettings({ 199 | connectionString: process.env.DATABASE_URL || "NOT_NEEDED", 200 | shadowConnectionString: process.env.SHADOW_DATABASE_URL || "NOT_NEEDED", 201 | ...settings, 202 | }); 203 | await fsp.mkdir(parsedSettings.migrationsFolder); 204 | await fsp.mkdir(parsedSettings.migrationsFolder + "/committed"); 205 | if (options.folder) { 206 | await fsp.mkdir(parsedSettings.migrationsFolder + "/current"); 207 | } 208 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 209 | await writeCurrentMigration( 210 | parsedSettings, 211 | currentLocation, 212 | parsedSettings.blankMigrationContent.trim() + "\n", 213 | ); 214 | // eslint-disable-next-line 215 | console.log( 216 | `The current migration was created at '${currentLocation.path}'.\n${ 217 | process.env.DATABASE_URL 218 | ? "Try" 219 | : "After configuring your connectionString/DATABASE_URL try" 220 | } running \`graphile-migrate watch\` and editing the current migration.`, 221 | ); 222 | } 223 | 224 | export const initCommand: CommandModule, InitArgv> = { 225 | command: "init", 226 | aliases: [], 227 | describe: `\ 228 | Initializes a graphile-migrate project by creating a \`.gmrc\` file and \`migrations\` folder.`, 229 | builder: { 230 | folder: { 231 | type: "boolean", 232 | description: "Use a folder rather than a file for the current migration.", 233 | default: false, 234 | }, 235 | }, 236 | handler: init, 237 | }; 238 | -------------------------------------------------------------------------------- /src/commands/watch.ts: -------------------------------------------------------------------------------- 1 | import * as chokidar from "chokidar"; 2 | 3 | import { executeActions } from "../actions"; 4 | import { logDbError } from "../instrumentation"; 5 | import { reverseMigration, runStringMigration } from "../migration"; 6 | import { withClient, withTransaction } from "../pg"; 7 | import { ParsedSettings, parseSettings, Settings } from "../settings"; 8 | import { _migrate } from "./migrate"; 9 | import pgMinify = require("pg-minify"); 10 | import { CommandModule } from "yargs"; 11 | 12 | import { 13 | getCurrentMigrationLocation, 14 | readCurrentMigration, 15 | writeCurrentMigration, 16 | } from "../current"; 17 | import { DbCurrent } from "../interfaces"; 18 | import { isLoggedError } from "../lib"; 19 | import { CommonArgv, getSettings } from "./_common"; 20 | 21 | interface WatchArgv extends CommonArgv { 22 | once: boolean; 23 | shadow: boolean; 24 | } 25 | 26 | export function _makeCurrentMigrationRunner( 27 | parsedSettings: ParsedSettings, 28 | _once = false, 29 | shadow = false, 30 | ): () => Promise { 31 | async function run(): Promise { 32 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 33 | const body = await readCurrentMigration(parsedSettings, currentLocation); 34 | let migrationsAreEquivalent = false; 35 | 36 | try { 37 | parsedSettings.logger.info( 38 | `[${new Date().toISOString()}]: Running current.sql`, 39 | ); 40 | const start = process.hrtime(); 41 | const connectionString = shadow 42 | ? parsedSettings.shadowConnectionString 43 | : parsedSettings.connectionString; 44 | if (!connectionString) { 45 | throw new Error( 46 | "Could not determine connection string for running commands", 47 | ); 48 | } 49 | await withClient( 50 | connectionString, 51 | parsedSettings, 52 | (lockingPgClient, context) => 53 | withTransaction(lockingPgClient, async () => { 54 | // 1: lock graphile_migrate.current so no concurrent migrations can occur 55 | await lockingPgClient.query( 56 | "lock graphile_migrate.current in EXCLUSIVE mode", 57 | ); 58 | 59 | // 2: Get last current.sql from graphile_migrate.current 60 | const { 61 | rows: [previousCurrent], 62 | } = await lockingPgClient.query( 63 | ` 64 | select * 65 | from graphile_migrate.current 66 | where filename = 'current.sql' 67 | `, 68 | ); 69 | 70 | // 3: minify and compare last ran current.sql with this _COMPILED_ current.sql. 71 | const previousBody: string | void = 72 | previousCurrent && previousCurrent.content; 73 | const { sql: currentBodyFromDryRun } = await runStringMigration( 74 | lockingPgClient, 75 | parsedSettings, 76 | context, 77 | body, 78 | "current.sql", 79 | undefined, 80 | true, 81 | ); 82 | const previousBodyMinified = previousBody 83 | ? pgMinify(previousBody) 84 | : null; 85 | const currentBodyMinified = pgMinify(currentBodyFromDryRun); 86 | migrationsAreEquivalent = 87 | currentBodyMinified === previousBodyMinified; 88 | 89 | // 4: if different 90 | if (!migrationsAreEquivalent) { 91 | await executeActions( 92 | parsedSettings, 93 | shadow, 94 | parsedSettings.beforeCurrent, 95 | ); 96 | 97 | // 4a: invert previous current; on success delete from graphile_migrate.current; on failure rollback and abort 98 | if (previousBody) { 99 | await reverseMigration(lockingPgClient, previousBody); 100 | } 101 | 102 | // COMMIT ─ because we need to commit that the migration was reversed 103 | await lockingPgClient.query("commit"); 104 | await lockingPgClient.query("begin"); 105 | // Re-establish a lock ASAP to continue with migration 106 | await lockingPgClient.query( 107 | "lock graphile_migrate.current in EXCLUSIVE mode", 108 | ); 109 | 110 | // 4b: run this current (in its own independent transaction) if not empty 111 | if (currentBodyMinified !== "") { 112 | await withClient( 113 | connectionString, 114 | parsedSettings, 115 | (independentPgClient, context) => 116 | runStringMigration( 117 | independentPgClient, 118 | parsedSettings, 119 | context, 120 | body, 121 | "current.sql", 122 | undefined, 123 | ), 124 | ); 125 | } 126 | } else { 127 | parsedSettings.logger.info( 128 | `[${new Date().toISOString()}]: current.sql unchanged, skipping migration`, 129 | ); 130 | } 131 | 132 | // 5: update graphile_migrate.current with latest content 133 | // (NOTE: we update even if the minified versions don't differ since 134 | // the comments may have changed.) 135 | await lockingPgClient.query({ 136 | name: "current-insert", 137 | text: ` 138 | insert into graphile_migrate.current(content) 139 | values ($1) 140 | on conflict (filename) 141 | do update 142 | set content = excluded.content, date = excluded.date 143 | `, 144 | values: [currentBodyFromDryRun], 145 | }); 146 | }), 147 | ); 148 | const interval = process.hrtime(start); 149 | const duration = interval[0] * 1e3 + interval[1] * 1e-6; 150 | if (!migrationsAreEquivalent) { 151 | await executeActions( 152 | parsedSettings, 153 | shadow, 154 | parsedSettings.afterCurrent, 155 | ); 156 | } 157 | const interval2 = process.hrtime(start); 158 | const duration2 = interval2[0] * 1e3 + interval2[1] * 1e-6; 159 | parsedSettings.logger.info( 160 | `[${new Date().toISOString()}]: Finished (${duration2.toFixed(0)}ms${ 161 | duration2 - duration >= 5 162 | ? `; excluding actions: ${duration.toFixed(0)}ms` 163 | : "" 164 | })`, 165 | ); 166 | } catch (err) { 167 | const e = err instanceof Error ? err : new Error(String(err)); 168 | logDbError(parsedSettings, e); 169 | throw e; 170 | } 171 | } 172 | return run; 173 | } 174 | 175 | export async function _watch( 176 | parsedSettings: ParsedSettings, 177 | once = false, 178 | shadow = false, 179 | ): Promise { 180 | await _migrate(parsedSettings, shadow); 181 | 182 | const currentLocation = await getCurrentMigrationLocation(parsedSettings); 183 | if (!currentLocation.exists) { 184 | await writeCurrentMigration( 185 | parsedSettings, 186 | currentLocation, 187 | parsedSettings.blankMigrationContent.trim() + "\n", 188 | ); 189 | } 190 | 191 | const run = _makeCurrentMigrationRunner(parsedSettings, once, shadow); 192 | if (once) { 193 | return run(); 194 | } else { 195 | let running = false; 196 | let runAgain = false; 197 | const queue = (): void => { 198 | if (running) { 199 | runAgain = true; 200 | } 201 | running = true; 202 | 203 | run() 204 | .catch((error: unknown) => { 205 | if (!isLoggedError(error)) { 206 | parsedSettings.logger.error( 207 | `Error occurred whilst processing migration: ${error instanceof Error ? error.message : String(error)}`, 208 | { error }, 209 | ); 210 | } 211 | }) 212 | .finally(() => { 213 | running = false; 214 | if (runAgain) { 215 | runAgain = false; 216 | queue(); 217 | } 218 | }); 219 | }; 220 | const watcher = chokidar.watch( 221 | [currentLocation.path, `${parsedSettings.migrationsFolder}/fixtures`], 222 | { 223 | /* 224 | * Without `usePolling`, on Linux, you can prevent the watching from 225 | * working by issuing `git stash && sleep 2 && git stash pop`. This is 226 | * annoying. 227 | */ 228 | usePolling: true, 229 | 230 | /* 231 | * Some editors stream the writes out a little at a time, we want to wait 232 | * for the write to finish before triggering. 233 | */ 234 | awaitWriteFinish: { 235 | stabilityThreshold: 200, 236 | pollInterval: 100, 237 | }, 238 | 239 | /* 240 | * We don't want to run the queue too many times during startup; so we 241 | * call it once on the 'ready' event. 242 | */ 243 | ignoreInitial: true, 244 | }, 245 | ); 246 | watcher.on("add", queue); 247 | watcher.on("change", queue); 248 | watcher.on("unlink", queue); 249 | watcher.once("ready", queue); 250 | return Promise.resolve(); 251 | } 252 | } 253 | 254 | export async function watch( 255 | settings: Settings, 256 | once = false, 257 | shadow = false, 258 | ): Promise { 259 | const parsedSettings = await parseSettings(settings, shadow); 260 | return _watch(parsedSettings, once, shadow); 261 | } 262 | 263 | export const watchCommand: CommandModule, WatchArgv> = { 264 | command: "watch", 265 | aliases: [], 266 | describe: 267 | "Runs any un-executed committed migrations and then runs and watches the current migration, re-running it on any change. For development.", 268 | builder: { 269 | once: { 270 | type: "boolean", 271 | default: false, 272 | description: "Runs the current migration and then exits.", 273 | }, 274 | shadow: { 275 | type: "boolean", 276 | default: false, 277 | description: "Applies changes to shadow DB.", 278 | }, 279 | }, 280 | handler: async (argv) => { 281 | await watch( 282 | await getSettings({ configFile: argv.config }), 283 | argv.once, 284 | argv.shadow, 285 | ); 286 | }, 287 | }; 288 | -------------------------------------------------------------------------------- /src/current.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { promises as fsp, Stats } from "fs"; 3 | 4 | import { isNoTransactionDefined } from "./header"; 5 | import { errorCode } from "./lib"; 6 | import { 7 | compileIncludes, 8 | parseMigrationText, 9 | serializeHeader, 10 | } from "./migration"; 11 | import { ParsedSettings } from "./settings"; 12 | 13 | export const VALID_FILE_REGEX = /^([0-9]+)(-[-_a-zA-Z0-9]*)?\.sql$/; 14 | 15 | async function statOrNull(path: string): Promise { 16 | try { 17 | return await fsp.stat(path); 18 | } catch (e) { 19 | if (errorCode(e) === "ENOENT") { 20 | return null; 21 | } 22 | throw e; 23 | } 24 | } 25 | 26 | async function readFileOrNull(path: string): Promise { 27 | try { 28 | return await fsp.readFile(path, "utf8"); 29 | } catch (e) { 30 | if (errorCode(e) === "ENOENT") { 31 | return null; 32 | } 33 | throw e; 34 | } 35 | } 36 | async function readFileOrError(path: string): Promise { 37 | try { 38 | return await fsp.readFile(path, "utf8"); 39 | } catch (e) { 40 | throw new Error( 41 | `Failed to read file at '${path}': ${e instanceof Error ? e.message : String(e)}`, 42 | ); 43 | } 44 | } 45 | 46 | export interface CurrentMigrationLocation { 47 | isFile: boolean; 48 | path: string; 49 | exists: boolean; 50 | // stats: Stats, 51 | } 52 | 53 | export async function getCurrentMigrationLocation( 54 | parsedSettings: ParsedSettings, 55 | ): Promise { 56 | const filePath = `${parsedSettings.migrationsFolder}/current.sql`; 57 | const dirPath = `${parsedSettings.migrationsFolder}/current`; 58 | 59 | const fileStats = await statOrNull(filePath); 60 | const dirStats = await statOrNull(dirPath); 61 | 62 | if (fileStats && !fileStats.isFile()) { 63 | throw new Error(`'${filePath}' exists but is not a file.`); 64 | } 65 | if (dirStats && !dirStats.isDirectory()) { 66 | throw new Error(`'${dirPath}' exists but is not a directory.`); 67 | } 68 | 69 | if (fileStats && dirStats) { 70 | throw new Error( 71 | `Invalid current migration: both the '${filePath}' file and the '${dirPath}' directory exist; only one of these may exist at a time.`, 72 | ); 73 | } 74 | 75 | const isFile = !dirStats; 76 | const stats = isFile ? fileStats : dirStats; 77 | const exists = !!stats; 78 | 79 | return { 80 | isFile, 81 | path: isFile ? filePath : dirPath, 82 | exists, 83 | // stats, 84 | }; 85 | } 86 | 87 | function idFromFilename(file: string): number { 88 | const matches = VALID_FILE_REGEX.exec(file); 89 | if (!matches) { 90 | throw new Error( 91 | `Invalid current migration filename: '${file}'. File must follow the naming 001.sql or 001-message.sql, where 001 is a unique number (with optional zero padding) and message is an optional alphanumeric string.`, 92 | ); 93 | } 94 | const [, rawId, _message] = matches; 95 | const id = parseInt(rawId, 10); 96 | 97 | if (!id || !isFinite(id) || id < 1) { 98 | throw new Error( 99 | `Invalid current migration filename: '${file}'. File must start with a (positive) number, could not coerce '${rawId}' to int.`, 100 | ); 101 | } 102 | return id; 103 | } 104 | 105 | export async function readCurrentMigration( 106 | parsedSettings: ParsedSettings, 107 | location: CurrentMigrationLocation, 108 | ): Promise { 109 | if (location.isFile) { 110 | const content = await readFileOrNull(location.path); 111 | 112 | // If file doesn't exist, treat it as if it were empty. 113 | return compileIncludes( 114 | parsedSettings, 115 | content || "", 116 | new Set([location.path]), 117 | ); 118 | } else { 119 | const files = await fsp.readdir(location.path); 120 | const parts = new Map< 121 | number, 122 | { 123 | filePath: string; 124 | file: string; 125 | bodyPromise: Promise; 126 | } 127 | >(); 128 | 129 | for (const file of files) { 130 | // Do not await during this loop, it will limit parallelism 131 | 132 | if (file.startsWith(".")) { 133 | // Ignore dotfiles 134 | continue; 135 | } 136 | if (!file.endsWith(".sql")) { 137 | // Skip non-SQL files 138 | continue; 139 | } 140 | const id = idFromFilename(file); 141 | const duplicate = parts.get(id); 142 | if (duplicate) { 143 | throw new Error( 144 | `Current migration filename clash: files must have a unique numeric prefix, but at least 2 files ('${file}' and '${duplicate.file}') have the prefix '${id}'.`, 145 | ); 146 | } 147 | 148 | const filePath = `${location.path}/${file}`; 149 | const bodyPromise = readFileOrError(filePath); 150 | 151 | parts.set(id, { 152 | filePath, 153 | file, 154 | bodyPromise, 155 | }); 156 | } 157 | 158 | const ids = [...parts.keys()].sort((a, b) => a - b); 159 | let wholeBody = ""; 160 | 161 | // Like hobbitses 162 | const headerses: Array<{ [key: string]: string | null }> = []; 163 | 164 | for (const id of ids) { 165 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 166 | const { file, filePath, bodyPromise } = parts.get(id)!; 167 | const rawContents = await bodyPromise; 168 | const contents = await compileIncludes( 169 | parsedSettings, 170 | rawContents, 171 | new Set([filePath]), 172 | ); 173 | const { body, headers } = parseMigrationText(filePath, contents, false); 174 | headerses.push(headers); 175 | if (isNoTransactionDefined(body)) { 176 | throw new Error( 177 | `Error in '${location.path}/${file}': cannot use '--! no-transaction' with 'current/' directory migrations; use 'current.sql' instead.`, 178 | ); 179 | } 180 | if (wholeBody.length > 0) { 181 | wholeBody += "\n"; 182 | } 183 | // 'split' is not a "header", so it must NOT start with a capital. 184 | wholeBody += `--! split: ${file}\n`; 185 | wholeBody += body.trim() + "\n"; 186 | } 187 | const headerLines: string[] = []; 188 | for (const headers of headerses) { 189 | for (const key of Object.keys(headers)) { 190 | const value = headers[key]; 191 | headerLines.push(serializeHeader(key, value)); 192 | } 193 | } 194 | if (headerLines.length) { 195 | wholeBody = headerLines.join("\n") + "\n\n" + wholeBody; 196 | } 197 | 198 | return wholeBody; 199 | } 200 | } 201 | 202 | export async function writeCurrentMigration( 203 | parsedSettings: ParsedSettings, 204 | location: CurrentMigrationLocation, 205 | body: string, 206 | ): Promise { 207 | if (body.trim() + "\n" !== body) { 208 | throw new Error( 209 | "graphile-migrate error - 'body' should be sanitized before being passed to 'writeCurrentMigration'", 210 | ); 211 | } 212 | if (location.isFile) { 213 | await fsp.writeFile(location.path, body); 214 | } else { 215 | // Split body and write to files 216 | 217 | const lines = body.split("\n"); 218 | 219 | /** 220 | * List of filenames we've written, so we can determine which files not to 221 | * delete. 222 | */ 223 | const filenamesWritten: string[] = []; 224 | 225 | /** 226 | * List of write operation promises, so we can do all our waiting in 227 | * parallel at the end. 228 | */ 229 | const writePromises: Array> = []; 230 | 231 | /** 232 | * The next file that will be written to, once all lines are accumulated. 233 | */ 234 | let nextFileToWrite: string | null = null; 235 | 236 | /** 237 | * The lines being accumulated to write to `nextFileToWrite`. 238 | */ 239 | let linesToWrite: string[] = []; 240 | 241 | /** 242 | * The highest file index we've seen, so that we can ensure that we don't 243 | * have any ambiguities or conflicts. 244 | */ 245 | let highestIndex = 0; 246 | let highestIndexFilename: string | null = null; 247 | 248 | /** 249 | * Writes `linesToWrite` to `nextFileToWrite` (or '001.sql' if unknown), then 250 | * resets these variables ready for the next batch. 251 | */ 252 | const flushToFile = (force = false): void => { 253 | if (!linesToWrite.length && !nextFileToWrite) { 254 | // Optimisation to avoid writing the initial empty migration file before the first `--! split` 255 | return; 256 | } 257 | const sql = linesToWrite.join("\n").trim() + "\n"; 258 | const fileName = 259 | nextFileToWrite || (force ? `${highestIndex + 1}-current.sql` : null); 260 | if (!fileName) { 261 | // Merge into first file 262 | return; 263 | } 264 | const id = idFromFilename(fileName); 265 | if (id <= highestIndex) { 266 | throw new Error( 267 | `Bad migration, split ids must be monotonically increasing, but '${id}' (from '${fileName}') <= '${highestIndex}' (from '${highestIndexFilename}').`, 268 | ); 269 | } 270 | highestIndex = id; 271 | highestIndexFilename = fileName; 272 | 273 | writePromises.push(fsp.writeFile(`${location.path}/${fileName}`, sql)); 274 | filenamesWritten.push(fileName); 275 | 276 | linesToWrite = []; 277 | nextFileToWrite = null; 278 | }; 279 | 280 | for (const line of lines) { 281 | // Do not await in this loop, it decreases parallelism 282 | 283 | const matches = /^--! split: ([0-9]+(?:-[-_a-zA-Z0-9]+)?\.sql)$/.exec( 284 | line, 285 | ); 286 | if (matches) { 287 | // Write out previous linesToWrite, if appropriate 288 | flushToFile(); 289 | 290 | // Prepare to write next linesToWrite 291 | nextFileToWrite = matches[1]; 292 | } else { 293 | // Regular line, just add to next linesToWrite 294 | linesToWrite.push(line); 295 | } 296 | } 297 | 298 | // Handle any trailing lines 299 | flushToFile(true); 300 | 301 | if (writePromises.length === 0) { 302 | // Body must have been empty, so no files were written. 303 | assert.strictEqual(body.length, 0); 304 | 305 | // Lets write out just the one empty file. 306 | const filename = `001.sql`; 307 | const sql = parsedSettings.blankMigrationContent; 308 | 309 | writePromises.push(fsp.writeFile(`${location.path}/${filename}`, sql)); 310 | filenamesWritten.push(filename); 311 | } 312 | 313 | // Clear out old files that were not overwritten 314 | const files = await fsp.readdir(location.path); 315 | for (const file of files) { 316 | if ( 317 | VALID_FILE_REGEX.test(file) && 318 | !file.startsWith(".") && 319 | file.endsWith(".sql") && 320 | !filenamesWritten.includes(file) 321 | ) { 322 | writePromises.push(fsp.unlink(`${location.path}/${file}`)); 323 | } 324 | } 325 | 326 | // Wait for writing to finish 327 | await Promise.all(writePromises); 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /__tests__/helpers.ts: -------------------------------------------------------------------------------- 1 | jest.unmock("pg"); 2 | 3 | import "mock-fs"; // MUST BE BEFORE EVERYTHING 4 | 5 | import { exec } from "child_process"; 6 | import { createHash } from "crypto"; 7 | import mockFs from "mock-fs"; 8 | import { Pool } from "pg"; 9 | import { parse } from "pg-connection-string"; 10 | 11 | import { _migrateMigrationSchema } from "../src/migration"; 12 | import { clearAllPools, escapeIdentifier, withClient } from "../src/pgReal"; 13 | import { ParsedSettings, parseSettings, Settings } from "../src/settings"; 14 | 15 | export const TEST_DATABASE_URL: string = 16 | process.env.TEST_DATABASE_URL || 17 | "postgres://gmtestuser:gmtestpass@localhost/graphile_migrate_test"; 18 | export const TEST_SHADOW_DATABASE_URL = TEST_DATABASE_URL + "_shadow"; 19 | 20 | const parsedTestDatabaseUrl = parse(TEST_DATABASE_URL); 21 | export const TEST_DATABASE_NAME = 22 | parsedTestDatabaseUrl.database || "graphile_migrate_test"; 23 | export const TEST_SHADOW_DATABASE_NAME = 24 | parse(TEST_SHADOW_DATABASE_URL).database || "graphile_migrate_test_shadow"; 25 | 26 | if (!/^[a-zA-Z0-9_-]+$/.test(TEST_DATABASE_NAME)) { 27 | throw new Error("Invalid database name " + TEST_DATABASE_NAME); 28 | } 29 | 30 | export const TEST_ROOT_DATABASE_URL: string = 31 | process.env.TEST_ROOT_DATABASE_URL || "postgres:///postgres"; 32 | 33 | export const settings: Settings = { 34 | connectionString: TEST_DATABASE_URL, 35 | shadowConnectionString: TEST_SHADOW_DATABASE_URL, 36 | rootConnectionString: TEST_ROOT_DATABASE_URL, 37 | }; 38 | 39 | beforeAll(() => { 40 | // eslint-disable-next-line no-console 41 | console.log("[mock-fs callsites hack]"); // Without this, jest fails due to 'callsites' 42 | mockFs({}); 43 | }); 44 | afterAll(() => { 45 | mockFs.restore(); 46 | }); 47 | 48 | let rootPgPool: Pool | null = null; 49 | afterAll(() => { 50 | if (rootPgPool) { 51 | rootPgPool.end(); 52 | } 53 | rootPgPool = null; 54 | }); 55 | afterAll(() => { 56 | clearAllPools(); 57 | }); 58 | 59 | const parsedSettingsPromise = parseSettings(settings); 60 | const ROOT_DB = TEST_ROOT_DATABASE_URL + "?max=1&idleTimeoutMillis=1"; 61 | 62 | async function createDatabases() { 63 | const { user, password } = parsedTestDatabaseUrl; 64 | if (!user || !password) { 65 | throw new Error( 66 | "TEST_DATABASE_URL does not contain a username and password", 67 | ); 68 | } 69 | const parsedSettings = await parsedSettingsPromise; 70 | await withClient(ROOT_DB, parsedSettings, async (client) => { 71 | const result = await client.query( 72 | `select 73 | exists(select 1 from pg_database where datname = $1) as "hasMain", 74 | exists(select 1 from pg_database where datname = $2) as "hasShadow", 75 | exists(select 1 from pg_roles where rolname = $3) as "hasRole" 76 | `, 77 | [TEST_DATABASE_NAME, TEST_SHADOW_DATABASE_NAME, user], 78 | ); 79 | if (!result) { 80 | // eslint-disable-next-line no-console 81 | console.dir(client.query); 82 | // eslint-disable-next-line no-console 83 | console.dir(result); 84 | throw new Error("No result?!"); 85 | } 86 | const { 87 | rows: [{ hasMain, hasShadow, hasRole }], 88 | } = result; 89 | if (!hasRole) { 90 | await client.query( 91 | `CREATE ROLE ${escapeIdentifier( 92 | user, 93 | )} WITH LOGIN PASSWORD '${password.replace(/'/g, "''")}';`, 94 | ); 95 | } 96 | if (!hasMain) { 97 | await client.query( 98 | `CREATE DATABASE ${escapeIdentifier( 99 | TEST_DATABASE_NAME, 100 | )} OWNER ${escapeIdentifier(user)};`, 101 | ); 102 | } 103 | if (!hasShadow) { 104 | await client.query( 105 | `CREATE DATABASE ${escapeIdentifier( 106 | TEST_SHADOW_DATABASE_NAME, 107 | )} OWNER ${escapeIdentifier(user)};`, 108 | ); 109 | } 110 | }); 111 | } 112 | beforeAll(createDatabases); 113 | 114 | export async function resetDb() { 115 | const parsedSettings = await parsedSettingsPromise; 116 | await withClient(TEST_DATABASE_URL, parsedSettings, async (client) => { 117 | await client.query("drop schema if exists graphile_migrate cascade;"); 118 | { 119 | const { rows } = await client.query( 120 | `select relname from pg_class where relkind = 'r' and relnamespace = 'public'::regnamespace`, 121 | ); 122 | for (const row of rows) { 123 | await client.query( 124 | `drop table if exists ${escapeIdentifier(row.relname)} cascade;`, 125 | ); 126 | } 127 | } 128 | { 129 | const { rows } = await client.query( 130 | `select typname from pg_type where typtype = 'e' and typnamespace = 'public'::regnamespace`, 131 | ); 132 | for (const row of rows) { 133 | await client.query( 134 | `drop type if exists ${escapeIdentifier(row.typname)} cascade;`, 135 | ); 136 | } 137 | } 138 | }); 139 | } 140 | 141 | interface ActionSpies { 142 | getActionCalls: () => string[]; 143 | settings: Pick< 144 | Settings, 145 | | "beforeReset" 146 | | "afterReset" 147 | | "beforeAllMigrations" 148 | | "afterAllMigrations" 149 | | "beforeCurrent" 150 | | "afterCurrent" 151 | >; 152 | } 153 | export function makeActionSpies(shadow = false): ActionSpies { 154 | const mockedExec = exec as unknown as jest.Mock; 155 | if (!mockedExec.mock) { 156 | throw new Error("Must mock child_process"); 157 | } 158 | mockedExec.mockReset(); 159 | const calls: string[] = []; 160 | mockedExec.mockImplementation((_cmd, _opts, cb): any => { 161 | expect(_opts.env.PATH).toBe(process.env.PATH); 162 | expect(typeof _opts.env.GM_DBURL).toBe("string"); 163 | if (shadow) { 164 | expect(_opts.env.GM_SHADOW).toBe("1"); 165 | } else { 166 | expect(typeof _opts.env.GM_SHADOW).toBe("undefined"); 167 | } 168 | calls.push(_cmd.replace(/^touch /, "")); 169 | cb(null, { 170 | error: null, 171 | stdout: "", 172 | stderr: "", 173 | }); 174 | }); 175 | function getActionCalls() { 176 | return calls; 177 | } 178 | return { 179 | getActionCalls, 180 | settings: { 181 | beforeAllMigrations: [ 182 | { _: "command", command: "touch beforeAllMigrations" }, 183 | ], 184 | afterAllMigrations: [ 185 | { _: "command", command: "touch afterAllMigrations" }, 186 | ], 187 | beforeReset: [{ _: "command", command: "touch beforeReset" }], 188 | afterReset: [{ _: "command", command: "touch afterReset" }], 189 | beforeCurrent: [{ _: "command", command: "touch beforeCurrent" }], 190 | afterCurrent: [{ _: "command", command: "touch afterCurrent" }], 191 | }, 192 | }; 193 | } 194 | 195 | function makePgClientMock() { 196 | return { 197 | __isMockClient: true, 198 | query: jest.fn(async () => { 199 | return { rows: [] }; 200 | }), 201 | }; 202 | } 203 | 204 | export const mockPgClient = makePgClientMock(); 205 | 206 | export function mockCurrentSqlContentOnce( 207 | parsedSettings: ParsedSettings, 208 | content: string, 209 | ) { 210 | mockFs({ 211 | [parsedSettings.migrationsFolder + "/current.sql"]: content, 212 | }); 213 | } 214 | 215 | export async function setup(parsedSettings: ParsedSettings) { 216 | const pool = new Pool({ 217 | connectionString: parsedSettings.connectionString, 218 | max: 1, 219 | }); 220 | try { 221 | const client = await pool.connect(); 222 | try { 223 | await _migrateMigrationSchema(client, parsedSettings); 224 | } finally { 225 | client.release(); 226 | } 227 | } finally { 228 | pool.end(); 229 | } 230 | } 231 | 232 | export const makeMigrations = (commitMessage?: string) => { 233 | const MIGRATION_1_TEXT = 234 | "create table if not exists foo (id serial primary key);"; 235 | const MIGRATION_1_HASH = "e00ec93314a423ee5cc68d1182ad52f16442d7df"; 236 | const MIGRATION_1_COMMITTED = `--! Previous: -\n--! Hash: sha1:${MIGRATION_1_HASH}${ 237 | commitMessage ? `\n--! Message: ${commitMessage}` : `` 238 | }\n\n${MIGRATION_1_TEXT.trim()}\n`; 239 | 240 | const MIGRATION_2_TEXT = 241 | "\n\n\ncreate table if not exists bar (id serial primary key);\n\n\n"; 242 | const MIGRATION_2_HASH = createHash("sha1") 243 | .update(`sha1:${MIGRATION_1_HASH}\n${MIGRATION_2_TEXT.trim()}` + "\n") 244 | .digest("hex"); 245 | const MIGRATION_2_COMMITTED = `--! Previous: sha1:${MIGRATION_1_HASH}\n--! Hash: sha1:${MIGRATION_2_HASH}${ 246 | commitMessage ? `\n--! Message: ${commitMessage}` : `` 247 | }\n\n${MIGRATION_2_TEXT.trim()}\n`; 248 | 249 | const MIGRATION_ENUM_TEXT = 250 | "drop type if exists user_role;\ncreate type user_role as enum ('User');"; 251 | const MIGRATION_ENUM_HASH = createHash("sha1") 252 | .update(`sha1:${MIGRATION_1_HASH}\n${MIGRATION_ENUM_TEXT.trim()}` + "\n") 253 | .digest("hex"); 254 | const MIGRATION_ENUM_COMMITTED = `--! Previous: sha1:${MIGRATION_1_HASH}\n--! Hash: sha1:${MIGRATION_ENUM_HASH}${ 255 | commitMessage ? `\n--! Message: ${commitMessage}` : `` 256 | }\n\n${MIGRATION_ENUM_TEXT.trim()}\n`; 257 | 258 | const MIGRATION_NOTRX_TEXT = 259 | "--! no-transaction\nALTER TYPE user_role ADD VALUE IF NOT EXISTS 'Admin';"; 260 | const MIGRATION_NOTRX_HASH = createHash("sha1") 261 | .update( 262 | `sha1:${MIGRATION_ENUM_HASH}\n${MIGRATION_NOTRX_TEXT.trim()}` + "\n", 263 | ) 264 | .digest("hex"); 265 | const MIGRATION_NOTRX_COMMITTED = `--! Previous: sha1:${MIGRATION_ENUM_HASH}\n--! Hash: sha1:${MIGRATION_NOTRX_HASH}${ 266 | commitMessage ? `\n--! Message: ${commitMessage}` : `` 267 | }\n\n${MIGRATION_NOTRX_TEXT.trim()}\n`; 268 | 269 | const MIGRATION_MULTIFILE_FILES = { 270 | "migrations/links/two.sql": "select 2;", 271 | "migrations/current": { 272 | "001.sql": "select 1;", 273 | "002-two.sql": mockFs.symlink({ 274 | path: "../links/two.sql", 275 | }), 276 | "003.sql": "select 3;", 277 | }, 278 | }; 279 | 280 | const MIGRATION_MULTIFILE_TEXT = `\ 281 | --! split: 001.sql 282 | select 1; 283 | 284 | --! split: 002-two.sql 285 | select 2; 286 | 287 | --! split: 003.sql 288 | select 3; 289 | `; 290 | const MIGRATION_MULTIFILE_HASH = createHash("sha1") 291 | .update( 292 | `sha1:${MIGRATION_1_HASH}\n${MIGRATION_MULTIFILE_TEXT.trim()}` + "\n", 293 | ) 294 | .digest("hex"); 295 | const MIGRATION_MULTIFILE_COMMITTED = `--! Previous: sha1:${MIGRATION_1_HASH}\n--! Hash: sha1:${MIGRATION_MULTIFILE_HASH}${ 296 | commitMessage ? `\n--! Message: ${commitMessage}` : `` 297 | }\n\n${MIGRATION_MULTIFILE_TEXT.trim()}\n`; 298 | return { 299 | MIGRATION_1_TEXT, 300 | MIGRATION_1_HASH, 301 | MIGRATION_1_COMMITTED, 302 | MIGRATION_2_TEXT, 303 | MIGRATION_2_HASH, 304 | MIGRATION_2_COMMITTED, 305 | MIGRATION_ENUM_TEXT, 306 | MIGRATION_ENUM_HASH, 307 | MIGRATION_ENUM_COMMITTED, 308 | MIGRATION_NOTRX_TEXT, 309 | MIGRATION_NOTRX_HASH, 310 | MIGRATION_NOTRX_COMMITTED, 311 | MIGRATION_MULTIFILE_TEXT, 312 | MIGRATION_MULTIFILE_HASH, 313 | MIGRATION_MULTIFILE_COMMITTED, 314 | MIGRATION_MULTIFILE_FILES, 315 | }; 316 | }; 317 | -------------------------------------------------------------------------------- /src/settings.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@graphile/logger"; 2 | import { parse } from "pg-connection-string"; 3 | import * as querystring from "querystring"; 4 | import { format as formatURL, parse as parseURL } from "url"; 5 | 6 | import { 7 | ActionSpec, 8 | CommandActionSpec, 9 | makeValidateActionCallback, 10 | SqlActionSpec, 11 | } from "./actions"; 12 | import { defaultLogger } from "./logger"; 13 | 14 | export type Actions = string | Array; 15 | 16 | export function isActionSpec(o: unknown): o is ActionSpec { 17 | if (!(typeof o === "object" && o && "_" in o && typeof o["_"] === "string")) { 18 | return false; 19 | } 20 | 21 | // After here it's definitely an action spec; but we should still validate the 22 | // other properties. 23 | 24 | if ("shadow" in o && typeof o["shadow"] !== "boolean") { 25 | throw new Error( 26 | `'${o["_"]}' action has 'shadow' property of type '${typeof o[ 27 | "shadow" 28 | ]}'; expected 'boolean' (or not set)`, 29 | ); 30 | } 31 | 32 | return true; 33 | } 34 | 35 | export function isSqlActionSpec(o: unknown): o is SqlActionSpec { 36 | if (!isActionSpec(o) || o._ !== "sql") { 37 | return false; 38 | } 39 | if (typeof o["file"] !== "string") { 40 | throw new Error("SQL command requires 'file' property to be set"); 41 | } 42 | return true; 43 | } 44 | 45 | export function isCommandActionSpec(o: unknown): o is CommandActionSpec { 46 | if (!isActionSpec(o) || o._ !== "command") { 47 | return false; 48 | } 49 | 50 | // Validations 51 | if (typeof o["command"] !== "string") { 52 | throw new Error( 53 | `Command action has 'command' property of type '${typeof o[ 54 | "command" 55 | ]}'; expected 'string'`, 56 | ); 57 | } 58 | 59 | return true; 60 | } 61 | 62 | /** 63 | * This type is not trusted; to use the values within it, it must be 64 | * parsed/validated into ParsedSettings. 65 | */ 66 | export interface Settings { 67 | connectionString?: string; 68 | shadowConnectionString?: string; 69 | rootConnectionString?: string; 70 | databaseOwner?: string; 71 | migrationsFolder?: string; 72 | manageGraphileMigrateSchema?: boolean; 73 | pgSettings?: { 74 | [key: string]: string; 75 | }; 76 | placeholders?: { 77 | [key: string]: string; 78 | }; 79 | beforeReset?: Actions; 80 | afterReset?: Actions; 81 | beforeAllMigrations?: Actions; 82 | afterAllMigrations?: Actions; 83 | beforeCurrent?: Actions; 84 | afterCurrent?: Actions; 85 | blankMigrationContent?: string; 86 | logger?: Logger; 87 | } 88 | 89 | // NOTE: only override values that differ (e.g. changing non-nullability) 90 | export interface ParsedSettings extends Settings { 91 | connectionString: string; 92 | rootConnectionString: string; 93 | databaseOwner: string; 94 | databaseName: string; 95 | shadowDatabaseName?: string; 96 | migrationsFolder: string; 97 | beforeReset: ActionSpec[]; 98 | afterReset: ActionSpec[]; 99 | beforeAllMigrations: ActionSpec[]; 100 | afterAllMigrations: ActionSpec[]; 101 | beforeCurrent: ActionSpec[]; 102 | afterCurrent: ActionSpec[]; 103 | blankMigrationContent: string; 104 | logger: Logger; 105 | } 106 | 107 | export async function parseSettings( 108 | settings: Settings, 109 | requireShadow = false, 110 | ): Promise { 111 | if (!settings) { 112 | throw new Error("Expected settings object"); 113 | } 114 | if (typeof settings !== "object") { 115 | throw new Error("Expected settings object, received " + typeof settings); 116 | } 117 | const errors: Array = []; 118 | const keysToCheck = Object.keys(settings); 119 | const checkedKeys: Array = []; 120 | async function check( 121 | key: TKey, 122 | callback: (value: unknown) => T | Promise, 123 | ): Promise { 124 | checkedKeys.push(key); 125 | const value = settings[key]; 126 | try { 127 | return await callback(value); 128 | } catch (e) { 129 | errors.push( 130 | `Setting '${key}': ${e instanceof Error ? e.message : String(e)}`, 131 | ); 132 | return void 0 as never; 133 | } 134 | } 135 | 136 | const connectionString = await check( 137 | "connectionString", 138 | (rawConnectionString = process.env.DATABASE_URL): string => { 139 | if (typeof rawConnectionString !== "string") { 140 | throw new Error( 141 | "Expected a string, or for DATABASE_URL envvar to be set", 142 | ); 143 | } 144 | return rawConnectionString; 145 | }, 146 | ); 147 | 148 | const logger = await check("logger", (rawLogger = defaultLogger): Logger => { 149 | if (!(rawLogger instanceof Logger)) { 150 | throw new Error( 151 | "Expected 'logger' to be a @graphile/logger Logger instance", 152 | ); 153 | } 154 | return rawLogger; 155 | }); 156 | 157 | const rootConnectionString = await check( 158 | "rootConnectionString", 159 | ( 160 | rawRootConnectionString = process.env.ROOT_DATABASE_URL || 161 | "postgres:///template1", 162 | ): string => { 163 | if (typeof rawRootConnectionString !== "string") { 164 | throw new Error( 165 | "Expected a string, or for ROOT_DATABASE_URL envvar to be set", 166 | ); 167 | } 168 | return rawRootConnectionString; 169 | }, 170 | ); 171 | 172 | const migrationsFolder = await check( 173 | "migrationsFolder", 174 | (rawMigrationsFolder = `${process.cwd()}/migrations`): string => { 175 | if (typeof rawMigrationsFolder !== "string") { 176 | throw new Error("Expected a string"); 177 | } 178 | return rawMigrationsFolder; 179 | }, 180 | ); 181 | 182 | const blankMigrationContent = await check( 183 | "blankMigrationContent", 184 | (rawBlankMigrationContent = "-- Enter migration here\n"): string => { 185 | if (typeof rawBlankMigrationContent !== "string") { 186 | throw new Error("Expected a string"); 187 | } 188 | return rawBlankMigrationContent; 189 | }, 190 | ); 191 | 192 | const { user, database: databaseName } = parse(connectionString || ""); 193 | const databaseOwner = await check( 194 | "databaseOwner", 195 | (rawDatabaseOwner = user || databaseName) => { 196 | if (typeof rawDatabaseOwner !== "string") { 197 | throw new Error( 198 | "Expected a string or for user or database name to be specified in connectionString", 199 | ); 200 | } 201 | return rawDatabaseOwner; 202 | }, 203 | ); 204 | 205 | const shadowConnectionString = await check( 206 | "shadowConnectionString", 207 | (rawShadowConnectionString = process.env.SHADOW_DATABASE_URL) => { 208 | if (requireShadow) { 209 | if (typeof rawShadowConnectionString !== "string") { 210 | throw new Error( 211 | "Expected `shadowConnectionString` to be a string, or for SHADOW_DATABASE_URL to be set", 212 | ); 213 | } 214 | return rawShadowConnectionString; 215 | } 216 | return null; 217 | }, 218 | ); 219 | const { database: shadowDatabaseName } = parse(shadowConnectionString || ""); 220 | 221 | await check("pgSettings", (pgSettings) => { 222 | if (pgSettings) { 223 | if (typeof pgSettings !== "object" || pgSettings === null) { 224 | throw new Error("Expected settings.pgSettings to be an object"); 225 | } 226 | const badKeys = Object.keys(pgSettings).filter((key) => { 227 | const value = (pgSettings as Record)[key]; 228 | return typeof value !== "string" && typeof value !== "number"; 229 | }); 230 | if (badKeys.length) { 231 | throw new Error( 232 | `Invalid pgSettings for keys '${badKeys.join( 233 | ", ", 234 | )}' - expected string` /* Number is acceptable, but prefer string. Boolean not acceptable. */, 235 | ); 236 | } 237 | } 238 | }); 239 | 240 | const placeholders = await check( 241 | "placeholders", 242 | (rawPlaceholders): { [key: string]: string } | undefined => { 243 | if (rawPlaceholders) { 244 | if (typeof rawPlaceholders !== "object" || rawPlaceholders === null) { 245 | throw new Error("Expected settings.placeholders to be an object"); 246 | } 247 | const badKeys = Object.keys(rawPlaceholders).filter( 248 | (key) => !/^:[A-Z][0-9A-Z_]+$/.exec(key), 249 | ); 250 | if (badKeys.length) { 251 | throw new Error( 252 | `Invalid placeholders keys '${badKeys.join( 253 | ", ", 254 | )}' - expected to follow format ':ABCD_EFG_HIJ'`, 255 | ); 256 | } 257 | const badValueKeys = Object.keys(rawPlaceholders).filter((key) => { 258 | const value = (rawPlaceholders as Record)[key]; 259 | return typeof value !== "string"; 260 | }); 261 | if (badValueKeys.length) { 262 | throw new Error( 263 | `Invalid placeholders values for keys '${badValueKeys.join( 264 | ", ", 265 | )}' - expected string`, 266 | ); 267 | } 268 | return Object.entries(rawPlaceholders).reduce( 269 | ( 270 | memo: { [key: string]: string }, 271 | [key, value], 272 | ): { [key: string]: string } => { 273 | if (value === "!ENV") { 274 | const envvarKey = key.substring(1); 275 | const envvar = process.env[envvarKey]; 276 | if (!envvar) { 277 | throw new Error( 278 | `Could not find environmental variable '${envvarKey}'`, 279 | ); 280 | } 281 | memo[key] = envvar; 282 | } 283 | return memo; 284 | }, 285 | { ...rawPlaceholders }, 286 | ); 287 | } 288 | return undefined; 289 | }, 290 | ); 291 | 292 | const validateAction = makeValidateActionCallback(logger); 293 | const rootValidateAction = makeValidateActionCallback(logger, true); 294 | 295 | const beforeReset = await check("beforeReset", rootValidateAction); 296 | const afterReset = await check("afterReset", rootValidateAction); 297 | const beforeAllMigrations = await check( 298 | "beforeAllMigrations", 299 | validateAction, 300 | ); 301 | const afterAllMigrations = await check("afterAllMigrations", validateAction); 302 | const beforeCurrent = await check("beforeCurrent", validateAction); 303 | const afterCurrent = await check("afterCurrent", validateAction); 304 | 305 | const manageGraphileMigrateSchema = await check( 306 | "manageGraphileMigrateSchema", 307 | (mgms) => { 308 | const type = typeof mgms; 309 | if (type !== "undefined" && type !== "boolean") { 310 | throw new Error( 311 | `Expected boolean, received '${ 312 | type === "object" && !mgms ? "null" : type 313 | }'`, 314 | ); 315 | } 316 | return mgms !== false; 317 | }, 318 | ); 319 | 320 | /******/ 321 | 322 | const uncheckedKeys = keysToCheck 323 | .filter((key) => !checkedKeys.includes(key)) 324 | .filter((key) => !key.startsWith("//")); 325 | if (uncheckedKeys.length) { 326 | errors.push( 327 | `The following config settings were not understood: '${uncheckedKeys.join( 328 | "', '", 329 | )}'`, 330 | ); 331 | } 332 | 333 | if (connectionString) { 334 | if (!databaseOwner) { 335 | errors.push( 336 | "Could not determine the database owner, please add the 'databaseOwner' setting.", 337 | ); 338 | } 339 | 340 | if (!databaseName) { 341 | errors.push( 342 | "Could not determine the database name, please ensure connectionString includes the database name.", 343 | ); 344 | } 345 | 346 | if ( 347 | connectionString === rootConnectionString || 348 | (requireShadow && connectionString === shadowConnectionString) 349 | ) { 350 | errors.push( 351 | "connectionString cannot be the same value as rootConnectionString or shadowConnectionString.", 352 | ); 353 | } 354 | } 355 | 356 | if (requireShadow && !shadowDatabaseName) { 357 | errors.push( 358 | "Could not determine the shadow database name, please ensure shadowConnectionString includes the database name.", 359 | ); 360 | } 361 | 362 | if (errors.length) { 363 | throw new Error( 364 | `Errors occurred during settings validation:\n- ${errors.join("\n- ")}`, 365 | ); 366 | } 367 | if (!databaseName) { 368 | // This is just to appease TypeScript, this should be caught above. 369 | throw new Error("Could not determine databaseName"); 370 | } 371 | 372 | return { 373 | ...settings, 374 | beforeReset, 375 | beforeAllMigrations, 376 | beforeCurrent, 377 | afterReset, 378 | afterAllMigrations, 379 | afterCurrent, 380 | rootConnectionString, 381 | connectionString, 382 | manageGraphileMigrateSchema, 383 | databaseOwner, 384 | migrationsFolder, 385 | databaseName, 386 | shadowConnectionString: shadowConnectionString 387 | ? shadowConnectionString 388 | : void 0, 389 | shadowDatabaseName: shadowDatabaseName ? shadowDatabaseName : void 0, 390 | placeholders, 391 | blankMigrationContent, 392 | logger, 393 | }; 394 | } 395 | 396 | /** 397 | * Overrides the databaseName in rootConnectionString and returns the resulting 398 | * connection string. 399 | */ 400 | export function makeRootDatabaseConnectionString( 401 | parsedSettings: ParsedSettings, 402 | databaseName: string, 403 | ): string { 404 | const { rootConnectionString } = parsedSettings; 405 | if (!rootConnectionString) { 406 | throw new Error( 407 | "Cannot execute SQL as root since rootConnectionString / ROOT_DATABASE_URL is not specified", 408 | ); 409 | } 410 | const parsed = parseURL(rootConnectionString, true); 411 | const isJustADatabaseName = !parsed.protocol; 412 | if (parsed.protocol === "socket:") { 413 | parsed.query.db = databaseName; 414 | const query = querystring.stringify(parsed.query); 415 | 416 | if (parsed.auth) { 417 | return `socket://${parsed.auth}@${parsed.pathname}?${query}`; 418 | } else { 419 | return `socket:${parsed.pathname}?${query}`; 420 | } 421 | } else if (isJustADatabaseName) { 422 | return databaseName; 423 | } else { 424 | parsed.pathname = `/${databaseName}`; 425 | return formatURL(parsed); 426 | } 427 | } 428 | --------------------------------------------------------------------------------