├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── feature.yaml │ └── main.yaml ├── .gitignore ├── .releaserc ├── LICENSE ├── README.md ├── cspell.yaml ├── eslint.config.cjs ├── package-lock.json ├── package.json ├── src ├── index.ts ├── parsePgDump.test.ts ├── parsePgDump.ts └── scopeSchemaObject.ts ├── tsconfig.build.json ├── tsconfig.json └── vitest.config.ts /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: gajus 2 | patreon: gajus 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a bug report to help us improve Slonik 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ## Expected Behavior 13 | 14 | 15 | ## Current Behavior 16 | 17 | 18 | ## Possible Solution 19 | 20 | 21 | ## Steps to Reproduce 22 | 23 | 24 | 25 | 1. 26 | 2. 27 | 3. 28 | 4. 29 | 30 | ## Logs 31 | 32 | 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Create an enhancement request to help us improve Slonik 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ## Desired Behavior 13 | 14 | 15 | ## Motivation 16 | 17 | 18 | ## Implementation 19 | 20 | -------------------------------------------------------------------------------- /.github/workflows/feature.yaml: -------------------------------------------------------------------------------- 1 | jobs: 2 | lint: 3 | environment: release 4 | name: Lint 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: setup repository 8 | uses: actions/checkout@v2 9 | with: 10 | fetch-depth: 0 11 | - name: setup node.js 12 | uses: actions/setup-node@v2 13 | with: 14 | cache: 'npm' 15 | node-version: '22' 16 | - run: npm ci 17 | - run: npm run lint:eslint 18 | - run: npm run lint:tsc 19 | - run: npm run lint:cspell 20 | timeout-minutes: 10 21 | test: 22 | environment: release 23 | name: Test 24 | needs: 25 | - lint 26 | runs-on: ${{ matrix.os }} 27 | steps: 28 | - name: setup repository 29 | uses: actions/checkout@v2 30 | with: 31 | fetch-depth: 0 32 | - name: setup node.js 33 | uses: actions/setup-node@v2 34 | with: 35 | cache: 'npm' 36 | node-version: ${{ matrix.version }} 37 | - run: npm ci 38 | - run: npm run test:vitest 39 | - run: npm run build 40 | strategy: 41 | matrix: 42 | os: 43 | - ubuntu-latest 44 | version: 45 | - 22 46 | timeout-minutes: 10 47 | name: Test and build 48 | on: 49 | pull_request: 50 | branches: 51 | - main 52 | types: 53 | - opened 54 | - synchronize 55 | - reopened 56 | - ready_for_review -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | jobs: 2 | test: 3 | environment: release 4 | name: Test 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: setup repository 8 | uses: actions/checkout@v2 9 | with: 10 | fetch-depth: 0 11 | - name: setup node.js 12 | uses: actions/setup-node@v2 13 | with: 14 | cache: 'npm' 15 | node-version: '22' 16 | - run: npm ci 17 | - run: npm run lint:eslint 18 | - run: npm run lint:tsc 19 | - run: npm run lint:cspell 20 | - run: npm run test:vitest 21 | - run: npm run build 22 | - env: 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 24 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 25 | run: npx semantic-release 26 | name: Test, build and release 27 | on: 28 | push: 29 | branches: 30 | - main -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | node_modules 3 | *.log 4 | .* 5 | !.github 6 | !.gitignore 7 | !.README 8 | !.releaserc -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "branches": [ 3 | "main" 4 | ], 5 | "plugins": [ 6 | "@semantic-release/commit-analyzer", 7 | "@semantic-release/github", 8 | "@semantic-release/npm" 9 | ] 10 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024, Gajus Kuizinas (https://gajus.com/) 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer in the 10 | documentation and/or other materials provided with the distribution. 11 | * Neither the name of the Gajus Kuizinas (https://gajus.com/) nor the 12 | names of its contributors may be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL GAJUS KUIZINAS BE LIABLE FOR ANY 19 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pg-dump-parser 2 | 3 | Parses PostgreSQL dump files into an array of schema objects. 4 | 5 | ## Motivation 6 | 7 | The idea behind `pg-dump-parser` is to split the dump file into a series of files. Each file is a top-level schema object (e.g. a table, view, etc.). The same file will contain all the schema objects associated with the top-level object (e.g. comments, indexes, etc.). This makes having the database schema as a reference easier and allows for better checking into version control. 8 | 9 | The desired end result is something like this (see [recipes](#recipes) for a script that does this): 10 | 11 | ``` 12 | generated-schema 13 | ├── extensions 14 | │ ├── citext.sql 15 | │ └── vector.sql 16 | ├── functions 17 | │ ├── public.add_two_numbers.sql 18 | │ └── public.notify_foo_insert.sql 19 | ├── materialized-views 20 | │ ├── public.project_total_earnings.sql 21 | │ └── public.user_account_total_earnings.sql 22 | ├── tables 23 | │ ├── public.accounting_platform_account.sql 24 | │ └── public.workspace_workspace_group_history.sql 25 | └── types 26 | ├── public.accounting_platform.sql 27 | └── public.workspace_type.sql 28 | ``` 29 | 30 | where each file contains the SQL for the schema object. 31 | 32 | ## Usage 33 | 34 | ```ts 35 | import { readFile } from 'node:fs/promises'; 36 | import { parsePgDump } from 'pg-dump-parser'; 37 | 38 | const dump = await readFile('dump.sql', 'utf8'); 39 | 40 | const schemaObjects = parsePgDump(dump); 41 | 42 | for (const schemaObject of schemaObjects) { 43 | console.log(schemaObject); 44 | } 45 | ``` 46 | 47 | > [!NOTE] 48 | > The expected input is a PostgreSQL dump file created with `pg_dump --schema-only`. 49 | 50 | The output is an array of objects, each representing a schema object in the dump file and the corresponding header, e.g., 51 | 52 | ```json 53 | [ 54 | { 55 | "header": { 56 | "Name": "bar", 57 | "Owner": "postgres", 58 | "Schema": "public", 59 | "Type": "TABLE" 60 | }, 61 | "sql": "CREATE TABLE public.bar (\n id integer NOT NULL,\n uid text NOT NULL,\n foo_id integer\n);" 62 | }, 63 | { 64 | "header": { 65 | "Name": "bar", 66 | "Owner": "postgres", 67 | "Schema": "public", 68 | "Type": "TABLE" 69 | }, 70 | "sql": "ALTER TABLE public.bar OWNER TO postgres;" 71 | }, 72 | { 73 | "header": { 74 | "Name": "bar_id_seq", 75 | "Owner": "postgres", 76 | "Schema": "public", 77 | "Type": "SEQUENCE" 78 | }, 79 | "sql": "ALTER TABLE public.bar ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY (\n SEQUENCE NAME public.bar_id_seq\n START WITH 1\n INCREMENT BY 1\n NO MINVALUE\n NO MAXVALUE\n CACHE 1\n);" 80 | } 81 | ] 82 | ``` 83 | 84 | ### Grouping schema objects 85 | 86 | `groupSchemaObjects` is an opinionated utility that assigns object to a scope. 87 | 88 | ```ts 89 | import { readFile } from 'node:fs/promises'; 90 | import { groupSchemaObjects } from 'pg-dump-parser'; 91 | 92 | const dump = await readFile('dump.sql', 'utf8'); 93 | 94 | const schemaObjects = parsePgDump(dump); 95 | 96 | const schemaObjectScope = groupSchemaObjects(schemaObjects); 97 | schemaObjects, 98 | { 99 | header: { 100 | Name: 'TABLE foo', 101 | Owner: 'postgres', 102 | Schema: 'public', 103 | Type: 'COMMENT', 104 | }, 105 | sql: multiline` 106 | COMMENT ON TABLE public.foo IS 'Table comment x'; 107 | `, 108 | } 109 | ); 110 | ``` 111 | 112 | `schemaObjectScope` is now an object that describes the owner of the object, e.g., 113 | 114 | ```ts 115 | { 116 | name: 'foo', 117 | schema: 'public', 118 | type: 'TABLE', 119 | } 120 | ``` 121 | 122 | > [!WARNING] 123 | > The implementation behind `groupSchemaObjects` is _super_ scrappy. It relies on a lot of pattern matching. Use at your own risk. 124 | 125 | ## Recipes 126 | 127 | I intentionally did not include a script for producing a diff, because a lot of it (how you dump the schema, how you group the schema objects, etc.) is subjective. However, this is a version that we are using in production. 128 | 129 | ```ts 130 | import fs from 'node:fs/promises'; 131 | import path from 'node:path'; 132 | import { 133 | parsePgDump, 134 | SchemaObjectScope, 135 | scopeSchemaObject, 136 | } from 'pg-dump-parser'; 137 | import { default as yargs } from 'yargs'; 138 | import { $ } from 'zx'; 139 | 140 | const formatFileName = (schemaObjectScope: SchemaObjectScope) => { 141 | const name = schemaObjectScope.name.startsWith('"') 142 | ? schemaObjectScope.name.slice(1, -1) 143 | : schemaObjectScope.name; 144 | 145 | if (schemaObjectScope.schema) { 146 | return `${schemaObjectScope.schema}.${name}.sql`; 147 | } 148 | 149 | return `${name}.sql`; 150 | }; 151 | 152 | const argv = await yargs(process.argv.slice(2)) 153 | .options({ 154 | 'output-path': { 155 | demand: true, 156 | type: 'string', 157 | }, 158 | 'postgres-dsn': { 159 | demand: true, 160 | type: 'string', 161 | }, 162 | }) 163 | .strict() 164 | .parse(); 165 | 166 | const dump = await $`pg_dump --schema-only ${argv['postgres-dsn']}`; 167 | 168 | const schemaObjects = parsePgDump(dump.stdout); 169 | 170 | try { 171 | await fs.rmdir(argv['output-path'], { 172 | recursive: true, 173 | }); 174 | } catch { 175 | // ignore 176 | } 177 | 178 | await fs.mkdir(argv['output-path']); 179 | 180 | const files: Record = {}; 181 | 182 | for (const schemaObject of schemaObjects) { 183 | const schemaObjectScope = scopeSchemaObject(schemaObjects, schemaObject); 184 | 185 | if (!schemaObjectScope) { 186 | continue; 187 | } 188 | 189 | const file = path.join( 190 | argv['output-path'], 191 | // MATERIALIZED VIEW => materialized-views 192 | schemaObjectScope.type.toLowerCase().replace(' ', '-') + 's', 193 | formatFileName(schemaObjectScope), 194 | ); 195 | 196 | files[file] ??= []; 197 | files[file].push(schemaObject.sql); 198 | } 199 | 200 | for (const [filePath, content] of Object.entries(files)) { 201 | const directory = path.dirname(filePath); 202 | 203 | await fs.mkdir(directory, { recursive: true }); 204 | 205 | await fs.appendFile(filePath, content.join('\n\n') + '\n'); 206 | } 207 | ``` 208 | 209 | ## Alternatives 210 | 211 | * https://github.com/omniti-labs/pg_extractor 212 | * Prior to writing pg-dump-parser, I used this tool to extract the schema. It works well, but it's slow. It was taking a whole minute to parse our dump file. We needed something that implements equivalent functionality, but is faster. `pg-dump-parser` processes the same dump with in a few seconds. -------------------------------------------------------------------------------- /cspell.yaml: -------------------------------------------------------------------------------- 1 | ignoreRegExpList: 2 | - /.*[0-9].*/ 3 | language: en 4 | version: '0.2' 5 | words: 6 | - citext 7 | - corge 8 | - fkey 9 | - gajus 10 | - kuizinas 11 | - MAXVALUE 12 | - MINVALUE 13 | - nextval 14 | - pgcrypto 15 | - pkey 16 | - plpgsql 17 | - regclass 18 | - SFUNC 19 | - STYPE 20 | - tablespace 21 | - vitest 22 | - xmloption -------------------------------------------------------------------------------- /eslint.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | ...require('eslint-config-canonical/configurations/auto'), 3 | { 4 | ignores: ['**/dist/', '**/package-lock.json'], 5 | }, 6 | ]; 7 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": { 3 | "email": "gajus@gajus.com", 4 | "name": "Gajus Kuizinas", 5 | "url": "http://gajus.com" 6 | }, 7 | "dependencies": { 8 | "multiline-ts": "^4.0.1", 9 | "zod": "^3.23.8" 10 | }, 11 | "description": "Parses PostgreSQL dump files into an array of schema objects.", 12 | "devDependencies": { 13 | "@semantic-release/commit-analyzer": "^9.0.2", 14 | "@semantic-release/github": "^8.0.7", 15 | "@semantic-release/npm": "^9.0.2", 16 | "@types/node": "^18.15.3", 17 | "cspell": "^6.30.2", 18 | "eslint": "^9.14.0", 19 | "eslint-config-canonical": "^44.3.28", 20 | "semantic-release": "^20.1.3", 21 | "typescript": "^5.0.2", 22 | "vitest": "^0.29.7" 23 | }, 24 | "engines": { 25 | "node": ">=22" 26 | }, 27 | "exports": { 28 | ".": { 29 | "import": "./dist/index.js", 30 | "types": "./dist/index.d.ts" 31 | } 32 | }, 33 | "files": [ 34 | "./src", 35 | "./dist" 36 | ], 37 | "keywords": [ 38 | "duration", 39 | "human", 40 | "format" 41 | ], 42 | "license": "BSD-3-Clause", 43 | "main": "./dist/index.js", 44 | "name": "pg-dump-parser", 45 | "repository": { 46 | "type": "git", 47 | "url": "https://github.com/gajus/pg-dump-parser" 48 | }, 49 | "scripts": { 50 | "build": "rm -fr ./dist && tsc --project tsconfig.build.json", 51 | "dev": "tsc --watch", 52 | "lint": "npm run lint:tsc && npm run lint:eslint && npm run lint:cspell", 53 | "lint:cspell": "cspell './**/*.{ts,tsx}' --no-progress --gitignore", 54 | "lint:eslint": "eslint --color .", 55 | "lint:tsc": "tsc", 56 | "test:vitest": "vitest --run --passWithNoTests" 57 | }, 58 | "types": "./dist/index.d.ts", 59 | "version": "1.0.0" 60 | } -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | type AttributedHeader, 3 | type Header, 4 | parsePgDump, 5 | type SchemaObject, 6 | type TitleHeader, 7 | } from './parsePgDump'; 8 | export { type SchemaObjectScope, scopeSchemaObject } from './scopeSchemaObject'; 9 | -------------------------------------------------------------------------------- /src/parsePgDump.test.ts: -------------------------------------------------------------------------------- 1 | import { parsePgDump, type SchemaObject } from './parsePgDump'; 2 | import { type SchemaObjectScope, scopeSchemaObject } from './scopeSchemaObject'; 3 | import multiline from 'multiline-ts'; 4 | import { expect, test } from 'vitest'; 5 | 6 | const dump = multiline` 7 | -- 8 | -- PostgreSQL database dump 9 | -- 10 | 11 | -- Dumped from database version 16.2 (Debian 16.2-1.pgdg120+2) 12 | -- Dumped by pg_dump version 16.2 (Debian 16.2-1.pgdg120+2) 13 | 14 | SET statement_timeout = 0; 15 | SET lock_timeout = 0; 16 | SET idle_in_transaction_session_timeout = 0; 17 | SET client_encoding = 'UTF8'; 18 | SET standard_conforming_strings = on; 19 | SELECT pg_catalog.set_config('search_path', '', false); 20 | SET check_function_bodies = false; 21 | SET xmloption = content; 22 | SET client_min_messages = warning; 23 | SET row_security = off; 24 | 25 | -- 26 | -- Name: quux; Type: SCHEMA; Schema: -; Owner: postgres 27 | -- 28 | 29 | CREATE SCHEMA quux; 30 | 31 | 32 | ALTER SCHEMA quux OWNER TO postgres; 33 | 34 | -- 35 | -- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: - 36 | -- 37 | 38 | CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; 39 | 40 | 41 | -- 42 | -- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: 43 | -- 44 | 45 | COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions'; 46 | 47 | 48 | -- 49 | -- Name: status; Type: TYPE; Schema: public; Owner: postgres 50 | -- 51 | 52 | CREATE TYPE public.status AS ENUM ( 53 | 'ACTIVE', 54 | 'INACTIVE' 55 | ); 56 | 57 | 58 | ALTER TYPE public.status OWNER TO postgres; 59 | 60 | -- 61 | -- Name: TYPE status; Type: COMMENT; Schema: public; Owner: postgres 62 | -- 63 | 64 | COMMENT ON TYPE public.status IS 'Type comment x'; 65 | 66 | 67 | -- 68 | -- Name: CAST (text AS integer); Type: CAST; Schema: -; Owner: - 69 | -- 70 | 71 | CREATE CAST (text AS integer) WITH INOUT AS IMPLICIT; 72 | 73 | 74 | -- 75 | -- Name: add_two_numbers(integer, integer); Type: FUNCTION; Schema: public; Owner: postgres 76 | -- 77 | 78 | CREATE FUNCTION public.add_two_numbers(a integer, b integer) RETURNS integer 79 | LANGUAGE plpgsql 80 | AS $$ 81 | BEGIN 82 | RETURN a + b; 83 | END; 84 | $$; 85 | 86 | 87 | ALTER FUNCTION public.add_two_numbers(a integer, b integer) OWNER TO postgres; 88 | 89 | -- 90 | -- Name: FUNCTION add_two_numbers(a integer, b integer); Type: COMMENT; Schema: public; Owner: postgres 91 | -- 92 | 93 | COMMENT ON FUNCTION public.add_two_numbers(a integer, b integer) IS 'Function comment x'; 94 | 95 | 96 | -- 97 | -- Name: notify_foo_insert(); Type: FUNCTION; Schema: public; Owner: postgres 98 | -- 99 | 100 | CREATE FUNCTION public.notify_foo_insert() RETURNS trigger 101 | LANGUAGE plpgsql 102 | AS $$ 103 | BEGIN 104 | RAISE NOTICE 'A new row was inserted into the foo table with id: %', NEW.id; 105 | RETURN NEW; 106 | END; 107 | $$; 108 | 109 | 110 | ALTER FUNCTION public.notify_foo_insert() OWNER TO postgres; 111 | 112 | -- 113 | -- Name: say_hello(character varying); Type: PROCEDURE; Schema: public; Owner: postgres 114 | -- 115 | 116 | CREATE PROCEDURE public.say_hello(IN name_param character varying) 117 | LANGUAGE plpgsql 118 | AS $$ 119 | BEGIN 120 | RAISE NOTICE 'Hello, %!', name_param; 121 | END; 122 | $$; 123 | 124 | 125 | ALTER PROCEDURE public.say_hello(IN name_param character varying) OWNER TO postgres; 126 | 127 | -- 128 | -- Name: PROCEDURE say_hello(IN name_param character varying); Type: COMMENT; Schema: public; Owner: postgres 129 | -- 130 | 131 | COMMENT ON PROCEDURE public.say_hello(IN name_param character varying) IS 'Procedure comment x'; 132 | 133 | 134 | -- 135 | -- Name: my_sum(integer); Type: AGGREGATE; Schema: public; Owner: postgres 136 | -- 137 | 138 | CREATE AGGREGATE public.my_sum(integer) ( 139 | SFUNC = public.add_two_numbers, 140 | STYPE = integer 141 | ); 142 | 143 | 144 | ALTER AGGREGATE public.my_sum(integer) OWNER TO postgres; 145 | 146 | -- 147 | -- Name: AGGREGATE my_sum(integer); Type: COMMENT; Schema: public; Owner: postgres 148 | -- 149 | 150 | COMMENT ON AGGREGATE public.my_sum(integer) IS 'Aggregate comment x'; 151 | 152 | 153 | SET default_tablespace = ''; 154 | 155 | SET default_table_access_method = heap; 156 | 157 | -- 158 | -- Name: bar; Type: TABLE; Schema: public; Owner: postgres 159 | -- 160 | 161 | CREATE TABLE public.bar ( 162 | id integer NOT NULL, 163 | uid text NOT NULL, 164 | foo_id integer 165 | ); 166 | 167 | 168 | ALTER TABLE public.bar OWNER TO postgres; 169 | 170 | -- 171 | -- Name: bar_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres 172 | -- 173 | 174 | ALTER TABLE public.bar ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( 175 | SEQUENCE NAME public.bar_id_seq 176 | START WITH 1 177 | INCREMENT BY 1 178 | NO MINVALUE 179 | NO MAXVALUE 180 | CACHE 1 181 | ); 182 | 183 | 184 | -- 185 | -- Name: baz; Type: VIEW; Schema: public; Owner: postgres 186 | -- 187 | 188 | CREATE VIEW public.baz AS 189 | SELECT id, 190 | uid AS name 191 | FROM public.bar; 192 | 193 | 194 | ALTER VIEW public.baz OWNER TO postgres; 195 | 196 | -- 197 | -- Name: VIEW baz; Type: COMMENT; Schema: public; Owner: postgres 198 | -- 199 | 200 | COMMENT ON VIEW public.baz IS 'View comment x'; 201 | 202 | 203 | -- 204 | -- Name: COLUMN baz.id; Type: COMMENT; Schema: public; Owner: postgres 205 | -- 206 | 207 | COMMENT ON COLUMN public.baz.id IS 'Column comment x'; 208 | 209 | 210 | -- 211 | -- Name: corge; Type: TABLE; Schema: public; Owner: postgres 212 | -- 213 | 214 | CREATE TABLE public.corge ( 215 | id integer, 216 | name text 217 | ); 218 | 219 | 220 | ALTER TABLE public.corge OWNER TO postgres; 221 | 222 | -- 223 | -- Name: corge_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres 224 | -- 225 | 226 | CREATE SEQUENCE public.corge_id_seq 227 | START WITH 1000 228 | INCREMENT BY 1 229 | NO MINVALUE 230 | NO MAXVALUE 231 | CACHE 1; 232 | 233 | 234 | ALTER SEQUENCE public.corge_id_seq OWNER TO postgres; 235 | 236 | -- 237 | -- Name: corge_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres 238 | -- 239 | 240 | ALTER SEQUENCE public.corge_id_seq OWNED BY public.corge.id; 241 | 242 | 243 | -- 244 | -- Name: foo; Type: TABLE; Schema: public; Owner: postgres 245 | -- 246 | 247 | CREATE TABLE public.foo ( 248 | id integer NOT NULL, 249 | name text NOT NULL 250 | ); 251 | 252 | 253 | ALTER TABLE public.foo OWNER TO postgres; 254 | 255 | -- 256 | -- Name: TABLE foo; Type: COMMENT; Schema: public; Owner: postgres 257 | -- 258 | 259 | COMMENT ON TABLE public.foo IS 'Table comment x'; 260 | 261 | 262 | -- 263 | -- Name: COLUMN foo.id; Type: COMMENT; Schema: public; Owner: postgres 264 | -- 265 | 266 | COMMENT ON COLUMN public.foo.id IS 'Column comment x'; 267 | 268 | 269 | -- 270 | -- Name: foo_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres 271 | -- 272 | 273 | ALTER TABLE public.foo ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( 274 | SEQUENCE NAME public.foo_id_seq 275 | START WITH 1 276 | INCREMENT BY 1 277 | NO MINVALUE 278 | NO MAXVALUE 279 | CACHE 1 280 | ); 281 | 282 | 283 | -- 284 | -- Name: SEQUENCE foo_id_seq; Type: COMMENT; Schema: public; Owner: postgres 285 | -- 286 | 287 | COMMENT ON SEQUENCE public.foo_id_seq IS 'Sequence comment x'; 288 | 289 | 290 | -- 291 | -- Name: qux; Type: MATERIALIZED VIEW; Schema: public; Owner: postgres 292 | -- 293 | 294 | CREATE MATERIALIZED VIEW public.qux AS 295 | SELECT id, 296 | uid AS name 297 | FROM public.bar 298 | WITH NO DATA; 299 | 300 | 301 | ALTER MATERIALIZED VIEW public.qux OWNER TO postgres; 302 | 303 | -- 304 | -- Name: MATERIALIZED VIEW qux; Type: COMMENT; Schema: public; Owner: postgres 305 | -- 306 | 307 | COMMENT ON MATERIALIZED VIEW public.qux IS 'Materialized view comment x'; 308 | 309 | 310 | -- 311 | -- Name: COLUMN qux.id; Type: COMMENT; Schema: public; Owner: postgres 312 | -- 313 | 314 | COMMENT ON COLUMN public.qux.id IS 'Column comment x'; 315 | 316 | 317 | -- 318 | -- Name: portfolio_project_search_hit_default; Type: TABLE ATTACH; Schema: public; Owner: flyway 319 | -- 320 | 321 | ALTER TABLE ONLY public.portfolio_project_search_hit ATTACH PARTITION public.portfolio_project_search_hit_default DEFAULT; 322 | 323 | -- 324 | -- Name: corge id; Type: DEFAULT; Schema: public; Owner: postgres 325 | -- 326 | 327 | ALTER TABLE ONLY public.corge ALTER COLUMN id SET DEFAULT nextval('public.corge_id_seq'::regclass); 328 | 329 | 330 | -- 331 | -- Name: bar bar_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres 332 | -- 333 | 334 | ALTER TABLE ONLY public.bar 335 | ADD CONSTRAINT bar_pkey PRIMARY KEY (id); 336 | 337 | 338 | -- 339 | -- Name: foo foo_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres 340 | -- 341 | 342 | ALTER TABLE ONLY public.foo 343 | ADD CONSTRAINT foo_pkey PRIMARY KEY (id); 344 | 345 | 346 | -- 347 | -- Name: bar_uid_idx; Type: INDEX; Schema: public; Owner: postgres 348 | -- 349 | 350 | CREATE UNIQUE INDEX bar_uid_idx ON public.bar USING btree (uid); 351 | 352 | 353 | -- 354 | -- Name: INDEX foo_pkey; Type: COMMENT; Schema: public; Owner: postgres 355 | -- 356 | 357 | COMMENT ON INDEX public.foo_pkey IS 'Index comment x'; 358 | 359 | 360 | -- 361 | -- Name: qux_name_idx; Type: INDEX; Schema: public; Owner: postgres 362 | -- 363 | 364 | CREATE INDEX qux_name_idx ON public.qux USING btree (name); 365 | 366 | 367 | -- 368 | -- Name: INDEX qux_name_idx; Type: COMMENT; Schema: public; Owner: postgres 369 | -- 370 | 371 | COMMENT ON INDEX public.qux_name_idx IS 'Index comment x'; 372 | 373 | 374 | -- 375 | -- Name: foo foo_insert_trigger; Type: TRIGGER; Schema: public; Owner: postgres 376 | -- 377 | 378 | CREATE TRIGGER foo_insert_trigger AFTER INSERT ON public.foo FOR EACH ROW EXECUTE FUNCTION public.notify_foo_insert(); 379 | 380 | 381 | -- 382 | -- Name: bar bar_foo_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres 383 | -- 384 | 385 | ALTER TABLE ONLY public.bar 386 | ADD CONSTRAINT bar_foo_id_fkey FOREIGN KEY (foo_id) REFERENCES public.foo(id) ON DELETE CASCADE; 387 | 388 | 389 | -- 390 | -- Name: foo_publication; Type: PUBLICATION; Schema: -; Owner: postgres 391 | -- 392 | 393 | CREATE PUBLICATION foo_publication FOR ALL TABLES WITH (publish = 'insert, update, delete'); 394 | 395 | 396 | ALTER PUBLICATION foo_publication OWNER TO postgres; 397 | 398 | -- 399 | -- Name: COLUMN foo.name; Type: ACL; Schema: public; Owner: postgres 400 | -- 401 | 402 | GRANT SELECT(name) ON TABLE public.foo TO postgres; 403 | 404 | 405 | -- 406 | -- PostgreSQL database dump complete 407 | -- 408 | `; 409 | 410 | const omit = >( 411 | object: T, 412 | keys: string[], 413 | ): T => 414 | // @ts-expect-error - Object.fromEntries is not typed 415 | Object.fromEntries( 416 | Object.entries(object).filter(([key]) => !keys.includes(key)), 417 | ); 418 | 419 | const expectSchemeObject = ( 420 | expectedSchemaObject: { scope: null | SchemaObjectScope } & SchemaObject, 421 | ) => { 422 | const schemaObjects = parsePgDump(dump); 423 | 424 | expect(schemaObjects).toContainEqual(omit(expectedSchemaObject, ['scope'])); 425 | 426 | if (typeof expectedSchemaObject.scope === 'undefined') { 427 | return; 428 | } 429 | 430 | expect( 431 | scopeSchemaObject(schemaObjects, omit(expectedSchemaObject, ['scope'])), 432 | ).toEqual(expectedSchemaObject.scope); 433 | }; 434 | 435 | test('extracts SEQUENCE', async () => { 436 | expectSchemeObject({ 437 | header: { 438 | Name: 'bar_id_seq', 439 | Owner: 'postgres', 440 | Schema: 'public', 441 | Type: 'SEQUENCE', 442 | }, 443 | scope: { 444 | name: 'bar', 445 | schema: 'public', 446 | type: 'TABLE', 447 | }, 448 | sql: multiline` 449 | ALTER TABLE public.bar ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( 450 | SEQUENCE NAME public.bar_id_seq 451 | START WITH 1 452 | INCREMENT BY 1 453 | NO MINVALUE 454 | NO MAXVALUE 455 | CACHE 1 456 | ); 457 | `, 458 | }); 459 | }); 460 | 461 | test('extracts TABLE', async () => { 462 | expectSchemeObject({ 463 | header: { 464 | Name: 'foo', 465 | Owner: 'postgres', 466 | Schema: 'public', 467 | Type: 'TABLE', 468 | }, 469 | scope: { 470 | name: 'foo', 471 | schema: 'public', 472 | type: 'TABLE', 473 | }, 474 | sql: multiline` 475 | CREATE TABLE public.foo ( 476 | id integer NOT NULL, 477 | name text NOT NULL 478 | ); 479 | `, 480 | }); 481 | }); 482 | 483 | test('extracts CONSTRAINT', async () => { 484 | expectSchemeObject({ 485 | header: { 486 | Name: 'foo foo_pkey', 487 | Owner: 'postgres', 488 | Schema: 'public', 489 | Type: 'CONSTRAINT', 490 | }, 491 | scope: { 492 | name: 'foo', 493 | schema: 'public', 494 | type: 'TABLE', 495 | }, 496 | sql: multiline` 497 | ALTER TABLE ONLY public.foo 498 | ADD CONSTRAINT foo_pkey PRIMARY KEY (id); 499 | `, 500 | }); 501 | }); 502 | 503 | test('extracts COMMENT on TABLE', async () => { 504 | expectSchemeObject({ 505 | header: { 506 | Name: 'TABLE foo', 507 | Owner: 'postgres', 508 | Schema: 'public', 509 | Type: 'COMMENT', 510 | }, 511 | scope: { 512 | name: 'foo', 513 | schema: 'public', 514 | type: 'TABLE', 515 | }, 516 | sql: multiline` 517 | COMMENT ON TABLE public.foo IS 'Table comment x'; 518 | `, 519 | }); 520 | }); 521 | 522 | test('extracts COMMENT on VIEW', async () => { 523 | expectSchemeObject({ 524 | header: { 525 | Name: 'VIEW baz', 526 | Owner: 'postgres', 527 | Schema: 'public', 528 | Type: 'COMMENT', 529 | }, 530 | scope: { 531 | name: 'baz', 532 | schema: 'public', 533 | type: 'VIEW', 534 | }, 535 | sql: multiline` 536 | COMMENT ON VIEW public.baz IS 'View comment x'; 537 | `, 538 | }); 539 | }); 540 | 541 | test('extracts COMMENT on MATERIALIZED VIEW', async () => { 542 | expectSchemeObject({ 543 | header: { 544 | Name: 'MATERIALIZED VIEW qux', 545 | Owner: 'postgres', 546 | Schema: 'public', 547 | Type: 'COMMENT', 548 | }, 549 | scope: { 550 | name: 'qux', 551 | schema: 'public', 552 | type: 'MATERIALIZED VIEW', 553 | }, 554 | sql: multiline` 555 | COMMENT ON MATERIALIZED VIEW public.qux IS 'Materialized view comment x'; 556 | `, 557 | }); 558 | }); 559 | 560 | test('extracts COMMENT on COLUMN (TABLE)', async () => { 561 | expectSchemeObject({ 562 | header: { 563 | Name: 'COLUMN foo.id', 564 | Owner: 'postgres', 565 | Schema: 'public', 566 | Type: 'COMMENT', 567 | }, 568 | scope: { 569 | name: 'foo', 570 | schema: 'public', 571 | type: 'TABLE', 572 | }, 573 | sql: multiline` 574 | COMMENT ON COLUMN public.foo.id IS 'Column comment x'; 575 | `, 576 | }); 577 | }); 578 | 579 | test('extracts COMMENT on COLUMN (VIEW)', async () => { 580 | expectSchemeObject({ 581 | header: { 582 | Name: 'COLUMN baz.id', 583 | Owner: 'postgres', 584 | Schema: 'public', 585 | Type: 'COMMENT', 586 | }, 587 | scope: { 588 | name: 'baz', 589 | schema: 'public', 590 | type: 'VIEW', 591 | }, 592 | sql: multiline` 593 | COMMENT ON COLUMN public.baz.id IS 'Column comment x'; 594 | `, 595 | }); 596 | }); 597 | 598 | test('extracts COMMENT on COLUMN (MATERIALIZED VIEW)', async () => { 599 | expectSchemeObject({ 600 | header: { 601 | Name: 'COLUMN qux.id', 602 | Owner: 'postgres', 603 | Schema: 'public', 604 | Type: 'COMMENT', 605 | }, 606 | scope: { 607 | name: 'qux', 608 | schema: 'public', 609 | type: 'MATERIALIZED VIEW', 610 | }, 611 | sql: multiline` 612 | COMMENT ON COLUMN public.qux.id IS 'Column comment x'; 613 | `, 614 | }); 615 | }); 616 | 617 | test('extracts COMMENT on INDEX (TABLE)', async () => { 618 | expectSchemeObject({ 619 | header: { 620 | Name: 'INDEX foo_pkey', 621 | Owner: 'postgres', 622 | Schema: 'public', 623 | Type: 'COMMENT', 624 | }, 625 | scope: { 626 | name: 'foo', 627 | schema: 'public', 628 | type: 'TABLE', 629 | }, 630 | sql: multiline` 631 | COMMENT ON INDEX public.foo_pkey IS 'Index comment x'; 632 | `, 633 | }); 634 | }); 635 | 636 | test('extracts COMMENT on INDEX (MATERIALIZED VIEW)', async () => { 637 | expectSchemeObject({ 638 | header: { 639 | Name: 'INDEX qux_name_idx', 640 | Owner: 'postgres', 641 | Schema: 'public', 642 | Type: 'COMMENT', 643 | }, 644 | scope: { 645 | name: 'qux', 646 | schema: 'public', 647 | type: 'MATERIALIZED VIEW', 648 | }, 649 | sql: multiline` 650 | COMMENT ON INDEX public.qux_name_idx IS 'Index comment x'; 651 | `, 652 | }); 653 | }); 654 | 655 | test('extracts COMMENT on SEQUENCE', async () => { 656 | expectSchemeObject({ 657 | header: { 658 | Name: 'SEQUENCE foo_id_seq', 659 | Owner: 'postgres', 660 | Schema: 'public', 661 | Type: 'COMMENT', 662 | }, 663 | scope: { 664 | name: 'foo', 665 | schema: 'public', 666 | type: 'TABLE', 667 | }, 668 | sql: multiline` 669 | COMMENT ON SEQUENCE public.foo_id_seq IS 'Sequence comment x'; 670 | `, 671 | }); 672 | }); 673 | 674 | test('extracts COMMENT on TYPE', async () => { 675 | expectSchemeObject({ 676 | header: { 677 | Name: 'TYPE status', 678 | Owner: 'postgres', 679 | Schema: 'public', 680 | Type: 'COMMENT', 681 | }, 682 | scope: { 683 | name: 'status', 684 | schema: 'public', 685 | type: 'TYPE', 686 | }, 687 | sql: multiline` 688 | COMMENT ON TYPE public.status IS 'Type comment x'; 689 | `, 690 | }); 691 | }); 692 | 693 | test('extracts COMMENT on FUNCTION', async () => { 694 | expectSchemeObject({ 695 | header: { 696 | Name: 'FUNCTION add_two_numbers(a integer, b integer)', 697 | Owner: 'postgres', 698 | Schema: 'public', 699 | Type: 'COMMENT', 700 | }, 701 | scope: { 702 | name: 'add_two_numbers', 703 | schema: 'public', 704 | type: 'FUNCTION', 705 | }, 706 | sql: multiline` 707 | COMMENT ON FUNCTION public.add_two_numbers(a integer, b integer) IS 'Function comment x'; 708 | `, 709 | }); 710 | }); 711 | 712 | test('extracts COMMENT on AGGREGATE', async () => { 713 | expectSchemeObject({ 714 | header: { 715 | Name: 'AGGREGATE my_sum(integer)', 716 | Owner: 'postgres', 717 | Schema: 'public', 718 | Type: 'COMMENT', 719 | }, 720 | scope: { 721 | name: 'my_sum', 722 | schema: 'public', 723 | type: 'AGGREGATE', 724 | }, 725 | sql: multiline` 726 | COMMENT ON AGGREGATE public.my_sum(integer) IS 'Aggregate comment x'; 727 | `, 728 | }); 729 | }); 730 | 731 | test('extracts COMMENT on PROCEDURE', async () => { 732 | expectSchemeObject({ 733 | header: { 734 | Name: 'PROCEDURE say_hello(IN name_param character varying)', 735 | Owner: 'postgres', 736 | Schema: 'public', 737 | Type: 'COMMENT', 738 | }, 739 | scope: { 740 | name: 'say_hello', 741 | schema: 'public', 742 | type: 'PROCEDURE', 743 | }, 744 | sql: multiline` 745 | COMMENT ON PROCEDURE public.say_hello(IN name_param character varying) IS 'Procedure comment x'; 746 | `, 747 | }); 748 | }); 749 | 750 | test('extracts PUBLICATION', async () => { 751 | expectSchemeObject({ 752 | header: { 753 | Name: 'foo_publication', 754 | Owner: 'postgres', 755 | Schema: null, 756 | Type: 'PUBLICATION', 757 | }, 758 | scope: null, 759 | sql: multiline` 760 | CREATE PUBLICATION foo_publication FOR ALL TABLES WITH (publish = 'insert, update, delete'); 761 | `, 762 | }); 763 | }); 764 | 765 | test('extracts SCHEMA', async () => { 766 | expectSchemeObject({ 767 | header: { 768 | Name: 'quux', 769 | Owner: 'postgres', 770 | Schema: null, 771 | Type: 'SCHEMA', 772 | }, 773 | scope: null, 774 | sql: multiline` 775 | CREATE SCHEMA quux; 776 | `, 777 | }); 778 | }); 779 | 780 | test('extracts VIEW', async () => { 781 | expectSchemeObject({ 782 | header: { 783 | Name: 'baz', 784 | Owner: 'postgres', 785 | Schema: 'public', 786 | Type: 'VIEW', 787 | }, 788 | scope: { 789 | name: 'baz', 790 | schema: 'public', 791 | type: 'VIEW', 792 | }, 793 | sql: multiline` 794 | CREATE VIEW public.baz AS 795 | SELECT id, 796 | uid AS name 797 | FROM public.bar; 798 | `, 799 | }); 800 | }); 801 | 802 | test('extracts MATERIALIZED VIEW', async () => { 803 | expectSchemeObject({ 804 | header: { 805 | Name: 'qux', 806 | Owner: 'postgres', 807 | Schema: 'public', 808 | Type: 'MATERIALIZED VIEW', 809 | }, 810 | scope: { 811 | name: 'qux', 812 | schema: 'public', 813 | type: 'MATERIALIZED VIEW', 814 | }, 815 | sql: multiline` 816 | CREATE MATERIALIZED VIEW public.qux AS 817 | SELECT id, 818 | uid AS name 819 | FROM public.bar 820 | WITH NO DATA; 821 | `, 822 | }); 823 | }); 824 | 825 | test('extracts FUNCTION', async () => { 826 | expectSchemeObject({ 827 | header: { 828 | Name: 'add_two_numbers(integer, integer)', 829 | Owner: 'postgres', 830 | Schema: 'public', 831 | Type: 'FUNCTION', 832 | }, 833 | scope: { 834 | name: 'add_two_numbers', 835 | schema: 'public', 836 | type: 'FUNCTION', 837 | }, 838 | sql: multiline` 839 | CREATE FUNCTION public.add_two_numbers(a integer, b integer) RETURNS integer 840 | LANGUAGE plpgsql 841 | AS $$ 842 | BEGIN 843 | RETURN a + b; 844 | END; 845 | $$; 846 | `, 847 | }); 848 | }); 849 | 850 | test('extracts PROCEDURE', async () => { 851 | expectSchemeObject({ 852 | header: { 853 | Name: 'say_hello(character varying)', 854 | Owner: 'postgres', 855 | Schema: 'public', 856 | Type: 'PROCEDURE', 857 | }, 858 | scope: { 859 | name: 'say_hello', 860 | schema: 'public', 861 | type: 'PROCEDURE', 862 | }, 863 | sql: multiline` 864 | CREATE PROCEDURE public.say_hello(IN name_param character varying) 865 | LANGUAGE plpgsql 866 | AS $$ 867 | BEGIN 868 | RAISE NOTICE 'Hello, %!', name_param; 869 | END; 870 | $$; 871 | `, 872 | }); 873 | }); 874 | 875 | test('extracts TRIGGER', async () => { 876 | expectSchemeObject({ 877 | header: { 878 | Name: 'foo foo_insert_trigger', 879 | Owner: 'postgres', 880 | Schema: 'public', 881 | Type: 'TRIGGER', 882 | }, 883 | scope: { 884 | name: 'foo', 885 | schema: 'public', 886 | type: 'TABLE', 887 | }, 888 | sql: multiline` 889 | CREATE TRIGGER foo_insert_trigger AFTER INSERT ON public.foo FOR EACH ROW EXECUTE FUNCTION public.notify_foo_insert(); 890 | `, 891 | }); 892 | }); 893 | 894 | test('extracts TYPE', async () => { 895 | expectSchemeObject({ 896 | header: { 897 | Name: 'status', 898 | Owner: 'postgres', 899 | Schema: 'public', 900 | Type: 'TYPE', 901 | }, 902 | scope: { 903 | name: 'status', 904 | schema: 'public', 905 | type: 'TYPE', 906 | }, 907 | sql: multiline` 908 | CREATE TYPE public.status AS ENUM ( 909 | 'ACTIVE', 910 | 'INACTIVE' 911 | ); 912 | `, 913 | }); 914 | }); 915 | 916 | test('extracts SEQUENCE', async () => { 917 | expectSchemeObject({ 918 | header: { 919 | Name: 'bar_id_seq', 920 | Owner: 'postgres', 921 | Schema: 'public', 922 | Type: 'SEQUENCE', 923 | }, 924 | scope: { 925 | name: 'bar', 926 | schema: 'public', 927 | type: 'TABLE', 928 | }, 929 | sql: multiline` 930 | ALTER TABLE public.bar ALTER COLUMN id ADD GENERATED ALWAYS AS IDENTITY ( 931 | SEQUENCE NAME public.bar_id_seq 932 | START WITH 1 933 | INCREMENT BY 1 934 | NO MINVALUE 935 | NO MAXVALUE 936 | CACHE 1 937 | ); 938 | `, 939 | }); 940 | }); 941 | 942 | test('extracts AGGREGATE', async () => { 943 | expectSchemeObject({ 944 | header: { 945 | Name: 'my_sum(integer)', 946 | Owner: 'postgres', 947 | Schema: 'public', 948 | Type: 'AGGREGATE', 949 | }, 950 | scope: { 951 | name: 'my_sum', 952 | schema: 'public', 953 | type: 'AGGREGATE', 954 | }, 955 | sql: multiline` 956 | CREATE AGGREGATE public.my_sum(integer) ( 957 | SFUNC = public.add_two_numbers, 958 | STYPE = integer 959 | ); 960 | `, 961 | }); 962 | }); 963 | 964 | test('extracts FK CONSTRAINT', async () => { 965 | expectSchemeObject({ 966 | header: { 967 | Name: 'bar bar_foo_id_fkey', 968 | Owner: 'postgres', 969 | Schema: 'public', 970 | Type: 'FK CONSTRAINT', 971 | }, 972 | scope: { 973 | name: 'bar', 974 | schema: 'public', 975 | type: 'TABLE', 976 | }, 977 | sql: multiline` 978 | ALTER TABLE ONLY public.bar 979 | ADD CONSTRAINT bar_foo_id_fkey FOREIGN KEY (foo_id) REFERENCES public.foo(id) ON DELETE CASCADE; 980 | `, 981 | }); 982 | }); 983 | 984 | test('extracts INDEX', async () => { 985 | expectSchemeObject({ 986 | header: { 987 | Name: 'bar_uid_idx', 988 | Owner: 'postgres', 989 | Schema: 'public', 990 | Type: 'INDEX', 991 | }, 992 | scope: { 993 | name: 'bar', 994 | schema: 'public', 995 | type: 'TABLE', 996 | }, 997 | sql: multiline` 998 | CREATE UNIQUE INDEX bar_uid_idx ON public.bar USING btree (uid); 999 | `, 1000 | }); 1001 | }); 1002 | 1003 | test('extracts CAST', async () => { 1004 | expectSchemeObject({ 1005 | header: { 1006 | Name: 'CAST (text AS integer)', 1007 | Owner: null, 1008 | Schema: null, 1009 | Type: 'CAST', 1010 | }, 1011 | scope: null, 1012 | sql: multiline` 1013 | CREATE CAST (text AS integer) WITH INOUT AS IMPLICIT; 1014 | `, 1015 | }); 1016 | }); 1017 | 1018 | test('extracts EXTENSION', async () => { 1019 | expectSchemeObject({ 1020 | header: { 1021 | Name: 'pgcrypto', 1022 | Owner: null, 1023 | Schema: null, 1024 | Type: 'EXTENSION', 1025 | }, 1026 | scope: { 1027 | name: 'pgcrypto', 1028 | schema: null, 1029 | type: 'EXTENSION', 1030 | }, 1031 | sql: multiline` 1032 | CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; 1033 | `, 1034 | }); 1035 | }); 1036 | 1037 | test('extracts ACL (ON TABLE)', async () => { 1038 | expectSchemeObject({ 1039 | header: { 1040 | Name: 'COLUMN foo.name', 1041 | Owner: 'postgres', 1042 | Schema: 'public', 1043 | Type: 'ACL', 1044 | }, 1045 | scope: { 1046 | name: 'foo', 1047 | schema: 'public', 1048 | type: 'TABLE', 1049 | }, 1050 | sql: multiline` 1051 | GRANT SELECT(name) ON TABLE public.foo TO postgres; 1052 | `, 1053 | }); 1054 | }); 1055 | 1056 | test('extracts OWNER TO (FUNCTION)', async () => { 1057 | expectSchemeObject({ 1058 | header: { 1059 | Name: 'notify_foo_insert()', 1060 | Owner: 'postgres', 1061 | Schema: 'public', 1062 | Type: 'FUNCTION', 1063 | }, 1064 | scope: { 1065 | name: 'notify_foo_insert', 1066 | schema: 'public', 1067 | type: 'FUNCTION', 1068 | }, 1069 | sql: multiline` 1070 | ALTER FUNCTION public.notify_foo_insert() OWNER TO postgres; 1071 | `, 1072 | }); 1073 | }); 1074 | 1075 | test('extracts OWNER TO (TABLE)', async () => { 1076 | expectSchemeObject({ 1077 | header: { 1078 | Name: 'foo', 1079 | Owner: 'postgres', 1080 | Schema: 'public', 1081 | Type: 'TABLE', 1082 | }, 1083 | scope: { 1084 | name: 'foo', 1085 | schema: 'public', 1086 | type: 'TABLE', 1087 | }, 1088 | sql: multiline` 1089 | ALTER TABLE public.foo OWNER TO postgres; 1090 | `, 1091 | }); 1092 | }); 1093 | 1094 | test('extracts OWNER TO (TYPE)', async () => { 1095 | expectSchemeObject({ 1096 | header: { 1097 | Name: 'status', 1098 | Owner: 'postgres', 1099 | Schema: 'public', 1100 | Type: 'TYPE', 1101 | }, 1102 | scope: { 1103 | name: 'status', 1104 | schema: 'public', 1105 | type: 'TYPE', 1106 | }, 1107 | sql: multiline` 1108 | ALTER TYPE public.status OWNER TO postgres; 1109 | `, 1110 | }); 1111 | }); 1112 | 1113 | test('extracts OWNER TO (VIEW)', async () => { 1114 | expectSchemeObject({ 1115 | header: { 1116 | Name: 'baz', 1117 | Owner: 'postgres', 1118 | Schema: 'public', 1119 | Type: 'VIEW', 1120 | }, 1121 | scope: { 1122 | name: 'baz', 1123 | schema: 'public', 1124 | type: 'VIEW', 1125 | }, 1126 | sql: multiline` 1127 | ALTER VIEW public.baz OWNER TO postgres; 1128 | `, 1129 | }); 1130 | }); 1131 | 1132 | test('extracts DEFAULT', async () => { 1133 | expectSchemeObject({ 1134 | header: { 1135 | Name: 'corge id', 1136 | Owner: 'postgres', 1137 | Schema: 'public', 1138 | Type: 'DEFAULT', 1139 | }, 1140 | scope: { 1141 | name: 'corge', 1142 | schema: 'public', 1143 | type: 'TABLE', 1144 | }, 1145 | sql: multiline` 1146 | ALTER TABLE ONLY public.corge ALTER COLUMN id SET DEFAULT nextval('public.corge_id_seq'::regclass); 1147 | `, 1148 | }); 1149 | }); 1150 | 1151 | test('extracts SEQUENCE OWNED BY', async () => { 1152 | expectSchemeObject({ 1153 | header: { 1154 | Name: 'corge_id_seq', 1155 | Owner: 'postgres', 1156 | Schema: 'public', 1157 | Type: 'SEQUENCE OWNED BY', 1158 | }, 1159 | scope: { 1160 | name: 'corge', 1161 | schema: 'public', 1162 | type: 'TABLE', 1163 | }, 1164 | sql: multiline` 1165 | ALTER SEQUENCE public.corge_id_seq OWNED BY public.corge.id; 1166 | `, 1167 | }); 1168 | }); 1169 | -------------------------------------------------------------------------------- /src/parsePgDump.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | import { z } from 'zod'; 3 | 4 | // These are the attribute less headers, e.g. 5 | // -- 6 | // -- PostgreSQL database dump 7 | // -- 8 | const TitleHeaderZodSchema = z.object({ 9 | Title: z.string(), 10 | }); 11 | 12 | // These are the objects with attributes, e.g. 13 | // -- 14 | // -- Name: citext; Type: EXTENSION; Schema: -; Owner: - 15 | // -- 16 | const AttributedHeaderZodSchema = z.object({ 17 | Name: z.string(), 18 | Owner: z.string().nullable(), 19 | Schema: z.string().nullable(), 20 | Type: z.enum([ 21 | 'ACL', 22 | 'AGGREGATE', 23 | 'CAST', 24 | 'COMMENT', 25 | 'CONSTRAINT', 26 | 'DEFAULT ACL', 27 | 'DEFAULT', 28 | 'EXTENSION', 29 | 'FK CONSTRAINT', 30 | 'FUNCTION', 31 | 'INDEX', 32 | 'MATERIALIZED VIEW', 33 | 'PROCEDURE', 34 | 'PUBLICATION', 35 | 'SCHEMA', 36 | 'SEQUENCE OWNED BY', 37 | 'SEQUENCE', 38 | 'TABLE ATTACH', 39 | 'TABLE', 40 | 'TEXT SEARCH CONFIGURATION', 41 | 'TEXT SEARCH DICTIONARY', 42 | 'TRIGGER', 43 | 'TYPE', 44 | 'VIEW', 45 | ]), 46 | }); 47 | 48 | const HeaderZodSchema = z.union([ 49 | TitleHeaderZodSchema, 50 | AttributedHeaderZodSchema, 51 | ]); 52 | 53 | export type Header = z.infer; 54 | export type TitleHeader = z.infer; 55 | export type AttributedHeader = z.infer; 56 | 57 | const isHeader = (fragment: string): boolean => { 58 | return fragment.startsWith('--\n--'); 59 | }; 60 | 61 | const parseValue = (value: string) => { 62 | if (value === '-' || value === '' || value === undefined) { 63 | return null; 64 | } 65 | 66 | return value; 67 | }; 68 | 69 | const parseAttribute = (attribute: string): [string, null | string] => { 70 | const [name, value] = attribute.split(':'); 71 | 72 | return [name, parseValue(value.trim())]; 73 | }; 74 | 75 | // -- 76 | // -- Name: TABLE user_survey; Type: ACL; Schema: public; Owner: postgres 77 | // -- 78 | 79 | const parseHeader = (fragment: string) => { 80 | try { 81 | const lines = fragment.split('\n'); 82 | 83 | if (lines.length !== 3) { 84 | throw new Error('Invalid header'); 85 | } 86 | 87 | const contentLine = lines[1].slice(3); 88 | 89 | if ( 90 | contentLine === 'PostgreSQL database dump' || 91 | contentLine === 'PostgreSQL database dump complete' 92 | ) { 93 | return HeaderZodSchema.parse({ 94 | Title: contentLine, 95 | }); 96 | } 97 | 98 | const content = Object.fromEntries( 99 | contentLine.split('; ').map((attribute) => { 100 | return parseAttribute(attribute); 101 | }), 102 | ); 103 | 104 | const result = HeaderZodSchema.safeParse(content); 105 | 106 | if (!result.success) { 107 | throw new Error('Invalid header'); 108 | } 109 | 110 | return result.data; 111 | } catch (error) { 112 | console.warn('[pg-dump-parser] failing fragment'); 113 | console.warn(fragment); 114 | 115 | throw error; 116 | } 117 | }; 118 | 119 | export type SchemaObject = { 120 | header: Header; 121 | sql: string; 122 | }; 123 | 124 | export const parsePgDump = (dump: string) => { 125 | const schemaObjects: SchemaObject[] = []; 126 | 127 | const fragments = dump.trim().split(/(--\n-- .*\n--)/u); 128 | 129 | let lastHeader: Header | null = null; 130 | 131 | for (const fragment of fragments.map((chunk) => chunk.trim())) { 132 | if (fragment === '') { 133 | continue; 134 | } 135 | 136 | if (isHeader(fragment)) { 137 | lastHeader = parseHeader(fragment); 138 | } else if (lastHeader) { 139 | const subFragments = fragment.split('\n\n\n'); 140 | 141 | for (const subFragment of subFragments) { 142 | schemaObjects.push({ 143 | header: lastHeader, 144 | sql: subFragment, 145 | }); 146 | } 147 | } else { 148 | throw new Error('No header'); 149 | } 150 | } 151 | 152 | return schemaObjects; 153 | }; 154 | -------------------------------------------------------------------------------- /src/scopeSchemaObject.ts: -------------------------------------------------------------------------------- 1 | import { type AttributedHeader, type SchemaObject } from './parsePgDump'; 2 | import { z } from 'zod'; 3 | 4 | type TableTarget = { 5 | name: string; 6 | schema: string; 7 | }; 8 | 9 | const extractOwnedByTarget = (fragment: string): TableTarget => { 10 | const { name, schema } = 11 | fragment.match(/OWNED BY\s(?[^.]+)\.(?[^.]+)/u)?.groups ?? {}; 12 | 13 | if (!schema) { 14 | throw new Error('Invalid OWNED BY target (missing schema)'); 15 | } 16 | 17 | if (!name) { 18 | throw new Error('Invalid OWNED BY target (missing name)'); 19 | } 20 | 21 | return { 22 | name, 23 | schema, 24 | }; 25 | }; 26 | 27 | const extractOnTableTarget = (fragment: string): TableTarget => { 28 | const { name, schema } = 29 | fragment.match(/ON TABLE\s(?[^.]+)\.(?\S+)/u)?.groups ?? {}; 30 | 31 | if (!schema) { 32 | throw new Error('Invalid ON TABLE target'); 33 | } 34 | 35 | if (!name) { 36 | throw new Error('Invalid ON TABLE target'); 37 | } 38 | 39 | return { 40 | name, 41 | schema, 42 | }; 43 | }; 44 | 45 | const extractCreateIndexTarget = (fragment: string): TableTarget => { 46 | const { name, schema } = 47 | fragment.match(/ON\s(?[^.]+)\.(?\S+)/u)?.groups ?? {}; 48 | 49 | if (!schema) { 50 | throw new Error('Invalid CREATE INDEX target'); 51 | } 52 | 53 | if (!name) { 54 | throw new Error('Invalid CREATE INDEX target'); 55 | } 56 | 57 | return { 58 | name, 59 | schema, 60 | }; 61 | }; 62 | 63 | const extractAlterTableTarget = (fragment: string): TableTarget => { 64 | const { name, schema } = 65 | fragment.match(/ALTER TABLE (?:ONLY\s)?(?[^.]+)\.(?\S+)/u) 66 | ?.groups ?? {}; 67 | 68 | if (!schema) { 69 | throw new Error('Invalid ALTER TABLE target'); 70 | } 71 | 72 | if (!name) { 73 | throw new Error('Invalid ALTER TABLE target'); 74 | } 75 | 76 | return { 77 | name, 78 | schema, 79 | }; 80 | }; 81 | 82 | const extractFunctionLikeName = (fragment: string): string => { 83 | const { name } = 84 | fragment.match( 85 | /(?:AGGREGATE|FUNCTION|PROCEDURE)\s+(?:(?\S+)\.)?(?\w+)\s*\(/u, 86 | )?.groups ?? {}; 87 | 88 | if (!name) { 89 | throw new Error('Invalid FUNCTION name'); 90 | } 91 | 92 | return name; 93 | }; 94 | 95 | type CommentOnTarget = { 96 | target: string; 97 | type: 98 | | 'AGGREGATE' 99 | | 'COLUMN' 100 | | 'EXTENSION' 101 | | 'FUNCTION' 102 | | 'INDEX' 103 | | 'MATERIALIZED VIEW' 104 | | 'PROCEDURE' 105 | | 'SEQUENCE' 106 | | 'TABLE' 107 | | 'TRIGGER' 108 | | 'TYPE' 109 | | 'VIEW'; 110 | }; 111 | 112 | const extractCommentOnTarget = (fragment: string): CommentOnTarget => { 113 | const { target, type } = 114 | fragment.match( 115 | /COMMENT ON (?AGGREGATE|COLUMN|EXTENSION|FUNCTION|INDEX|MATERIALIZED VIEW|PROCEDURE|SEQUENCE|TABLE|TYPE|VIEW)\s(?.+?) IS/u, 116 | )?.groups ?? {}; 117 | 118 | if (!target) { 119 | throw new Error('Invalid COMMENT ON target (missing target)'); 120 | } 121 | 122 | if (!type) { 123 | throw new Error('Invalid COMMENT ON target (missing type)'); 124 | } 125 | 126 | return { 127 | target, 128 | type: type as CommentOnTarget['type'], 129 | }; 130 | }; 131 | 132 | export type SchemaObjectScope = 133 | | { 134 | name: string; 135 | schema: null; 136 | type: 'EXTENSION'; 137 | } 138 | | { 139 | name: string; 140 | schema: string; 141 | type: 142 | | 'AGGREGATE' 143 | | 'FUNCTION' 144 | | 'MATERIALIZED VIEW' 145 | | 'PROCEDURE' 146 | | 'TABLE' 147 | | 'TYPE' 148 | | 'VIEW'; 149 | }; 150 | 151 | type AttributedSchemaObject = { 152 | header: AttributedHeader; 153 | sql: string; 154 | }; 155 | 156 | const findTableLikeOwner = ( 157 | schemaObjects: AttributedSchemaObject[], 158 | name: string, 159 | schema: string, 160 | ): null | SchemaObjectScope => { 161 | const targetSchemaObject = schemaObjects.find((schemaObject) => { 162 | return ( 163 | ['MATERIALIZED VIEW', 'TABLE', 'VIEW'].includes( 164 | schemaObject.header.Type, 165 | ) && 166 | schemaObject.header.Name === name && 167 | schemaObject.header.Schema === schema 168 | ); 169 | }); 170 | 171 | if (!targetSchemaObject) { 172 | return null; 173 | } 174 | 175 | return { 176 | name, 177 | schema, 178 | type: targetSchemaObject.header.Type as 179 | | 'MATERIALIZED VIEW' 180 | | 'TABLE' 181 | | 'VIEW', 182 | }; 183 | }; 184 | 185 | const scopeComment = ( 186 | schemaObjects: AttributedSchemaObject[], 187 | subject: AttributedSchemaObject, 188 | ): null | SchemaObjectScope => { 189 | const target = extractCommentOnTarget(subject.sql); 190 | 191 | if (target.type === 'AGGREGATE') { 192 | return { 193 | name: extractFunctionLikeName(subject.header.Name), 194 | schema: subject.header.Schema ?? 'public', 195 | type: 'AGGREGATE', 196 | }; 197 | } 198 | 199 | if (target.type === 'EXTENSION') { 200 | return { 201 | name: target.target, 202 | schema: null, 203 | type: 'EXTENSION', 204 | }; 205 | } 206 | 207 | if (target.type === 'COLUMN') { 208 | const [schema, name] = z 209 | .tuple([z.string(), z.string(), z.string()]) 210 | .parse(target.target.split('.')); 211 | 212 | return findTableLikeOwner(schemaObjects, name, schema); 213 | } 214 | 215 | if (target.type === 'FUNCTION') { 216 | return { 217 | name: extractFunctionLikeName(subject.header.Name), 218 | schema: subject.header.Schema ?? 'public', 219 | type: 'FUNCTION', 220 | }; 221 | } 222 | 223 | if (target.type === 'INDEX') { 224 | const [schema, indexName] = z 225 | .tuple([z.string(), z.string()]) 226 | .parse(target.target.split('.')); 227 | 228 | const indexSchemaObject = schemaObjects.find((schemaObject) => { 229 | if (schemaObject.header.Type !== 'INDEX') { 230 | return false; 231 | } 232 | 233 | return schemaObject.header.Name === indexName; 234 | }); 235 | 236 | if (indexSchemaObject) { 237 | const indexTarget = extractCreateIndexTarget(indexSchemaObject.sql); 238 | 239 | return findTableLikeOwner( 240 | schemaObjects, 241 | indexTarget.name, 242 | indexTarget.schema, 243 | ); 244 | } 245 | 246 | const constraintSchemaObject = schemaObjects.find((schemaObject) => { 247 | if (schemaObject.header.Type !== 'CONSTRAINT') { 248 | return false; 249 | } 250 | 251 | return schemaObject.header.Name.split(' ')[1] === indexName; 252 | }); 253 | 254 | if (constraintSchemaObject) { 255 | const [tableName] = constraintSchemaObject.header.Name.split(' '); 256 | 257 | return findTableLikeOwner(schemaObjects, tableName, schema); 258 | } 259 | } 260 | 261 | if (target.type === 'MATERIALIZED VIEW') { 262 | return { 263 | name: subject.header.Name.replace('MATERIALIZED VIEW ', ''), 264 | schema: subject.header.Schema ?? 'public', 265 | type: 'MATERIALIZED VIEW', 266 | }; 267 | } 268 | 269 | if (target.type === 'PROCEDURE') { 270 | return { 271 | name: extractFunctionLikeName(subject.header.Name), 272 | schema: subject.header.Schema ?? 'public', 273 | type: 'PROCEDURE', 274 | }; 275 | } 276 | 277 | if (target.type === 'SEQUENCE') { 278 | const [schemaName, sequenceName] = z 279 | .tuple([z.string(), z.string()]) 280 | .parse(target.target.split('.')); 281 | 282 | const sequenceSchemaObject = schemaObjects.find((schemaObject) => { 283 | if (schemaObject.header.Type !== 'SEQUENCE') { 284 | return false; 285 | } 286 | 287 | return ( 288 | schemaObject.header.Name === sequenceName && 289 | schemaObject.header.Schema === schemaName 290 | ); 291 | }); 292 | 293 | if (!sequenceSchemaObject) { 294 | throw new Error('Sequence not found'); 295 | } 296 | 297 | const alterTableTarget = extractAlterTableTarget(sequenceSchemaObject.sql); 298 | 299 | return findTableLikeOwner( 300 | schemaObjects, 301 | alterTableTarget.name, 302 | alterTableTarget.schema, 303 | ); 304 | } 305 | 306 | if (target.type === 'TABLE') { 307 | const [schema, name] = z 308 | .tuple([z.string(), z.string()]) 309 | .parse(target.target.split('.')); 310 | 311 | return { 312 | name, 313 | schema, 314 | type: 'TABLE', 315 | }; 316 | } 317 | 318 | if (target.type === 'VIEW') { 319 | const [schema, name] = z 320 | .tuple([z.string(), z.string()]) 321 | .parse(target.target.split('.')); 322 | 323 | return { 324 | name, 325 | schema, 326 | type: 'VIEW', 327 | }; 328 | } 329 | 330 | if (target.type === 'TYPE') { 331 | const [, typeName] = z 332 | .tuple([z.string(), z.string()]) 333 | .parse(subject.header.Name.split(' ')); 334 | 335 | return { 336 | name: typeName, 337 | schema: subject.header.Schema ?? 'public', 338 | type: 'TYPE', 339 | }; 340 | } 341 | 342 | return null; 343 | }; 344 | 345 | const scopeAttributedSchemaObject = ( 346 | schemaObjects: AttributedSchemaObject[], 347 | subject: AttributedSchemaObject, 348 | ): null | SchemaObjectScope => { 349 | if (subject.header.Type === 'AGGREGATE') { 350 | return { 351 | name: subject.header.Name.split('(')[0], 352 | schema: subject.header.Schema ?? 'public', 353 | type: 'AGGREGATE', 354 | }; 355 | } 356 | 357 | if (subject.header.Type === 'TRIGGER') { 358 | return { 359 | name: subject.header.Name.split(' ')[0], 360 | schema: subject.header.Schema ?? 'public', 361 | type: 'TABLE', 362 | }; 363 | } 364 | 365 | if (subject.header.Type === 'FUNCTION') { 366 | return { 367 | name: subject.header.Name.split('(')[0], 368 | schema: subject.header.Schema ?? 'public', 369 | type: 'FUNCTION', 370 | }; 371 | } 372 | 373 | if (subject.header.Type === 'EXTENSION') { 374 | return { 375 | name: subject.header.Name, 376 | schema: null, 377 | type: 'EXTENSION', 378 | }; 379 | } 380 | 381 | if (subject.header.Type === 'INDEX') { 382 | const createIndexTarget = extractCreateIndexTarget(subject.sql); 383 | 384 | return findTableLikeOwner( 385 | schemaObjects, 386 | createIndexTarget.name, 387 | createIndexTarget.schema, 388 | ); 389 | } 390 | 391 | if (subject.header.Type === 'MATERIALIZED VIEW') { 392 | return { 393 | name: subject.header.Name, 394 | schema: subject.header.Schema ?? 'public', 395 | type: 'MATERIALIZED VIEW', 396 | }; 397 | } 398 | 399 | if (subject.header.Type === 'PROCEDURE') { 400 | return { 401 | name: subject.header.Name.split('(')[0], 402 | schema: subject.header.Schema ?? 'public', 403 | type: 'PROCEDURE', 404 | }; 405 | } 406 | 407 | if (subject.header.Type === 'SEQUENCE') { 408 | // Handled by ALTER TABLE 409 | } 410 | 411 | if (subject.header.Type === 'TABLE') { 412 | return { 413 | name: subject.header.Name, 414 | schema: subject.header.Schema ?? 'public', 415 | type: 'TABLE', 416 | }; 417 | } 418 | 419 | if (subject.header.Type === 'TYPE') { 420 | return { 421 | name: subject.header.Name, 422 | schema: subject.header.Schema ?? 'public', 423 | type: 'TYPE', 424 | }; 425 | } 426 | 427 | if (subject.header.Type === 'VIEW') { 428 | return { 429 | name: subject.header.Name, 430 | schema: subject.header.Schema ?? 'public', 431 | type: 'VIEW', 432 | }; 433 | } 434 | 435 | if (subject.sql.startsWith('ALTER TABLE ')) { 436 | const target = extractAlterTableTarget(subject.sql); 437 | 438 | return findTableLikeOwner(schemaObjects, target.name, target.schema); 439 | } 440 | 441 | if (subject.sql.startsWith('COMMENT ON ')) { 442 | return scopeComment(schemaObjects, subject); 443 | } 444 | 445 | try { 446 | const ownedByTarget = extractOwnedByTarget(subject.sql); 447 | 448 | return findTableLikeOwner( 449 | schemaObjects, 450 | ownedByTarget.name, 451 | ownedByTarget.schema, 452 | ); 453 | } catch { 454 | // ignore 455 | } 456 | 457 | try { 458 | const onTableTarget = extractOnTableTarget(subject.sql); 459 | 460 | return findTableLikeOwner( 461 | schemaObjects, 462 | onTableTarget.name, 463 | onTableTarget.schema, 464 | ); 465 | } catch { 466 | // ignore 467 | } 468 | 469 | return null; 470 | }; 471 | 472 | export const scopeSchemaObject = ( 473 | schemaObjects: SchemaObject[], 474 | subject: SchemaObject, 475 | ): null | SchemaObjectScope => { 476 | if (!('Type' in subject.header)) { 477 | return null; 478 | } 479 | 480 | const attributedSchemaObjects = schemaObjects.filter( 481 | (schemaObject) => 'Type' in schemaObject.header, 482 | ) as AttributedSchemaObject[]; 483 | 484 | return scopeAttributedSchemaObject( 485 | attributedSchemaObjects, 486 | subject as AttributedSchemaObject, 487 | ); 488 | }; 489 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noEmit": false, 4 | "outDir": "dist" 5 | }, 6 | "extends": "./tsconfig.json", 7 | "include": [ 8 | "src" 9 | ] 10 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowSyntheticDefaultImports": true, 4 | "declaration": true, 5 | "declarationMap": true, 6 | "esModuleInterop": true, 7 | "lib": [ 8 | "es2021" 9 | ], 10 | "module": "commonjs", 11 | "moduleResolution": "node", 12 | "noEmit": true, 13 | "noImplicitAny": false, 14 | "noImplicitReturns": true, 15 | "skipLibCheck": true, 16 | "sourceMap": true, 17 | "strict": true, 18 | "target": "es2018", 19 | "useUnknownInCatchVariables": false 20 | }, 21 | "include": [ 22 | "src", 23 | "vitest.config.ts" 24 | ] 25 | } -------------------------------------------------------------------------------- /vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | singleThread: true, 6 | testTimeout: 5_000, 7 | }, 8 | }); 9 | --------------------------------------------------------------------------------