├── .nvmrc
├── .github
├── CODEOWNERS
├── dependabot.yml
└── workflows
│ ├── publish-deps.yml
│ ├── docs.yml
│ ├── mirror.yml
│ ├── automerge.yml
│ ├── validate-python-types.yml
│ ├── ci.yml
│ ├── release.yml
│ └── canary-deploy.yml
├── .dockerignore
├── .prettierignore
├── src
├── lib
│ ├── constants.ts
│ ├── sql
│ │ ├── version.sql.ts
│ │ ├── common.ts
│ │ ├── extensions.sql.ts
│ │ ├── config.sql.ts
│ │ ├── foreign_tables.sql.ts
│ │ ├── materialized_views.sql.ts
│ │ ├── views.sql.ts
│ │ ├── schemas.sql.ts
│ │ ├── publications.sql.ts
│ │ ├── roles.sql.ts
│ │ ├── policies.sql.ts
│ │ ├── types.sql.ts
│ │ ├── triggers.sql.ts
│ │ ├── indexes.sql.ts
│ │ ├── table_relationships.sql.ts
│ │ ├── table_privileges.sql.ts
│ │ ├── table.sql.ts
│ │ └── columns.sql.ts
│ ├── secrets.ts
│ ├── PostgresMetaVersion.ts
│ ├── PostgresMetaConfig.ts
│ ├── index.ts
│ ├── helpers.ts
│ ├── PostgresMetaTypes.ts
│ ├── Parser.ts
│ ├── PostgresMetaIndexes.ts
│ ├── PostgresMetaExtensions.ts
│ ├── PostgresMeta.ts
│ ├── PostgresMetaViews.ts
│ ├── PostgresMetaColumnPrivileges.ts
│ ├── PostgresMetaMaterializedViews.ts
│ ├── PostgresMetaForeignTables.ts
│ └── PostgresMetaSchemas.ts
└── server
│ ├── admin-app.ts
│ ├── routes
│ ├── generators
│ │ ├── go.ts
│ │ ├── python.ts
│ │ ├── swift.ts
│ │ └── typescript.ts
│ ├── config.ts
│ ├── types.ts
│ ├── indexes.ts
│ ├── foreign-tables.ts
│ ├── query.ts
│ ├── materialized-views.ts
│ ├── views.ts
│ ├── publications.ts
│ ├── extensions.ts
│ ├── index.ts
│ ├── functions.ts
│ ├── policies.ts
│ └── triggers.ts
│ ├── sentry.ts
│ ├── utils.ts
│ ├── app.ts
│ └── constants.ts
├── tsconfig.jest.json
├── test
├── db
│ ├── Dockerfile
│ ├── docker-compose.yml
│ ├── server.key
│ └── server.crt
├── lib
│ ├── utils.ts
│ ├── version.ts
│ ├── config.ts
│ ├── secrets.ts
│ ├── schemas.ts
│ ├── types.ts
│ └── extensions.ts
├── admin-app.test.ts
├── server
│ ├── utils.ts
│ ├── query-timeout.ts
│ ├── indexes.ts
│ ├── materialized-views.ts
│ └── ssl.ts
├── app.test.ts
├── index.test.ts
├── types.test.ts
├── views.test.ts
├── utils.test.ts
├── schemas.test.ts
├── config.test.ts
└── extensions.test.ts
├── .prettierrc.json
├── .releaserc.json
├── vitest.config.ts
├── tsconfig.json
├── Dockerfile
├── scripts
└── generate-python-types-test.ts
├── CONTRIBUTING.md
├── .gitignore
├── package.json
└── README.md
/.nvmrc:
--------------------------------------------------------------------------------
1 | v20
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @avallete @soedirgo @supabase/sdk
2 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !src
3 | !package*.json
4 | !tsconfig*.json
5 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | .expo
2 | .next
3 | node_modules
4 | package-lock.json
5 | docker*
--------------------------------------------------------------------------------
/src/lib/constants.ts:
--------------------------------------------------------------------------------
1 | export const DEFAULT_SYSTEM_SCHEMAS = ['information_schema', 'pg_catalog', 'pg_toast']
2 |
--------------------------------------------------------------------------------
/tsconfig.jest.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig",
3 | "compilerOptions": {
4 | "rootDir": "."
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/test/db/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM supabase/postgres:14.1.0
2 |
3 | COPY --chown=postgres:postgres --chmod=600 server.key server.crt /var/lib/postgresql/
4 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "trailingComma": "es5",
3 | "tabWidth": 2,
4 | "semi": false,
5 | "singleQuote": true,
6 | "printWidth": 100
7 | }
8 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "daily"
7 | - package-ecosystem: "npm"
8 | directory: "/"
9 | schedule:
10 | interval: "daily"
11 |
--------------------------------------------------------------------------------
/.releaserc.json:
--------------------------------------------------------------------------------
1 | {
2 | "plugins": [
3 | "@semantic-release/commit-analyzer",
4 | "@semantic-release/release-notes-generator",
5 | [
6 | "@semantic-release/github", {
7 | "successComment": false,
8 | "failTitle": false
9 | }
10 | ],
11 | "@semantic-release/npm"
12 | ]
13 | }
14 |
--------------------------------------------------------------------------------
/vitest.config.ts:
--------------------------------------------------------------------------------
1 | ///
2 | import { defineConfig } from 'vitest/config'
3 |
4 | export default defineConfig({
5 | test: {
6 | coverage: { reporter: ['lcov'] },
7 | maxConcurrency: 1,
8 | // https://github.com/vitest-dev/vitest/issues/317#issuecomment-1542319622
9 | pool: 'forks',
10 | },
11 | })
12 |
--------------------------------------------------------------------------------
/test/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import { afterAll } from 'vitest'
2 | import { PostgresMeta } from '../../src/lib'
3 |
4 | export const TEST_CONNECTION_STRING = 'postgresql://postgres:postgres@localhost:5432'
5 |
6 | export const pgMeta = new PostgresMeta({
7 | max: 1,
8 | connectionString: TEST_CONNECTION_STRING,
9 | })
10 |
11 | afterAll(() => pgMeta.end())
12 |
--------------------------------------------------------------------------------
/src/lib/sql/version.sql.ts:
--------------------------------------------------------------------------------
1 | export const VERSION_SQL = () => /* SQL */ `
2 | SELECT
3 | version(),
4 | current_setting('server_version_num') :: int8 AS version_number,
5 | (
6 | SELECT
7 | COUNT(*) AS active_connections
8 | FROM
9 | pg_stat_activity
10 | ) AS active_connections,
11 | current_setting('max_connections') :: int8 AS max_connections
12 | `
13 |
--------------------------------------------------------------------------------
/src/lib/sql/common.ts:
--------------------------------------------------------------------------------
1 | export type SQLQueryProps = {
2 | limit?: number
3 | offset?: number
4 | }
5 |
6 | export type SQLQueryPropsWithSchemaFilter = SQLQueryProps & {
7 | schemaFilter?: string
8 | }
9 |
10 | export type SQLQueryPropsWithIdsFilter = SQLQueryProps & {
11 | idsFilter?: string
12 | }
13 |
14 | export type SQLQueryPropsWithSchemaFilterAndIdsFilter = SQLQueryProps & {
15 | schemaFilter?: string
16 | idsFilter?: string
17 | }
18 |
--------------------------------------------------------------------------------
/src/server/admin-app.ts:
--------------------------------------------------------------------------------
1 | import './sentry.js'
2 | import * as Sentry from '@sentry/node'
3 | import { fastify, FastifyInstance, FastifyServerOptions } from 'fastify'
4 | import fastifyMetrics from 'fastify-metrics'
5 |
6 | export function build(opts: FastifyServerOptions = {}): FastifyInstance {
7 | const app = fastify(opts)
8 | Sentry.setupFastifyErrorHandler(app)
9 | app.register(fastifyMetrics.default, {
10 | endpoint: '/metrics',
11 | routeMetrics: { enabled: false },
12 | })
13 | return app
14 | }
15 |
--------------------------------------------------------------------------------
/test/admin-app.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/admin-app.js'
3 |
4 | describe('admin-app', () => {
5 | test('should register metrics endpoint', async () => {
6 | const app = build()
7 |
8 | // Test that the app can be started (this will trigger plugin registration)
9 | await app.ready()
10 |
11 | // Verify that metrics endpoint is available
12 | const routes = app.printRoutes()
13 | expect(routes).toContain('metrics')
14 |
15 | await app.close()
16 | })
17 | })
18 |
--------------------------------------------------------------------------------
/test/db/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | db:
3 | build: .
4 | ports:
5 | - 5432:5432
6 | volumes:
7 | - .:/docker-entrypoint-initdb.d
8 | environment:
9 | POSTGRES_PASSWORD: postgres
10 | command: postgres -c config_file=/etc/postgresql/postgresql.conf -c ssl=on -c ssl_cert_file=/var/lib/postgresql/server.crt -c ssl_key_file=/var/lib/postgresql/server.key
11 | healthcheck:
12 | test: ["CMD-SHELL", "pg_isready -U postgres"]
13 | interval: 1s
14 | timeout: 2s
15 | retries: 10
16 | start_period: 2s
17 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": ["src"],
3 | "compilerOptions": {
4 | "incremental": true,
5 | "declaration": true,
6 | "declarationMap": true,
7 | "module": "NodeNext",
8 | "outDir": "dist",
9 | "rootDir": "src",
10 | "sourceMap": true,
11 | "target": "esnext",
12 |
13 | "strict": true,
14 |
15 | "esModuleInterop": true,
16 | "moduleResolution": "NodeNext",
17 | "resolveJsonModule": true,
18 | "skipLibCheck": true,
19 | "forceConsistentCasingInFileNames": true,
20 | "stripInternal": true
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/lib/secrets.ts:
--------------------------------------------------------------------------------
1 | export const getSecret = async (key: string) => {
2 | if (!key) {
3 | return ''
4 | }
5 |
6 | const env = process.env[key]
7 | if (env) {
8 | return env
9 | }
10 |
11 | const file = process.env[key + '_FILE']
12 | if (!file) {
13 | return ''
14 | }
15 | // Use dynamic import to support module mock
16 | const fs = await import('node:fs/promises')
17 |
18 | return await fs.readFile(file, { encoding: 'utf8' }).catch((e) => {
19 | if (e.code == 'ENOENT') {
20 | return ''
21 | }
22 | throw e
23 | })
24 | }
25 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaVersion.ts:
--------------------------------------------------------------------------------
1 | import { VERSION_SQL } from './sql/version.sql.js'
2 | import { PostgresMetaResult, PostgresVersion } from './types.js'
3 |
4 | export default class PostgresMetaVersion {
5 | query: (sql: string) => Promise>
6 |
7 | constructor(query: (sql: string) => Promise>) {
8 | this.query = query
9 | }
10 |
11 | async retrieve(): Promise> {
12 | const { data, error } = await this.query(VERSION_SQL())
13 | if (error) {
14 | return { data, error }
15 | }
16 | return { data: data[0], error }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaConfig.ts:
--------------------------------------------------------------------------------
1 | import { CONFIG_SQL } from './sql/config.sql.js'
2 | import { PostgresMetaResult, PostgresConfig } from './types.js'
3 |
4 | export default class PostgresMetaConfig {
5 | query: (sql: string) => Promise>
6 |
7 | constructor(query: (sql: string) => Promise>) {
8 | this.query = query
9 | }
10 |
11 | async list({
12 | limit,
13 | offset,
14 | }: {
15 | limit?: number
16 | offset?: number
17 | } = {}): Promise> {
18 | const sql = CONFIG_SQL({ limit, offset })
19 | return await this.query(sql)
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/lib/index.ts:
--------------------------------------------------------------------------------
1 | export { default as PostgresMeta } from './PostgresMeta.js'
2 | export {
3 | PostgresMetaOk,
4 | PostgresMetaErr,
5 | PostgresMetaResult,
6 | PostgresColumn,
7 | PostgresConfig,
8 | PostgresExtension,
9 | PostgresFunction,
10 | PostgresFunctionCreate,
11 | PostgresIndex,
12 | PostgresMaterializedView,
13 | PostgresPolicy,
14 | PostgresPrimaryKey,
15 | PostgresPublication,
16 | PostgresRelationship,
17 | PostgresRole,
18 | PostgresSchema,
19 | PostgresSchemaCreate,
20 | PostgresSchemaUpdate,
21 | PostgresTable,
22 | PostgresTrigger,
23 | PostgresType,
24 | PostgresVersion,
25 | PostgresView,
26 | } from './types.js'
27 |
--------------------------------------------------------------------------------
/src/lib/sql/extensions.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryProps } from './common.js'
2 |
3 | export const EXTENSIONS_SQL = (props: SQLQueryProps & { nameFilter?: string }) => /* SQL */ `
4 | SELECT
5 | e.name,
6 | n.nspname AS schema,
7 | e.default_version,
8 | x.extversion AS installed_version,
9 | e.comment
10 | FROM
11 | pg_available_extensions() e(name, default_version, comment)
12 | LEFT JOIN pg_extension x ON e.name = x.extname
13 | LEFT JOIN pg_namespace n ON x.extnamespace = n.oid
14 | WHERE
15 | true
16 | ${props.nameFilter ? `AND e.name ${props.nameFilter}` : ''}
17 | ${props.limit ? `limit ${props.limit}` : ''}
18 | ${props.offset ? `offset ${props.offset}` : ''}
19 | `
20 |
--------------------------------------------------------------------------------
/src/lib/sql/config.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const CONFIG_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ `
4 | SELECT
5 | name,
6 | setting,
7 | category,
8 | TRIM(split_part(category, '/', 1)) AS group,
9 | TRIM(split_part(category, '/', 2)) AS subgroup,
10 | unit,
11 | short_desc,
12 | extra_desc,
13 | context,
14 | vartype,
15 | source,
16 | min_val,
17 | max_val,
18 | enumvals,
19 | boot_val,
20 | reset_val,
21 | sourcefile,
22 | sourceline,
23 | pending_restart
24 | FROM
25 | pg_settings
26 | ORDER BY
27 | category,
28 | name
29 | ${props.limit ? `limit ${props.limit}` : ''}
30 | ${props.offset ? `offset ${props.offset}` : ''}
31 | `
32 |
--------------------------------------------------------------------------------
/test/lib/version.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { pgMeta } from './utils'
3 |
4 | test('retrieve', async () => {
5 | const res = await pgMeta.version.retrieve()
6 | expect(res).toMatchInlineSnapshot(
7 | {
8 | data: {
9 | active_connections: expect.any(Number),
10 | max_connections: expect.any(Number),
11 | version: expect.stringMatching(/^PostgreSQL/),
12 | version_number: expect.any(Number),
13 | },
14 | },
15 | `
16 | {
17 | "data": {
18 | "active_connections": Any,
19 | "max_connections": Any,
20 | "version": StringMatching /\\^PostgreSQL/,
21 | "version_number": Any,
22 | },
23 | "error": null,
24 | }
25 | `
26 | )
27 | })
28 |
--------------------------------------------------------------------------------
/.github/workflows/publish-deps.yml:
--------------------------------------------------------------------------------
1 | name: Publish Dependencies
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | permissions:
7 | contents: read
8 | packages: write
9 | id-token: write
10 |
11 | jobs:
12 | publish:
13 | # Must match glibc verison in node:20
14 | runs-on: ubuntu-22.04
15 | steps:
16 | - uses: actions/checkout@v5
17 | with:
18 | repository: 'pyramation/libpg-query-node'
19 | ref: 'v15'
20 |
21 | - uses: actions/setup-node@v4
22 | with:
23 | node-version-file: '.nvmrc'
24 |
25 | - run: npm i
26 | - run: npm run binary:build
27 |
28 | - uses: aws-actions/configure-aws-credentials@v4
29 | with:
30 | role-to-assume: ${{ secrets.PROD_AWS_ROLE }}
31 | aws-region: us-east-1
32 |
33 | - run: npx node-pre-gyp publish
34 |
--------------------------------------------------------------------------------
/src/lib/sql/foreign_tables.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryProps } from './common.js'
2 |
3 | export const FOREIGN_TABLES_SQL = (
4 | props: SQLQueryProps & {
5 | schemaFilter?: string
6 | idsFilter?: string
7 | tableIdentifierFilter?: string
8 | }
9 | ) => /* SQL */ `
10 | SELECT
11 | c.oid :: int8 AS id,
12 | n.nspname AS schema,
13 | c.relname AS name,
14 | obj_description(c.oid) AS comment
15 | FROM
16 | pg_class c
17 | JOIN pg_namespace n ON n.oid = c.relnamespace
18 | WHERE
19 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''}
20 | ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''}
21 | ${props.tableIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.tableIdentifierFilter} AND` : ''}
22 | c.relkind = 'f'
23 | ${props.limit ? `limit ${props.limit}` : ''}
24 | ${props.offset ? `offset ${props.offset}` : ''}
25 | `
26 |
--------------------------------------------------------------------------------
/test/lib/config.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { pgMeta } from './utils'
3 |
4 | test('list', async () => {
5 | const res = await pgMeta.config.list()
6 | expect(res.data?.find(({ name }) => name === 'autovacuum')).toMatchInlineSnapshot(`
7 | {
8 | "boot_val": "on",
9 | "category": "Autovacuum",
10 | "context": "sighup",
11 | "enumvals": null,
12 | "extra_desc": null,
13 | "group": "Autovacuum",
14 | "max_val": null,
15 | "min_val": null,
16 | "name": "autovacuum",
17 | "pending_restart": false,
18 | "reset_val": "on",
19 | "setting": "on",
20 | "short_desc": "Starts the autovacuum subprocess.",
21 | "source": "default",
22 | "sourcefile": null,
23 | "sourceline": null,
24 | "subgroup": "",
25 | "unit": null,
26 | "vartype": "bool",
27 | }
28 | `)
29 | })
30 |
--------------------------------------------------------------------------------
/test/server/utils.ts:
--------------------------------------------------------------------------------
1 | import { build as buildApp } from '../../src/server/app'
2 |
3 | export const app = buildApp()
4 |
5 | /**
6 | * Normalizes UUIDs in test data to make snapshots resilient to UUID changes.
7 | * Replaces all UUID strings with a consistent placeholder.
8 | */
9 | export function normalizeUuids(data: unknown): unknown {
10 | const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
11 |
12 | if (typeof data === 'string' && uuidRegex.test(data)) {
13 | return '00000000-0000-0000-0000-000000000000'
14 | }
15 |
16 | if (Array.isArray(data)) {
17 | return data.map(normalizeUuids)
18 | }
19 |
20 | if (data !== null && typeof data === 'object') {
21 | const normalized: Record = {}
22 | for (const [key, value] of Object.entries(data)) {
23 | normalized[key] = normalizeUuids(value)
24 | }
25 | return normalized
26 | }
27 |
28 | return data
29 | }
30 |
--------------------------------------------------------------------------------
/src/lib/sql/materialized_views.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const MATERIALIZED_VIEWS_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & {
5 | materializedViewIdentifierFilter?: string
6 | }
7 | ) => /* SQL */ `
8 | select
9 | c.oid::int8 as id,
10 | n.nspname as schema,
11 | c.relname as name,
12 | c.relispopulated as is_populated,
13 | obj_description(c.oid) as comment
14 | from
15 | pg_class c
16 | join pg_namespace n on n.oid = c.relnamespace
17 | where
18 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''}
19 | ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''}
20 | ${props.materializedViewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.materializedViewIdentifierFilter} AND` : ''}
21 | c.relkind = 'm'
22 | ${props.limit ? `limit ${props.limit}` : ''}
23 | ${props.offset ? `offset ${props.offset}` : ''}
24 | `
25 |
--------------------------------------------------------------------------------
/src/lib/sql/views.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const VIEWS_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & {
5 | viewIdentifierFilter?: string
6 | }
7 | ) => /* SQL */ `
8 | SELECT
9 | c.oid :: int8 AS id,
10 | n.nspname AS schema,
11 | c.relname AS name,
12 | -- See definition of information_schema.views
13 | (pg_relation_is_updatable(c.oid, false) & 20) = 20 AS is_updatable,
14 | obj_description(c.oid) AS comment
15 | FROM
16 | pg_class c
17 | JOIN pg_namespace n ON n.oid = c.relnamespace
18 | WHERE
19 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''}
20 | ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''}
21 | ${props.viewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.viewIdentifierFilter} AND` : ''}
22 | c.relkind = 'v'
23 | ${props.limit ? `limit ${props.limit}` : ''}
24 | ${props.offset ? `offset ${props.offset}` : ''}
25 | `
26 |
--------------------------------------------------------------------------------
/src/lib/helpers.ts:
--------------------------------------------------------------------------------
1 | import { literal } from 'pg-format'
2 |
3 | export const coalesceRowsToArray = (source: string, filter: string) => {
4 | return `
5 | COALESCE(
6 | (
7 | SELECT
8 | array_agg(row_to_json(${source})) FILTER (WHERE ${filter})
9 | FROM
10 | ${source}
11 | ),
12 | '{}'
13 | ) AS ${source}`
14 | }
15 |
16 | export const filterByList = (
17 | include?: (string | number)[],
18 | exclude?: (string | number)[],
19 | defaultExclude?: (string | number)[]
20 | ) => {
21 | if (defaultExclude) {
22 | exclude = defaultExclude.concat(exclude ?? [])
23 | }
24 | if (include?.length) {
25 | return `IN (${include.map(literal).join(',')})`
26 | } else if (exclude?.length) {
27 | return `NOT IN (${exclude.map(literal).join(',')})`
28 | }
29 | return ''
30 | }
31 |
32 | export const filterByValue = (ids?: (string | number)[]) => {
33 | if (ids?.length) {
34 | return `IN (${ids.map(literal).join(',')})`
35 | }
36 | return ''
37 | }
38 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:20 AS build
2 | WORKDIR /usr/src/app
3 | # Do `npm ci` separately so we can cache `node_modules`
4 | # https://nodejs.org/en/docs/guides/nodejs-docker-webapp/
5 | COPY package.json package-lock.json ./
6 | RUN npm clean-install
7 | COPY . .
8 | RUN npm run build && npm prune --omit=dev
9 |
10 | FROM node:20-slim
11 | RUN apt-get update && apt-get install -y \
12 | ca-certificates \
13 | && rm -rf /var/lib/apt/lists/*
14 | WORKDIR /usr/src/app
15 | COPY --from=build /usr/src/app/node_modules node_modules
16 | COPY --from=build /usr/src/app/dist dist
17 | COPY package.json ./
18 | ENV PG_META_PORT=8080
19 | # `npm run start` does not forward signals to child process
20 | CMD ["node", "dist/server/server.js"]
21 | EXPOSE 8080
22 | # --start-period defaults to 0s, but can't be set to 0s (to be explicit) by now
23 | HEALTHCHECK --interval=5s --timeout=5s --retries=3 CMD node -e "fetch('http://localhost:8080/health').then((r) => {if (r.status !== 200) throw new Error(r.status)})"
24 |
--------------------------------------------------------------------------------
/src/lib/sql/schemas.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryProps } from './common.js'
2 |
3 | export const SCHEMAS_SQL = (
4 | props: SQLQueryProps & { nameFilter?: string; idsFilter?: string; includeSystemSchemas?: boolean }
5 | ) => /* SQL */ `
6 | -- Adapted from information_schema.schemata
7 | select
8 | n.oid::int8 as id,
9 | n.nspname as name,
10 | u.rolname as owner
11 | from
12 | pg_namespace n,
13 | pg_roles u
14 | where
15 | n.nspowner = u.oid
16 | ${props.idsFilter ? `and n.oid ${props.idsFilter}` : ''}
17 | ${props.nameFilter ? `and n.nspname ${props.nameFilter}` : ''}
18 | ${!props.includeSystemSchemas ? `and not pg_catalog.starts_with(n.nspname, 'pg_')` : ''}
19 | and (
20 | pg_has_role(n.nspowner, 'USAGE')
21 | or has_schema_privilege(n.oid, 'CREATE, USAGE')
22 | )
23 | and not pg_catalog.starts_with(n.nspname, 'pg_temp_')
24 | and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_')
25 | ${props.limit ? `limit ${props.limit}` : ''}
26 | ${props.offset ? `offset ${props.offset}` : ''}
27 | `
28 |
--------------------------------------------------------------------------------
/test/app.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 |
4 | describe('server/app', () => {
5 | test('should handle root endpoint', async () => {
6 | const app = build()
7 | const response = await app.inject({
8 | method: 'GET',
9 | url: '/',
10 | })
11 | expect(response.statusCode).toBe(200)
12 | const data = JSON.parse(response.body)
13 | expect(data).toHaveProperty('status')
14 | expect(data).toHaveProperty('name')
15 | expect(data).toHaveProperty('version')
16 | expect(data).toHaveProperty('documentation')
17 | await app.close()
18 | })
19 |
20 | test('should handle health endpoint', async () => {
21 | const app = build()
22 | const response = await app.inject({
23 | method: 'GET',
24 | url: '/health',
25 | })
26 | expect(response.statusCode).toBe(200)
27 | const data = JSON.parse(response.body)
28 | expect(data).toHaveProperty('date')
29 | await app.close()
30 | })
31 | })
32 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Docs
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | workflow_dispatch:
8 |
9 | permissions:
10 | contents: write
11 | pages: write
12 |
13 | # Cancel old builds on new commit for same workflow + branch/PR
14 | concurrency:
15 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
16 | cancel-in-progress: true
17 |
18 | jobs:
19 | docs:
20 | name: Publish docs
21 | runs-on: ubuntu-22.04
22 | steps:
23 | - uses: actions/checkout@v5
24 |
25 | - uses: actions/setup-node@v4
26 | with:
27 | node-version-file: '.nvmrc'
28 |
29 | - run: |
30 | npm clean-install
31 | npm run docs:export
32 |
33 | - name: Generate Swagger UI
34 | uses: Legion2/swagger-ui-action@v1
35 | with:
36 | output: docs
37 | spec-file: openapi.json
38 |
39 | - name: Publish
40 | uses: peaceiris/actions-gh-pages@v4
41 | with:
42 | github_token: ${{ secrets.GITHUB_TOKEN }}
43 | publish_dir: docs
44 | force_orphan: true
45 | commit_message: 'docs: update'
46 |
--------------------------------------------------------------------------------
/.github/workflows/mirror.yml:
--------------------------------------------------------------------------------
1 | name: Mirror Image
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | version:
7 | description: 'Image tag'
8 | required: true
9 | type: string
10 |
11 | permissions:
12 | contents: read
13 |
14 | jobs:
15 | mirror:
16 | runs-on: ubuntu-latest
17 | permissions:
18 | contents: read
19 | packages: write
20 | steps:
21 | - name: configure aws credentials
22 | uses: aws-actions/configure-aws-credentials@v4
23 | with:
24 | role-to-assume: ${{ secrets.PROD_AWS_ROLE }}
25 | aws-region: us-east-1
26 | - uses: docker/login-action@v3
27 | with:
28 | registry: public.ecr.aws
29 | - uses: docker/login-action@v3
30 | with:
31 | registry: ghcr.io
32 | username: ${{ github.actor }}
33 | password: ${{ secrets.GITHUB_TOKEN }}
34 | - uses: akhilerm/tag-push-action@v2.2.0
35 | with:
36 | src: docker.io/supabase/postgres-meta:${{ inputs.version }}
37 | dst: |
38 | public.ecr.aws/supabase/postgres-meta:${{ inputs.version }}
39 | ghcr.io/supabase/postgres-meta:${{ inputs.version }}
40 |
--------------------------------------------------------------------------------
/scripts/generate-python-types-test.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Script to generate Python types for CI validation
5 | * This script uses the test database setup to generate Python types
6 | */
7 |
8 | import { build } from '../src/server/app.js'
9 |
10 | const TEST_CONNECTION_STRING = 'postgresql://postgres:postgres@localhost:5432'
11 |
12 | async function generatePythonTypes() {
13 | const app = build()
14 |
15 | try {
16 | const response = await app.inject({
17 | method: 'GET',
18 | url: '/generators/python',
19 | headers: {
20 | pg: TEST_CONNECTION_STRING,
21 | },
22 | query: {
23 | access_control: 'public',
24 | },
25 | })
26 |
27 | if (response.statusCode !== 200) {
28 | console.error(`Failed to generate types: ${response.statusCode}`)
29 | console.error(response.body)
30 | process.exit(1)
31 | }
32 |
33 | // Write to stdout so it can be captured
34 | process.stdout.write(response.body)
35 | } catch (error) {
36 | console.error('Error generating Python types:', error)
37 | process.exit(1)
38 | } finally {
39 | await app.close()
40 | }
41 | }
42 |
43 | generatePythonTypes()
44 |
45 |
--------------------------------------------------------------------------------
/.github/workflows/automerge.yml:
--------------------------------------------------------------------------------
1 | name: Dependabot auto-merge
2 | on: pull_request
3 |
4 | permissions:
5 | contents: write
6 | pull-requests: write
7 |
8 | jobs:
9 | dependabot:
10 | runs-on: ubuntu-latest
11 | if: ${{ github.actor == 'dependabot[bot]' }}
12 | steps:
13 | - name: Dependabot metadata
14 | id: metadata
15 | uses: dependabot/fetch-metadata@v2
16 | with:
17 | github-token: "${{ secrets.GITHUB_TOKEN }}"
18 |
19 | - name: Approve a PR
20 | if: ${{ steps.metadata.outputs.update-type != 'version-update:semver-major' && !startswith(steps.metadata.outputs.new_version, '0.') }}
21 | run: gh pr review --approve "$PR_URL"
22 | env:
23 | PR_URL: ${{ github.event.pull_request.html_url }}
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 |
26 | - name: Enable auto-merge for Dependabot PRs
27 | if: ${{ steps.metadata.outputs.update-type != 'version-update:semver-major' && !startswith(steps.metadata.outputs.new_version, '0.') }}
28 | run: gh pr merge --auto --squash "$PR_URL"
29 | env:
30 | PR_URL: ${{ github.event.pull_request.html_url }}
31 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
32 |
--------------------------------------------------------------------------------
/test/index.test.ts:
--------------------------------------------------------------------------------
1 | // TODO: Change lib tests to server tests.
2 | // https://github.com/supabase/postgres-meta/issues/397#issuecomment-1285078489
3 | import './lib/columns'
4 | import './lib/config'
5 | import './lib/extensions'
6 | import './lib/foreign-tables'
7 | import './lib/functions'
8 | import './lib/policies'
9 | import './lib/publications'
10 | import './lib/roles'
11 | import './lib/schemas'
12 | import './lib/secrets'
13 | import './lib/tables'
14 | import './lib/triggers'
15 | import './lib/types'
16 | import './lib/version'
17 | import './lib/views'
18 | import './server/column-privileges'
19 | import './server/indexes'
20 | import './server/materialized-views'
21 | import './server/query'
22 | import './server/ssl'
23 | import './server/table-privileges'
24 | import './server/typegen'
25 | import './server/result-size-limit'
26 | import './server/query-timeout'
27 | // New tests for increased coverage - commented out to avoid import issues
28 | // import './server/app'
29 | // import './server/utils'
30 | // import './server/functions'
31 | // import './server/config'
32 | // import './server/extensions'
33 | // import './server/publications'
34 | // import './server/schemas'
35 | // import './server/roles'
36 | // import './server/triggers'
37 | // import './server/types'
38 | // import './server/views'
39 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaTypes.ts:
--------------------------------------------------------------------------------
1 | import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js'
2 | import { filterByList } from './helpers.js'
3 | import { PostgresMetaResult, PostgresType } from './types.js'
4 | import { TYPES_SQL } from './sql/types.sql.js'
5 |
6 | export default class PostgresMetaTypes {
7 | query: (sql: string) => Promise>
8 |
9 | constructor(query: (sql: string) => Promise>) {
10 | this.query = query
11 | }
12 |
13 | async list({
14 | includeTableTypes = false,
15 | includeArrayTypes = false,
16 | includeSystemSchemas = false,
17 | includedSchemas,
18 | excludedSchemas,
19 | limit,
20 | offset,
21 | }: {
22 | includeTableTypes?: boolean
23 | includeArrayTypes?: boolean
24 | includeSystemSchemas?: boolean
25 | includedSchemas?: string[]
26 | excludedSchemas?: string[]
27 | limit?: number
28 | offset?: number
29 | } = {}): Promise> {
30 | const schemaFilter = filterByList(
31 | includedSchemas,
32 | excludedSchemas,
33 | !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined
34 | )
35 | const sql = TYPES_SQL({ schemaFilter, limit, offset, includeTableTypes, includeArrayTypes })
36 | return await this.query(sql)
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ### Install deps
4 |
5 | - docker
6 | - `npm install`
7 |
8 | ### Start services
9 |
10 | 1. Run `docker compose up` in `/test/db`
11 | 2. Run the tests: `npm run test:run`
12 | 3. Make changes in code (`/src`) and tests (`/test/lib` and `/test/server`)
13 | 4. Run the tests again: `npm run test:run`
14 | 5. Commit + PR
15 |
16 | ### Canary Deployments
17 |
18 | For testing your changes when they impact other things (like type generation and postgrest-js), you can deploy a canary version of postgres-meta:
19 |
20 | 1. **Create a Pull Request** targeting the `master` branch
21 | 2. **Add the `deploy-canary` label** to your PR
22 | 3. **Wait for the canary build** - GitHub Actions will automatically build and push a canary Docker image
23 | 4. **Use the canary image** - The bot will comment on your PR with the exact image tag and usage instructions
24 |
25 | The canary image will be tagged as:
26 |
27 | - `supabase/postgres-meta:canary-pr-{PR_NUMBER}-{COMMIT_SHA}`
28 | - `supabase/postgres-meta:canary-pr-{PR_NUMBER}`
29 |
30 | Example usage:
31 |
32 | ```bash
33 | docker pull supabase/postgres-meta:canary-pr-123-abc1234
34 | echo "canary-pr-123-abc1234" > supabase/.temp/pgmeta-version
35 | ```
36 |
37 | **Note:** Only maintainers can add the `deploy-canary` label for security reasons. The canary deployment requires access to production Docker registries.
38 |
--------------------------------------------------------------------------------
/src/lib/sql/publications.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithIdsFilter } from './common.js'
2 |
3 | export const PUBLICATIONS_SQL = (
4 | props: SQLQueryPropsWithIdsFilter & { nameFilter?: string }
5 | ) => /* SQL */ `
6 | SELECT
7 | p.oid :: int8 AS id,
8 | p.pubname AS name,
9 | p.pubowner::regrole::text AS owner,
10 | p.pubinsert AS publish_insert,
11 | p.pubupdate AS publish_update,
12 | p.pubdelete AS publish_delete,
13 | p.pubtruncate AS publish_truncate,
14 | CASE
15 | WHEN p.puballtables THEN NULL
16 | ELSE pr.tables
17 | END AS tables
18 | FROM
19 | pg_catalog.pg_publication AS p
20 | LEFT JOIN LATERAL (
21 | SELECT
22 | COALESCE(
23 | array_agg(
24 | json_build_object(
25 | 'id',
26 | c.oid :: int8,
27 | 'name',
28 | c.relname,
29 | 'schema',
30 | nc.nspname
31 | )
32 | ),
33 | '{}'
34 | ) AS tables
35 | FROM
36 | pg_catalog.pg_publication_rel AS pr
37 | JOIN pg_class AS c ON pr.prrelid = c.oid
38 | join pg_namespace as nc on c.relnamespace = nc.oid
39 | WHERE
40 | pr.prpubid = p.oid
41 | ) AS pr ON 1 = 1
42 | WHERE
43 | ${props.idsFilter ? `p.oid ${props.idsFilter}` : 'true'}
44 | ${props.nameFilter ? `AND p.pubname ${props.nameFilter}` : ''}
45 | ${props.limit ? `limit ${props.limit}` : ''}
46 | ${props.offset ? `offset ${props.offset}` : ''}
47 | `
48 |
--------------------------------------------------------------------------------
/src/server/routes/generators/go.ts:
--------------------------------------------------------------------------------
1 | import type { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../../lib/index.js'
3 | import { createConnectionConfig, extractRequestForLogging } from '../../utils.js'
4 | import { apply as applyGoTemplate } from '../../templates/go.js'
5 | import { getGeneratorMetadata } from '../../../lib/generators.js'
6 |
7 | export default async (fastify: FastifyInstance) => {
8 | fastify.get<{
9 | Headers: { pg: string; 'x-pg-application-name'?: string }
10 | Querystring: {
11 | excluded_schemas?: string
12 | included_schemas?: string
13 | }
14 | }>('/', async (request, reply) => {
15 | const config = createConnectionConfig(request)
16 | const excludedSchemas =
17 | request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? []
18 | const includedSchemas =
19 | request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? []
20 |
21 | const pgMeta: PostgresMeta = new PostgresMeta(config)
22 | const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, {
23 | includedSchemas,
24 | excludedSchemas,
25 | })
26 | if (generatorMetaError) {
27 | request.log.error({ error: generatorMetaError, request: extractRequestForLogging(request) })
28 | reply.code(500)
29 | return { error: generatorMetaError.message }
30 | }
31 |
32 | return applyGoTemplate(generatorMeta)
33 | })
34 | }
35 |
--------------------------------------------------------------------------------
/src/server/routes/generators/python.ts:
--------------------------------------------------------------------------------
1 | import type { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../../lib/index.js'
3 | import { createConnectionConfig, extractRequestForLogging } from '../../utils.js'
4 | import { apply as applyPyTemplate } from '../../templates/python.js'
5 | import { getGeneratorMetadata } from '../../../lib/generators.js'
6 |
7 | export default async (fastify: FastifyInstance) => {
8 | fastify.get<{
9 | Headers: { pg: string; 'x-pg-application-name'?: string }
10 | Querystring: {
11 | excluded_schemas?: string
12 | included_schemas?: string
13 | }
14 | }>('/', async (request, reply) => {
15 | const config = createConnectionConfig(request)
16 | const excludedSchemas =
17 | request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? []
18 | const includedSchemas =
19 | request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? []
20 | const pgMeta: PostgresMeta = new PostgresMeta(config)
21 | const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, {
22 | includedSchemas,
23 | excludedSchemas,
24 | })
25 | if (generatorMetaError) {
26 | request.log.error({ error: generatorMetaError, request: extractRequestForLogging(request) })
27 | reply.code(500)
28 | return { error: generatorMetaError.message }
29 | }
30 |
31 | return applyPyTemplate(generatorMeta)
32 | })
33 | }
34 |
--------------------------------------------------------------------------------
/test/types.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 | import { TEST_CONNECTION_STRING } from './lib/utils.js'
4 |
5 | describe('server/routes/types', () => {
6 | test('should list types', async () => {
7 | const app = build()
8 | const response = await app.inject({
9 | method: 'GET',
10 | url: '/types',
11 | headers: {
12 | pg: TEST_CONNECTION_STRING,
13 | },
14 | })
15 | expect(response.statusCode).toBe(200)
16 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
17 | await app.close()
18 | })
19 |
20 | test('should list types with query parameters', async () => {
21 | const app = build()
22 | const response = await app.inject({
23 | method: 'GET',
24 | url: '/types?include_system_schemas=true&limit=5&offset=0',
25 | headers: {
26 | pg: TEST_CONNECTION_STRING,
27 | },
28 | })
29 | expect(response.statusCode).toBe(200)
30 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
31 | await app.close()
32 | })
33 |
34 | test('should return 404 for non-existent type', async () => {
35 | const app = build()
36 | const response = await app.inject({
37 | method: 'GET',
38 | url: '/types/non-existent-type',
39 | headers: {
40 | pg: TEST_CONNECTION_STRING,
41 | },
42 | })
43 | expect(response.statusCode).toBe(404)
44 | await app.close()
45 | })
46 | })
47 |
--------------------------------------------------------------------------------
/src/server/sentry.ts:
--------------------------------------------------------------------------------
1 | import * as Sentry from '@sentry/node'
2 | import { nodeProfilingIntegration } from '@sentry/profiling-node'
3 |
4 | const sentryEnvironment = process.env.ENVIRONMENT ?? 'local'
5 | const dsn = process.env.SENTRY_DSN ?? ''
6 |
7 | const captureOptions: Sentry.NodeOptions =
8 | sentryEnvironment === 'prod'
9 | ? {
10 | // Tracing
11 | tracesSampleRate: 0.00001, // trace 1/10k events
12 | // Set sampling rate for profiling - this is evaluated only once per SDK.init call
13 | profilesSampleRate: 0.00001, // profile 1/10k events
14 | }
15 | : {
16 | tracesSampleRate: 0.01, // trace 1% of the events
17 | profilesSampleRate: 0.01,
18 | }
19 |
20 | const sensitiveKeys = ['pg', 'x-connection-encrypted']
21 |
22 | function redactSensitiveData(data: any) {
23 | if (data && typeof data === 'object') {
24 | for (const key of sensitiveKeys) {
25 | if (key in data) {
26 | data[key] = '[REDACTED]'
27 | }
28 | }
29 | }
30 | }
31 |
32 | export default Sentry.init({
33 | enabled: Boolean(dsn),
34 | dsn: dsn,
35 | environment: sentryEnvironment,
36 | integrations: [nodeProfilingIntegration()],
37 | beforeSendTransaction(transaction) {
38 | if (transaction.contexts?.trace?.data) {
39 | redactSensitiveData(transaction.contexts.trace.data)
40 | }
41 | return transaction
42 | },
43 | beforeSendSpan(span) {
44 | if (span.data) {
45 | redactSensitiveData(span.data)
46 | }
47 | return span
48 | },
49 | ...captureOptions,
50 | })
51 |
--------------------------------------------------------------------------------
/src/lib/sql/roles.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithIdsFilter } from './common.js'
2 |
3 | export const ROLES_SQL = (
4 | props: SQLQueryPropsWithIdsFilter & {
5 | includeDefaultRoles?: boolean
6 | nameFilter?: string
7 | }
8 | ) => /* SQL */ `
9 | -- TODO: Consider using pg_authid vs. pg_roles for unencrypted password field
10 | SELECT
11 | oid :: int8 AS id,
12 | rolname AS name,
13 | rolsuper AS is_superuser,
14 | rolcreatedb AS can_create_db,
15 | rolcreaterole AS can_create_role,
16 | rolinherit AS inherit_role,
17 | rolcanlogin AS can_login,
18 | rolreplication AS is_replication_role,
19 | rolbypassrls AS can_bypass_rls,
20 | (
21 | SELECT
22 | COUNT(*)
23 | FROM
24 | pg_stat_activity
25 | WHERE
26 | pg_roles.rolname = pg_stat_activity.usename
27 | ) AS active_connections,
28 | CASE WHEN rolconnlimit = -1 THEN current_setting('max_connections') :: int8
29 | ELSE rolconnlimit
30 | END AS connection_limit,
31 | rolpassword AS password,
32 | rolvaliduntil AS valid_until,
33 | rolconfig AS config
34 | FROM
35 | pg_roles
36 | WHERE
37 | ${props.idsFilter ? `oid ${props.idsFilter}` : 'true'}
38 | -- All default/predefined roles start with pg_: https://www.postgresql.org/docs/15/predefined-roles.html
39 | -- The pg_ prefix is also reserved.
40 | ${!props.includeDefaultRoles ? `AND NOT pg_catalog.starts_with(rolname, 'pg_')` : ''}
41 | ${props.nameFilter ? `AND rolname ${props.nameFilter}` : ''}
42 | ${props.limit ? `limit ${props.limit}` : ''}
43 | ${props.offset ? `offset ${props.offset}` : ''}
44 | `
45 |
--------------------------------------------------------------------------------
/src/server/routes/config.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | limit?: number
11 | offset?: number
12 | }
13 | }>('/', async (request, reply) => {
14 | const config = createConnectionConfig(request)
15 | const limit = request.query.limit
16 | const offset = request.query.offset
17 |
18 | const pgMeta = new PostgresMeta(config)
19 | const { data, error } = await pgMeta.config.list({ limit, offset })
20 | await pgMeta.end()
21 | if (error) {
22 | request.log.error({ error, request: extractRequestForLogging(request) })
23 | reply.code(500)
24 | return { error: error.message }
25 | }
26 |
27 | return data
28 | })
29 |
30 | fastify.get<{
31 | Headers: { pg: string; 'x-pg-application-name'?: string }
32 | }>('/version', async (request, reply) => {
33 | const config = createConnectionConfig(request)
34 |
35 | const pgMeta = new PostgresMeta(config)
36 | const { data, error } = await pgMeta.version.retrieve()
37 | await pgMeta.end()
38 | if (error) {
39 | request.log.error({ error, request: extractRequestForLogging(request) })
40 | reply.code(500)
41 | return { error: error.message }
42 | }
43 |
44 | return data
45 | })
46 | }
47 |
--------------------------------------------------------------------------------
/test/views.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 | import { TEST_CONNECTION_STRING } from './lib/utils.js'
4 |
5 | describe('server/routes/views', () => {
6 | test('should list views', async () => {
7 | const app = build()
8 | const response = await app.inject({
9 | method: 'GET',
10 | url: '/views',
11 | headers: {
12 | pg: TEST_CONNECTION_STRING,
13 | },
14 | })
15 | expect(response.statusCode).toBe(200)
16 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
17 | await app.close()
18 | })
19 |
20 | test('should list views with query parameters', async () => {
21 | const app = build()
22 | const response = await app.inject({
23 | method: 'GET',
24 | url: '/views?include_system_schemas=true&limit=5&offset=0',
25 | headers: {
26 | pg: TEST_CONNECTION_STRING,
27 | },
28 | })
29 | expect(response.statusCode).toBe(200)
30 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
31 | await app.close()
32 | })
33 |
34 | test('should return 404 for non-existent view', async () => {
35 | const app = build()
36 | const response = await app.inject({
37 | method: 'GET',
38 | url: '/views/1',
39 | headers: {
40 | pg: TEST_CONNECTION_STRING,
41 | },
42 | })
43 | expect(response.statusCode).toBe(404)
44 | expect(response.json()).toMatchInlineSnapshot(`
45 | {
46 | "error": "Cannot find a view with ID 1",
47 | }
48 | `)
49 | await app.close()
50 | })
51 | })
52 |
--------------------------------------------------------------------------------
/src/server/routes/generators/swift.ts:
--------------------------------------------------------------------------------
1 | import type { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../../lib/index.js'
3 | import { createConnectionConfig, extractRequestForLogging } from '../../utils.js'
4 | import { apply as applySwiftTemplate, AccessControl } from '../../templates/swift.js'
5 | import { getGeneratorMetadata } from '../../../lib/generators.js'
6 |
7 | export default async (fastify: FastifyInstance) => {
8 | fastify.get<{
9 | Headers: { pg: string; 'x-pg-application-name'?: string }
10 | Querystring: {
11 | excluded_schemas?: string
12 | included_schemas?: string
13 | access_control?: AccessControl
14 | }
15 | }>('/', async (request, reply) => {
16 | const config = createConnectionConfig(request)
17 | const excludedSchemas =
18 | request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? []
19 | const includedSchemas =
20 | request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? []
21 | const accessControl = request.query.access_control ?? 'internal'
22 |
23 | const pgMeta: PostgresMeta = new PostgresMeta(config)
24 | const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, {
25 | includedSchemas,
26 | excludedSchemas,
27 | })
28 | if (generatorMetaError) {
29 | request.log.error({ error: generatorMetaError, request: extractRequestForLogging(request) })
30 | reply.code(500)
31 | return { error: generatorMetaError.message }
32 | }
33 |
34 | return applySwiftTemplate({
35 | ...generatorMeta,
36 | accessControl,
37 | })
38 | })
39 | }
40 |
--------------------------------------------------------------------------------
/src/server/utils.ts:
--------------------------------------------------------------------------------
1 | import pgcs from 'pg-connection-string'
2 | import { FastifyRequest } from 'fastify'
3 | import { DEFAULT_POOL_CONFIG } from './constants.js'
4 | import { PoolConfig } from '../lib/types.js'
5 |
6 | export const extractRequestForLogging = (request: FastifyRequest) => {
7 | let pg: string = 'unknown'
8 | try {
9 | if (request.headers.pg) {
10 | pg = pgcs.parse(request.headers.pg as string).host || pg
11 | }
12 | } catch (e: any) {
13 | console.warn('failed to parse PG connstring for ' + request.url)
14 | }
15 |
16 | const additional = request.headers['x-supabase-info']?.toString() || ''
17 |
18 | return {
19 | method: request.method,
20 | url: request.url,
21 | pg,
22 | opt: additional,
23 | }
24 | }
25 |
26 | export function createConnectionConfig(request: FastifyRequest): PoolConfig {
27 | const connectionString = request.headers.pg as string
28 | const config = { ...DEFAULT_POOL_CONFIG, connectionString }
29 |
30 | // Override application_name if custom one provided in header
31 | if (request.headers['x-pg-application-name']) {
32 | config.application_name = request.headers['x-pg-application-name'] as string
33 | }
34 |
35 | return config
36 | }
37 |
38 | export function translateErrorToResponseCode(
39 | error: { message: string },
40 | defaultResponseCode = 400
41 | ): number {
42 | if (error.message === 'Connection terminated due to connection timeout') {
43 | return 504
44 | } else if (error.message === 'sorry, too many clients already') {
45 | return 503
46 | } else if (error.message === 'Query read timeout') {
47 | return 408
48 | }
49 | return defaultResponseCode
50 | }
51 |
--------------------------------------------------------------------------------
/src/server/routes/types.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | include_array_types?: string
11 | include_system_schemas?: string
12 | // Note: this only supports comma separated values (e.g., ".../types?included_schemas=public,core")
13 | included_schemas?: string
14 | excluded_schemas?: string
15 | limit?: number
16 | offset?: number
17 | }
18 | }>('/', async (request, reply) => {
19 | const config = createConnectionConfig(request)
20 | const includeArrayTypes = request.query.include_array_types === 'true'
21 | const includeSystemSchemas = request.query.include_system_schemas === 'true'
22 | const includedSchemas = request.query.included_schemas?.split(',')
23 | const excludedSchemas = request.query.excluded_schemas?.split(',')
24 | const limit = request.query.limit
25 | const offset = request.query.offset
26 |
27 | const pgMeta = new PostgresMeta(config)
28 | const { data, error } = await pgMeta.types.list({
29 | includeArrayTypes,
30 | includeSystemSchemas,
31 | includedSchemas,
32 | excludedSchemas,
33 | limit,
34 | offset,
35 | })
36 | await pgMeta.end()
37 | if (error) {
38 | request.log.error({ error, request: extractRequestForLogging(request) })
39 | reply.code(500)
40 | return { error: error.message }
41 | }
42 |
43 | return data
44 | })
45 | }
46 |
--------------------------------------------------------------------------------
/test/db/server.key:
--------------------------------------------------------------------------------
1 | -----BEGIN PRIVATE KEY-----
2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDWSuwNQKixzejp
3 | FkGpa+1/xO5Otk6DWnw3gYz9kAfaV9MbkS93baGwOEgIAx93kWqROVQGhyAzwtkg
4 | 5AYV+Vn7DtsuoIHAbEf2vAAPB5o2qEzDYpdRMVNTUSrW/8rmz7KO14muK6QV7Xw1
5 | jlsmhLFNE3o+MqNWU8HomPJKA1ZTLtvHln7S3+rl18I1k2ENrwzALrSyorFaizj6
6 | 5hzHHiDYDrKX8oJrSh8njMHkY99CmuNsRnRG+/UOEtS5Es7cIt3wXG7jMU8a+t4x
7 | Fewqm2zqZ79n9xNEugFK3XYyqFmCE4HySG30XfBwoXvwvkY+ZTbu8y45AFIqAPPT
8 | g8lVVt2TAgMBAAECggEAHXE9I3OpzzF3pGbEGMSqZJlTFgoi7sCE5pTBy/4jsL0g
9 | /92fxEHngDBgvTETUWNFCApKCtI6phdJq8+IgoZi9YU3Wh2qwMcKetJJE8eQnvKF
10 | XCb0nAQx6vWbnt9AKnGI7+qZ5mM6moSplyt68eeIpgqyC0+mdMWck8TygnbDlTlP
11 | W+lfAZoCnrPDe6ptKTKtSy3AdGteAKk0pdaiUPHjtMdtOMwXCcHQkKopVIstfAib
12 | mvg2/3djn5OnYBmhOINIAZvNSoVr/s9I/yZc8V3z2/lPoLDRmEjCgIGba4zkG0Sr
13 | oaHdxJz8eTuSPwI+jcjto3gPkBdL2658l4JLxXYgQQKBgQD+VWv+jJsB01ijZsI9
14 | cV1aS6bqyb5sJEc1EFOZtkUYEr0RB6ww4FrRY7uryMPjXf+y47RvGsev0GvWkpRZ
15 | ijzGmfeqHMm9y+hjVxJ64nNOvxzpuVWG0s3JOBDnVY/4RmnW1qghlAI0QkwU7EHl
16 | O4ql3QS5PQEzudhNpQltDHmL4QKBgQDXsleHOzf32HCFR3EWAy+rosuiianGu3LI
17 | 2toAX0NxCSkNCPHksfcEryoyrgKLCSzNoBtQMQkvk9sgbQfRLPZi3Lcng+wzjBEv
18 | 4uR/a2xdOwnCMCYc9KMjnVukhf5WZ+hJBc49lCqJtc4Mhl89icgrXxUG8YwqUqNK
19 | qb9YMCH38wKBgE3JOnpj7pSkWxu+tfGs1mxjbu2oPkE85zpnf+onQQKX2JN40UUx
20 | mRUpd6CWirLjcOz5j5nbiu9Ow2yg8BZinSvwszqoC1utHaokW1aSI8oV0XX6ZRoT
21 | JzU/nIvkM2AvyPcYN9vtNK9fB33utEiz6TfJXUR6T//N+0XkD/n2MsaBAoGBALDY
22 | A3NYVhbaWcasQEdv7VGnc5WbkJrjbMTIyhur/ztZ61JIlyqNzp0EkHBkwqkDqLwe
23 | HMaurX1YmDwJqHMTjh6YH4JCYxIQMLc2K2lcxcfac7HGkDkFSgwVI+HMCi8Fmijk
24 | nadXJ1koufsC4Gsv3/HPTwoWWHkKr96zNbI0JGWJAoGAFFw+fjx4gI+VDayf4NMd
25 | feIpDF6O2uB9uKbTyNJjYoj9Jh0NkSHccgVb+j5BvnxBmAJHrMEr6Cz3bnlKlK0a
26 | 1+Oqyq8MaYRLk6J/xMGSUcfa3uRC5svq0s8ebbl84Kt23IW9NU+YycVnMzysMLsH
27 | xn4VooZdfd3oNm2lpYURz3I=
28 | -----END PRIVATE KEY-----
29 |
--------------------------------------------------------------------------------
/src/lib/Parser.ts:
--------------------------------------------------------------------------------
1 | import prettier from 'prettier/standalone.js'
2 | import SqlFormatter from 'prettier-plugin-sql'
3 | import { parse, deparse } from 'pgsql-parser'
4 | import { FormatterOptions } from './types.js'
5 |
6 | const DEFAULT_FORMATTER_OPTIONS = {
7 | plugins: [SqlFormatter],
8 | formatter: 'sql-formatter',
9 | language: 'postgresql',
10 | database: 'postgresql',
11 | parser: 'sql',
12 | }
13 |
14 | /**
15 | * Parses a SQL string into an AST.
16 | */
17 | export function Parse(sql: string): ParseReturnValues {
18 | try {
19 | const data = parse(sql)
20 |
21 | return { data, error: null }
22 | } catch (error) {
23 | return { data: null, error: error as Error }
24 | }
25 | }
26 | interface ParseReturnValues {
27 | data: object | null
28 | error: null | Error
29 | }
30 |
31 | /**
32 | * Deparses an AST into SQL string.
33 | */
34 | export async function Deparse(parsedSql: object): Promise {
35 | try {
36 | const data = await deparse(parsedSql, {})
37 | return { data, error: null }
38 | } catch (error) {
39 | return { data: null, error: error as Error }
40 | }
41 | }
42 | interface DeparseReturnValues {
43 | data: string | null
44 | error: null | Error
45 | }
46 |
47 | /**
48 | * Formats a SQL string into a prettier-formatted SQL string.
49 | */
50 | export async function Format(
51 | sql: string,
52 | options: FormatterOptions = {}
53 | ): Promise {
54 | try {
55 | const formatted = await prettier.format(sql, {
56 | ...DEFAULT_FORMATTER_OPTIONS,
57 | ...options,
58 | })
59 |
60 | return { data: formatted, error: null }
61 | } catch (error) {
62 | return { data: null, error: error as Error }
63 | }
64 | }
65 | interface FormatReturnValues {
66 | data: string | null
67 | error: null | Error
68 | }
69 |
--------------------------------------------------------------------------------
/src/server/routes/generators/typescript.ts:
--------------------------------------------------------------------------------
1 | import type { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../../lib/index.js'
3 | import { createConnectionConfig, extractRequestForLogging } from '../../utils.js'
4 | import { apply as applyTypescriptTemplate } from '../../templates/typescript.js'
5 | import { getGeneratorMetadata } from '../../../lib/generators.js'
6 |
7 | export default async (fastify: FastifyInstance) => {
8 | fastify.get<{
9 | Headers: { pg: string; 'x-pg-application-name'?: string }
10 | Querystring: {
11 | excluded_schemas?: string
12 | included_schemas?: string
13 | detect_one_to_one_relationships?: string
14 | postgrest_version?: string
15 | }
16 | }>('/', async (request, reply) => {
17 | const config = createConnectionConfig(request)
18 | const excludedSchemas =
19 | request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? []
20 | const includedSchemas =
21 | request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? []
22 | const detectOneToOneRelationships = request.query.detect_one_to_one_relationships === 'true'
23 | const postgrestVersion = request.query.postgrest_version
24 |
25 | const pgMeta: PostgresMeta = new PostgresMeta(config)
26 | const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, {
27 | includedSchemas,
28 | excludedSchemas,
29 | })
30 | if (generatorMetaError) {
31 | request.log.error({ error: generatorMetaError, request: extractRequestForLogging(request) })
32 | reply.code(500)
33 | return { error: generatorMetaError.message }
34 | }
35 |
36 | return applyTypescriptTemplate({
37 | ...generatorMeta,
38 | detectOneToOneRelationships,
39 | postgrestVersion,
40 | })
41 | })
42 | }
43 |
--------------------------------------------------------------------------------
/src/lib/sql/policies.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const POLICIES_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { functionNameIdentifierFilter?: string }
5 | ) => /* SQL */ `
6 | SELECT
7 | pol.oid :: int8 AS id,
8 | n.nspname AS schema,
9 | c.relname AS table,
10 | c.oid :: int8 AS table_id,
11 | pol.polname AS name,
12 | CASE
13 | WHEN pol.polpermissive THEN 'PERMISSIVE' :: text
14 | ELSE 'RESTRICTIVE' :: text
15 | END AS action,
16 | CASE
17 | WHEN pol.polroles = '{0}' :: oid [] THEN array_to_json(
18 | string_to_array('public' :: text, '' :: text) :: name []
19 | )
20 | ELSE array_to_json(
21 | ARRAY(
22 | SELECT
23 | pg_roles.rolname
24 | FROM
25 | pg_roles
26 | WHERE
27 | pg_roles.oid = ANY (pol.polroles)
28 | ORDER BY
29 | pg_roles.rolname
30 | )
31 | )
32 | END AS roles,
33 | CASE
34 | pol.polcmd
35 | WHEN 'r' :: "char" THEN 'SELECT' :: text
36 | WHEN 'a' :: "char" THEN 'INSERT' :: text
37 | WHEN 'w' :: "char" THEN 'UPDATE' :: text
38 | WHEN 'd' :: "char" THEN 'DELETE' :: text
39 | WHEN '*' :: "char" THEN 'ALL' :: text
40 | ELSE NULL :: text
41 | END AS command,
42 | pg_get_expr(pol.polqual, pol.polrelid) AS definition,
43 | pg_get_expr(pol.polwithcheck, pol.polrelid) AS check
44 | FROM
45 | pg_policy pol
46 | JOIN pg_class c ON c.oid = pol.polrelid
47 | LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
48 | WHERE
49 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'}
50 | ${props.idsFilter ? `AND pol.oid ${props.idsFilter}` : ''}
51 | ${props.functionNameIdentifierFilter ? `AND (c.relname || '.' || pol.polname) ${props.functionNameIdentifierFilter}` : ''}
52 | ${props.limit ? `limit ${props.limit}` : ''}
53 | ${props.offset ? `offset ${props.offset}` : ''}
54 | `
55 |
--------------------------------------------------------------------------------
/src/server/app.ts:
--------------------------------------------------------------------------------
1 | import './sentry.js'
2 | import * as Sentry from '@sentry/node'
3 | import cors from '@fastify/cors'
4 | import swagger from '@fastify/swagger'
5 | import { fastify, FastifyInstance, FastifyServerOptions } from 'fastify'
6 | import { PG_META_REQ_HEADER, MAX_BODY_LIMIT } from './constants.js'
7 | import routes from './routes/index.js'
8 | import { extractRequestForLogging } from './utils.js'
9 | // Pseudo package declared only for this module
10 | import pkg from '#package.json' with { type: 'json' }
11 |
12 | export const build = (opts: FastifyServerOptions = {}): FastifyInstance => {
13 | const app = fastify({
14 | disableRequestLogging: true,
15 | requestIdHeader: PG_META_REQ_HEADER,
16 | bodyLimit: MAX_BODY_LIMIT,
17 | ...opts,
18 | })
19 | Sentry.setupFastifyErrorHandler(app)
20 |
21 | app.setErrorHandler((error, request, reply) => {
22 | app.log.error({ error: error.toString(), request: extractRequestForLogging(request) })
23 | reply.code(500).send({ error: error.message })
24 | })
25 |
26 | app.setNotFoundHandler((request, reply) => {
27 | app.log.error({ error: 'Not found', request: extractRequestForLogging(request) })
28 | reply.code(404).send({ error: 'Not found' })
29 | })
30 |
31 | app.register(swagger, {
32 | openapi: {
33 | servers: [],
34 | info: {
35 | title: 'postgres-meta',
36 | description: 'A REST API to manage your Postgres database',
37 | version: pkg.version,
38 | },
39 | },
40 | })
41 |
42 | app.register(cors)
43 |
44 | app.get('/', async (_request, _reply) => {
45 | return {
46 | status: 200,
47 | name: pkg.name,
48 | version: pkg.version,
49 | documentation: 'https://github.com/supabase/postgres-meta',
50 | }
51 | })
52 |
53 | app.get('/health', async (_request, _reply) => {
54 | return { date: new Date() }
55 | })
56 |
57 | app.register(routes)
58 |
59 | return app
60 | }
61 |
--------------------------------------------------------------------------------
/test/lib/secrets.ts:
--------------------------------------------------------------------------------
1 | import { readFile } from 'node:fs/promises'
2 | import { afterEach, beforeEach, describe, expect, test, vi } from 'vitest'
3 | import { getSecret } from '../../src/lib/secrets'
4 |
5 | vi.mock('node:fs/promises', async (): Promise => {
6 | const originalModule =
7 | await vi.importActual('node:fs/promises')
8 | const readFile = vi.fn()
9 | return { ...originalModule, readFile }
10 | })
11 |
12 | describe('getSecret', () => {
13 | const value = 'dummy'
14 |
15 | beforeEach(() => {
16 | // Clears env var
17 | vi.resetModules()
18 | })
19 |
20 | afterEach(() => {
21 | delete process.env.SECRET
22 | delete process.env.SECRET_FILE
23 | })
24 |
25 | test('loads from env', async () => {
26 | process.env.SECRET = value
27 | const res = await getSecret('SECRET')
28 | expect(res).toBe(value)
29 | })
30 |
31 | test('loads from file', async () => {
32 | process.env.SECRET_FILE = '/run/secrets/db_password'
33 | vi.mocked(readFile).mockResolvedValueOnce(value)
34 | const res = await getSecret('SECRET')
35 | expect(res).toBe(value)
36 | })
37 |
38 | test('defaults to empty string', async () => {
39 | expect(await getSecret('')).toBe('')
40 | expect(await getSecret('SECRET')).toBe('')
41 | })
42 |
43 | test('default on file not found', async () => {
44 | process.env.SECRET_FILE = '/run/secrets/db_password'
45 | const e: NodeJS.ErrnoException = new Error('no such file or directory')
46 | e.code = 'ENOENT'
47 | vi.mocked(readFile).mockRejectedValueOnce(e)
48 | const res = await getSecret('SECRET')
49 | expect(res).toBe('')
50 | })
51 |
52 | test('throws on permission denied', async () => {
53 | process.env.SECRET_FILE = '/run/secrets/db_password'
54 | const e: NodeJS.ErrnoException = new Error('permission denied')
55 | e.code = 'EACCES'
56 | vi.mocked(readFile).mockRejectedValueOnce(e)
57 | await expect(getSecret('SECRET')).rejects.toThrow()
58 | })
59 | })
60 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaIndexes.ts:
--------------------------------------------------------------------------------
1 | import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js'
2 | import { filterByList, filterByValue } from './helpers.js'
3 | import { PostgresMetaResult, PostgresIndex } from './types.js'
4 | import { INDEXES_SQL } from './sql/indexes.sql.js'
5 |
6 | export default class PostgresMetaIndexes {
7 | query: (sql: string) => Promise>
8 |
9 | constructor(query: (sql: string) => Promise>) {
10 | this.query = query
11 | }
12 |
13 | async list({
14 | includeSystemSchemas = false,
15 | includedSchemas,
16 | excludedSchemas,
17 | limit,
18 | offset,
19 | }: {
20 | includeSystemSchemas?: boolean
21 | includedSchemas?: string[]
22 | excludedSchemas?: string[]
23 | limit?: number
24 | offset?: number
25 | } = {}): Promise> {
26 | const schemaFilter = filterByList(
27 | includedSchemas,
28 | excludedSchemas,
29 | !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined
30 | )
31 | const sql = INDEXES_SQL({ schemaFilter, limit, offset })
32 | return await this.query(sql)
33 | }
34 |
35 | async retrieve({ id }: { id: number }): Promise>
36 | async retrieve({
37 | name,
38 | schema,
39 | args,
40 | }: {
41 | name: string
42 | schema: string
43 | args: string[]
44 | }): Promise>
45 | async retrieve({
46 | id,
47 | }: {
48 | id?: number
49 | args?: string[]
50 | }): Promise> {
51 | if (id) {
52 | const idsFilter = filterByValue([id])
53 | const sql = INDEXES_SQL({ idsFilter })
54 | const { data, error } = await this.query(sql)
55 | if (error) {
56 | return { data, error }
57 | } else if (data.length === 0) {
58 | return { data: null, error: { message: `Cannot find a index with ID ${id}` } }
59 | } else {
60 | return { data: data[0], error }
61 | }
62 | } else {
63 | return { data: null, error: { message: 'Invalid parameters on function retrieve' } }
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/.github/workflows/validate-python-types.yml:
--------------------------------------------------------------------------------
1 | name: Validate Python Type Generation
2 |
3 | on:
4 | push:
5 | branches: [master]
6 | pull_request:
7 | branches: [master]
8 |
9 | jobs:
10 | validate-python-types:
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout code
15 | uses: actions/checkout@v4
16 |
17 | - name: Set up Node.js
18 | uses: actions/setup-node@v4
19 | with:
20 | node-version: '20'
21 | cache: 'npm'
22 |
23 | - name: Install dependencies
24 | run: npm ci
25 |
26 | - name: Build project
27 | run: npm run build
28 |
29 | - name: Set up Python
30 | uses: actions/setup-python@v5
31 | with:
32 | python-version: '3.11'
33 |
34 | - name: Install Python dependencies
35 | run: |
36 | pip install pydantic mypy
37 |
38 | - name: Start test database
39 | working-directory: test/db
40 | run: |
41 | docker compose up -d --wait
42 |
43 | - name: Wait for database to be ready
44 | run: |
45 | # Install PostgreSQL client for health check
46 | sudo apt-get update && sudo apt-get install -y postgresql-client
47 | until pg_isready -h localhost -p 5432 -U postgres; do
48 | echo "Waiting for database..."
49 | sleep 1
50 | done
51 | echo "Database is ready!"
52 |
53 | - name: Generate Python types
54 | id: generate-types
55 | run: |
56 | node --loader ts-node/esm scripts/generate-python-types-test.ts > generated_types.py
57 | echo "Generated Python types (first 30 lines):"
58 | head -30 generated_types.py
59 |
60 | - name: Validate Python types runtime
61 | run: |
62 | python -c "import generated_types; print('✓ Generated Python types are valid and can be imported')"
63 |
64 | - name: Validate Python types with mypy
65 | run: |
66 | mypy generated_types.py --strict
67 |
68 | - name: Cleanup
69 | if: always()
70 | working-directory: test/db
71 | run: docker compose down
72 |
--------------------------------------------------------------------------------
/src/server/routes/indexes.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig, extractRequestForLogging } from '../utils.js'
4 |
5 | export default async (fastify: FastifyInstance) => {
6 | fastify.get<{
7 | Headers: { pg: string; 'x-pg-application-name'?: string }
8 | Querystring: {
9 | include_system_schemas?: string
10 | // Note: this only supports comma separated values (e.g., ".../functions?included_schemas=public,core")
11 | included_schemas?: string
12 | excluded_schemas?: string
13 | limit?: number
14 | offset?: number
15 | }
16 | }>('/', async (request, reply) => {
17 | const config = createConnectionConfig(request)
18 | const includeSystemSchemas = request.query.include_system_schemas === 'true'
19 | const includedSchemas = request.query.included_schemas?.split(',')
20 | const excludedSchemas = request.query.excluded_schemas?.split(',')
21 | const limit = request.query.limit
22 | const offset = request.query.offset
23 |
24 | const pgMeta = new PostgresMeta(config)
25 | const { data, error } = await pgMeta.indexes.list({
26 | includeSystemSchemas,
27 | includedSchemas,
28 | excludedSchemas,
29 | limit,
30 | offset,
31 | })
32 | await pgMeta.end()
33 | if (error) {
34 | request.log.error({ error, request: extractRequestForLogging(request) })
35 | reply.code(500)
36 | return { error: error.message }
37 | }
38 |
39 | return data
40 | })
41 |
42 | fastify.get<{
43 | Headers: { pg: string; 'x-pg-application-name'?: string }
44 | Params: {
45 | id: string
46 | }
47 | }>('/:id(\\d+)', async (request, reply) => {
48 | const config = createConnectionConfig(request)
49 | const id = Number(request.params.id)
50 |
51 | const pgMeta = new PostgresMeta(config)
52 | const { data, error } = await pgMeta.indexes.retrieve({ id })
53 | await pgMeta.end()
54 | if (error) {
55 | request.log.error({ error, request: extractRequestForLogging(request) })
56 | reply.code(404)
57 | return { error: error.message }
58 | }
59 |
60 | return data
61 | })
62 | }
63 |
--------------------------------------------------------------------------------
/src/lib/sql/types.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const TYPES_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & {
5 | includeTableTypes?: boolean
6 | includeArrayTypes?: boolean
7 | }
8 | ) => /* SQL */ `
9 | select
10 | t.oid::int8 as id,
11 | t.typname as name,
12 | n.nspname as schema,
13 | format_type (t.oid, null) as format,
14 | coalesce(t_enums.enums, '[]') as enums,
15 | coalesce(t_attributes.attributes, '[]') as attributes,
16 | obj_description (t.oid, 'pg_type') as comment,
17 | nullif(t.typrelid::int8, 0) as type_relation_id
18 | from
19 | pg_type t
20 | left join pg_namespace n on n.oid = t.typnamespace
21 | left join (
22 | select
23 | enumtypid,
24 | jsonb_agg(enumlabel order by enumsortorder) as enums
25 | from
26 | pg_enum
27 | group by
28 | enumtypid
29 | ) as t_enums on t_enums.enumtypid = t.oid
30 | left join (
31 | select
32 | oid,
33 | jsonb_agg(
34 | jsonb_build_object('name', a.attname, 'type_id', a.atttypid::int8)
35 | order by a.attnum asc
36 | ) as attributes
37 | from
38 | pg_class c
39 | join pg_attribute a on a.attrelid = c.oid
40 | where
41 | c.relkind = 'c' and not a.attisdropped
42 | group by
43 | c.oid
44 | ) as t_attributes on t_attributes.oid = t.typrelid
45 | where
46 | (
47 | t.typrelid = 0
48 | or (
49 | select
50 | c.relkind ${props.includeTableTypes ? `in ('c', 'r', 'v', 'm', 'p')` : `= 'c'`}
51 | from
52 | pg_class c
53 | where
54 | c.oid = t.typrelid
55 | )
56 | )
57 | ${
58 | !props.includeArrayTypes
59 | ? `and not exists (
60 | select
61 | from
62 | pg_type el
63 | where
64 | el.oid = t.typelem
65 | and el.typarray = t.oid
66 | )`
67 | : ''
68 | }
69 | ${props.schemaFilter ? `and n.nspname ${props.schemaFilter}` : ''}
70 | ${props.idsFilter ? `and t.oid ${props.idsFilter}` : ''}
71 | ${props.limit ? `limit ${props.limit}` : ''}
72 | ${props.offset ? `offset ${props.offset}` : ''}
73 | `
74 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Logs
3 | logs
4 | *.log
5 | npm-debug.log*
6 | yarn-debug.log*
7 | yarn-error.log*
8 | lerna-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # TypeScript v1 declaration files
46 | typings/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Microbundle cache
58 | .rpt2_cache/
59 | .rts2_cache_cjs/
60 | .rts2_cache_es/
61 | .rts2_cache_umd/
62 |
63 | # Optional REPL history
64 | .node_repl_history
65 |
66 | # Output of 'npm pack'
67 | *.tgz
68 |
69 | # Yarn Integrity file
70 | .yarn-integrity
71 |
72 | # dotenv environment variables file
73 | .env
74 | .env.test
75 |
76 | # sentry cli config
77 | .sentryclirc
78 |
79 | # parcel-bundler cache (https://parceljs.org/)
80 | .cache
81 |
82 | # Next.js build output
83 | .next
84 |
85 | # Nuxt.js build / generate output
86 | .nuxt
87 |
88 | # Gatsby files
89 | .cache/
90 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
91 | # https://nextjs.org/blog/next-9-1#public-directory-support
92 | # public
93 |
94 | # vuepress build output
95 | .vuepress/dist
96 |
97 | # Serverless directories
98 | .serverless/
99 |
100 | # FuseBox cache
101 | .fusebox/
102 |
103 | # DynamoDB Local files
104 | .dynamodb/
105 |
106 | # TernJS port file
107 | .tern-port
108 |
109 | # TypeScript output files
110 | dist/
111 |
112 | # Binaries
113 | bin/
114 |
115 | # postgres-meta
116 | openapi.json
117 |
--------------------------------------------------------------------------------
/src/lib/sql/triggers.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const TRIGGERS_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & {
5 | tableNameFilter?: string
6 | nameFilter?: string
7 | }
8 | ) => /* SQL */ `
9 | SELECT
10 | pg_t.oid AS id,
11 | pg_t.tgrelid AS table_id,
12 | CASE
13 | WHEN pg_t.tgenabled = 'D' THEN 'DISABLED'
14 | WHEN pg_t.tgenabled = 'O' THEN 'ORIGIN'
15 | WHEN pg_t.tgenabled = 'R' THEN 'REPLICA'
16 | WHEN pg_t.tgenabled = 'A' THEN 'ALWAYS'
17 | END AS enabled_mode,
18 | (
19 | STRING_TO_ARRAY(
20 | ENCODE(pg_t.tgargs, 'escape'), '\\000'
21 | )
22 | )[:pg_t.tgnargs] AS function_args,
23 | is_t.trigger_name AS name,
24 | is_t.event_object_table AS table,
25 | is_t.event_object_schema AS schema,
26 | is_t.action_condition AS condition,
27 | is_t.action_orientation AS orientation,
28 | is_t.action_timing AS activation,
29 | ARRAY_AGG(is_t.event_manipulation)::text[] AS events,
30 | pg_p.proname AS function_name,
31 | pg_n.nspname AS function_schema
32 | FROM
33 | pg_trigger AS pg_t
34 | JOIN
35 | pg_class AS pg_c
36 | ON pg_t.tgrelid = pg_c.oid
37 | JOIN pg_namespace AS table_ns
38 | ON pg_c.relnamespace = table_ns.oid
39 | JOIN information_schema.triggers AS is_t
40 | ON is_t.trigger_name = pg_t.tgname
41 | AND pg_c.relname = is_t.event_object_table
42 | AND pg_c.relnamespace = (quote_ident(is_t.event_object_schema))::regnamespace
43 | JOIN pg_proc AS pg_p
44 | ON pg_t.tgfoid = pg_p.oid
45 | JOIN pg_namespace AS pg_n
46 | ON pg_p.pronamespace = pg_n.oid
47 | WHERE
48 | ${props.schemaFilter ? `table_ns.nspname ${props.schemaFilter}` : 'true'}
49 | ${props.tableNameFilter ? `AND pg_c.relname ${props.tableNameFilter}` : ''}
50 | ${props.nameFilter ? `AND is_t.trigger_name ${props.nameFilter}` : ''}
51 | ${props.idsFilter ? `AND pg_t.oid ${props.idsFilter}` : ''}
52 | GROUP BY
53 | pg_t.oid,
54 | pg_t.tgrelid,
55 | pg_t.tgenabled,
56 | pg_t.tgargs,
57 | pg_t.tgnargs,
58 | is_t.trigger_name,
59 | is_t.event_object_table,
60 | is_t.event_object_schema,
61 | is_t.action_condition,
62 | is_t.action_orientation,
63 | is_t.action_timing,
64 | pg_p.proname,
65 | pg_n.nspname
66 | ${props.limit ? `limit ${props.limit}` : ''}
67 | ${props.offset ? `offset ${props.offset}` : ''}
68 | `
69 |
--------------------------------------------------------------------------------
/src/lib/sql/indexes.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const INDEXES_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ `
4 | SELECT
5 | idx.indexrelid::int8 AS id,
6 | idx.indrelid::int8 AS table_id,
7 | n.nspname AS schema,
8 | idx.indnatts AS number_of_attributes,
9 | idx.indnkeyatts AS number_of_key_attributes,
10 | idx.indisunique AS is_unique,
11 | idx.indisprimary AS is_primary,
12 | idx.indisexclusion AS is_exclusion,
13 | idx.indimmediate AS is_immediate,
14 | idx.indisclustered AS is_clustered,
15 | idx.indisvalid AS is_valid,
16 | idx.indcheckxmin AS check_xmin,
17 | idx.indisready AS is_ready,
18 | idx.indislive AS is_live,
19 | idx.indisreplident AS is_replica_identity,
20 | idx.indkey::smallint[] AS key_attributes,
21 | idx.indcollation::integer[] AS collation,
22 | idx.indclass::integer[] AS class,
23 | idx.indoption::smallint[] AS options,
24 | idx.indpred AS index_predicate,
25 | obj_description(idx.indexrelid, 'pg_class') AS comment,
26 | ix.indexdef as index_definition,
27 | am.amname AS access_method,
28 | jsonb_agg(
29 | jsonb_build_object(
30 | 'attribute_number', a.attnum,
31 | 'attribute_name', a.attname,
32 | 'data_type', format_type(a.atttypid, a.atttypmod)
33 | )
34 | ORDER BY a.attnum
35 | ) AS index_attributes
36 | FROM
37 | pg_index idx
38 | JOIN pg_class c ON c.oid = idx.indexrelid
39 | JOIN pg_namespace n ON c.relnamespace = n.oid
40 | JOIN pg_am am ON c.relam = am.oid
41 | JOIN pg_attribute a ON a.attrelid = c.oid AND a.attnum = ANY(idx.indkey)
42 | JOIN pg_indexes ix ON c.relname = ix.indexname
43 | WHERE
44 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'}
45 | ${props.idsFilter ? `AND idx.indexrelid ${props.idsFilter}` : ''}
46 | GROUP BY
47 | idx.indexrelid, idx.indrelid, n.nspname, idx.indnatts, idx.indnkeyatts, idx.indisunique, idx.indisprimary, idx.indisexclusion, idx.indimmediate, idx.indisclustered, idx.indisvalid, idx.indcheckxmin, idx.indisready, idx.indislive, idx.indisreplident, idx.indkey, idx.indcollation, idx.indclass, idx.indoption, idx.indexprs, idx.indpred, ix.indexdef, am.amname
48 | ${props.limit ? `limit ${props.limit}` : ''}
49 | ${props.offset ? `offset ${props.offset}` : ''}
50 | `
51 |
--------------------------------------------------------------------------------
/src/lib/sql/table_relationships.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilter } from './common.js'
2 |
3 | export const TABLE_RELATIONSHIPS_SQL = (props: SQLQueryPropsWithSchemaFilter) => /* SQL */ `
4 | -- Adapted from
5 | -- https://github.com/PostgREST/postgrest/blob/f9f0f79fa914ac00c11fbf7f4c558e14821e67e2/src/PostgREST/SchemaCache.hs#L722
6 | WITH
7 | pks_uniques_cols AS (
8 | SELECT
9 | connamespace,
10 | conrelid,
11 | jsonb_agg(column_info.cols) as cols
12 | FROM pg_constraint
13 | JOIN lateral (
14 | SELECT array_agg(cols.attname order by cols.attnum) as cols
15 | FROM ( select unnest(conkey) as col) _
16 | JOIN pg_attribute cols on cols.attrelid = conrelid and cols.attnum = col
17 | ) column_info ON TRUE
18 | WHERE
19 | contype IN ('p', 'u') and
20 | connamespace::regnamespace::text <> 'pg_catalog'
21 | ${props.schemaFilter ? `and connamespace::regnamespace::text ${props.schemaFilter}` : ''}
22 | GROUP BY connamespace, conrelid
23 | )
24 | SELECT
25 | traint.conname AS foreign_key_name,
26 | ns1.nspname AS schema,
27 | tab.relname AS relation,
28 | column_info.cols AS columns,
29 | ns2.nspname AS referenced_schema,
30 | other.relname AS referenced_relation,
31 | column_info.refs AS referenced_columns,
32 | (column_info.cols IN (SELECT * FROM jsonb_array_elements(pks_uqs.cols))) AS is_one_to_one
33 | FROM pg_constraint traint
34 | JOIN LATERAL (
35 | SELECT
36 | jsonb_agg(cols.attname order by ord) AS cols,
37 | jsonb_agg(refs.attname order by ord) AS refs
38 | FROM unnest(traint.conkey, traint.confkey) WITH ORDINALITY AS _(col, ref, ord)
39 | JOIN pg_attribute cols ON cols.attrelid = traint.conrelid AND cols.attnum = col
40 | JOIN pg_attribute refs ON refs.attrelid = traint.confrelid AND refs.attnum = ref
41 | WHERE ${props.schemaFilter ? `traint.connamespace::regnamespace::text ${props.schemaFilter}` : 'true'}
42 | ) AS column_info ON TRUE
43 | JOIN pg_namespace ns1 ON ns1.oid = traint.connamespace
44 | JOIN pg_class tab ON tab.oid = traint.conrelid
45 | JOIN pg_class other ON other.oid = traint.confrelid
46 | JOIN pg_namespace ns2 ON ns2.oid = other.relnamespace
47 | LEFT JOIN pks_uniques_cols pks_uqs ON pks_uqs.connamespace = traint.connamespace AND pks_uqs.conrelid = traint.conrelid
48 | WHERE traint.contype = 'f'
49 | AND traint.conparentid = 0
50 | ${props.schemaFilter ? `and ns1.nspname ${props.schemaFilter}` : ''}
51 | `
52 |
--------------------------------------------------------------------------------
/test/lib/schemas.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { pgMeta } from './utils'
3 |
4 | test('list with system schemas', async () => {
5 | const res = await pgMeta.schemas.list({ includeSystemSchemas: true })
6 | expect(res.data?.find(({ name }) => name === 'pg_catalog')).toMatchInlineSnapshot(
7 | { id: expect.any(Number) },
8 | `
9 | {
10 | "id": Any,
11 | "name": "pg_catalog",
12 | "owner": "postgres",
13 | }
14 | `
15 | )
16 | })
17 |
18 | test('list without system schemas', async () => {
19 | const res = await pgMeta.schemas.list({ includeSystemSchemas: false })
20 | expect(res.data?.find(({ name }) => name === 'pg_catalog')).toMatchInlineSnapshot(`undefined`)
21 | expect(res.data?.find(({ name }) => name === 'public')).toMatchInlineSnapshot(
22 | { id: expect.any(Number) },
23 | `
24 | {
25 | "id": Any,
26 | "name": "public",
27 | "owner": "postgres",
28 | }
29 | `
30 | )
31 | })
32 |
33 | test('retrieve, create, update, delete', async () => {
34 | let res = await pgMeta.schemas.create({ name: 's' })
35 | expect(res).toMatchInlineSnapshot(
36 | { data: { id: expect.any(Number) } },
37 | `
38 | {
39 | "data": {
40 | "id": Any,
41 | "name": "s",
42 | "owner": "postgres",
43 | },
44 | "error": null,
45 | }
46 | `
47 | )
48 | res = await pgMeta.schemas.retrieve({ id: res.data!.id })
49 | expect(res).toMatchInlineSnapshot(
50 | { data: { id: expect.any(Number) } },
51 | `
52 | {
53 | "data": {
54 | "id": Any,
55 | "name": "s",
56 | "owner": "postgres",
57 | },
58 | "error": null,
59 | }
60 | `
61 | )
62 | res = await pgMeta.schemas.update(res.data!.id, { name: 'ss', owner: 'postgres' })
63 | expect(res).toMatchInlineSnapshot(
64 | { data: { id: expect.any(Number) } },
65 | `
66 | {
67 | "data": {
68 | "id": Any,
69 | "name": "ss",
70 | "owner": "postgres",
71 | },
72 | "error": null,
73 | }
74 | `
75 | )
76 | res = await pgMeta.schemas.remove(res.data!.id)
77 | expect(res).toMatchInlineSnapshot(
78 | { data: { id: expect.any(Number) } },
79 | `
80 | {
81 | "data": {
82 | "id": Any,
83 | "name": "ss",
84 | "owner": "postgres",
85 | },
86 | "error": null,
87 | }
88 | `
89 | )
90 | res = await pgMeta.schemas.retrieve({ id: res.data!.id })
91 | expect(res).toMatchObject({
92 | data: null,
93 | error: {
94 | message: expect.stringMatching(/^Cannot find a schema with ID \d+$/),
95 | },
96 | })
97 | })
98 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches:
7 | - master
8 | workflow_dispatch:
9 |
10 | permissions:
11 | contents: read
12 |
13 | # Cancel old builds on new commit for same workflow + branch/PR
14 | concurrency:
15 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
16 | cancel-in-progress: true
17 |
18 | jobs:
19 | test:
20 | name: Test
21 | runs-on: ubuntu-22.04
22 | steps:
23 | - uses: actions/checkout@v5
24 |
25 | - uses: actions/setup-node@v4
26 | with:
27 | node-version-file: '.nvmrc'
28 |
29 | - run: |
30 | npm clean-install
31 | npm run check
32 | npm run test
33 |
34 | - uses: coverallsapp/github-action@v2
35 | with:
36 | github-token: ${{ secrets.GITHUB_TOKEN }}
37 | path-to-lcov: coverage/lcov.info
38 |
39 | prettier-check:
40 | name: Prettier check
41 | runs-on: ubuntu-22.04
42 | steps:
43 | - uses: actions/checkout@v5
44 |
45 | - name: Setup node
46 | uses: actions/setup-node@v4
47 | with:
48 | node-version-file: '.nvmrc'
49 |
50 | # Installing all dependencies takes up to three minutes, hacking around to only installing prettier+deps
51 | - name: Download dependencies
52 | run: |
53 | rm package.json
54 | rm package-lock.json
55 | npm i prettier@3 prettier-plugin-sql@0.17.0
56 | - name: Run prettier
57 | run: |-
58 | npx prettier -c '{src,test}/**/*.ts'
59 |
60 | docker:
61 | name: Build with docker
62 | runs-on: ubuntu-22.04
63 | permissions:
64 | contents: read
65 | packages: write
66 | steps:
67 | - uses: actions/checkout@v5
68 | name: Checkout Repo
69 |
70 | - uses: docker/setup-buildx-action@v3
71 | name: Set up Docker Buildx
72 |
73 | - uses: docker/build-push-action@v5
74 | with:
75 | push: false
76 | tags: pg-meta:test
77 | load: true
78 | cache-from: type=gha
79 | cache-to: type=gha,mode=max
80 |
81 | - name: Check Health status
82 | run: |
83 | docker run -d --name pg-meta-test pg-meta:test
84 | state=$(docker inspect -f '{{ .State.Health.Status}}' pg-meta-test)
85 | if [ $state != "starting" ]; then
86 | exit 1
87 | fi
88 | sleep 10
89 | state=$(docker inspect -f '{{ .State.Health.Status}}' pg-meta-test)
90 | docker stop pg-meta-test
91 | if [ $state == "healthy" ]; then
92 | exit 0
93 | else
94 | exit 1
95 | fi
96 |
97 |
98 |
99 |
--------------------------------------------------------------------------------
/src/server/routes/foreign-tables.ts:
--------------------------------------------------------------------------------
1 | import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
2 | import { Type } from '@sinclair/typebox'
3 | import { PostgresMeta } from '../../lib/index.js'
4 | import { postgresForeignTableSchema } from '../../lib/types.js'
5 | import { createConnectionConfig } from '../utils.js'
6 | import { extractRequestForLogging } from '../utils.js'
7 |
8 | const route: FastifyPluginAsyncTypebox = async (fastify) => {
9 | fastify.get(
10 | '/',
11 | {
12 | schema: {
13 | headers: Type.Object({
14 | pg: Type.String(),
15 | 'x-pg-application-name': Type.Optional(Type.String()),
16 | }),
17 | querystring: Type.Object({
18 | limit: Type.Optional(Type.Integer()),
19 | offset: Type.Optional(Type.Integer()),
20 | include_columns: Type.Optional(Type.Boolean()),
21 | }),
22 | response: {
23 | 200: Type.Array(postgresForeignTableSchema),
24 | 500: Type.Object({
25 | error: Type.String(),
26 | }),
27 | },
28 | },
29 | },
30 | async (request, reply) => {
31 | const config = createConnectionConfig(request)
32 | const limit = request.query.limit
33 | const offset = request.query.offset
34 | const includeColumns = request.query.include_columns
35 |
36 | const pgMeta = new PostgresMeta(config)
37 | const { data, error } = await pgMeta.foreignTables.list({ limit, offset, includeColumns })
38 | await pgMeta.end()
39 | if (error) {
40 | request.log.error({ error, request: extractRequestForLogging(request) })
41 | reply.code(500)
42 | return { error: error.message }
43 | }
44 |
45 | return data
46 | }
47 | )
48 |
49 | fastify.get(
50 | '/:id(\\d+)',
51 | {
52 | schema: {
53 | headers: Type.Object({
54 | pg: Type.String(),
55 | 'x-pg-application-name': Type.Optional(Type.String()),
56 | }),
57 | params: Type.Object({
58 | id: Type.Integer(),
59 | }),
60 | response: {
61 | 200: postgresForeignTableSchema,
62 | 404: Type.Object({
63 | error: Type.String(),
64 | }),
65 | },
66 | },
67 | },
68 | async (request, reply) => {
69 | const config = createConnectionConfig(request)
70 | const id = request.params.id
71 |
72 | const pgMeta = new PostgresMeta(config)
73 | const { data, error } = await pgMeta.foreignTables.retrieve({ id })
74 | await pgMeta.end()
75 | if (error) {
76 | request.log.error({ error, request: extractRequestForLogging(request) })
77 | reply.code(404)
78 | return { error: error.message }
79 | }
80 |
81 | return data
82 | }
83 | )
84 | }
85 | export default route
86 |
--------------------------------------------------------------------------------
/src/lib/sql/table_privileges.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const TABLE_PRIVILEGES_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & {
5 | nameIdentifierFilter?: string
6 | }
7 | ) => /* SQL */ `
8 | -- Despite the name \`table_privileges\`, this includes other kinds of relations:
9 | -- views, matviews, etc. "Relation privileges" just doesn't roll off the tongue.
10 | --
11 | -- For each relation, get its relacl in a jsonb format,
12 | -- e.g.
13 | --
14 | -- '{postgres=arwdDxt/postgres}'
15 | --
16 | -- becomes
17 | --
18 | -- [
19 | -- {
20 | -- "grantee": "postgres",
21 | -- "grantor": "postgres",
22 | -- "is_grantable": false,
23 | -- "privilege_type": "INSERT"
24 | -- },
25 | -- ...
26 | -- ]
27 | select
28 | c.oid as relation_id,
29 | nc.nspname as schema,
30 | c.relname as name,
31 | case
32 | when c.relkind = 'r' then 'table'
33 | when c.relkind = 'v' then 'view'
34 | when c.relkind = 'm' then 'materialized_view'
35 | when c.relkind = 'f' then 'foreign_table'
36 | when c.relkind = 'p' then 'partitioned_table'
37 | end as kind,
38 | coalesce(
39 | jsonb_agg(
40 | jsonb_build_object(
41 | 'grantor', grantor.rolname,
42 | 'grantee', grantee.rolname,
43 | 'privilege_type', _priv.privilege_type,
44 | 'is_grantable', _priv.is_grantable
45 | )
46 | ) filter (where _priv is not null),
47 | '[]'
48 | ) as privileges
49 | from pg_class c
50 | join pg_namespace as nc
51 | on nc.oid = c.relnamespace
52 | left join lateral (
53 | select grantor, grantee, privilege_type, is_grantable
54 | from aclexplode(coalesce(c.relacl, acldefault('r', c.relowner)))
55 | ) as _priv on true
56 | left join pg_roles as grantor
57 | on grantor.oid = _priv.grantor
58 | left join (
59 | select
60 | pg_roles.oid,
61 | pg_roles.rolname
62 | from pg_roles
63 | union all
64 | select
65 | (0)::oid as oid, 'PUBLIC'
66 | ) as grantee (oid, rolname)
67 | on grantee.oid = _priv.grantee
68 | where c.relkind in ('r', 'v', 'm', 'f', 'p')
69 | ${props.schemaFilter ? `and nc.nspname ${props.schemaFilter}` : ''}
70 | ${props.idsFilter ? `and c.oid ${props.idsFilter}` : ''}
71 | ${props.nameIdentifierFilter ? `and (nc.nspname || '.' || c.relname) ${props.nameIdentifierFilter}` : ''}
72 | and not pg_is_other_temp_schema(c.relnamespace)
73 | and (
74 | pg_has_role(c.relowner, 'USAGE')
75 | or has_table_privilege(
76 | c.oid,
77 | 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER, MAINTAIN'
78 | )
79 | or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES')
80 | )
81 | group by
82 | c.oid,
83 | nc.nspname,
84 | c.relname,
85 | c.relkind
86 | ${props.limit ? `limit ${props.limit}` : ''}
87 | ${props.offset ? `offset ${props.offset}` : ''}
88 | `
89 |
--------------------------------------------------------------------------------
/test/server/query-timeout.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { app } from './utils'
3 | import { pgMeta } from '../lib/utils'
4 |
5 | const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2
6 | const STATEMENT_TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 1
7 |
8 | describe('test query timeout', () => {
9 | test(
10 | `query timeout after ${TIMEOUT}s and connection cleanup`,
11 | async () => {
12 | const query = `SELECT pg_sleep(${TIMEOUT + 10});`
13 | // Execute a query that will sleep for 10 seconds
14 | const res = await app.inject({
15 | method: 'POST',
16 | path: '/query',
17 | query: `statementTimeoutSecs=${STATEMENT_TIMEOUT}`,
18 | payload: {
19 | query,
20 | },
21 | })
22 |
23 | // Check that we get the proper timeout error response
24 | expect(res.statusCode).toBe(408) // Request Timeout
25 | expect(res.json()).toMatchObject({
26 | error: expect.stringContaining('Query read timeout'),
27 | })
28 | // wait one second for the statement timeout to take effect
29 | await new Promise((resolve) => setTimeout(resolve, 1000))
30 |
31 | // Verify that the connection has been cleaned up by checking active connections
32 | const connectionsRes = await pgMeta.query(`
33 | SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%';
34 | `)
35 |
36 | // Should have no active connections except for our current query
37 | expect(connectionsRes.data).toHaveLength(0)
38 | },
39 | TIMEOUT * 1000
40 | )
41 |
42 | test(
43 | 'query without timeout parameter should not have timeout',
44 | async () => {
45 | const query = `SELECT pg_sleep(${TIMEOUT + 10});`
46 | // Execute a query that will sleep for 10 seconds without specifying timeout
47 | const res = await app.inject({
48 | method: 'POST',
49 | path: '/query',
50 | payload: {
51 | query,
52 | },
53 | })
54 |
55 | // Check that we get the proper timeout error response
56 | expect(res.statusCode).toBe(408) // Request Timeout
57 | expect(res.json()).toMatchObject({
58 | error: expect.stringContaining('Query read timeout'),
59 | })
60 | // wait one second
61 | await new Promise((resolve) => setTimeout(resolve, 1000))
62 |
63 | // Verify that the connection has not been cleaned up sinice there is no statementTimetout
64 | const connectionsRes = await pgMeta.query(`
65 | SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%';
66 | `)
67 |
68 | // Should have no active connections except for our current query
69 | expect(connectionsRes.data).toHaveLength(1)
70 | },
71 | TIMEOUT * 1000
72 | )
73 | })
74 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | workflow_dispatch:
8 |
9 | jobs:
10 | semantic-release:
11 | name: Release
12 | runs-on: ubuntu-24.04
13 | outputs:
14 | new-release-published: ${{ steps.semantic-release.outputs.new_release_published }}
15 | new-release-version: ${{ steps.semantic-release.outputs.new_release_version }}
16 | permissions:
17 | contents: write
18 | id-token: write
19 | steps:
20 | - uses: actions/checkout@v5
21 |
22 | - uses: actions/setup-node@v6
23 | with:
24 | node-version-file: '.nvmrc'
25 |
26 | - name: Update npm
27 | run: npm install -g npm@latest
28 |
29 | - run: |
30 | npm clean-install
31 | npm run build
32 |
33 | - id: semantic-release
34 | uses: cycjimmy/semantic-release-action@v6
35 | with:
36 | semantic_version: 25.0.1 # version with latest npm and support for trusted publishing
37 | env:
38 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
39 |
40 | docker-hub:
41 | name: Release on Docker Hub
42 | needs:
43 | - semantic-release
44 | if: needs.semantic-release.outputs.new-release-published == 'true'
45 | runs-on: ubuntu-latest
46 | permissions:
47 | id-token: write # This is required for requesting the JWT from AWS
48 | contents: read
49 | packages: write
50 | steps:
51 | - id: meta
52 | uses: docker/metadata-action@v5
53 | with:
54 | images: |
55 | supabase/postgres-meta
56 | public.ecr.aws/supabase/postgres-meta
57 | ghcr.io/supabase/postgres-meta
58 | tags: |
59 | type=raw,value=v${{ needs.semantic-release.outputs.new-release-version }}
60 |
61 | - uses: docker/setup-qemu-action@v3
62 | with:
63 | platforms: amd64,arm64
64 | - uses: docker/setup-buildx-action@v3
65 |
66 | - name: Login to DockerHub
67 | uses: docker/login-action@v3
68 | with:
69 | username: ${{ secrets.DOCKER_USERNAME }}
70 | password: ${{ secrets.DOCKER_PASSWORD }}
71 |
72 | - name: configure aws credentials
73 | uses: aws-actions/configure-aws-credentials@v4
74 | with:
75 | role-to-assume: ${{ secrets.PROD_AWS_ROLE }}
76 | aws-region: us-east-1
77 |
78 | - name: Login to ECR
79 | uses: docker/login-action@v3
80 | with:
81 | registry: public.ecr.aws
82 |
83 | - name: Login to GHCR
84 | uses: docker/login-action@v3
85 | with:
86 | registry: ghcr.io
87 | username: ${{ github.actor }}
88 | password: ${{ secrets.GITHUB_TOKEN }}
89 |
90 | - uses: docker/build-push-action@v6
91 | with:
92 | push: true
93 | platforms: linux/amd64,linux/arm64
94 | tags: ${{ steps.meta.outputs.tags }}
95 |
--------------------------------------------------------------------------------
/src/server/routes/query.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance, FastifyRequest } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import * as Parser from '../../lib/Parser.js'
4 | import {
5 | createConnectionConfig,
6 | extractRequestForLogging,
7 | translateErrorToResponseCode,
8 | } from '../utils.js'
9 |
10 | const errorOnEmptyQuery = (request: FastifyRequest) => {
11 | if (!(request.body as any).query) {
12 | throw new Error('query not found')
13 | }
14 | }
15 |
16 | export default async (fastify: FastifyInstance) => {
17 | fastify.post<{
18 | Headers: { pg: string; 'x-pg-application-name'?: string }
19 | Body: { query: string; parameters?: unknown[] }
20 | Querystring: { statementTimeoutSecs?: number }
21 | }>('/', async (request, reply) => {
22 | const statementTimeoutSecs = request.query.statementTimeoutSecs
23 | errorOnEmptyQuery(request)
24 | const config = createConnectionConfig(request)
25 | const pgMeta = new PostgresMeta(config)
26 | const { data, error } = await pgMeta.query(request.body.query, {
27 | trackQueryInSentry: true,
28 | statementQueryTimeout: statementTimeoutSecs,
29 | parameters: request.body.parameters,
30 | })
31 | await pgMeta.end()
32 | if (error) {
33 | request.log.error({ error, request: extractRequestForLogging(request) })
34 | reply.code(translateErrorToResponseCode(error))
35 | return { error: error.formattedError ?? error.message, ...error }
36 | }
37 |
38 | return data || []
39 | })
40 |
41 | fastify.post<{
42 | Headers: { pg: string; 'x-pg-application-name'?: string }
43 | Body: { query: string }
44 | }>('/format', async (request, reply) => {
45 | errorOnEmptyQuery(request)
46 | const { data, error } = await Parser.Format(request.body.query)
47 |
48 | if (error) {
49 | request.log.error({ error, request: extractRequestForLogging(request) })
50 | reply.code(translateErrorToResponseCode(error))
51 | return { error: error.message }
52 | }
53 |
54 | return data
55 | })
56 |
57 | fastify.post<{
58 | Headers: { pg: string; 'x-pg-application-name'?: string }
59 | Body: { query: string }
60 | }>('/parse', async (request, reply) => {
61 | errorOnEmptyQuery(request)
62 | const { data, error } = Parser.Parse(request.body.query)
63 |
64 | if (error) {
65 | request.log.error({ error, request: extractRequestForLogging(request) })
66 | reply.code(translateErrorToResponseCode(error))
67 | return { error: error.message }
68 | }
69 |
70 | return data
71 | })
72 |
73 | fastify.post<{
74 | Headers: { pg: string; 'x-pg-application-name'?: string }
75 | Body: { ast: object }
76 | }>('/deparse', async (request, reply) => {
77 | const { data, error } = await Parser.Deparse(request.body.ast)
78 |
79 | if (error) {
80 | request.log.error({ error, request: extractRequestForLogging(request) })
81 | reply.code(translateErrorToResponseCode(error))
82 | return { error: error.message }
83 | }
84 |
85 | return data
86 | })
87 | }
88 |
--------------------------------------------------------------------------------
/test/server/indexes.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { PostgresIndex } from '../../src/lib/types'
3 | import { app } from './utils'
4 |
5 | test('list indexes', async () => {
6 | const res = await app.inject({ method: 'GET', path: '/indexes' })
7 | const index = res
8 | .json()
9 | .find(
10 | ({ index_definition }) =>
11 | index_definition === 'CREATE UNIQUE INDEX users_pkey ON public.users USING btree (id)'
12 | )!
13 | expect(index).toMatchInlineSnapshot(
14 | `
15 | {
16 | "access_method": "btree",
17 | "check_xmin": false,
18 | "class": [
19 | 3124,
20 | ],
21 | "collation": [
22 | 0,
23 | ],
24 | "comment": null,
25 | "id": 16400,
26 | "index_attributes": [
27 | {
28 | "attribute_name": "id",
29 | "attribute_number": 1,
30 | "data_type": "bigint",
31 | },
32 | ],
33 | "index_definition": "CREATE UNIQUE INDEX users_pkey ON public.users USING btree (id)",
34 | "index_predicate": null,
35 | "is_clustered": false,
36 | "is_exclusion": false,
37 | "is_immediate": true,
38 | "is_live": true,
39 | "is_primary": true,
40 | "is_ready": true,
41 | "is_replica_identity": false,
42 | "is_unique": true,
43 | "is_valid": true,
44 | "key_attributes": [
45 | 1,
46 | ],
47 | "number_of_attributes": 1,
48 | "number_of_key_attributes": 1,
49 | "options": [
50 | 0,
51 | ],
52 | "schema": "public",
53 | "table_id": 16393,
54 | }
55 | `
56 | )
57 | })
58 |
59 | test('retrieve index', async () => {
60 | const res = await app.inject({ method: 'GET', path: '/indexes/16400' })
61 | const index = res.json()
62 | expect(index).toMatchInlineSnapshot(
63 | `
64 | {
65 | "access_method": "btree",
66 | "check_xmin": false,
67 | "class": [
68 | 3124,
69 | ],
70 | "collation": [
71 | 0,
72 | ],
73 | "comment": null,
74 | "id": 16400,
75 | "index_attributes": [
76 | {
77 | "attribute_name": "id",
78 | "attribute_number": 1,
79 | "data_type": "bigint",
80 | },
81 | ],
82 | "index_definition": "CREATE UNIQUE INDEX users_pkey ON public.users USING btree (id)",
83 | "index_predicate": null,
84 | "is_clustered": false,
85 | "is_exclusion": false,
86 | "is_immediate": true,
87 | "is_live": true,
88 | "is_primary": true,
89 | "is_ready": true,
90 | "is_replica_identity": false,
91 | "is_unique": true,
92 | "is_valid": true,
93 | "key_attributes": [
94 | 1,
95 | ],
96 | "number_of_attributes": 1,
97 | "number_of_key_attributes": 1,
98 | "options": [
99 | 0,
100 | ],
101 | "schema": "public",
102 | "table_id": 16393,
103 | }
104 | `
105 | )
106 | })
107 |
--------------------------------------------------------------------------------
/src/server/routes/materialized-views.ts:
--------------------------------------------------------------------------------
1 | import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
2 | import { Type } from '@sinclair/typebox'
3 | import { PostgresMeta } from '../../lib/index.js'
4 | import { postgresMaterializedViewSchema } from '../../lib/types.js'
5 | import { createConnectionConfig } from '../utils.js'
6 | import { extractRequestForLogging } from '../utils.js'
7 |
8 | const route: FastifyPluginAsyncTypebox = async (fastify) => {
9 | fastify.get(
10 | '/',
11 | {
12 | schema: {
13 | headers: Type.Object({
14 | pg: Type.String(),
15 | 'x-pg-application-name': Type.Optional(Type.String()),
16 | }),
17 | querystring: Type.Object({
18 | included_schemas: Type.Optional(Type.String()),
19 | excluded_schemas: Type.Optional(Type.String()),
20 | limit: Type.Optional(Type.Integer()),
21 | offset: Type.Optional(Type.Integer()),
22 | include_columns: Type.Optional(Type.Boolean()),
23 | }),
24 | response: {
25 | 200: Type.Array(postgresMaterializedViewSchema),
26 | 500: Type.Object({
27 | error: Type.String(),
28 | }),
29 | },
30 | },
31 | },
32 | async (request, reply) => {
33 | const config = createConnectionConfig(request)
34 | const includedSchemas = request.query.included_schemas?.split(',')
35 | const excludedSchemas = request.query.excluded_schemas?.split(',')
36 | const limit = request.query.limit
37 | const offset = request.query.offset
38 | const includeColumns = request.query.include_columns
39 |
40 | const pgMeta = new PostgresMeta(config)
41 | const { data, error } = await pgMeta.materializedViews.list({
42 | includedSchemas,
43 | excludedSchemas,
44 | limit,
45 | offset,
46 | includeColumns,
47 | })
48 | await pgMeta.end()
49 | if (error) {
50 | request.log.error({ error, request: extractRequestForLogging(request) })
51 | reply.code(500)
52 | return { error: error.message }
53 | }
54 |
55 | return data
56 | }
57 | )
58 |
59 | fastify.get(
60 | '/:id(\\d+)',
61 | {
62 | schema: {
63 | headers: Type.Object({
64 | pg: Type.String(),
65 | 'x-pg-application-name': Type.Optional(Type.String()),
66 | }),
67 | params: Type.Object({
68 | id: Type.Integer(),
69 | }),
70 | response: {
71 | 200: postgresMaterializedViewSchema,
72 | 404: Type.Object({
73 | error: Type.String(),
74 | }),
75 | },
76 | },
77 | },
78 | async (request, reply) => {
79 | const config = createConnectionConfig(request)
80 | const id = request.params.id
81 |
82 | const pgMeta = new PostgresMeta(config)
83 | const { data, error } = await pgMeta.materializedViews.retrieve({ id })
84 | await pgMeta.end()
85 | if (error) {
86 | request.log.error({ error, request: extractRequestForLogging(request) })
87 | reply.code(404)
88 | return { error: error.message }
89 | }
90 |
91 | return data
92 | }
93 | )
94 | }
95 | export default route
96 |
--------------------------------------------------------------------------------
/test/server/materialized-views.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { app } from './utils'
3 |
4 | const cleanNondetFromBody = (x: T) => {
5 | const cleanNondet = ({ id, columns, ...rest }: any) => {
6 | const cleaned = rest
7 | if (columns) {
8 | cleaned.columns = columns.map(({ id, table_id, ...rest }: any) => rest)
9 | }
10 | return cleaned
11 | }
12 |
13 | return (Array.isArray(x) ? x.map(cleanNondet) : cleanNondet(x)) as T
14 | }
15 |
16 | test('materialized views', async () => {
17 | const { body } = await app.inject({ method: 'GET', path: '/materialized-views' })
18 | expect(cleanNondetFromBody(JSON.parse(body))).toMatchInlineSnapshot(`
19 | [
20 | {
21 | "comment": null,
22 | "is_populated": true,
23 | "name": "todos_matview",
24 | "schema": "public",
25 | },
26 | ]
27 | `)
28 | })
29 |
30 | test('materialized views with columns', async () => {
31 | const { body } = await app.inject({
32 | method: 'GET',
33 | path: '/materialized-views',
34 | query: { include_columns: 'true' },
35 | })
36 | expect(cleanNondetFromBody(JSON.parse(body))).toMatchInlineSnapshot(`
37 | [
38 | {
39 | "columns": [
40 | {
41 | "check": null,
42 | "comment": null,
43 | "data_type": "bigint",
44 | "default_value": null,
45 | "enums": [],
46 | "format": "int8",
47 | "identity_generation": null,
48 | "is_generated": false,
49 | "is_identity": false,
50 | "is_nullable": true,
51 | "is_unique": false,
52 | "is_updatable": false,
53 | "name": "id",
54 | "ordinal_position": 1,
55 | "schema": "public",
56 | "table": "todos_matview",
57 | },
58 | {
59 | "check": null,
60 | "comment": null,
61 | "data_type": "text",
62 | "default_value": null,
63 | "enums": [],
64 | "format": "text",
65 | "identity_generation": null,
66 | "is_generated": false,
67 | "is_identity": false,
68 | "is_nullable": true,
69 | "is_unique": false,
70 | "is_updatable": false,
71 | "name": "details",
72 | "ordinal_position": 2,
73 | "schema": "public",
74 | "table": "todos_matview",
75 | },
76 | {
77 | "check": null,
78 | "comment": null,
79 | "data_type": "bigint",
80 | "default_value": null,
81 | "enums": [],
82 | "format": "int8",
83 | "identity_generation": null,
84 | "is_generated": false,
85 | "is_identity": false,
86 | "is_nullable": true,
87 | "is_unique": false,
88 | "is_updatable": false,
89 | "name": "user-id",
90 | "ordinal_position": 3,
91 | "schema": "public",
92 | "table": "todos_matview",
93 | },
94 | ],
95 | "comment": null,
96 | "is_populated": true,
97 | "name": "todos_matview",
98 | "schema": "public",
99 | },
100 | ]
101 | `)
102 | })
103 |
--------------------------------------------------------------------------------
/test/lib/types.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { pgMeta } from './utils'
3 |
4 | test('list', async () => {
5 | const res = await pgMeta.types.list()
6 | expect(res.data?.find(({ name }) => name === 'user_status')).toMatchInlineSnapshot(
7 | { id: expect.any(Number) },
8 | `
9 | {
10 | "attributes": [],
11 | "comment": null,
12 | "enums": [
13 | "ACTIVE",
14 | "INACTIVE",
15 | ],
16 | "format": "user_status",
17 | "id": Any,
18 | "name": "user_status",
19 | "schema": "public",
20 | "type_relation_id": null,
21 | }
22 | `
23 | )
24 | })
25 |
26 | test('list types with included schemas', async () => {
27 | let res = await pgMeta.types.list({
28 | includedSchemas: ['public'],
29 | })
30 |
31 | expect(res.data?.length).toBeGreaterThan(0)
32 |
33 | res.data?.forEach((type) => {
34 | expect(type.schema).toBe('public')
35 | })
36 | })
37 |
38 | test('list types with excluded schemas', async () => {
39 | let res = await pgMeta.types.list({
40 | excludedSchemas: ['public'],
41 | })
42 |
43 | res.data?.forEach((type) => {
44 | expect(type.schema).not.toBe('public')
45 | })
46 | })
47 |
48 | test('list types with excluded schemas and include System Schemas', async () => {
49 | let res = await pgMeta.types.list({
50 | excludedSchemas: ['public'],
51 | includeSystemSchemas: true,
52 | })
53 |
54 | expect(res.data?.length).toBeGreaterThan(0)
55 |
56 | res.data?.forEach((type) => {
57 | expect(type.schema).not.toBe('public')
58 | })
59 | })
60 |
61 | test('list types with include Table Types', async () => {
62 | const res = await pgMeta.types.list({
63 | includeTableTypes: true,
64 | })
65 |
66 | expect(res.data?.find(({ name }) => name === 'todos')).toMatchInlineSnapshot(
67 | { id: expect.any(Number) },
68 | `
69 | {
70 | "attributes": [],
71 | "comment": null,
72 | "enums": [],
73 | "format": "todos",
74 | "id": Any,
75 | "name": "todos",
76 | "schema": "public",
77 | "type_relation_id": 16403,
78 | }
79 | `
80 | )
81 | })
82 |
83 | test('list types without Table Types', async () => {
84 | const res = await pgMeta.types.list({
85 | includeTableTypes: false,
86 | })
87 |
88 | res.data?.forEach((type) => {
89 | expect(type.name).not.toBe('todos')
90 | })
91 | })
92 |
93 | test('composite type attributes', async () => {
94 | await pgMeta.query(`create type test_composite as (id int8, data text);`)
95 |
96 | const res = await pgMeta.types.list()
97 | expect(res.data?.find(({ name }) => name === 'test_composite')).toMatchInlineSnapshot(
98 | { id: expect.any(Number), type_relation_id: expect.any(Number) },
99 | `
100 | {
101 | "attributes": [
102 | {
103 | "name": "id",
104 | "type_id": 20,
105 | },
106 | {
107 | "name": "data",
108 | "type_id": 25,
109 | },
110 | ],
111 | "comment": null,
112 | "enums": [],
113 | "format": "test_composite",
114 | "id": Any,
115 | "name": "test_composite",
116 | "schema": "public",
117 | "type_relation_id": Any,
118 | }
119 | `
120 | )
121 |
122 | await pgMeta.query(`drop type test_composite;`)
123 | })
124 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@supabase/postgres-meta",
3 | "version": "0.0.0-automated",
4 | "description": "A RESTful API for managing your Postgres.",
5 | "homepage": "https://github.com/supabase/postgres-meta",
6 | "bugs": "https://github.com/supabase/postgres-meta/issues",
7 | "license": "MIT",
8 | "author": "Supabase",
9 | "files": [
10 | "dist"
11 | ],
12 | "type": "module",
13 | "main": "dist/lib/index.js",
14 | "types": "dist/lib/index.d.ts",
15 | "imports": {
16 | "#package.json": "./package.json"
17 | },
18 | "repository": {
19 | "url": "git+https://github.com/supabase/postgres-meta.git"
20 | },
21 | "scripts": {
22 | "check": "tsc -p tsconfig.json --noEmit",
23 | "clean": "rimraf dist tsconfig.tsbuildinfo",
24 | "format": "prettier --write '{src,test}/**/*.ts' '*.ts'",
25 | "build": "tsc -p tsconfig.json && cpy 'src/lib/sql/*.sql' dist/lib/sql",
26 | "docs:export": "PG_META_EXPORT_DOCS=true node --loader ts-node/esm src/server/server.ts > openapi.json",
27 | "gen:types:typescript": "PG_META_GENERATE_TYPES=typescript node --loader ts-node/esm src/server/server.ts",
28 | "gen:types:go": "PG_META_GENERATE_TYPES=go node --loader ts-node/esm src/server/server.ts",
29 | "gen:types:swift": "PG_META_GENERATE_TYPES=swift node --loader ts-node/esm src/server/server.ts",
30 | "gen:types:python": "PG_META_GENERATE_TYPES=python node --loader ts-node/esm src/server/server.ts",
31 | "start": "node dist/server/server.js",
32 | "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && run-s dev:code",
33 | "dev:code": "nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize",
34 | "test": "run-s db:clean db:run test:run db:clean",
35 | "db:clean": "cd test/db && docker compose down",
36 | "db:run": "cd test/db && docker compose up --detach --wait",
37 | "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage",
38 | "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean"
39 | },
40 | "engines": {
41 | "node": ">=20",
42 | "npm": ">=9"
43 | },
44 | "dependencies": {
45 | "@fastify/cors": "^9.0.1",
46 | "@fastify/swagger": "^8.2.1",
47 | "@fastify/type-provider-typebox": "^3.5.0",
48 | "@sentry/node": "^9.12.0",
49 | "@sentry/profiling-node": "^9.12.0",
50 | "@sinclair/typebox": "^0.31.25",
51 | "close-with-grace": "^2.1.0",
52 | "crypto-js": "^4.0.0",
53 | "fastify": "^4.24.3",
54 | "fastify-metrics": "^10.0.0",
55 | "pg": "npm:@supabase/pg@0.0.3",
56 | "pg-connection-string": "^2.7.0",
57 | "pg-format": "^1.0.4",
58 | "pg-protocol": "npm:@supabase/pg-protocol@0.0.2",
59 | "pgsql-parser": "^17.8.2",
60 | "pino": "^9.5.0",
61 | "postgres-array": "^3.0.1",
62 | "prettier": "^3.3.3",
63 | "prettier-plugin-sql": "0.17.1"
64 | },
65 | "devDependencies": {
66 | "@types/crypto-js": "^4.1.1",
67 | "@types/node": "^20.11.14",
68 | "@types/pg": "^8.11.10",
69 | "@types/pg-format": "^1.0.1",
70 | "@vitest/coverage-v8": "^3.0.5",
71 | "cpy-cli": "^5.0.0",
72 | "nodemon": "^3.1.7",
73 | "npm-run-all": "^4.1.5",
74 | "pino-pretty": "^13.1.1",
75 | "rimraf": "^6.0.1",
76 | "ts-node": "^10.9.1",
77 | "typescript": "^5.6.3",
78 | "vitest": "^3.0.5"
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaExtensions.ts:
--------------------------------------------------------------------------------
1 | import { ident, literal } from 'pg-format'
2 | import { PostgresMetaResult, PostgresExtension } from './types.js'
3 | import { EXTENSIONS_SQL } from './sql/extensions.sql.js'
4 | import { filterByValue } from './helpers.js'
5 |
6 | export default class PostgresMetaExtensions {
7 | query: (sql: string) => Promise>
8 |
9 | constructor(query: (sql: string) => Promise>) {
10 | this.query = query
11 | }
12 |
13 | async list({
14 | limit,
15 | offset,
16 | }: {
17 | limit?: number
18 | offset?: number
19 | } = {}): Promise> {
20 | const sql = EXTENSIONS_SQL({ limit, offset })
21 | return await this.query(sql)
22 | }
23 |
24 | async retrieve({ name }: { name: string }): Promise> {
25 | const nameFilter = filterByValue([name])
26 | const sql = EXTENSIONS_SQL({ nameFilter })
27 | const { data, error } = await this.query(sql)
28 | if (error) {
29 | return { data, error }
30 | } else if (data.length === 0) {
31 | return { data: null, error: { message: `Cannot find an extension named ${name}` } }
32 | } else {
33 | return { data: data[0], error }
34 | }
35 | }
36 |
37 | async create({
38 | name,
39 | schema,
40 | version,
41 | cascade = false,
42 | }: {
43 | name: string
44 | schema?: string
45 | version?: string
46 | cascade?: boolean
47 | }): Promise> {
48 | const sql = `
49 | CREATE EXTENSION ${ident(name)}
50 | ${schema === undefined ? '' : `SCHEMA ${ident(schema)}`}
51 | ${version === undefined ? '' : `VERSION ${literal(version)}`}
52 | ${cascade ? 'CASCADE' : ''};`
53 | const { error } = await this.query(sql)
54 | if (error) {
55 | return { data: null, error }
56 | }
57 | return await this.retrieve({ name })
58 | }
59 |
60 | async update(
61 | name: string,
62 | {
63 | update = false,
64 | version,
65 | schema,
66 | }: {
67 | update?: boolean
68 | version?: string
69 | schema?: string
70 | }
71 | ): Promise> {
72 | let updateSql = ''
73 | if (update) {
74 | updateSql = `ALTER EXTENSION ${ident(name)} UPDATE ${
75 | version === undefined ? '' : `TO ${literal(version)}`
76 | };`
77 | }
78 | const schemaSql =
79 | schema === undefined ? '' : `ALTER EXTENSION ${ident(name)} SET SCHEMA ${ident(schema)};`
80 |
81 | const sql = `BEGIN; ${updateSql} ${schemaSql} COMMIT;`
82 | const { error } = await this.query(sql)
83 | if (error) {
84 | return { data: null, error }
85 | }
86 | return await this.retrieve({ name })
87 | }
88 |
89 | async remove(
90 | name: string,
91 | { cascade = false } = {}
92 | ): Promise> {
93 | const { data: extension, error } = await this.retrieve({ name })
94 | if (error) {
95 | return { data: null, error }
96 | }
97 | const sql = `DROP EXTENSION ${ident(name)} ${cascade ? 'CASCADE' : 'RESTRICT'};`
98 | {
99 | const { error } = await this.query(sql)
100 | if (error) {
101 | return { data: null, error }
102 | }
103 | }
104 | return { data: extension!, error: null }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/server/routes/views.ts:
--------------------------------------------------------------------------------
1 | import { FastifyPluginAsyncTypebox } from '@fastify/type-provider-typebox'
2 | import { Type } from '@sinclair/typebox'
3 | import { PostgresMeta } from '../../lib/index.js'
4 | import { postgresViewSchema } from '../../lib/types.js'
5 | import { createConnectionConfig } from '../utils.js'
6 | import { extractRequestForLogging } from '../utils.js'
7 |
8 | const route: FastifyPluginAsyncTypebox = async (fastify) => {
9 | fastify.get(
10 | '/',
11 | {
12 | schema: {
13 | headers: Type.Object({
14 | pg: Type.String(),
15 | 'x-pg-application-name': Type.Optional(Type.String()),
16 | }),
17 | querystring: Type.Object({
18 | include_system_schemas: Type.Optional(Type.Boolean()),
19 | // Note: this only supports comma separated values (e.g., ".../views?included_schemas=public,core")
20 | included_schemas: Type.Optional(Type.String()),
21 | excluded_schemas: Type.Optional(Type.String()),
22 | limit: Type.Optional(Type.Integer()),
23 | offset: Type.Optional(Type.Integer()),
24 | include_columns: Type.Optional(Type.Boolean()),
25 | }),
26 | response: {
27 | 200: Type.Array(postgresViewSchema),
28 | 500: Type.Object({
29 | error: Type.String(),
30 | }),
31 | },
32 | },
33 | },
34 | async (request, reply) => {
35 | const config = createConnectionConfig(request)
36 | const includeSystemSchemas = request.query.include_system_schemas
37 | const includedSchemas = request.query.included_schemas?.split(',')
38 | const excludedSchemas = request.query.excluded_schemas?.split(',')
39 | const limit = request.query.limit
40 | const offset = request.query.offset
41 | const includeColumns = request.query.include_columns
42 |
43 | const pgMeta = new PostgresMeta(config)
44 | const { data, error } = await pgMeta.views.list({
45 | includeSystemSchemas,
46 | includedSchemas,
47 | excludedSchemas,
48 | limit,
49 | offset,
50 | includeColumns,
51 | })
52 | await pgMeta.end()
53 | if (error) {
54 | request.log.error({ error, request: extractRequestForLogging(request) })
55 | reply.code(500)
56 | return { error: error.message }
57 | }
58 |
59 | return data
60 | }
61 | )
62 |
63 | fastify.get(
64 | '/:id(\\d+)',
65 | {
66 | schema: {
67 | headers: Type.Object({
68 | pg: Type.String(),
69 | 'x-pg-application-name': Type.Optional(Type.String()),
70 | }),
71 | params: Type.Object({
72 | id: Type.Integer(),
73 | }),
74 | response: {
75 | 200: postgresViewSchema,
76 | 404: Type.Object({
77 | error: Type.String(),
78 | }),
79 | },
80 | },
81 | },
82 | async (request, reply) => {
83 | const config = createConnectionConfig(request)
84 | const id = request.params.id
85 |
86 | const pgMeta = new PostgresMeta(config)
87 | const { data, error } = await pgMeta.views.retrieve({ id })
88 | await pgMeta.end()
89 | if (error) {
90 | request.log.error({ error, request: extractRequestForLogging(request) })
91 | reply.code(404)
92 | return { error: error.message }
93 | }
94 |
95 | return data
96 | }
97 | )
98 | }
99 | export default route
100 |
--------------------------------------------------------------------------------
/src/lib/PostgresMeta.ts:
--------------------------------------------------------------------------------
1 | import * as Parser from './Parser.js'
2 | import PostgresMetaColumnPrivileges from './PostgresMetaColumnPrivileges.js'
3 | import PostgresMetaColumns from './PostgresMetaColumns.js'
4 | import PostgresMetaConfig from './PostgresMetaConfig.js'
5 | import PostgresMetaExtensions from './PostgresMetaExtensions.js'
6 | import PostgresMetaForeignTables from './PostgresMetaForeignTables.js'
7 | import PostgresMetaFunctions from './PostgresMetaFunctions.js'
8 | import PostgresMetaIndexes from './PostgresMetaIndexes.js'
9 | import PostgresMetaMaterializedViews from './PostgresMetaMaterializedViews.js'
10 | import PostgresMetaPolicies from './PostgresMetaPolicies.js'
11 | import PostgresMetaPublications from './PostgresMetaPublications.js'
12 | import PostgresMetaRelationships from './PostgresMetaRelationships.js'
13 | import PostgresMetaRoles from './PostgresMetaRoles.js'
14 | import PostgresMetaSchemas from './PostgresMetaSchemas.js'
15 | import PostgresMetaTablePrivileges from './PostgresMetaTablePrivileges.js'
16 | import PostgresMetaTables from './PostgresMetaTables.js'
17 | import PostgresMetaTriggers from './PostgresMetaTriggers.js'
18 | import PostgresMetaTypes from './PostgresMetaTypes.js'
19 | import PostgresMetaVersion from './PostgresMetaVersion.js'
20 | import PostgresMetaViews from './PostgresMetaViews.js'
21 | import { init } from './db.js'
22 | import { PostgresMetaResult, PoolConfig } from './types.js'
23 |
24 | export default class PostgresMeta {
25 | query: (
26 | sql: string,
27 | opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean; parameters?: unknown[] }
28 | ) => Promise>
29 | end: () => Promise
30 | columnPrivileges: PostgresMetaColumnPrivileges
31 | columns: PostgresMetaColumns
32 | config: PostgresMetaConfig
33 | extensions: PostgresMetaExtensions
34 | foreignTables: PostgresMetaForeignTables
35 | functions: PostgresMetaFunctions
36 | indexes: PostgresMetaIndexes
37 | materializedViews: PostgresMetaMaterializedViews
38 | policies: PostgresMetaPolicies
39 | publications: PostgresMetaPublications
40 | relationships: PostgresMetaRelationships
41 | roles: PostgresMetaRoles
42 | schemas: PostgresMetaSchemas
43 | tablePrivileges: PostgresMetaTablePrivileges
44 | tables: PostgresMetaTables
45 | triggers: PostgresMetaTriggers
46 | types: PostgresMetaTypes
47 | version: PostgresMetaVersion
48 | views: PostgresMetaViews
49 |
50 | parse = Parser.Parse
51 | deparse = Parser.Deparse
52 | format = Parser.Format
53 |
54 | constructor(config: PoolConfig) {
55 | const { query, end } = init(config)
56 | this.query = query
57 | this.end = end
58 | this.columnPrivileges = new PostgresMetaColumnPrivileges(this.query)
59 | this.columns = new PostgresMetaColumns(this.query)
60 | this.config = new PostgresMetaConfig(this.query)
61 | this.extensions = new PostgresMetaExtensions(this.query)
62 | this.foreignTables = new PostgresMetaForeignTables(this.query)
63 | this.functions = new PostgresMetaFunctions(this.query)
64 | this.indexes = new PostgresMetaIndexes(this.query)
65 | this.materializedViews = new PostgresMetaMaterializedViews(this.query)
66 | this.policies = new PostgresMetaPolicies(this.query)
67 | this.publications = new PostgresMetaPublications(this.query)
68 | this.relationships = new PostgresMetaRelationships(this.query)
69 | this.roles = new PostgresMetaRoles(this.query)
70 | this.schemas = new PostgresMetaSchemas(this.query)
71 | this.tablePrivileges = new PostgresMetaTablePrivileges(this.query)
72 | this.tables = new PostgresMetaTables(this.query)
73 | this.triggers = new PostgresMetaTriggers(this.query)
74 | this.types = new PostgresMetaTypes(this.query)
75 | this.version = new PostgresMetaVersion(this.query)
76 | this.views = new PostgresMetaViews(this.query)
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaViews.ts:
--------------------------------------------------------------------------------
1 | import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js'
2 | import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js'
3 | import { PostgresMetaResult, PostgresView } from './types.js'
4 | import { VIEWS_SQL } from './sql/views.sql.js'
5 | import { COLUMNS_SQL } from './sql/columns.sql.js'
6 |
7 | export default class PostgresMetaViews {
8 | query: (sql: string) => Promise>
9 |
10 | constructor(query: (sql: string) => Promise>) {
11 | this.query = query
12 | }
13 |
14 | async list({
15 | includeSystemSchemas = false,
16 | includedSchemas,
17 | excludedSchemas,
18 | limit,
19 | offset,
20 | includeColumns = true,
21 | }: {
22 | includeSystemSchemas?: boolean
23 | includedSchemas?: string[]
24 | excludedSchemas?: string[]
25 | limit?: number
26 | offset?: number
27 | includeColumns?: boolean
28 | } = {}): Promise> {
29 | const schemaFilter = filterByList(
30 | includedSchemas,
31 | excludedSchemas,
32 | !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined
33 | )
34 | const sql = generateEnrichedViewsSql({ includeColumns, schemaFilter, limit, offset })
35 | return await this.query(sql)
36 | }
37 |
38 | async retrieve({ id }: { id: number }): Promise>
39 | async retrieve({
40 | name,
41 | schema,
42 | }: {
43 | name: string
44 | schema: string
45 | }): Promise>
46 | async retrieve({
47 | id,
48 | name,
49 | schema = 'public',
50 | }: {
51 | id?: number
52 | name?: string
53 | schema?: string
54 | }): Promise> {
55 | if (id) {
56 | const idsFilter = filterByValue([id])
57 | const sql = generateEnrichedViewsSql({
58 | includeColumns: true,
59 | idsFilter,
60 | })
61 | const { data, error } = await this.query(sql)
62 | if (error) {
63 | return { data, error }
64 | } else if (data.length === 0) {
65 | return { data: null, error: { message: `Cannot find a view with ID ${id}` } }
66 | } else {
67 | return { data: data[0], error }
68 | }
69 | } else if (name) {
70 | const viewIdentifierFilter = filterByValue([`${schema}.${name}`])
71 | const sql = generateEnrichedViewsSql({
72 | includeColumns: true,
73 | viewIdentifierFilter,
74 | })
75 | const { data, error } = await this.query(sql)
76 | if (error) {
77 | return { data, error }
78 | } else if (data.length === 0) {
79 | return {
80 | data: null,
81 | error: { message: `Cannot find a view named ${name} in schema ${schema}` },
82 | }
83 | } else {
84 | return { data: data[0], error }
85 | }
86 | } else {
87 | return { data: null, error: { message: 'Invalid parameters on view retrieve' } }
88 | }
89 | }
90 | }
91 |
92 | const generateEnrichedViewsSql = ({
93 | includeColumns,
94 | schemaFilter,
95 | idsFilter,
96 | viewIdentifierFilter,
97 | limit,
98 | offset,
99 | }: {
100 | includeColumns: boolean
101 | schemaFilter?: string
102 | idsFilter?: string
103 | viewIdentifierFilter?: string
104 | limit?: number
105 | offset?: number
106 | }) => `
107 | with views as (${VIEWS_SQL({ schemaFilter, limit, offset, viewIdentifierFilter, idsFilter })})
108 | ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter: viewIdentifierFilter, tableIdFilter: idsFilter })})` : ''}
109 | select
110 | *
111 | ${includeColumns ? `, ${coalesceRowsToArray('columns', 'columns.table_id = views.id')}` : ''}
112 | from views`
113 |
--------------------------------------------------------------------------------
/src/server/constants.ts:
--------------------------------------------------------------------------------
1 | import crypto from 'crypto'
2 | import { PoolConfig } from '../lib/types.js'
3 | import { getSecret } from '../lib/secrets.js'
4 | import { AccessControl } from './templates/swift.js'
5 | import pkg from '#package.json' with { type: 'json' }
6 |
7 | export const PG_META_HOST = process.env.PG_META_HOST || '0.0.0.0'
8 | export const PG_META_PORT = Number(process.env.PG_META_PORT || 1337)
9 | export const CRYPTO_KEY = (await getSecret('CRYPTO_KEY')) || 'SAMPLE_KEY'
10 |
11 | const PG_META_DB_HOST = process.env.PG_META_DB_HOST || 'localhost'
12 | const PG_META_DB_NAME = process.env.PG_META_DB_NAME || 'postgres'
13 | const PG_META_DB_USER = process.env.PG_META_DB_USER || 'postgres'
14 | const PG_META_DB_PORT = process.env.PG_META_DB_PORT || '5432'
15 | const PG_META_DB_PASSWORD = (await getSecret('PG_META_DB_PASSWORD')) || 'postgres'
16 | const PG_META_DB_SSL_MODE = process.env.PG_META_DB_SSL_MODE || 'disable'
17 |
18 | const PG_CONN_TIMEOUT_SECS = Number(process.env.PG_CONN_TIMEOUT_SECS || 15)
19 | const PG_QUERY_TIMEOUT_SECS = Number(process.env.PG_QUERY_TIMEOUT_SECS || 55)
20 |
21 | export let PG_CONNECTION = process.env.PG_META_DB_URL
22 | if (!PG_CONNECTION) {
23 | const pgConn = new URL('postgresql://')
24 | pgConn.hostname = PG_META_DB_HOST
25 | pgConn.port = PG_META_DB_PORT
26 | pgConn.username = PG_META_DB_USER
27 | pgConn.password = PG_META_DB_PASSWORD
28 | pgConn.pathname = encodeURIComponent(PG_META_DB_NAME)
29 | pgConn.searchParams.set('sslmode', PG_META_DB_SSL_MODE)
30 | PG_CONNECTION = `${pgConn}`
31 | }
32 |
33 | export const PG_META_DB_SSL_ROOT_CERT = process.env.PG_META_DB_SSL_ROOT_CERT
34 | if (PG_META_DB_SSL_ROOT_CERT) {
35 | // validate cert
36 | new crypto.X509Certificate(PG_META_DB_SSL_ROOT_CERT)
37 | }
38 |
39 | export const EXPORT_DOCS = process.env.PG_META_EXPORT_DOCS === 'true'
40 | export const GENERATE_TYPES = process.env.PG_META_GENERATE_TYPES
41 | export const GENERATE_TYPES_INCLUDED_SCHEMAS = GENERATE_TYPES
42 | ? (process.env.PG_META_GENERATE_TYPES_INCLUDED_SCHEMAS?.split(',') ?? [])
43 | : []
44 | export const GENERATE_TYPES_DEFAULT_SCHEMA =
45 | process.env.PG_META_GENERATE_TYPES_DEFAULT_SCHEMA || 'public'
46 | export const GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS =
47 | process.env.PG_META_GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS === 'true'
48 | export const POSTGREST_VERSION = process.env.PG_META_POSTGREST_VERSION
49 | export const GENERATE_TYPES_SWIFT_ACCESS_CONTROL = process.env
50 | .PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL
51 | ? (process.env.PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL as AccessControl)
52 | : 'internal'
53 |
54 | // json/jsonb/text types
55 | export const VALID_UNNAMED_FUNCTION_ARG_TYPES = new Set([114, 3802, 25])
56 | export const VALID_FUNCTION_ARGS_MODE = new Set(['in', 'inout', 'variadic'])
57 |
58 | export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB
59 | ? // Node-postgres get a maximum size in bytes make the conversion from the env variable
60 | // from MB to Bytes
61 | parseInt(process.env.PG_META_MAX_RESULT_SIZE_MB, 10) * 1024 * 1024
62 | : 2 * 1024 * 1024 * 1024 // default to 2GB max query size result
63 |
64 | export const MAX_BODY_LIMIT = process.env.PG_META_MAX_BODY_LIMIT_MB
65 | ? // Fastify server max body size allowed, is in bytes, convert from MB to Bytes
66 | parseInt(process.env.PG_META_MAX_BODY_LIMIT_MB, 10) * 1024 * 1024
67 | : 3 * 1024 * 1024
68 |
69 | export const DEFAULT_POOL_CONFIG: PoolConfig = {
70 | max: 1,
71 | connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000,
72 | query_timeout: PG_QUERY_TIMEOUT_SECS * 1000,
73 | ssl: PG_META_DB_SSL_ROOT_CERT ? { ca: PG_META_DB_SSL_ROOT_CERT } : undefined,
74 | application_name: `postgres-meta ${pkg.version}`,
75 | maxResultSize: PG_META_MAX_RESULT_SIZE,
76 | }
77 |
78 | export const PG_META_REQ_HEADER = process.env.PG_META_REQ_HEADER || 'request-id'
79 |
--------------------------------------------------------------------------------
/src/server/routes/publications.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | limit?: number
11 | offset?: number
12 | }
13 | }>('/', async (request, reply) => {
14 | const config = createConnectionConfig(request)
15 | const limit = request.query.limit
16 | const offset = request.query.offset
17 |
18 | const pgMeta = new PostgresMeta(config)
19 | const { data, error } = await pgMeta.publications.list({ limit, offset })
20 | await pgMeta.end()
21 | if (error) {
22 | request.log.error({ error, request: extractRequestForLogging(request) })
23 | reply.code(500)
24 | return { error: error.message }
25 | }
26 |
27 | return data
28 | })
29 |
30 | fastify.get<{
31 | Headers: { pg: string; 'x-pg-application-name'?: string }
32 | Params: {
33 | id: string
34 | }
35 | }>('/:id(\\d+)', async (request, reply) => {
36 | const config = createConnectionConfig(request)
37 | const id = Number(request.params.id)
38 |
39 | const pgMeta = new PostgresMeta(config)
40 | const { data, error } = await pgMeta.publications.retrieve({ id })
41 | await pgMeta.end()
42 | if (error) {
43 | request.log.error({ error, request: extractRequestForLogging(request) })
44 | reply.code(404)
45 | return { error: error.message }
46 | }
47 |
48 | return data
49 | })
50 |
51 | fastify.post<{
52 | Headers: { pg: string; 'x-pg-application-name'?: string }
53 | Body: any
54 | }>('/', async (request, reply) => {
55 | const config = createConnectionConfig(request)
56 |
57 | const pgMeta = new PostgresMeta(config)
58 | const { data, error } = await pgMeta.publications.create(request.body as any)
59 | await pgMeta.end()
60 | if (error) {
61 | request.log.error({ error, request: extractRequestForLogging(request) })
62 | reply.code(400)
63 | return { error: error.message }
64 | }
65 |
66 | return data
67 | })
68 |
69 | fastify.patch<{
70 | Headers: { pg: string; 'x-pg-application-name'?: string }
71 | Params: {
72 | id: string
73 | }
74 | Body: any
75 | }>('/:id(\\d+)', async (request, reply) => {
76 | const config = createConnectionConfig(request)
77 | const id = Number(request.params.id)
78 |
79 | const pgMeta = new PostgresMeta(config)
80 | const { data, error } = await pgMeta.publications.update(id, request.body as any)
81 | await pgMeta.end()
82 | if (error) {
83 | request.log.error({ error, request: extractRequestForLogging(request) })
84 | reply.code(400)
85 | if (error.message.startsWith('Cannot find')) reply.code(404)
86 | return { error: error.message }
87 | }
88 |
89 | return data
90 | })
91 |
92 | fastify.delete<{
93 | Headers: { pg: string; 'x-pg-application-name'?: string }
94 | Params: {
95 | id: string
96 | }
97 | }>('/:id(\\d+)', async (request, reply) => {
98 | const config = createConnectionConfig(request)
99 | const id = Number(request.params.id)
100 |
101 | const pgMeta = new PostgresMeta(config)
102 | const { data, error } = await pgMeta.publications.remove(id)
103 | await pgMeta.end()
104 | if (error) {
105 | request.log.error({ error, request: extractRequestForLogging(request) })
106 | reply.code(400)
107 | if (error.message.startsWith('Cannot find')) reply.code(404)
108 | return { error: error.message }
109 | }
110 |
111 | return data
112 | })
113 | }
114 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaColumnPrivileges.ts:
--------------------------------------------------------------------------------
1 | import { ident, literal } from 'pg-format'
2 | import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js'
3 | import { filterByValue, filterByList } from './helpers.js'
4 | import { COLUMN_PRIVILEGES_SQL } from './sql/column_privileges.sql.js'
5 | import {
6 | PostgresMetaResult,
7 | PostgresColumnPrivileges,
8 | PostgresColumnPrivilegesGrant,
9 | PostgresColumnPrivilegesRevoke,
10 | } from './types.js'
11 |
12 | export default class PostgresMetaColumnPrivileges {
13 | query: (sql: string) => Promise>
14 |
15 | constructor(query: (sql: string) => Promise>) {
16 | this.query = query
17 | }
18 |
19 | async list({
20 | includeSystemSchemas = false,
21 | includedSchemas,
22 | excludedSchemas,
23 | limit,
24 | offset,
25 | }: {
26 | includeSystemSchemas?: boolean
27 | includedSchemas?: string[]
28 | excludedSchemas?: string[]
29 | limit?: number
30 | offset?: number
31 | } = {}): Promise> {
32 | const schemaFilter = filterByList(
33 | includedSchemas,
34 | excludedSchemas,
35 | !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined
36 | )
37 | const sql = COLUMN_PRIVILEGES_SQL({ schemaFilter, limit, offset })
38 | return await this.query(sql)
39 | }
40 |
41 | async grant(
42 | grants: PostgresColumnPrivilegesGrant[]
43 | ): Promise> {
44 | let sql = `
45 | do $$
46 | declare
47 | col record;
48 | begin
49 | ${grants
50 | .map(({ privilege_type, column_id, grantee, is_grantable }) => {
51 | const [relationId, columnNumber] = column_id.split('.')
52 | return `
53 | select *
54 | from pg_attribute a
55 | where a.attrelid = ${literal(relationId)}
56 | and a.attnum = ${literal(columnNumber)}
57 | into col;
58 | execute format(
59 | 'grant ${privilege_type} (%I) on %s to ${
60 | grantee.toLowerCase() === 'public' ? 'public' : ident(grantee)
61 | } ${is_grantable ? 'with grant option' : ''}',
62 | col.attname,
63 | col.attrelid::regclass
64 | );`
65 | })
66 | .join('\n')}
67 | end $$;
68 | `
69 | const { data, error } = await this.query(sql)
70 | if (error) {
71 | return { data, error }
72 | }
73 |
74 | // Return the updated column privileges for modified columns.
75 | const columnIds = [...new Set(grants.map(({ column_id }) => column_id))]
76 | const columnIdsFilter = filterByValue(columnIds)
77 | sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter })
78 | return await this.query(sql)
79 | }
80 |
81 | async revoke(
82 | revokes: PostgresColumnPrivilegesRevoke[]
83 | ): Promise> {
84 | let sql = `
85 | do $$
86 | declare
87 | col record;
88 | begin
89 | ${revokes
90 | .map(({ privilege_type, column_id, grantee }) => {
91 | const [relationId, columnNumber] = column_id.split('.')
92 | return `
93 | select *
94 | from pg_attribute a
95 | where a.attrelid = ${literal(relationId)}
96 | and a.attnum = ${literal(columnNumber)}
97 | into col;
98 | execute format(
99 | 'revoke ${privilege_type} (%I) on %s from ${
100 | grantee.toLowerCase() === 'public' ? 'public' : ident(grantee)
101 | }',
102 | col.attname,
103 | col.attrelid::regclass
104 | );`
105 | })
106 | .join('\n')}
107 | end $$;
108 | `
109 | const { data, error } = await this.query(sql)
110 | if (error) {
111 | return { data, error }
112 | }
113 |
114 | // Return the updated column privileges for modified columns.
115 | const columnIds = [...new Set(revokes.map(({ column_id }) => column_id))]
116 | const columnIdsFilter = filterByValue(columnIds)
117 | sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter })
118 | return await this.query(sql)
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/src/server/routes/extensions.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | limit?: number
11 | offset?: number
12 | }
13 | }>('/', async (request, reply) => {
14 | const config = createConnectionConfig(request)
15 | const limit = request.query.limit
16 | const offset = request.query.offset
17 |
18 | const pgMeta = new PostgresMeta(config)
19 | const { data, error } = await pgMeta.extensions.list({ limit, offset })
20 | await pgMeta.end()
21 | if (error) {
22 | request.log.error({ error, request: extractRequestForLogging(request) })
23 | reply.code(500)
24 | return { error: error.message }
25 | }
26 |
27 | return data
28 | })
29 |
30 | fastify.get<{
31 | Headers: { pg: string; 'x-pg-application-name'?: string }
32 | Params: {
33 | name: string
34 | }
35 | }>('/:name', async (request, reply) => {
36 | const config = createConnectionConfig(request)
37 |
38 | const pgMeta = new PostgresMeta(config)
39 | const { data, error } = await pgMeta.extensions.retrieve({ name: request.params.name })
40 | await pgMeta.end()
41 | if (error) {
42 | request.log.error({ error, request: extractRequestForLogging(request) })
43 | reply.code(404)
44 | return { error: error.message }
45 | }
46 |
47 | return data
48 | })
49 |
50 | fastify.post<{
51 | Headers: { pg: string; 'x-pg-application-name'?: string }
52 | Body: any
53 | }>('/', async (request, reply) => {
54 | const config = createConnectionConfig(request)
55 |
56 | const pgMeta = new PostgresMeta(config)
57 | const { data, error } = await pgMeta.extensions.create(request.body as any)
58 | await pgMeta.end()
59 | if (error) {
60 | request.log.error({ error, request: extractRequestForLogging(request) })
61 | reply.code(400)
62 | return { error: error.message }
63 | }
64 |
65 | return data
66 | })
67 |
68 | fastify.patch<{
69 | Headers: { pg: string; 'x-pg-application-name'?: string }
70 | Params: {
71 | name: string
72 | }
73 | Body: any
74 | }>('/:name', async (request, reply) => {
75 | const config = createConnectionConfig(request)
76 |
77 | const pgMeta = new PostgresMeta(config)
78 | const { data, error } = await pgMeta.extensions.update(request.params.name, request.body as any)
79 | await pgMeta.end()
80 | if (error) {
81 | request.log.error({ error, request: extractRequestForLogging(request) })
82 | reply.code(400)
83 | if (error.message.startsWith('Cannot find')) reply.code(404)
84 | return { error: error.message }
85 | }
86 |
87 | return data
88 | })
89 |
90 | fastify.delete<{
91 | Headers: { pg: string; 'x-pg-application-name'?: string }
92 | Params: {
93 | name: string
94 | }
95 | Querystring: {
96 | cascade?: string
97 | }
98 | }>('/:name', async (request, reply) => {
99 | const config = createConnectionConfig(request)
100 | const cascade = request.query.cascade === 'true'
101 |
102 | const pgMeta = new PostgresMeta(config)
103 | const { data, error } = await pgMeta.extensions.remove(request.params.name, { cascade })
104 | await pgMeta.end()
105 | if (error) {
106 | request.log.error({ error, request: extractRequestForLogging(request) })
107 | reply.code(400)
108 | if (error.message.startsWith('Cannot find')) reply.code(404)
109 | return { error: error.message }
110 | }
111 |
112 | return data
113 | })
114 | }
115 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaMaterializedViews.ts:
--------------------------------------------------------------------------------
1 | import { filterByList, coalesceRowsToArray, filterByValue } from './helpers.js'
2 | import { PostgresMetaResult, PostgresMaterializedView } from './types.js'
3 | import { MATERIALIZED_VIEWS_SQL } from './sql/materialized_views.sql.js'
4 | import { COLUMNS_SQL } from './sql/columns.sql.js'
5 |
6 | export default class PostgresMetaMaterializedViews {
7 | query: (sql: string) => Promise>
8 |
9 | constructor(query: (sql: string) => Promise>) {
10 | this.query = query
11 | }
12 |
13 | async list({
14 | includedSchemas,
15 | excludedSchemas,
16 | limit,
17 | offset,
18 | includeColumns = false,
19 | }: {
20 | includedSchemas?: string[]
21 | excludedSchemas?: string[]
22 | limit?: number
23 | offset?: number
24 | includeColumns?: boolean
25 | } = {}): Promise> {
26 | const schemaFilter = filterByList(includedSchemas, excludedSchemas, undefined)
27 | let sql = generateEnrichedMaterializedViewsSql({ includeColumns, schemaFilter, limit, offset })
28 | return await this.query(sql)
29 | }
30 |
31 | async retrieve({ id }: { id: number }): Promise>
32 | async retrieve({
33 | name,
34 | schema,
35 | }: {
36 | name: string
37 | schema: string
38 | }): Promise>
39 | async retrieve({
40 | id,
41 | name,
42 | schema = 'public',
43 | }: {
44 | id?: number
45 | name?: string
46 | schema?: string
47 | }): Promise> {
48 | if (id) {
49 | const idsFilter = filterByValue([id])
50 | const sql = generateEnrichedMaterializedViewsSql({
51 | includeColumns: true,
52 | idsFilter,
53 | })
54 | const { data, error } = await this.query(sql)
55 | if (error) {
56 | return { data, error }
57 | } else if (data.length === 0) {
58 | return { data: null, error: { message: `Cannot find a materialized view with ID ${id}` } }
59 | } else {
60 | return { data: data[0], error }
61 | }
62 | } else if (name) {
63 | const materializedViewIdentifierFilter = filterByValue([`${schema}.${name}`])
64 | const sql = generateEnrichedMaterializedViewsSql({
65 | includeColumns: true,
66 | materializedViewIdentifierFilter,
67 | })
68 | const { data, error } = await this.query(sql)
69 | if (error) {
70 | return { data, error }
71 | } else if (data.length === 0) {
72 | return {
73 | data: null,
74 | error: { message: `Cannot find a materialized view named ${name} in schema ${schema}` },
75 | }
76 | } else {
77 | return { data: data[0], error }
78 | }
79 | } else {
80 | return { data: null, error: { message: 'Invalid parameters on materialized view retrieve' } }
81 | }
82 | }
83 | }
84 |
85 | const generateEnrichedMaterializedViewsSql = ({
86 | includeColumns,
87 | schemaFilter,
88 | materializedViewIdentifierFilter,
89 | idsFilter,
90 | limit,
91 | offset,
92 | }: {
93 | includeColumns: boolean
94 | schemaFilter?: string
95 | materializedViewIdentifierFilter?: string
96 | idsFilter?: string
97 | limit?: number
98 | offset?: number
99 | }) => `
100 | with materialized_views as (${MATERIALIZED_VIEWS_SQL({ schemaFilter, limit, offset, materializedViewIdentifierFilter, idsFilter })})
101 | ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, limit, offset, tableIdentifierFilter: materializedViewIdentifierFilter, tableIdFilter: idsFilter })})` : ''}
102 | select
103 | *
104 | ${
105 | includeColumns
106 | ? `, ${coalesceRowsToArray('columns', 'columns.table_id = materialized_views.id')}`
107 | : ''
108 | }
109 | from materialized_views`
110 |
--------------------------------------------------------------------------------
/.github/workflows/canary-deploy.yml:
--------------------------------------------------------------------------------
1 | name: Canary Deploy
2 |
3 | permissions:
4 | contents: read
5 | pull-requests: read
6 | packages: write
7 | id-token: write
8 |
9 | on:
10 | pull_request:
11 | types: [opened, synchronize, labeled]
12 | paths:
13 | - 'src/**'
14 | - 'package.json'
15 | - 'package-lock.json'
16 | - 'tsconfig.json'
17 | - 'Dockerfile'
18 |
19 | jobs:
20 | build-canary:
21 | # Only run if PR has the 'deploy-canary' label, is on the correct repository, and targets master branch
22 | if: |
23 | github.repository == 'supabase/postgres-meta' &&
24 | github.event.pull_request.base.ref == 'master' &&
25 | contains(github.event.pull_request.labels.*.name, 'deploy-canary')
26 | runs-on: ubuntu-22.04
27 | timeout-minutes: 30
28 | outputs:
29 | canary-tag: ${{ steps.meta.outputs.tags }}
30 | pr-number: ${{ github.event.pull_request.number }}
31 | steps:
32 | # Checkout fork code - safe because no secrets are available for building
33 | - name: Checkout code
34 | uses: actions/checkout@v5
35 |
36 | # Log PR author for auditing
37 | - name: Log PR author
38 | run: |
39 | echo "Canary deploy triggered by: ${{ github.event.pull_request.user.login }}"
40 | echo "PR #${{ github.event.pull_request.number }} from fork: ${{ github.event.pull_request.head.repo.full_name }}"
41 |
42 | - name: Setup Node.js
43 | uses: actions/setup-node@v4
44 | with:
45 | node-version-file: '.nvmrc'
46 | cache: 'npm'
47 |
48 | - name: Install dependencies and build
49 | run: |
50 | npm clean-install
51 | npm run build
52 |
53 | # Generate canary tag
54 | - id: meta
55 | uses: docker/metadata-action@v5
56 | with:
57 | images: |
58 | supabase/postgres-meta
59 | public.ecr.aws/supabase/postgres-meta
60 | ghcr.io/supabase/postgres-meta
61 | tags: |
62 | type=raw,value=canary-pr-${{ github.event.pull_request.number }}-${{ github.event.pull_request.head.sha }}
63 | type=raw,value=canary-pr-${{ github.event.pull_request.number }}
64 |
65 | - uses: docker/setup-qemu-action@v3
66 | with:
67 | platforms: amd64,arm64
68 | - uses: docker/setup-buildx-action@v3
69 |
70 | - name: Login to DockerHub
71 | uses: docker/login-action@v3
72 | with:
73 | username: ${{ secrets.DOCKER_USERNAME }}
74 | password: ${{ secrets.DOCKER_PASSWORD }}
75 |
76 | - name: configure aws credentials
77 | uses: aws-actions/configure-aws-credentials@v4
78 | with:
79 | role-to-assume: ${{ secrets.PROD_AWS_ROLE }}
80 | aws-region: us-east-1
81 |
82 | - name: Login to ECR
83 | uses: docker/login-action@v3
84 | with:
85 | registry: public.ecr.aws
86 |
87 | - name: Login to GHCR
88 | uses: docker/login-action@v3
89 | with:
90 | registry: ghcr.io
91 | username: ${{ github.actor }}
92 | password: ${{ secrets.GITHUB_TOKEN }}
93 |
94 | - name: Build and push canary image
95 | uses: docker/build-push-action@v6
96 | with:
97 | context: .
98 | push: true
99 | platforms: linux/amd64,linux/arm64
100 | tags: ${{ steps.meta.outputs.tags }}
101 | labels: |
102 | org.opencontainers.image.title=postgres-meta-canary
103 | org.opencontainers.image.description=Canary build for PR #${{ github.event.pull_request.number }}
104 | org.opencontainers.image.source=${{ github.event.pull_request.head.repo.html_url }}
105 | org.opencontainers.image.revision=${{ github.event.pull_request.head.sha }}
106 | canary.pr.number=${{ github.event.pull_request.number }}
107 | canary.pr.author=${{ github.event.pull_request.user.login }}
108 |
--------------------------------------------------------------------------------
/test/lib/extensions.ts:
--------------------------------------------------------------------------------
1 | import { expect, test } from 'vitest'
2 | import { pgMeta } from './utils'
3 |
4 | test('list', async () => {
5 | const res = await pgMeta.extensions.list()
6 | expect(res.data?.find(({ name }) => name === 'hstore')).toMatchInlineSnapshot(
7 | { default_version: expect.stringMatching(/^\d+.\d+$/) },
8 | `
9 | {
10 | "comment": "data type for storing sets of (key, value) pairs",
11 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
12 | "installed_version": null,
13 | "name": "hstore",
14 | "schema": null,
15 | }
16 | `
17 | )
18 | })
19 |
20 | test('retrieve, create, update, delete', async () => {
21 | const { data: testSchema } = await pgMeta.schemas.create({ name: 'extensions' })
22 |
23 | let res = await pgMeta.extensions.create({ name: 'hstore', version: '1.4' })
24 | expect(res).toMatchInlineSnapshot(
25 | {
26 | data: {
27 | default_version: expect.stringMatching(/^\d+.\d+$/),
28 | },
29 | },
30 | `
31 | {
32 | "data": {
33 | "comment": "data type for storing sets of (key, value) pairs",
34 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
35 | "installed_version": "1.4",
36 | "name": "hstore",
37 | "schema": "public",
38 | },
39 | "error": null,
40 | }
41 | `
42 | )
43 | res = await pgMeta.extensions.retrieve({ name: res.data!.name })
44 | expect(res).toMatchInlineSnapshot(
45 | {
46 | data: {
47 | default_version: expect.stringMatching(/^\d+.\d+$/),
48 | },
49 | },
50 | `
51 | {
52 | "data": {
53 | "comment": "data type for storing sets of (key, value) pairs",
54 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
55 | "installed_version": "1.4",
56 | "name": "hstore",
57 | "schema": "public",
58 | },
59 | "error": null,
60 | }
61 | `
62 | )
63 | res = await pgMeta.extensions.update(res.data!.name, { update: true, schema: 'extensions' })
64 | expect(res).toMatchInlineSnapshot(
65 | {
66 | data: {
67 | default_version: expect.stringMatching(/^\d+.\d+$/),
68 | installed_version: expect.stringMatching(/^\d+.\d+$/),
69 | },
70 | },
71 | `
72 | {
73 | "data": {
74 | "comment": "data type for storing sets of (key, value) pairs",
75 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
76 | "installed_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
77 | "name": "hstore",
78 | "schema": "extensions",
79 | },
80 | "error": null,
81 | }
82 | `
83 | )
84 | res = await pgMeta.extensions.remove(res.data!.name)
85 | expect(res).toMatchInlineSnapshot(
86 | {
87 | data: {
88 | default_version: expect.stringMatching(/^\d+.\d+$/),
89 | installed_version: expect.stringMatching(/^\d+.\d+$/),
90 | },
91 | },
92 | `
93 | {
94 | "data": {
95 | "comment": "data type for storing sets of (key, value) pairs",
96 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
97 | "installed_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
98 | "name": "hstore",
99 | "schema": "extensions",
100 | },
101 | "error": null,
102 | }
103 | `
104 | )
105 | res = await pgMeta.extensions.retrieve({ name: res.data!.name })
106 | expect(res).toMatchInlineSnapshot(
107 | { data: { default_version: expect.stringMatching(/^\d+.\d+$/) } },
108 | `
109 | {
110 | "data": {
111 | "comment": "data type for storing sets of (key, value) pairs",
112 | "default_version": StringMatching /\\^\\\\d\\+\\.\\\\d\\+\\$/,
113 | "installed_version": null,
114 | "name": "hstore",
115 | "schema": null,
116 | },
117 | "error": null,
118 | }
119 | `
120 | )
121 |
122 | await pgMeta.schemas.remove(testSchema!.id)
123 | })
124 |
--------------------------------------------------------------------------------
/test/server/ssl.ts:
--------------------------------------------------------------------------------
1 | import CryptoJS from 'crypto-js'
2 | import fs from 'node:fs'
3 | import path from 'node:path'
4 | import { fileURLToPath } from 'node:url'
5 | import { expect, test } from 'vitest'
6 | import { app } from './utils'
7 | import { CRYPTO_KEY, DEFAULT_POOL_CONFIG } from '../../src/server/constants'
8 |
9 | // @ts-ignore: Harmless type error on import.meta.
10 | const cwd = path.dirname(fileURLToPath(import.meta.url))
11 | const sslRootCertPath = path.join(cwd, '../db/server.crt')
12 | const sslRootCert = fs.readFileSync(sslRootCertPath, { encoding: 'utf8' })
13 |
14 | test('query with no ssl', async () => {
15 | const res = await app.inject({
16 | method: 'POST',
17 | path: '/query',
18 | headers: {
19 | 'x-connection-encrypted': CryptoJS.AES.encrypt(
20 | 'postgresql://postgres:postgres@localhost:5432/postgres',
21 | CRYPTO_KEY
22 | ).toString(),
23 | },
24 | payload: { query: 'select 1;' },
25 | })
26 | expect(res.json()).toMatchInlineSnapshot(`
27 | [
28 | {
29 | "?column?": 1,
30 | },
31 | ]
32 | `)
33 | })
34 |
35 | test('query with ssl w/o root cert', async () => {
36 | const res = await app.inject({
37 | method: 'POST',
38 | path: '/query',
39 | headers: {
40 | 'x-connection-encrypted': CryptoJS.AES.encrypt(
41 | 'postgresql://postgres:postgres@localhost:5432/postgres?sslmode=verify-full',
42 | CRYPTO_KEY
43 | ).toString(),
44 | },
45 | payload: { query: 'select 1;' },
46 | })
47 | expect(res.json()?.error).toMatch(/^self[ -]signed certificate$/)
48 | })
49 |
50 | test('query with ssl with root cert', async () => {
51 | const defaultSsl = DEFAULT_POOL_CONFIG.ssl
52 | DEFAULT_POOL_CONFIG.ssl = { ca: sslRootCert }
53 |
54 | const res = await app.inject({
55 | method: 'POST',
56 | path: '/query',
57 | headers: {
58 | 'x-connection-encrypted': CryptoJS.AES.encrypt(
59 | `postgresql://postgres:postgres@localhost:5432/postgres?sslmode=verify-full`,
60 | CRYPTO_KEY
61 | ).toString(),
62 | },
63 | payload: { query: 'select 1;' },
64 | })
65 | expect(res.json()).toMatchInlineSnapshot(`
66 | [
67 | {
68 | "?column?": 1,
69 | },
70 | ]
71 | `)
72 |
73 | DEFAULT_POOL_CONFIG.ssl = defaultSsl
74 | })
75 |
76 | test('query with invalid space empty encrypted connection string', async () => {
77 | const res = await app.inject({
78 | method: 'POST',
79 | path: '/query',
80 | headers: {
81 | 'x-connection-encrypted': CryptoJS.AES.encrypt(` `, CRYPTO_KEY).toString(),
82 | },
83 | payload: { query: 'select 1;' },
84 | })
85 | expect(res.statusCode).toBe(500)
86 | expect(res.json()).toMatchInlineSnapshot(`
87 | {
88 | "error": "failed to get upstream connection details",
89 | }
90 | `)
91 | })
92 |
93 | test('query with invalid empty encrypted connection string', async () => {
94 | const res = await app.inject({
95 | method: 'POST',
96 | path: '/query',
97 | headers: {
98 | 'x-connection-encrypted': CryptoJS.AES.encrypt(``, CRYPTO_KEY).toString(),
99 | },
100 | payload: { query: 'select 1;' },
101 | })
102 | expect(res.statusCode).toBe(500)
103 | expect(res.json()).toMatchInlineSnapshot(`
104 | {
105 | "error": "failed to get upstream connection details",
106 | }
107 | `)
108 | })
109 |
110 | test('query with missing host connection string encrypted connection string', async () => {
111 | const res = await app.inject({
112 | method: 'POST',
113 | path: '/query',
114 | headers: {
115 | 'x-connection-encrypted': CryptoJS.AES.encrypt(
116 | `postgres://name:password@:5432/postgres?sslmode=prefer`,
117 | CRYPTO_KEY
118 | ).toString(),
119 | },
120 | payload: { query: 'select 1;' },
121 | })
122 | expect(res.statusCode).toBe(500)
123 | expect(res.json()).toMatchInlineSnapshot(`
124 | {
125 | "error": "failed to process upstream connection details",
126 | }
127 | `)
128 | })
129 |
--------------------------------------------------------------------------------
/test/utils.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { FastifyRequest } from 'fastify'
3 | import {
4 | extractRequestForLogging,
5 | createConnectionConfig,
6 | translateErrorToResponseCode,
7 | } from '../src/server/utils.js'
8 |
9 | describe('server/utils', () => {
10 | describe('extractRequestForLogging', () => {
11 | test('should extract request information for logging', () => {
12 | const mockRequest = {
13 | method: 'GET',
14 | url: '/test',
15 | headers: {
16 | 'user-agent': 'test-agent',
17 | 'x-supabase-info': 'test-info',
18 | },
19 | query: { param: 'value' },
20 | } as FastifyRequest
21 |
22 | const result = extractRequestForLogging(mockRequest)
23 | expect(result).toHaveProperty('method')
24 | expect(result).toHaveProperty('url')
25 | expect(result).toHaveProperty('pg')
26 | expect(result).toHaveProperty('opt')
27 | })
28 |
29 | test('should handle request with minimal properties', () => {
30 | const mockRequest = {
31 | method: 'POST',
32 | url: '/api/test',
33 | headers: {},
34 | } as FastifyRequest
35 |
36 | const result = extractRequestForLogging(mockRequest)
37 | expect(result.method).toBe('POST')
38 | expect(result.url).toBe('/api/test')
39 | expect(result.pg).toBe('unknown')
40 | })
41 | })
42 |
43 | describe('createConnectionConfig', () => {
44 | test('should create connection config from request headers', () => {
45 | const mockRequest = {
46 | headers: {
47 | pg: 'postgresql://user:pass@localhost:5432/db',
48 | 'x-pg-application-name': 'test-app',
49 | },
50 | } as FastifyRequest
51 |
52 | const result = createConnectionConfig(mockRequest)
53 | expect(result).toHaveProperty('connectionString')
54 | expect(result).toHaveProperty('application_name')
55 | expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db')
56 | expect(result.application_name).toBe('test-app')
57 | })
58 |
59 | test('should handle request without application name', () => {
60 | const mockRequest = {
61 | headers: {
62 | pg: 'postgresql://user:pass@localhost:5432/db',
63 | },
64 | } as FastifyRequest
65 |
66 | const result = createConnectionConfig(mockRequest)
67 | expect(result).toHaveProperty('connectionString')
68 | expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db')
69 | // application_name should have default value if not provided
70 | expect(result.application_name).toBe('postgres-meta 0.0.0-automated')
71 | })
72 | })
73 |
74 | describe('translateErrorToResponseCode', () => {
75 | test('should return 504 for connection timeout errors', () => {
76 | const error = { message: 'Connection terminated due to connection timeout' }
77 | const result = translateErrorToResponseCode(error)
78 | expect(result).toBe(504)
79 | })
80 |
81 | test('should return 503 for too many clients errors', () => {
82 | const error = { message: 'sorry, too many clients already' }
83 | const result = translateErrorToResponseCode(error)
84 | expect(result).toBe(503)
85 | })
86 |
87 | test('should return 408 for query timeout errors', () => {
88 | const error = { message: 'Query read timeout' }
89 | const result = translateErrorToResponseCode(error)
90 | expect(result).toBe(408)
91 | })
92 |
93 | test('should return default 400 for other errors', () => {
94 | const error = { message: 'database connection failed' }
95 | const result = translateErrorToResponseCode(error)
96 | expect(result).toBe(400)
97 | })
98 |
99 | test('should return custom default for other errors', () => {
100 | const error = { message: 'some other error' }
101 | const result = translateErrorToResponseCode(error, 500)
102 | expect(result).toBe(500)
103 | })
104 | })
105 | })
106 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # `postgres-meta`
2 |
3 | A RESTful API for managing your Postgres. Fetch tables, add roles, and run queries (and more).
4 |
5 | ## Documentation
6 |
7 | https://supabase.github.io/postgres-meta/
8 |
9 | ## Progress
10 |
11 | Schema:
12 |
13 | - [X] `POST /query` (Execute SQL query)
14 | - [x] `POST /format` (Format SQL query)
15 | - [x] `POST /parse` (Parse SQL query into AST)
16 | - [ ] `POST /explain` (Explain SQL query)
17 | - [X] `/columns`
18 | - [X] GET (List)
19 | - [X] POST (`alter table add column`)
20 | - [X] PATCH (`alter table alter/rename column`)
21 | - [X] DELETE (`alter table drop column`)
22 | - [X] `/extensions`
23 | - [X] GET (List)
24 | - [X] POST (`create extension`)
25 | - [X] PATCH (`alter extension`)
26 | - [X] DELETE (`drop extension`)
27 | - [X] `/functions`
28 | - [X] GET (List)
29 | - [X] POST (`create function`)
30 | - [X] PATCH (`alter function`)
31 | - [X] DELETE (`drop function`)
32 | - [X] `/publications`
33 | - [X] GET (List)
34 | - [X] POST (`create publication`)
35 | - [X] PATCH (`alter publication`)
36 | - [X] DELETE (`drop publication`)
37 | - [X] `/roles`
38 | - [X] GET (List)
39 | - [X] POST (`create role`)
40 | - [X] PATCH (`alter role`)
41 | - [X] DELETE (`drop role`)
42 | - [X] `/schemas`
43 | - [X] GET (List)
44 | - [X] POST (`create schema`)
45 | - [X] PATCH (`alter schema`)
46 | - [X] DELETE (`drop schema`)
47 | - [X] `/tables`
48 | - [X] GET (List)
49 | - [X] POST (`create table`)
50 | - [X] PATCH (`alter table`)
51 | - [X] DELETE (`drop table`)
52 | - [X] `/triggers`
53 | - [X] GET (List)
54 | - [X] POST (`create trigger`)
55 | - [X] PATCH (`alter trigger`)
56 | - [X] DELETE (`drop trigger`)
57 | - [ ] `/types`
58 | - [X] GET (List)
59 | - [ ] POST (`create type`)
60 | - [ ] PATCH (`alter type`)
61 | - [ ] DELETE (`drop type`)
62 |
63 | Helpers:
64 |
65 | - [ ] `/config`
66 | - [ ] GET `/version`: Postgres version
67 | - [ ] `/generators`
68 | - [ ] GET `/openapi`: Generate Open API
69 | - [ ] GET `/typescript`: Generate Typescript types
70 | - [ ] GET `/swift`: Generate Swift types (beta)
71 |
72 | ## Quickstart
73 |
74 | Set the following ENV VARS:
75 |
76 | ```bash
77 | PG_META_HOST="0.0.0.0"
78 | PG_META_PORT=8080
79 | PG_META_DB_HOST="postgres"
80 | PG_META_DB_NAME="postgres"
81 | PG_META_DB_USER="postgres"
82 | PG_META_DB_PORT=5432
83 | PG_META_DB_PASSWORD="postgres"
84 | ```
85 |
86 | Then run any of the binaries in the releases.
87 |
88 |
89 | ## FAQs
90 |
91 | **Why?**
92 |
93 | This serves as a light-weight connection pooler. It also normalises the Postgres system catalog into a more readable format. While there is a lot of re-inventing right now, this server will eventually provide helpers (such as type generators). The server is multi-tenant, so it can support multiple Postgres databases from a single server.
94 |
95 | **What security does this use?**
96 |
97 | None. Please don't use this as a standalone server. This should be used behind a proxy in a trusted environment, on your local machine, or using this internally with no access to the outside world.
98 |
99 | ## Developers
100 |
101 | To start developing, run `npm run dev`. It will set up the database with Docker for you. The server will restart on file change.
102 |
103 | If you are fixing a bug, you should create a new test case. To test your changes, add the `-u` flag to `vitest` on the `test:run` script, run `npm run test`, and then review the git diff of the snapshots. Depending on your change, you may see `id` fields being changed - this is expected and you are free to commit it, as long as it passes the CI. Don't forget to remove the `-u` flag when committing.
104 |
105 | To make changes to the type generation, run `npm run gen:types:` while you have `npm run dev` running,
106 | where `` is one of:
107 |
108 | - `typescript`
109 | - `go`
110 | - `swift` (beta)
111 |
112 | To use your own database connection string instead of the provided test database, run:
113 | `PG_META_DB_URL=postgresql://postgres:postgres@localhost:5432/postgres npm run gen:types:`
114 |
115 | ## Licence
116 |
117 | Apache 2.0
118 |
119 |
--------------------------------------------------------------------------------
/src/server/routes/index.ts:
--------------------------------------------------------------------------------
1 | import CryptoJS from 'crypto-js'
2 | import { FastifyInstance } from 'fastify'
3 | import ColumnPrivilegesRoute from './column-privileges.js'
4 | import ColumnRoute from './columns.js'
5 | import ConfigRoute from './config.js'
6 | import ExtensionsRoute from './extensions.js'
7 | import ForeignTablesRoute from './foreign-tables.js'
8 | import FunctionsRoute from './functions.js'
9 | import IndexesRoute from './indexes.js'
10 | import MaterializedViewsRoute from './materialized-views.js'
11 | import PoliciesRoute from './policies.js'
12 | import PublicationsRoute from './publications.js'
13 | import QueryRoute from './query.js'
14 | import SchemasRoute from './schemas.js'
15 | import RolesRoute from './roles.js'
16 | import TablePrivilegesRoute from './table-privileges.js'
17 | import TablesRoute from './tables.js'
18 | import TriggersRoute from './triggers.js'
19 | import TypesRoute from './types.js'
20 | import ViewsRoute from './views.js'
21 | import TypeScriptTypeGenRoute from './generators/typescript.js'
22 | import GoTypeGenRoute from './generators/go.js'
23 | import SwiftTypeGenRoute from './generators/swift.js'
24 | import PythonTypeGenRoute from './generators/python.js'
25 | import { PG_CONNECTION, CRYPTO_KEY } from '../constants.js'
26 |
27 | export default async (fastify: FastifyInstance) => {
28 | // Adds a "pg" object to the request if it doesn't exist
29 | fastify.addHook('onRequest', (request, _reply, done) => {
30 | try {
31 | // Node converts headers to lowercase
32 | const encryptedHeader = request.headers['x-connection-encrypted']?.toString()
33 | if (encryptedHeader) {
34 | try {
35 | request.headers.pg = CryptoJS.AES.decrypt(encryptedHeader, CRYPTO_KEY)
36 | .toString(CryptoJS.enc.Utf8)
37 | .trim()
38 | } catch (e: any) {
39 | request.log.warn({
40 | message: 'failed to parse encrypted connstring',
41 | error: e.toString(),
42 | })
43 | throw new Error('failed to process upstream connection details')
44 | }
45 | } else {
46 | request.headers.pg = PG_CONNECTION
47 | }
48 | if (!request.headers.pg) {
49 | request.log.error({ message: 'failed to get connection string' })
50 | throw new Error('failed to get upstream connection details')
51 | }
52 | // Ensure the resulting connection string is a valid URL
53 | try {
54 | new URL(request.headers.pg)
55 | } catch (error) {
56 | request.log.error({ message: 'pg connection string is invalid url' })
57 | throw new Error('failed to process upstream connection details')
58 | }
59 | return done()
60 | } catch (err) {
61 | return done(err as Error)
62 | }
63 | })
64 |
65 | fastify.register(ColumnPrivilegesRoute, { prefix: '/column-privileges' })
66 | fastify.register(ColumnRoute, { prefix: '/columns' })
67 | fastify.register(ConfigRoute, { prefix: '/config' })
68 | fastify.register(ExtensionsRoute, { prefix: '/extensions' })
69 | fastify.register(ForeignTablesRoute, { prefix: '/foreign-tables' })
70 | fastify.register(FunctionsRoute, { prefix: '/functions' })
71 | fastify.register(IndexesRoute, { prefix: '/indexes' })
72 | fastify.register(MaterializedViewsRoute, { prefix: '/materialized-views' })
73 | fastify.register(PoliciesRoute, { prefix: '/policies' })
74 | fastify.register(PublicationsRoute, { prefix: '/publications' })
75 | fastify.register(QueryRoute, { prefix: '/query' })
76 | fastify.register(SchemasRoute, { prefix: '/schemas' })
77 | fastify.register(RolesRoute, { prefix: '/roles' })
78 | fastify.register(TablePrivilegesRoute, { prefix: '/table-privileges' })
79 | fastify.register(TablesRoute, { prefix: '/tables' })
80 | fastify.register(TriggersRoute, { prefix: '/triggers' })
81 | fastify.register(TypesRoute, { prefix: '/types' })
82 | fastify.register(ViewsRoute, { prefix: '/views' })
83 | fastify.register(TypeScriptTypeGenRoute, { prefix: '/generators/typescript' })
84 | fastify.register(GoTypeGenRoute, { prefix: '/generators/go' })
85 | fastify.register(SwiftTypeGenRoute, { prefix: '/generators/swift' })
86 | fastify.register(PythonTypeGenRoute, { prefix: '/generators/python' })
87 | }
88 |
--------------------------------------------------------------------------------
/test/schemas.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 | import { TEST_CONNECTION_STRING } from './lib/utils.js'
4 |
5 | describe('server/routes/schemas', () => {
6 | test('should list schemas', async () => {
7 | const app = build()
8 | const response = await app.inject({
9 | method: 'GET',
10 | url: '/schemas',
11 | headers: {
12 | pg: TEST_CONNECTION_STRING,
13 | },
14 | })
15 | expect(response.statusCode).toBe(200)
16 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
17 | await app.close()
18 | })
19 |
20 | test('should list schemas with query parameters', async () => {
21 | const app = build()
22 | const response = await app.inject({
23 | method: 'GET',
24 | url: '/schemas?include_system_schemas=true&limit=5&offset=0',
25 | headers: {
26 | pg: TEST_CONNECTION_STRING,
27 | },
28 | })
29 | expect(response.statusCode).toBe(200)
30 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
31 | await app.close()
32 | })
33 |
34 | test('should return 404 for non-existent schema', async () => {
35 | const app = build()
36 | const response = await app.inject({
37 | method: 'GET',
38 | url: '/schemas/non-existent-schema',
39 | headers: {
40 | pg: TEST_CONNECTION_STRING,
41 | },
42 | })
43 | expect(response.statusCode).toBe(404)
44 | await app.close()
45 | })
46 |
47 | test('should create schema, retrieve, update, delete', async () => {
48 | const app = build()
49 | const response = await app.inject({
50 | method: 'POST',
51 | url: '/schemas',
52 | headers: {
53 | pg: TEST_CONNECTION_STRING,
54 | },
55 | payload: {
56 | name: 'test_schema',
57 | },
58 | })
59 | expect(response.statusCode).toBe(200)
60 | const responseData = response.json()
61 | expect(responseData).toMatchObject({
62 | id: expect.any(Number),
63 | name: 'test_schema',
64 | owner: 'postgres',
65 | })
66 |
67 | const { id } = responseData
68 |
69 | const retrieveResponse = await app.inject({
70 | method: 'GET',
71 | url: `/schemas/${id}`,
72 | headers: {
73 | pg: TEST_CONNECTION_STRING,
74 | },
75 | })
76 | expect(retrieveResponse.statusCode).toBe(200)
77 | const retrieveData = retrieveResponse.json()
78 | expect(retrieveData).toMatchObject({
79 | id: expect.any(Number),
80 | name: 'test_schema',
81 | owner: 'postgres',
82 | })
83 |
84 | const updateResponse = await app.inject({
85 | method: 'PATCH',
86 | url: `/schemas/${id}`,
87 | headers: {
88 | pg: TEST_CONNECTION_STRING,
89 | },
90 | payload: {
91 | name: 'test_schema_updated',
92 | },
93 | })
94 | expect(updateResponse.statusCode).toBe(200)
95 | const updateData = updateResponse.json()
96 | expect(updateData).toMatchObject({
97 | id: expect.any(Number),
98 | name: 'test_schema_updated',
99 | owner: 'postgres',
100 | })
101 |
102 | const deleteResponse = await app.inject({
103 | method: 'DELETE',
104 | url: `/schemas/${id}`,
105 | headers: {
106 | pg: TEST_CONNECTION_STRING,
107 | },
108 | })
109 | expect(deleteResponse.statusCode).toBe(200)
110 | const deleteData = deleteResponse.json()
111 | expect(deleteData).toMatchObject({
112 | id: expect.any(Number),
113 | name: 'test_schema_updated',
114 | owner: 'postgres',
115 | })
116 | })
117 |
118 | test('should return 400 for invalid payload', async () => {
119 | const app = build()
120 | const response = await app.inject({
121 | method: 'POST',
122 | url: '/schemas',
123 | headers: {
124 | pg: TEST_CONNECTION_STRING,
125 | },
126 | payload: {
127 | name: 'pg_',
128 | },
129 | })
130 | expect(response.statusCode).toBe(400)
131 | expect(response.json()).toMatchInlineSnapshot(`
132 | {
133 | "error": "unacceptable schema name "pg_"",
134 | }
135 | `)
136 | })
137 | })
138 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaForeignTables.ts:
--------------------------------------------------------------------------------
1 | import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js'
2 | import { PostgresMetaResult, PostgresForeignTable } from './types.js'
3 | import { FOREIGN_TABLES_SQL } from './sql/foreign_tables.sql.js'
4 | import { COLUMNS_SQL } from './sql/columns.sql.js'
5 |
6 | export default class PostgresMetaForeignTables {
7 | query: (sql: string) => Promise>
8 |
9 | constructor(query: (sql: string) => Promise>) {
10 | this.query = query
11 | }
12 |
13 | async list(options: {
14 | includedSchemas?: string[]
15 | excludedSchemas?: string[]
16 | limit?: number
17 | offset?: number
18 | includeColumns: false
19 | }): Promise>
20 | async list(options?: {
21 | includedSchemas?: string[]
22 | excludedSchemas?: string[]
23 | limit?: number
24 | offset?: number
25 | includeColumns?: boolean
26 | }): Promise>
27 | async list({
28 | includedSchemas,
29 | excludedSchemas,
30 | limit,
31 | offset,
32 | includeColumns = true,
33 | }: {
34 | includedSchemas?: string[]
35 | excludedSchemas?: string[]
36 | limit?: number
37 | offset?: number
38 | includeColumns?: boolean
39 | } = {}): Promise> {
40 | const schemaFilter = filterByList(includedSchemas, excludedSchemas)
41 | const sql = generateEnrichedForeignTablesSql({ includeColumns, schemaFilter, limit, offset })
42 | return await this.query(sql)
43 | }
44 |
45 | async retrieve({ id }: { id: number }): Promise>
46 | async retrieve({
47 | name,
48 | schema,
49 | }: {
50 | name: string
51 | schema: string
52 | }): Promise>
53 | async retrieve({
54 | id,
55 | name,
56 | schema = 'public',
57 | }: {
58 | id?: number
59 | name?: string
60 | schema?: string
61 | }): Promise> {
62 | if (id) {
63 | const idsFilter = filterByValue([`${id}`])
64 | const sql = generateEnrichedForeignTablesSql({
65 | includeColumns: true,
66 | idsFilter,
67 | })
68 | const { data, error } = await this.query(sql)
69 | if (error) {
70 | return { data, error }
71 | } else if (data.length === 0) {
72 | return { data: null, error: { message: `Cannot find a foreign table with ID ${id}` } }
73 | } else {
74 | return { data: data[0], error }
75 | }
76 | } else if (name) {
77 | const nameFilter = filterByValue([`${schema}.${name}`])
78 | const sql = generateEnrichedForeignTablesSql({
79 | includeColumns: true,
80 | tableIdentifierFilter: nameFilter,
81 | })
82 | const { data, error } = await this.query(sql)
83 | if (error) {
84 | return { data, error }
85 | } else if (data.length === 0) {
86 | return {
87 | data: null,
88 | error: { message: `Cannot find a foreign table named ${name} in schema ${schema}` },
89 | }
90 | } else {
91 | return { data: data[0], error }
92 | }
93 | } else {
94 | return { data: null, error: { message: 'Invalid parameters on foreign table retrieve' } }
95 | }
96 | }
97 | }
98 |
99 | const generateEnrichedForeignTablesSql = ({
100 | includeColumns,
101 | schemaFilter,
102 | idsFilter,
103 | tableIdentifierFilter,
104 | limit,
105 | offset,
106 | }: {
107 | includeColumns: boolean
108 | schemaFilter?: string
109 | idsFilter?: string
110 | tableIdentifierFilter?: string
111 | limit?: number
112 | offset?: number
113 | }) => `
114 | with foreign_tables as (${FOREIGN_TABLES_SQL({ schemaFilter, tableIdentifierFilter, limit, offset })})
115 | ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter, tableIdFilter: idsFilter })})` : ''}
116 | select
117 | *
118 | ${
119 | includeColumns
120 | ? `, ${coalesceRowsToArray('columns', 'columns.table_id = foreign_tables.id')}`
121 | : ''
122 | }
123 | from foreign_tables`
124 |
--------------------------------------------------------------------------------
/test/db/server.crt:
--------------------------------------------------------------------------------
1 | Certificate:
2 | Data:
3 | Version: 3 (0x2)
4 | Serial Number:
5 | 41:a3:0e:d5:2f:07:82:95:c1:cc:c8:62:02:04:eb:7b:25:dc:3e:6b
6 | Signature Algorithm: sha256WithRSAEncryption
7 | Issuer: CN = localhost
8 | Validity
9 | Not Before: Aug 2 10:31:43 2023 GMT
10 | Not After : Jul 30 10:31:43 2033 GMT
11 | Subject: CN = localhost
12 | Subject Public Key Info:
13 | Public Key Algorithm: rsaEncryption
14 | Public-Key: (2048 bit)
15 | Modulus:
16 | 00:d6:4a:ec:0d:40:a8:b1:cd:e8:e9:16:41:a9:6b:
17 | ed:7f:c4:ee:4e:b6:4e:83:5a:7c:37:81:8c:fd:90:
18 | 07:da:57:d3:1b:91:2f:77:6d:a1:b0:38:48:08:03:
19 | 1f:77:91:6a:91:39:54:06:87:20:33:c2:d9:20:e4:
20 | 06:15:f9:59:fb:0e:db:2e:a0:81:c0:6c:47:f6:bc:
21 | 00:0f:07:9a:36:a8:4c:c3:62:97:51:31:53:53:51:
22 | 2a:d6:ff:ca:e6:cf:b2:8e:d7:89:ae:2b:a4:15:ed:
23 | 7c:35:8e:5b:26:84:b1:4d:13:7a:3e:32:a3:56:53:
24 | c1:e8:98:f2:4a:03:56:53:2e:db:c7:96:7e:d2:df:
25 | ea:e5:d7:c2:35:93:61:0d:af:0c:c0:2e:b4:b2:a2:
26 | b1:5a:8b:38:fa:e6:1c:c7:1e:20:d8:0e:b2:97:f2:
27 | 82:6b:4a:1f:27:8c:c1:e4:63:df:42:9a:e3:6c:46:
28 | 74:46:fb:f5:0e:12:d4:b9:12:ce:dc:22:dd:f0:5c:
29 | 6e:e3:31:4f:1a:fa:de:31:15:ec:2a:9b:6c:ea:67:
30 | bf:67:f7:13:44:ba:01:4a:dd:76:32:a8:59:82:13:
31 | 81:f2:48:6d:f4:5d:f0:70:a1:7b:f0:be:46:3e:65:
32 | 36:ee:f3:2e:39:00:52:2a:00:f3:d3:83:c9:55:56:
33 | dd:93
34 | Exponent: 65537 (0x10001)
35 | X509v3 extensions:
36 | X509v3 Subject Key Identifier:
37 | 79:57:F3:18:B8:6B:FB:64:39:B0:E8:CC:24:18:ED:C0:C1:37:E2:0D
38 | X509v3 Authority Key Identifier:
39 | 79:57:F3:18:B8:6B:FB:64:39:B0:E8:CC:24:18:ED:C0:C1:37:E2:0D
40 | X509v3 Basic Constraints: critical
41 | CA:TRUE
42 | Signature Algorithm: sha256WithRSAEncryption
43 | Signature Value:
44 | 2b:d1:37:75:b5:92:9a:c9:ed:45:a6:46:ac:97:93:b9:bf:c0:
45 | f3:7f:47:c3:bd:fd:bd:6b:58:ad:49:79:9d:31:18:3c:b9:94:
46 | 4b:aa:ca:49:c9:04:c4:71:1f:62:9b:ce:3f:5a:24:ec:82:68:
47 | a7:74:45:dd:b1:02:8a:f0:f2:4f:7f:3d:28:94:b0:5b:47:51:
48 | f3:12:a5:ce:1b:32:9f:f8:c6:6a:61:c6:99:4c:f6:99:9e:44:
49 | e4:e9:01:0c:45:1c:a4:5f:f3:69:2e:3d:a7:5d:62:ab:fb:e4:
50 | ea:d2:56:0f:56:df:00:5d:fa:9e:62:2a:77:00:cd:cd:b4:d8:
51 | b6:47:4b:84:73:85:3e:eb:4c:3e:2b:67:46:84:b1:22:1a:04:
52 | 47:02:ca:a0:74:a5:97:28:89:56:aa:c6:4a:ce:97:9b:14:14:
53 | 96:d7:26:60:38:fd:ec:ae:7d:ea:47:68:16:1c:ee:47:19:10:
54 | 69:6a:25:67:71:ac:0b:f0:4a:b0:b3:e6:9b:5f:89:e8:e7:64:
55 | f7:92:37:0c:72:8c:d0:32:c5:10:79:c1:2e:22:05:65:50:db:
56 | d8:0e:bf:b6:d9:f1:7b:88:82:0e:be:06:9b:8c:96:e2:53:03:
57 | 1f:de:86:39:d8:7e:4b:48:bb:11:d9:5d:41:68:82:49:e4:2b:
58 | 33:79:1b:78
59 | -----BEGIN CERTIFICATE-----
60 | MIIDCTCCAfGgAwIBAgIUQaMO1S8HgpXBzMhiAgTreyXcPmswDQYJKoZIhvcNAQEL
61 | BQAwFDESMBAGA1UEAwwJbG9jYWxob3N0MB4XDTIzMDgwMjEwMzE0M1oXDTMzMDcz
62 | MDEwMzE0M1owFDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEF
63 | AAOCAQ8AMIIBCgKCAQEA1krsDUCosc3o6RZBqWvtf8TuTrZOg1p8N4GM/ZAH2lfT
64 | G5Evd22hsDhICAMfd5FqkTlUBocgM8LZIOQGFflZ+w7bLqCBwGxH9rwADweaNqhM
65 | w2KXUTFTU1Eq1v/K5s+yjteJriukFe18NY5bJoSxTRN6PjKjVlPB6JjySgNWUy7b
66 | x5Z+0t/q5dfCNZNhDa8MwC60sqKxWos4+uYcxx4g2A6yl/KCa0ofJ4zB5GPfQprj
67 | bEZ0Rvv1DhLUuRLO3CLd8Fxu4zFPGvreMRXsKpts6me/Z/cTRLoBSt12MqhZghOB
68 | 8kht9F3wcKF78L5GPmU27vMuOQBSKgDz04PJVVbdkwIDAQABo1MwUTAdBgNVHQ4E
69 | FgQUeVfzGLhr+2Q5sOjMJBjtwME34g0wHwYDVR0jBBgwFoAUeVfzGLhr+2Q5sOjM
70 | JBjtwME34g0wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAK9E3
71 | dbWSmsntRaZGrJeTub/A839Hw739vWtYrUl5nTEYPLmUS6rKSckExHEfYpvOP1ok
72 | 7IJop3RF3bECivDyT389KJSwW0dR8xKlzhsyn/jGamHGmUz2mZ5E5OkBDEUcpF/z
73 | aS49p11iq/vk6tJWD1bfAF36nmIqdwDNzbTYtkdLhHOFPutMPitnRoSxIhoERwLK
74 | oHSllyiJVqrGSs6XmxQUltcmYDj97K596kdoFhzuRxkQaWolZ3GsC/BKsLPmm1+J
75 | 6Odk95I3DHKM0DLFEHnBLiIFZVDb2A6/ttnxe4iCDr4Gm4yW4lMDH96GOdh+S0i7
76 | EdldQWiCSeQrM3kbeA==
77 | -----END CERTIFICATE-----
78 |
--------------------------------------------------------------------------------
/src/server/routes/functions.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | include_system_schemas?: string
11 | // Note: this only supports comma separated values (e.g., ".../functions?included_schemas=public,core")
12 | included_schemas?: string
13 | excluded_schemas?: string
14 | limit?: number
15 | offset?: number
16 | }
17 | }>('/', async (request, reply) => {
18 | const config = createConnectionConfig(request)
19 | const includeSystemSchemas = request.query.include_system_schemas === 'true'
20 | const includedSchemas = request.query.included_schemas?.split(',')
21 | const excludedSchemas = request.query.excluded_schemas?.split(',')
22 | const limit = request.query.limit
23 | const offset = request.query.offset
24 |
25 | const pgMeta = new PostgresMeta(config)
26 | const { data, error } = await pgMeta.functions.list({
27 | includeSystemSchemas,
28 | includedSchemas,
29 | excludedSchemas,
30 | limit,
31 | offset,
32 | })
33 | await pgMeta.end()
34 | if (error) {
35 | request.log.error({ error, request: extractRequestForLogging(request) })
36 | reply.code(500)
37 | return { error: error.message }
38 | }
39 |
40 | return data
41 | })
42 |
43 | fastify.get<{
44 | Headers: { pg: string; 'x-pg-application-name'?: string }
45 | Params: {
46 | id: string
47 | }
48 | }>('/:id(\\d+)', async (request, reply) => {
49 | const config = createConnectionConfig(request)
50 | const id = Number(request.params.id)
51 |
52 | const pgMeta = new PostgresMeta(config)
53 | const { data, error } = await pgMeta.functions.retrieve({ id })
54 | await pgMeta.end()
55 | if (error) {
56 | request.log.error({ error, request: extractRequestForLogging(request) })
57 | reply.code(404)
58 | return { error: error.message }
59 | }
60 |
61 | return data
62 | })
63 |
64 | fastify.post<{
65 | Headers: { pg: string; 'x-pg-application-name'?: string }
66 | Body: any
67 | }>('/', async (request, reply) => {
68 | const config = createConnectionConfig(request)
69 |
70 | const pgMeta = new PostgresMeta(config)
71 | const { data, error } = await pgMeta.functions.create(request.body as any)
72 | await pgMeta.end()
73 | if (error) {
74 | request.log.error({ error, request: extractRequestForLogging(request) })
75 | reply.code(400)
76 | return { error: error.message }
77 | }
78 | return data
79 | })
80 |
81 | fastify.patch<{
82 | Headers: { pg: string; 'x-pg-application-name'?: string }
83 | Params: {
84 | id: string
85 | }
86 | Body: any
87 | }>('/:id(\\d+)', async (request, reply) => {
88 | const config = createConnectionConfig(request)
89 | const id = Number(request.params.id)
90 |
91 | const pgMeta = new PostgresMeta(config)
92 | const { data, error } = await pgMeta.functions.update(id, request.body as any)
93 | await pgMeta.end()
94 | if (error) {
95 | request.log.error({ error, request: extractRequestForLogging(request) })
96 | reply.code(400)
97 | if (error.message.startsWith('Cannot find')) reply.code(404)
98 | return { error: error.message }
99 | }
100 | return data
101 | })
102 |
103 | fastify.delete<{
104 | Headers: { pg: string; 'x-pg-application-name'?: string }
105 | Params: {
106 | id: string
107 | }
108 | }>('/:id(\\d+)', async (request, reply) => {
109 | const config = createConnectionConfig(request)
110 | const id = Number(request.params.id)
111 |
112 | const pgMeta = new PostgresMeta(config)
113 | const { data, error } = await pgMeta.functions.remove(id)
114 | await pgMeta.end()
115 | if (error) {
116 | request.log.error({ error, request: extractRequestForLogging(request) })
117 | reply.code(400)
118 | if (error.message.startsWith('Cannot find')) reply.code(404)
119 | return { error: error.message }
120 | }
121 | return data
122 | })
123 | }
124 |
--------------------------------------------------------------------------------
/src/server/routes/policies.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | include_system_schemas?: string
11 | // Note: this only supports comma separated values (e.g., ".../policies?included_schemas=public,core")
12 | included_schemas?: string
13 | excluded_schemas?: string
14 | limit?: number
15 | offset?: number
16 | }
17 | }>('/', async (request, reply) => {
18 | const config = createConnectionConfig(request)
19 | const includeSystemSchemas = request.query.include_system_schemas === 'true'
20 | const includedSchemas = request.query.included_schemas?.split(',')
21 | const excludedSchemas = request.query.excluded_schemas?.split(',')
22 | const limit = request.query.limit
23 | const offset = request.query.offset
24 |
25 | const pgMeta = new PostgresMeta(config)
26 | const { data, error } = await pgMeta.policies.list({
27 | includeSystemSchemas,
28 | includedSchemas,
29 | excludedSchemas,
30 | limit,
31 | offset,
32 | })
33 | await pgMeta.end()
34 | if (error) {
35 | request.log.error({ error, request: extractRequestForLogging(request) })
36 | reply.code(500)
37 | return { error: error.message }
38 | }
39 |
40 | return data
41 | })
42 |
43 | fastify.get<{
44 | Headers: { pg: string; 'x-pg-application-name'?: string }
45 | Params: {
46 | id: string
47 | }
48 | }>('/:id(\\d+)', async (request, reply) => {
49 | const config = createConnectionConfig(request)
50 | const id = Number(request.params.id)
51 |
52 | const pgMeta = new PostgresMeta(config)
53 | const { data, error } = await pgMeta.policies.retrieve({ id })
54 | await pgMeta.end()
55 | if (error) {
56 | request.log.error({ error, request: extractRequestForLogging(request) })
57 | reply.code(404)
58 | return { error: error.message }
59 | }
60 |
61 | return data
62 | })
63 |
64 | fastify.post<{
65 | Headers: { pg: string; 'x-pg-application-name'?: string }
66 | Body: any
67 | }>('/', async (request, reply) => {
68 | const config = createConnectionConfig(request)
69 |
70 | const pgMeta = new PostgresMeta(config)
71 | const { data, error } = await pgMeta.policies.create(request.body as any)
72 | await pgMeta.end()
73 | if (error) {
74 | request.log.error({ error, request: extractRequestForLogging(request) })
75 | reply.code(400)
76 | return { error: error.message }
77 | }
78 |
79 | return data
80 | })
81 |
82 | fastify.patch<{
83 | Headers: { pg: string; 'x-pg-application-name'?: string }
84 | Params: {
85 | id: string
86 | }
87 | Body: any
88 | }>('/:id(\\d+)', async (request, reply) => {
89 | const config = createConnectionConfig(request)
90 | const id = Number(request.params.id)
91 |
92 | const pgMeta = new PostgresMeta(config)
93 | const { data, error } = await pgMeta.policies.update(id, request.body as any)
94 | await pgMeta.end()
95 | if (error) {
96 | request.log.error({ error, request: extractRequestForLogging(request) })
97 | reply.code(400)
98 | if (error.message.startsWith('Cannot find')) reply.code(404)
99 | return { error: error.message }
100 | }
101 |
102 | return data
103 | })
104 |
105 | fastify.delete<{
106 | Headers: { pg: string; 'x-pg-application-name'?: string }
107 | Params: {
108 | id: string
109 | }
110 | }>('/:id(\\d+)', async (request, reply) => {
111 | const config = createConnectionConfig(request)
112 | const id = Number(request.params.id)
113 |
114 | const pgMeta = new PostgresMeta(config)
115 | const { data, error } = await pgMeta.policies.remove(id)
116 | await pgMeta.end()
117 | if (error) {
118 | request.log.error({ error, request: extractRequestForLogging(request) })
119 | reply.code(400)
120 | if (error.message.startsWith('Cannot find')) reply.code(404)
121 | return { error: error.message }
122 | }
123 |
124 | return data
125 | })
126 | }
127 |
--------------------------------------------------------------------------------
/src/lib/PostgresMetaSchemas.ts:
--------------------------------------------------------------------------------
1 | import { ident } from 'pg-format'
2 | import { SCHEMAS_SQL } from './sql/schemas.sql.js'
3 | import {
4 | PostgresMetaResult,
5 | PostgresSchema,
6 | PostgresSchemaCreate,
7 | PostgresSchemaUpdate,
8 | } from './types.js'
9 | import { filterByList, filterByValue } from './helpers.js'
10 | import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js'
11 |
12 | export default class PostgresMetaSchemas {
13 | query: (sql: string) => Promise>
14 |
15 | constructor(query: (sql: string) => Promise>) {
16 | this.query = query
17 | }
18 |
19 | async list({
20 | includedSchemas,
21 | excludedSchemas,
22 | includeSystemSchemas = false,
23 | limit,
24 | offset,
25 | }: {
26 | includedSchemas?: string[]
27 | excludedSchemas?: string[]
28 | includeSystemSchemas?: boolean
29 | limit?: number
30 | offset?: number
31 | } = {}): Promise> {
32 | const schemaFilter = filterByList(
33 | includedSchemas,
34 | excludedSchemas,
35 | !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined
36 | )
37 | const sql = SCHEMAS_SQL({ limit, offset, includeSystemSchemas, nameFilter: schemaFilter })
38 | return await this.query(sql)
39 | }
40 |
41 | async retrieve({ id }: { id: number }): Promise>
42 | async retrieve({ name }: { name: string }): Promise>
43 | async retrieve({
44 | id,
45 | name,
46 | }: {
47 | id?: number
48 | name?: string
49 | }): Promise> {
50 | if (id) {
51 | const idsFilter = filterByValue([id])
52 | const sql = SCHEMAS_SQL({ idsFilter })
53 | const { data, error } = await this.query(sql)
54 | if (error) {
55 | return { data, error }
56 | } else if (data.length === 0) {
57 | return { data: null, error: { message: `Cannot find a schema with ID ${id}` } }
58 | } else {
59 | return { data: data[0], error }
60 | }
61 | } else if (name) {
62 | const nameFilter = filterByValue([name])
63 | const sql = SCHEMAS_SQL({ nameFilter })
64 | const { data, error } = await this.query(sql)
65 | if (error) {
66 | return { data, error }
67 | } else if (data.length === 0) {
68 | return { data: null, error: { message: `Cannot find a schema named ${name}` } }
69 | } else {
70 | return { data: data[0], error }
71 | }
72 | } else {
73 | return { data: null, error: { message: 'Invalid parameters on schema retrieve' } }
74 | }
75 | }
76 |
77 | async create({
78 | name,
79 | owner = 'postgres',
80 | }: PostgresSchemaCreate): Promise> {
81 | const sql = `CREATE SCHEMA ${ident(name)} AUTHORIZATION ${ident(owner)};`
82 | const { error } = await this.query(sql)
83 | if (error) {
84 | return { data: null, error }
85 | }
86 | return await this.retrieve({ name })
87 | }
88 |
89 | async update(
90 | id: number,
91 | { name, owner }: PostgresSchemaUpdate
92 | ): Promise> {
93 | const { data: old, error } = await this.retrieve({ id })
94 | if (error) {
95 | return { data: null, error }
96 | }
97 | const nameSql =
98 | name === undefined ? '' : `ALTER SCHEMA ${ident(old!.name)} RENAME TO ${ident(name)};`
99 | const ownerSql =
100 | owner === undefined ? '' : `ALTER SCHEMA ${ident(old!.name)} OWNER TO ${ident(owner)};`
101 | const sql = `BEGIN; ${ownerSql} ${nameSql} COMMIT;`
102 | {
103 | const { error } = await this.query(sql)
104 | if (error) {
105 | return { data: null, error }
106 | }
107 | }
108 | return await this.retrieve({ id })
109 | }
110 |
111 | async remove(id: number, { cascade = false } = {}): Promise> {
112 | const { data: schema, error } = await this.retrieve({ id })
113 | if (error) {
114 | return { data: null, error }
115 | }
116 | const sql = `DROP SCHEMA ${ident(schema!.name)} ${cascade ? 'CASCADE' : 'RESTRICT'};`
117 | {
118 | const { error } = await this.query(sql)
119 | if (error) {
120 | return { data: null, error }
121 | }
122 | }
123 | return { data: schema!, error: null }
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/src/lib/sql/table.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js'
2 |
3 | export const TABLES_SQL = (
4 | props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { tableIdentifierFilter?: string }
5 | ) => /* SQL */ `
6 | SELECT
7 | c.oid :: int8 AS id,
8 | nc.nspname AS schema,
9 | c.relname AS name,
10 | c.relrowsecurity AS rls_enabled,
11 | c.relforcerowsecurity AS rls_forced,
12 | CASE
13 | WHEN c.relreplident = 'd' THEN 'DEFAULT'
14 | WHEN c.relreplident = 'i' THEN 'INDEX'
15 | WHEN c.relreplident = 'f' THEN 'FULL'
16 | ELSE 'NOTHING'
17 | END AS replica_identity,
18 | pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) :: int8 AS bytes,
19 | pg_size_pretty(
20 | pg_total_relation_size(format('%I.%I', nc.nspname, c.relname))
21 | ) AS size,
22 | pg_stat_get_live_tuples(c.oid) AS live_rows_estimate,
23 | pg_stat_get_dead_tuples(c.oid) AS dead_rows_estimate,
24 | obj_description(c.oid) AS comment,
25 | coalesce(pk.primary_keys, '[]') as primary_keys,
26 | coalesce(
27 | jsonb_agg(relationships) filter (where relationships is not null),
28 | '[]'
29 | ) as relationships
30 | FROM
31 | pg_namespace nc
32 | JOIN pg_class c ON nc.oid = c.relnamespace
33 | left join (
34 | select
35 | c.oid::int8 as table_id,
36 | jsonb_agg(
37 | jsonb_build_object(
38 | 'table_id', c.oid::int8,
39 | 'schema', n.nspname,
40 | 'table_name', c.relname,
41 | 'name', a.attname
42 | )
43 | order by array_position(i.indkey, a.attnum)
44 | ) as primary_keys
45 | from
46 | pg_index i
47 | join pg_class c on i.indrelid = c.oid
48 | join pg_namespace n on c.relnamespace = n.oid
49 | join pg_attribute a on a.attrelid = c.oid and a.attnum = any(i.indkey)
50 | where
51 | ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''}
52 | ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''}
53 | i.indisprimary
54 | group by c.oid
55 | ) as pk
56 | on pk.table_id = c.oid
57 | left join (
58 | select
59 | c.oid :: int8 as id,
60 | c.conname as constraint_name,
61 | nsa.nspname as source_schema,
62 | csa.relname as source_table_name,
63 | sa.attname as source_column_name,
64 | nta.nspname as target_table_schema,
65 | cta.relname as target_table_name,
66 | ta.attname as target_column_name
67 | from
68 | pg_constraint c
69 | join (
70 | pg_attribute sa
71 | join pg_class csa on sa.attrelid = csa.oid
72 | join pg_namespace nsa on csa.relnamespace = nsa.oid
73 | ) on sa.attrelid = c.conrelid and sa.attnum = any (c.conkey)
74 | join (
75 | pg_attribute ta
76 | join pg_class cta on ta.attrelid = cta.oid
77 | join pg_namespace nta on cta.relnamespace = nta.oid
78 | ) on ta.attrelid = c.confrelid and ta.attnum = any (c.confkey)
79 | where
80 | ${props.schemaFilter ? `nsa.nspname ${props.schemaFilter} OR nta.nspname ${props.schemaFilter} AND` : ''}
81 | ${props.tableIdentifierFilter ? `(nsa.nspname || '.' || csa.relname) ${props.tableIdentifierFilter} OR (nta.nspname || '.' || cta.relname) ${props.tableIdentifierFilter} AND` : ''}
82 | c.contype = 'f'
83 | ) as relationships
84 | on (relationships.source_schema = nc.nspname and relationships.source_table_name = c.relname)
85 | or (relationships.target_table_schema = nc.nspname and relationships.target_table_name = c.relname)
86 | WHERE
87 | ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''}
88 | ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''}
89 | ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''}
90 | c.relkind IN ('r', 'p')
91 | AND NOT pg_is_other_temp_schema(nc.oid)
92 | AND (
93 | pg_has_role(c.relowner, 'USAGE')
94 | OR has_table_privilege(
95 | c.oid,
96 | 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'
97 | )
98 | OR has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES')
99 | )
100 | group by
101 | c.oid,
102 | c.relname,
103 | c.relrowsecurity,
104 | c.relforcerowsecurity,
105 | c.relreplident,
106 | nc.nspname,
107 | pk.primary_keys
108 | ${props.limit ? `limit ${props.limit}` : ''}
109 | ${props.offset ? `offset ${props.offset}` : ''}
110 | `
111 |
--------------------------------------------------------------------------------
/src/server/routes/triggers.ts:
--------------------------------------------------------------------------------
1 | import { FastifyInstance } from 'fastify'
2 | import { PostgresMeta } from '../../lib/index.js'
3 | import { createConnectionConfig } from '../utils.js'
4 | import { extractRequestForLogging } from '../utils.js'
5 |
6 | export default async (fastify: FastifyInstance) => {
7 | fastify.get<{
8 | Headers: { pg: string; 'x-pg-application-name'?: string }
9 | Querystring: {
10 | include_system_schemas?: string
11 | // Note: this only supports comma separated values (e.g., ".../columns?included_schemas=public,core")
12 | included_schemas?: string
13 | excluded_schemas?: string
14 | limit?: number
15 | offset?: number
16 | }
17 | }>('/', async (request, reply) => {
18 | const config = createConnectionConfig(request)
19 | const includeSystemSchemas = request.query.include_system_schemas === 'true'
20 | const includedSchemas = request.query.included_schemas?.split(',')
21 | const excludedSchemas = request.query.excluded_schemas?.split(',')
22 | const limit = request.query.limit
23 | const offset = request.query.offset
24 |
25 | const pgMeta = new PostgresMeta(config)
26 | const { data, error } = await pgMeta.triggers.list({
27 | includeSystemSchemas,
28 | includedSchemas,
29 | excludedSchemas,
30 | limit,
31 | offset,
32 | })
33 | await pgMeta.end()
34 | if (error) {
35 | request.log.error({ error, request: extractRequestForLogging(request) })
36 | reply.code(500)
37 | return { error: error.message }
38 | }
39 |
40 | return data
41 | })
42 |
43 | fastify.get<{
44 | Headers: { pg: string; 'x-pg-application-name'?: string }
45 | Params: {
46 | id: string
47 | }
48 | }>('/:id(\\d+)', async (request, reply) => {
49 | const config = createConnectionConfig(request)
50 | const id = Number(request.params.id)
51 |
52 | const pgMeta = new PostgresMeta(config)
53 | const { data, error } = await pgMeta.triggers.retrieve({ id })
54 | await pgMeta.end()
55 | if (error) {
56 | request.log.error({ error, request: extractRequestForLogging(request) })
57 | reply.code(404)
58 | return { error: error.message }
59 | }
60 |
61 | return data
62 | })
63 |
64 | fastify.post<{
65 | Headers: { pg: string; 'x-pg-application-name'?: string }
66 | Body: any
67 | }>('/', async (request, reply) => {
68 | const config = createConnectionConfig(request)
69 |
70 | const pgMeta = new PostgresMeta(config)
71 | const { data, error } = await pgMeta.triggers.create(request.body as any)
72 | await pgMeta.end()
73 | if (error) {
74 | request.log.error({ error, request: extractRequestForLogging(request) })
75 | reply.code(400)
76 | return { error: error.message }
77 | }
78 |
79 | return data
80 | })
81 |
82 | fastify.patch<{
83 | Headers: { pg: string; 'x-pg-application-name'?: string }
84 | Params: {
85 | id: string
86 | }
87 | Body: any
88 | }>('/:id(\\d+)', async (request, reply) => {
89 | const config = createConnectionConfig(request)
90 | const id = Number(request.params.id)
91 |
92 | const pgMeta = new PostgresMeta(config)
93 | const { data, error } = await pgMeta.triggers.update(id, request.body as any)
94 | await pgMeta.end()
95 | if (error) {
96 | request.log.error({ error, request: extractRequestForLogging(request) })
97 | reply.code(400)
98 | if (error.message.startsWith('Cannot find')) reply.code(404)
99 | return { error: error.message }
100 | }
101 |
102 | return data
103 | })
104 |
105 | fastify.delete<{
106 | Headers: { pg: string; 'x-pg-application-name'?: string }
107 | Params: {
108 | id: string
109 | }
110 | Querystring: {
111 | cascade?: string
112 | }
113 | }>('/:id(\\d+)', async (request, reply) => {
114 | const config = createConnectionConfig(request)
115 | const id = Number(request.params.id)
116 | const cascade = request.query.cascade === 'true'
117 |
118 | const pgMeta = new PostgresMeta(config)
119 | const { data, error } = await pgMeta.triggers.remove(id, { cascade })
120 | await pgMeta.end()
121 | if (error) {
122 | request.log.error({ error, request: extractRequestForLogging(request) })
123 | reply.code(400)
124 | if (error.message.startsWith('Cannot find')) reply.code(404)
125 | return { error: error.message }
126 | }
127 |
128 | return data
129 | })
130 | }
131 |
--------------------------------------------------------------------------------
/test/config.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 | import { TEST_CONNECTION_STRING } from './lib/utils.js'
4 |
5 | describe('server/routes/config', () => {
6 | test('should list config with query parameters', async () => {
7 | const app = build()
8 | const response = await app.inject({
9 | method: 'GET',
10 | url: '/config?limit=5&offset=0',
11 | headers: {
12 | pg: TEST_CONNECTION_STRING,
13 | },
14 | })
15 | expect(response.statusCode).toBe(200)
16 | expect(response.json()).toMatchInlineSnapshot(`
17 | [
18 | {
19 | "boot_val": "on",
20 | "category": "Autovacuum",
21 | "context": "sighup",
22 | "enumvals": null,
23 | "extra_desc": null,
24 | "group": "Autovacuum",
25 | "max_val": null,
26 | "min_val": null,
27 | "name": "autovacuum",
28 | "pending_restart": false,
29 | "reset_val": "on",
30 | "setting": "on",
31 | "short_desc": "Starts the autovacuum subprocess.",
32 | "source": "default",
33 | "sourcefile": null,
34 | "sourceline": null,
35 | "subgroup": "",
36 | "unit": null,
37 | "vartype": "bool",
38 | },
39 | {
40 | "boot_val": "0.1",
41 | "category": "Autovacuum",
42 | "context": "sighup",
43 | "enumvals": null,
44 | "extra_desc": null,
45 | "group": "Autovacuum",
46 | "max_val": "100",
47 | "min_val": "0",
48 | "name": "autovacuum_analyze_scale_factor",
49 | "pending_restart": false,
50 | "reset_val": "0.1",
51 | "setting": "0.1",
52 | "short_desc": "Number of tuple inserts, updates, or deletes prior to analyze as a fraction of reltuples.",
53 | "source": "default",
54 | "sourcefile": null,
55 | "sourceline": null,
56 | "subgroup": "",
57 | "unit": null,
58 | "vartype": "real",
59 | },
60 | {
61 | "boot_val": "50",
62 | "category": "Autovacuum",
63 | "context": "sighup",
64 | "enumvals": null,
65 | "extra_desc": null,
66 | "group": "Autovacuum",
67 | "max_val": "2147483647",
68 | "min_val": "0",
69 | "name": "autovacuum_analyze_threshold",
70 | "pending_restart": false,
71 | "reset_val": "50",
72 | "setting": "50",
73 | "short_desc": "Minimum number of tuple inserts, updates, or deletes prior to analyze.",
74 | "source": "default",
75 | "sourcefile": null,
76 | "sourceline": null,
77 | "subgroup": "",
78 | "unit": null,
79 | "vartype": "integer",
80 | },
81 | {
82 | "boot_val": "200000000",
83 | "category": "Autovacuum",
84 | "context": "postmaster",
85 | "enumvals": null,
86 | "extra_desc": null,
87 | "group": "Autovacuum",
88 | "max_val": "2000000000",
89 | "min_val": "100000",
90 | "name": "autovacuum_freeze_max_age",
91 | "pending_restart": false,
92 | "reset_val": "200000000",
93 | "setting": "200000000",
94 | "short_desc": "Age at which to autovacuum a table to prevent transaction ID wraparound.",
95 | "source": "default",
96 | "sourcefile": null,
97 | "sourceline": null,
98 | "subgroup": "",
99 | "unit": null,
100 | "vartype": "integer",
101 | },
102 | {
103 | "boot_val": "3",
104 | "category": "Autovacuum",
105 | "context": "postmaster",
106 | "enumvals": null,
107 | "extra_desc": null,
108 | "group": "Autovacuum",
109 | "max_val": "262143",
110 | "min_val": "1",
111 | "name": "autovacuum_max_workers",
112 | "pending_restart": false,
113 | "reset_val": "3",
114 | "setting": "3",
115 | "short_desc": "Sets the maximum number of simultaneously running autovacuum worker processes.",
116 | "source": "default",
117 | "sourcefile": null,
118 | "sourceline": null,
119 | "subgroup": "",
120 | "unit": null,
121 | "vartype": "integer",
122 | },
123 | ]
124 | `)
125 | await app.close()
126 | })
127 | })
128 |
--------------------------------------------------------------------------------
/src/lib/sql/columns.sql.ts:
--------------------------------------------------------------------------------
1 | import type { SQLQueryPropsWithSchemaFilter } from './common.js'
2 |
3 | export const COLUMNS_SQL = (
4 | props: SQLQueryPropsWithSchemaFilter & {
5 | tableIdFilter?: string
6 | tableIdentifierFilter?: string
7 | columnNameFilter?: string
8 | idsFilter?: string
9 | }
10 | ) => /* SQL */ `
11 | -- Adapted from information_schema.columns
12 |
13 | SELECT
14 | c.oid :: int8 AS table_id,
15 | nc.nspname AS schema,
16 | c.relname AS table,
17 | (c.oid || '.' || a.attnum) AS id,
18 | a.attnum AS ordinal_position,
19 | a.attname AS name,
20 | CASE
21 | WHEN a.atthasdef THEN pg_get_expr(ad.adbin, ad.adrelid)
22 | ELSE NULL
23 | END AS default_value,
24 | CASE
25 | WHEN t.typtype = 'd' THEN CASE
26 | WHEN bt.typelem <> 0 :: oid
27 | AND bt.typlen = -1 THEN 'ARRAY'
28 | WHEN nbt.nspname = 'pg_catalog' THEN format_type(t.typbasetype, NULL)
29 | ELSE 'USER-DEFINED'
30 | END
31 | ELSE CASE
32 | WHEN t.typelem <> 0 :: oid
33 | AND t.typlen = -1 THEN 'ARRAY'
34 | WHEN nt.nspname = 'pg_catalog' THEN format_type(a.atttypid, NULL)
35 | ELSE 'USER-DEFINED'
36 | END
37 | END AS data_type,
38 | COALESCE(bt.typname, t.typname) AS format,
39 | a.attidentity IN ('a', 'd') AS is_identity,
40 | CASE
41 | a.attidentity
42 | WHEN 'a' THEN 'ALWAYS'
43 | WHEN 'd' THEN 'BY DEFAULT'
44 | ELSE NULL
45 | END AS identity_generation,
46 | a.attgenerated IN ('s') AS is_generated,
47 | NOT (
48 | a.attnotnull
49 | OR t.typtype = 'd' AND t.typnotnull
50 | ) AS is_nullable,
51 | (
52 | c.relkind IN ('r', 'p')
53 | OR c.relkind IN ('v', 'f') AND pg_column_is_updatable(c.oid, a.attnum, FALSE)
54 | ) AS is_updatable,
55 | uniques.table_id IS NOT NULL AS is_unique,
56 | check_constraints.definition AS "check",
57 | array_to_json(
58 | array(
59 | SELECT
60 | enumlabel
61 | FROM
62 | pg_catalog.pg_enum enums
63 | WHERE
64 | enums.enumtypid = coalesce(bt.oid, t.oid)
65 | OR enums.enumtypid = coalesce(bt.typelem, t.typelem)
66 | ORDER BY
67 | enums.enumsortorder
68 | )
69 | ) AS enums,
70 | col_description(c.oid, a.attnum) AS comment
71 | FROM
72 | pg_attribute a
73 | LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid
74 | AND a.attnum = ad.adnum
75 | JOIN (
76 | pg_class c
77 | JOIN pg_namespace nc ON c.relnamespace = nc.oid
78 | ) ON a.attrelid = c.oid
79 | JOIN (
80 | pg_type t
81 | JOIN pg_namespace nt ON t.typnamespace = nt.oid
82 | ) ON a.atttypid = t.oid
83 | LEFT JOIN (
84 | pg_type bt
85 | JOIN pg_namespace nbt ON bt.typnamespace = nbt.oid
86 | ) ON t.typtype = 'd'
87 | AND t.typbasetype = bt.oid
88 | LEFT JOIN (
89 | SELECT DISTINCT ON (table_id, ordinal_position)
90 | conrelid AS table_id,
91 | conkey[1] AS ordinal_position
92 | FROM pg_catalog.pg_constraint
93 | WHERE contype = 'u' AND cardinality(conkey) = 1
94 | ) AS uniques ON uniques.table_id = c.oid AND uniques.ordinal_position = a.attnum
95 | LEFT JOIN (
96 | -- We only select the first column check
97 | SELECT DISTINCT ON (table_id, ordinal_position)
98 | conrelid AS table_id,
99 | conkey[1] AS ordinal_position,
100 | substring(
101 | pg_get_constraintdef(pg_constraint.oid, true),
102 | 8,
103 | length(pg_get_constraintdef(pg_constraint.oid, true)) - 8
104 | ) AS "definition"
105 | FROM pg_constraint
106 | WHERE contype = 'c' AND cardinality(conkey) = 1
107 | ORDER BY table_id, ordinal_position, oid asc
108 | ) AS check_constraints ON check_constraints.table_id = c.oid AND check_constraints.ordinal_position = a.attnum
109 | WHERE
110 | ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''}
111 | ${props.idsFilter ? `(c.oid || '.' || a.attnum) ${props.idsFilter} AND` : ''}
112 | ${props.columnNameFilter ? `(c.relname || '.' || a.attname) ${props.columnNameFilter} AND` : ''}
113 | ${props.tableIdFilter ? `c.oid ${props.tableIdFilter} AND` : ''}
114 | ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''}
115 | NOT pg_is_other_temp_schema(nc.oid)
116 | AND a.attnum > 0
117 | AND NOT a.attisdropped
118 | AND (c.relkind IN ('r', 'v', 'm', 'f', 'p'))
119 | AND (
120 | pg_has_role(c.relowner, 'USAGE')
121 | OR has_column_privilege(
122 | c.oid,
123 | a.attnum,
124 | 'SELECT, INSERT, UPDATE, REFERENCES'
125 | )
126 | )
127 | ${props.limit ? `limit ${props.limit}` : ''}
128 | ${props.offset ? `offset ${props.offset}` : ''}
129 | `
130 |
--------------------------------------------------------------------------------
/test/extensions.test.ts:
--------------------------------------------------------------------------------
1 | import { expect, test, describe } from 'vitest'
2 | import { build } from '../src/server/app.js'
3 | import { TEST_CONNECTION_STRING } from './lib/utils.js'
4 |
5 | describe('server/routes/extensions', () => {
6 | test('should list extensions', async () => {
7 | const app = build()
8 | const response = await app.inject({
9 | method: 'GET',
10 | url: '/extensions',
11 | headers: {
12 | pg: TEST_CONNECTION_STRING,
13 | },
14 | })
15 | expect(response.statusCode).toBe(200)
16 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
17 | await app.close()
18 | })
19 |
20 | test('should list extensions with query parameters', async () => {
21 | const app = build()
22 | const response = await app.inject({
23 | method: 'GET',
24 | url: '/extensions?limit=5&offset=0',
25 | headers: {
26 | pg: TEST_CONNECTION_STRING,
27 | },
28 | })
29 | expect(response.statusCode).toBe(200)
30 | expect(Array.isArray(JSON.parse(response.body))).toBe(true)
31 | await app.close()
32 | })
33 |
34 | test('should return 404 for non-existent extension', async () => {
35 | const app = build()
36 | const response = await app.inject({
37 | method: 'GET',
38 | url: '/extensions/non-existent-extension',
39 | headers: {
40 | pg: TEST_CONNECTION_STRING,
41 | },
42 | })
43 | expect(response.statusCode).toBe(404)
44 | await app.close()
45 | })
46 |
47 | test('should create extension, retrieve, update, delete', async () => {
48 | const app = build()
49 | const response = await app.inject({
50 | method: 'POST',
51 | url: '/extensions',
52 | headers: {
53 | pg: TEST_CONNECTION_STRING,
54 | },
55 | payload: { name: 'pgcrypto', version: '1.3' },
56 | })
57 | expect(response.statusCode).toBe(200)
58 | expect(response.json()).toMatchInlineSnapshot(`
59 | {
60 | "comment": "cryptographic functions",
61 | "default_version": "1.3",
62 | "installed_version": "1.3",
63 | "name": "pgcrypto",
64 | "schema": "public",
65 | }
66 | `)
67 |
68 | const retrieveResponse = await app.inject({
69 | method: 'GET',
70 | url: '/extensions/pgcrypto',
71 | headers: {
72 | pg: TEST_CONNECTION_STRING,
73 | },
74 | })
75 | expect(retrieveResponse.statusCode).toBe(200)
76 | expect(retrieveResponse.json()).toMatchInlineSnapshot(`
77 | {
78 | "comment": "cryptographic functions",
79 | "default_version": "1.3",
80 | "installed_version": "1.3",
81 | "name": "pgcrypto",
82 | "schema": "public",
83 | }
84 | `)
85 |
86 | const updateResponse = await app.inject({
87 | method: 'PATCH',
88 | url: '/extensions/pgcrypto',
89 | headers: {
90 | pg: TEST_CONNECTION_STRING,
91 | },
92 | payload: { schema: 'public' },
93 | })
94 | expect(updateResponse.statusCode).toBe(200)
95 | expect(updateResponse.json()).toMatchInlineSnapshot(`
96 | {
97 | "comment": "cryptographic functions",
98 | "default_version": "1.3",
99 | "installed_version": "1.3",
100 | "name": "pgcrypto",
101 | "schema": "public",
102 | }
103 | `)
104 |
105 | const deleteResponse = await app.inject({
106 | method: 'DELETE',
107 | url: '/extensions/pgcrypto',
108 | headers: {
109 | pg: TEST_CONNECTION_STRING,
110 | },
111 | })
112 | expect(deleteResponse.statusCode).toBe(200)
113 | expect(deleteResponse.json()).toMatchInlineSnapshot(`
114 | {
115 | "comment": "cryptographic functions",
116 | "default_version": "1.3",
117 | "installed_version": "1.3",
118 | "name": "pgcrypto",
119 | "schema": "public",
120 | }
121 | `)
122 |
123 | await app.close()
124 | })
125 |
126 | test('should return 400 for invalid extension name', async () => {
127 | const app = build()
128 | const response = await app.inject({
129 | method: 'POST',
130 | url: '/extensions',
131 | headers: {
132 | pg: TEST_CONNECTION_STRING,
133 | },
134 | payload: { name: 'invalid-extension', version: '1.3' },
135 | })
136 | expect(response.statusCode).toBe(400)
137 | expect(response.json()).toMatchInlineSnapshot(`
138 | {
139 | "error": "could not open extension control file "/usr/share/postgresql/14/extension/invalid-extension.control": No such file or directory",
140 | }
141 | `)
142 | await app.close()
143 | })
144 | })
145 |
--------------------------------------------------------------------------------