├── vite-env.d.ts
├── shared
├── main_props.ts
└── types.ts
├── client
├── assets
│ └── dbpill.png
├── utils
│ ├── formatNumber.ts
│ ├── sqlHighlighter.tsx
│ └── HttpApi.ts
├── main.tsx
├── App.css
├── context
│ └── AppContext.tsx
├── components
│ ├── About.tsx
│ ├── QuerySuggestions.tsx
│ ├── QueryList.tsx
│ ├── SuggestionBox.tsx
│ └── Configs.tsx
├── App.tsx
└── styles
│ └── Styled.tsx
├── landing
├── client
│ ├── preview.png
│ ├── dbpill_web.png
│ └── downloads.html
├── package.json
├── .gitignore
├── tsconfig.json
└── email.ts
├── node-sea.ts
├── tsconfig.node.json
├── .env_example
├── server
├── apis
│ └── sockets.ts
├── main_props.ts
├── ssr.tsx
├── database_helper.ts
├── config_manager.ts
├── args.ts
├── llm.ts
├── prompt_generator.ts
├── query_analyzer.ts
└── query_logger.ts
├── make_executable.sh
├── sea-config.json
├── vite.config.ts
├── sea.entitlements
├── .gitignore
├── index.html
├── README.md
├── tsconfig.json
├── credentials
├── proxy.crt
├── proxy.key
└── index.ts
├── LICENSE
├── package.json
├── DIST.md
├── run.ts
├── proxy-standalone.ts
├── run_executable.ts
└── make_all_executables.sh
/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
--------------------------------------------------------------------------------
/shared/main_props.ts:
--------------------------------------------------------------------------------
1 |
2 | export type MainProps = {
3 | args: any;
4 | };
--------------------------------------------------------------------------------
/client/assets/dbpill.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mayfer/dbpill/HEAD/client/assets/dbpill.png
--------------------------------------------------------------------------------
/landing/client/preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mayfer/dbpill/HEAD/landing/client/preview.png
--------------------------------------------------------------------------------
/landing/client/dbpill_web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mayfer/dbpill/HEAD/landing/client/dbpill_web.png
--------------------------------------------------------------------------------
/shared/types.ts:
--------------------------------------------------------------------------------
1 | export type Greeting = { text: string }
2 |
3 | export type SocketTester = { counter: number }
--------------------------------------------------------------------------------
/node-sea.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Minimal ambient type declarations for the experimental `node:sea` module.
3 | */
4 |
5 | declare module 'node:sea' {
6 | export function isSea(): boolean;
7 | export function getAsset(key: string, encoding?: string): any;
8 | }
--------------------------------------------------------------------------------
/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "skipLibCheck": true,
5 | "module": "ESNext",
6 | "moduleResolution": "bundler",
7 | "allowSyntheticDefaultImports": true,
8 | "strict": true
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/.env_example:
--------------------------------------------------------------------------------
1 | AWS_ACCESS_KEY_ID=
2 | AWS_SECRET_ACCESS_KEY=
3 | AWS_DEFAULT_REGION=us-west-x
4 | S3_BUCKET=dbpill-releases
5 |
6 | MAC_CODESIGN_IDENTITY="Developer ID Application: Xyz Abc (1233454ASD)"
7 | MAC_NOTARIZE_PROFILE=dbpill # created via `xcrun notarytool store-credentials`
8 |
9 |
--------------------------------------------------------------------------------
/client/utils/formatNumber.ts:
--------------------------------------------------------------------------------
1 | export const formatNumber = (num: number) => {
2 | if (!num) return '?';
3 | return num > 10
4 | ? Math.round(num).toLocaleString('en-US')
5 | : num.toLocaleString('en-US', {
6 | minimumFractionDigits: 2,
7 | maximumFractionDigits: 2,
8 | });
9 | };
--------------------------------------------------------------------------------
/client/main.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ReactDOM from 'react-dom/client'
3 | import App from './App.tsx'
4 |
5 | // @ts-ignore
6 | const initial_state = window.__INITIAL_STATE__
7 |
8 | ReactDOM.createRoot(document.getElementById('root')!).render(
9 |
10 | )
11 |
--------------------------------------------------------------------------------
/landing/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "landing",
3 | "private": true,
4 | "devDependencies": {
5 | "@types/bun": "latest"
6 | },
7 | "peerDependencies": {
8 | "typescript": "^5"
9 | },
10 | "dependencies": {
11 | "@aws-sdk/client-ses": "^3.840.0",
12 | "stripe": "^18.3.0"
13 | }
14 | }
--------------------------------------------------------------------------------
/server/apis/sockets.ts:
--------------------------------------------------------------------------------
1 | import { SocketTester } from 'shared/types';
2 |
3 | export async function setup_sockets(io) {
4 | io.on('connection', (socket) => {
5 | let counter = 0;
6 |
7 | socket.on('disconnect', () => {
8 | });
9 |
10 | setInterval(() => {
11 | socket.emit('test', { counter: counter++ } as SocketTester);
12 | }, 500);
13 | });
14 | }
--------------------------------------------------------------------------------
/client/App.css:
--------------------------------------------------------------------------------
1 | @import url('https://fonts.googleapis.com/css2?family=Inconsolata:wght@200..900&display=swap');
2 |
3 | body, html {
4 | margin: 0;
5 | padding: 0;
6 | background-color: rgba(40, 40, 40, 1);
7 | font-family: "Inconsolata", monospace;
8 | font-optical-sizing: auto;
9 | font-weight: 400;
10 | font-style: normal;
11 | font-variation-settings: "wdth" 100;
12 | color: #fff;
13 | }
--------------------------------------------------------------------------------
/make_executable.sh:
--------------------------------------------------------------------------------
1 | npm run build
2 |
3 | npx esbuild run_executable.ts \
4 | --bundle --platform=node --format=cjs \
5 | --outfile=server.bundle.cjs
6 | node --experimental-sea-config sea-config.json
7 | cp $(command -v node) dbpill
8 | codesign --remove-signature dbpill
9 | npx postject dbpill NODE_SEA_BLOB sea-prep.blob --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 --macho-segment-name NODE_SEA
10 | codesign --sign - dbpill
--------------------------------------------------------------------------------
/sea-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "main": "./server.bundle.cjs",
3 | "disableExperimentalSEAWarning": true,
4 | "output": "sea-prep.blob",
5 | "assets": {
6 | "dist/index.html": "./dist/index.html",
7 | "dist/index.js": "./dist/index.js",
8 | "dist/assets/index.css": "./dist/assets/index.css",
9 | "credentials/proxy.key": "./credentials/proxy.key",
10 | "credentials/proxy.crt": "./credentials/proxy.crt",
11 | "dist/assets/dbpill_diagram.svg": "./dist/assets/dbpill_diagram.svg"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 |
4 | // this file is needed for React hot reloads
5 | const mode = "production";
6 |
7 | // https://vitejs.dev/config/
8 | export default defineConfig({
9 | plugins: [react()],
10 | base: '/client/',
11 | build: {
12 | rollupOptions: {
13 | output: {
14 | entryFileNames: `[name].js`,
15 | chunkFileNames: `[name].js`,
16 | assetFileNames: `assets/[name].[ext]`
17 | }
18 | }
19 | }
20 | })
21 |
--------------------------------------------------------------------------------
/landing/.gitignore:
--------------------------------------------------------------------------------
1 | # dependencies (bun install)
2 | node_modules
3 |
4 | # output
5 | out
6 | dist
7 | *.tgz
8 |
9 | # code coverage
10 | coverage
11 | *.lcov
12 |
13 | # logs
14 | logs
15 | _.log
16 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
17 |
18 | # dotenv environment variable files
19 | .env
20 | .env.development.local
21 | .env.test.local
22 | .env.production.local
23 | .env.local
24 |
25 | # caches
26 | .eslintcache
27 | .cache
28 | *.tsbuildinfo
29 |
30 | # IntelliJ based IDEs
31 | .idea
32 |
33 | # Finder (MacOS) folder config
34 | .DS_Store
35 |
--------------------------------------------------------------------------------
/server/main_props.ts:
--------------------------------------------------------------------------------
1 | import { Greeting } from 'shared/types';
2 | import { MainProps } from 'shared/main_props';
3 | import argv from './args';
4 | import { ConfigManager } from './config_manager';
5 |
6 | let configManager: ConfigManager | null = null;
7 |
8 | export async function getMainProps(req) {
9 | // Initialize ConfigManager if not already done
10 | if (!configManager) {
11 | configManager = new ConfigManager('dbpill.sqlite.db');
12 | await configManager.initialize();
13 | }
14 |
15 | return {
16 | args: argv,
17 | } as MainProps;
18 | }
--------------------------------------------------------------------------------
/sea.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | com.apple.security.cs.allow-jit
7 |
8 | com.apple.security.cs.allow-unsigned-executable-memory
9 |
10 |
11 | com.apple.security.cs.allow-dyld-environment-variables
12 |
13 | com.apple.security.cs.disable-library-validation
14 |
15 |
16 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-esm
13 | dist-ssr
14 | *.local
15 |
16 | # Editor directories and files
17 | .vscode/*
18 | !.vscode/extensions.json
19 | .idea
20 | .DS_Store
21 | *.suo
22 | *.ntvs*
23 | *.njsproj
24 | *.sln
25 | *.sw?
26 |
27 | bun.lockb
28 | **/credentials/*.json
29 |
30 | queries.db
31 | build
32 | server.bundle.*
33 |
34 | dbpill.sqlite.db
35 | purchases.db
36 | purchases.db-*
37 |
38 | package-lock.json
39 | bun.lock
40 |
41 | dbpill
42 | sea-prep.blob
43 | cache
44 |
45 | keys/
46 | .env
47 |
--------------------------------------------------------------------------------
/server/ssr.tsx:
--------------------------------------------------------------------------------
1 | import ReactDOMServer from 'react-dom/server'
2 | import App from '../client/App'
3 |
4 | // if you need to integrate other styling frameworks,
5 | // implement them here & add to the head tags for SSR
6 | import { ServerStyleSheet } from "styled-components";
7 | import {getMainProps} from "./main_props";
8 |
9 | const mainProps = await getMainProps({});
10 |
11 | export function render(url, context) {
12 | const sheet = new ServerStyleSheet();
13 | const body = ReactDOMServer.renderToString(
14 | sheet.collectStyles(
15 |
16 | )
17 | )
18 | return {
19 | body,
20 | head: sheet.getStyleTags()
21 | }
22 | }
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | dbpill
8 |
14 |
15 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dbpill
2 |
3 | This is a PostgreSQL proxy that intercepts all queries & provides a web interface to profile them, sort them, auto-suggest indexes to improve performance, and immediately apply changes & measure improvements, with instant rollback when performance isn't improved. See https://dbpill.com for more info
4 |
5 | # Quick run
6 |
7 | ```
8 | npm install
9 | npm run dev postgresql://user:pass@host:5432/dbname
10 | ```
11 |
12 | There are two main components:
13 |
14 | * The PostgreSQL `proxy` that intercepts & logs every query
15 | * The `webapp` which displays, analyzes & optimizes the queries
16 |
17 | # Requirements
18 |
19 | Node version 22+ is required (for node:sqlite built-in package)
20 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2020",
4 | "useDefineForClassFields": true,
5 | "lib": ["ES2020", "DOM", "DOM.Iterable"],
6 | "module": "ESNext",
7 | "skipLibCheck": true,
8 |
9 | /* Bundler mode */
10 | "moduleResolution": "bundler",
11 | "allowImportingTsExtensions": true,
12 | "resolveJsonModule": true,
13 | "isolatedModules": true,
14 | "noEmit": true,
15 | "jsx": "react-jsx",
16 |
17 | /* Linting */
18 | "strict": false,
19 | "noUnusedLocals": false,
20 | "noUnusedParameters": false,
21 | "noFallthroughCasesInSwitch": true,
22 | "baseUrl": "./"
23 | },
24 | "include": ["server", "client", "shared", "*.ts", "vite.config.ts", "credentials"]
25 | }
26 |
--------------------------------------------------------------------------------
/landing/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | // Enable latest features
4 | "lib": ["ESNext", "DOM"],
5 | "target": "ESNext",
6 | "module": "ESNext",
7 | "moduleDetection": "force",
8 | "jsx": "react-jsx",
9 | "allowJs": true,
10 |
11 | // Bundler mode
12 | "moduleResolution": "bundler",
13 | "allowImportingTsExtensions": true,
14 | "verbatimModuleSyntax": true,
15 | "noEmit": true,
16 |
17 | // Best practices
18 | "strict": true,
19 | "skipLibCheck": true,
20 | "noFallthroughCasesInSwitch": true,
21 |
22 | // Some stricter flags (disabled by default)
23 | "noUnusedLocals": false,
24 | "noUnusedParameters": false,
25 | "noPropertyAccessFromIndexSignature": false
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/credentials/proxy.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIICpDCCAYwCCQCyhX6+r72uGjANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls
3 | b2NhbGhvc3QwHhcNMjUwNjI1MDgwMzA0WhcNMjYwNjI1MDgwMzA0WjAUMRIwEAYD
4 | VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDR
5 | iEZwvPGt0hrYKruSCCOecSCDy6lvXWSwUALhevhCvcBSzBnq09H+eYA/EM5g/Qoy
6 | 1NLzta9w0A59daxluPcZx6ICX4RavPY3rCgepyGQBKzdOZvi9/+jc8cILCCi4TD/
7 | 0WX72MPdUMo58zfqUqjYmeoniIDmeAn8Q7bCVBmLY9RTM2Y2YeyJUzG5n06vCWW/
8 | eNimkIFUUlKmD24Jwa0+5f/3xYb7wfr6ibgibXhuYlq005O54AHDNZ8j8JhTzrXt
9 | U0eGiLK4seZuOkLQZCpbz0fDrL/BmIUnOcB+DEX1zo58zMTzqiqF4IHKJM9hBWUd
10 | j5fEjWKLCykUggvISsRLAgMBAAEwDQYJKoZIhvcNAQELBQADggEBANDT4I0njeJU
11 | i+bJPzeEu93QLGk3dmeUU2cdRafqHIUFDhAkGhVn3vJhVtk/a4OGUbSf9npjn1rW
12 | R2WuvcSOyG6pDpmPK2s6Z041sOamOD3Q+6tuRpZouJ7OSnVcq8nbmI+g4Fw2BNY1
13 | cJeXI6mH3gSOW6tai+1CZ1p2eq1g3Q5YkuyJcz/bgOSun6TuJ+SUgHNc9iSZA/VK
14 | vWLIJC90LLU8EclMp2HWWONVXw045b6+5iNj+eNKgPB85m+4PzuJCa/Pws3t0/6/
15 | c1md20nmAfpYv0YCRqNX673k8dGJVEeF0N92JYMuay+wztoSp6wFR05hDDR6dH41
16 | j9xSQqgsOtk=
17 | -----END CERTIFICATE-----
18 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Murat Ayfer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
23 | Commercial users are encouraged to support development via sponsorship or donation.
24 |
--------------------------------------------------------------------------------
/landing/email.ts:
--------------------------------------------------------------------------------
1 | import { SESClient, SendEmailCommand } from "@aws-sdk/client-ses";
2 |
3 | // AWS region defaults to us-east-1 but can be overridden via the AWS_REGION env var
4 | const sesRegion = process.env.AWS_REGION || "us-east-1";
5 | const sesClient = new SESClient({ region: sesRegion });
6 |
7 | /**
8 | * Send an email using AWS SES.
9 | *
10 | * The email is sent from help@dbpill.com which must be a verified identity
11 | * in the configured AWS SES account.
12 | */
13 | export async function sendEmail(to: string, subject: string, body: string) {
14 | const params = {
15 | Destination: {
16 | ToAddresses: [to],
17 | },
18 | Message: {
19 | Body: {
20 | Text: {
21 | Data: body,
22 | },
23 | },
24 | Subject: {
25 | Data: subject,
26 | },
27 | },
28 | Source: "help@dbpill.com",
29 | } as const;
30 | try {
31 | await sesClient.send(new SendEmailCommand({
32 | ...params,
33 | Destination: {
34 | ToAddresses: [...params.Destination.ToAddresses],
35 | },
36 | }));
37 | console.log(`[SES] Email sent to ${to}`);
38 | } catch (err) {
39 | console.error("[SES] Failed to send email", err);
40 | throw err;
41 | }
42 | }
--------------------------------------------------------------------------------
/credentials/proxy.key:
--------------------------------------------------------------------------------
1 | -----BEGIN PRIVATE KEY-----
2 | MIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQDRiEZwvPGt0hrY
3 | KruSCCOecSCDy6lvXWSwUALhevhCvcBSzBnq09H+eYA/EM5g/Qoy1NLzta9w0A59
4 | daxluPcZx6ICX4RavPY3rCgepyGQBKzdOZvi9/+jc8cILCCi4TD/0WX72MPdUMo5
5 | 8zfqUqjYmeoniIDmeAn8Q7bCVBmLY9RTM2Y2YeyJUzG5n06vCWW/eNimkIFUUlKm
6 | D24Jwa0+5f/3xYb7wfr6ibgibXhuYlq005O54AHDNZ8j8JhTzrXtU0eGiLK4seZu
7 | OkLQZCpbz0fDrL/BmIUnOcB+DEX1zo58zMTzqiqF4IHKJM9hBWUdj5fEjWKLCykU
8 | ggvISsRLAgMBAAECggEBALnPNFAjbdpr2HxboMyGRTfrMB9QCugEFRc5TOSxjNh6
9 | tsN11YHl53HYQz8p4yolGtcq3gta0cKj7kWZhZELbq1A2E1scM2ahmmmVEmigmPZ
10 | kYX44WFr9Rn4wMZhg2uNcUtaYSrt9fsb+slbAvnb5Xl3PCtkpJI8XvaDAd3ggGqX
11 | NvJvphGkwOgXVprUjhQNHAoNQhNCNdlKBboNYeHN74AHrP0jq5DZc+/9++9Wfhn4
12 | 6Jwq9qLO3IJ+WaPDezwlR4CzB4bksT9HKq/QZipCWjefNOopWqiwzdvEIYBpKfB6
13 | WQTm+vFPZBwad+I+WNcSFCCWe74J9Hgr9NWbLlqxeykCgYEA/xIkM9tTrpNxvHao
14 | XD06OG+EmSMx4DuATRbyNbArJjaRYTIxpJKXU7vC2S8jw883VMW6nqsI4ilPL7IL
15 | RoGT0HaZFHXcNtzFsJtzDcItATIqHieJWDHyU65SIdnvujG0WGynCZw7wHoLd2ZS
16 | oGv/DknvrHkKpSvAMVmeNA+HO00CgYEA0kurDIVJSHHgtMZzToopxtg7V74GLaNC
17 | Jggcr/pEQFM7FtrS9Iahhmr6/XYzKwltdtg5hOoYyBZaRCToAbF0jcDcDfnlIDuT
18 | 7WA8qgikF5Cv8qKSm3JDE3WKGjJCZDGkcZymlbfKpZFLPw+IOGLdqvP6/lgJAgSL
19 | o/j/x1zKQfcCgYEAzOQhuJYh7kX6Qs1+CDoWnmbWOL0Qv3KMaWBZR88vmg6TUWQk
20 | hHJMpLJanmNDc3Lsm2gJtJLnG+Ta+xEmBYjo17es18alElzdYDQ+g3rK8/773iUc
21 | KoBUi+unhhGug/6RQ8Zjl1CsnrxqoYrMLJkT1MKHiB/i8TLnu7ylXGRs2LECgYEA
22 | vThaw1xW0TmuP9rku8Gzz3iXBN81vfDNP11DOtDMgrihTXI+KMnPIxy1lES7y7ii
23 | 5zGtp7vf6oG6XOB8ZbGft0DWgDjfgr4433kdfL1jsNR+jKrdkcMhOawkRGf+txq3
24 | 5YZXmhWS4KE4fJhkMKA85mq76AmcqVUftL4CoAcM8ocCgYEA52mD/9yyEpaBa9ov
25 | 4f/GkOvDWsS82t93Dd5CDcY2JJKY0sqw0sSHnY4DJCaBxAlb0Vjg5a4Rj1x3xac/
26 | STPlXTuclPHSmyZm1hzbwuIPkd10LPMZ5DtnyhlpCyPDXQwdDyaUwUHcbNxtFOgr
27 | 74Q7WlT0ziqxl/SJZWKyNxT1de4=
28 | -----END PRIVATE KEY-----
29 |
--------------------------------------------------------------------------------
/client/context/AppContext.tsx:
--------------------------------------------------------------------------------
1 | import { createContext, useState, useEffect, ReactNode } from 'react';
2 | import { configApi } from '../utils/HttpApi';
3 |
4 | interface VendorApiKeys {
5 | anthropic?: string;
6 | openai?: string;
7 | xai?: string;
8 | google?: string;
9 | }
10 |
11 | interface LLMConfig {
12 | id: number;
13 | llm_endpoint: string;
14 | llm_model: string;
15 | llm_api_key: string | null;
16 | created_at: string;
17 | updated_at: string;
18 | apiKeys?: VendorApiKeys;
19 | }
20 |
21 | export interface AppContextShape {
22 | args: any;
23 | config: LLMConfig | null;
24 | updateConfig: (newConfig: Partial) => Promise;
25 | }
26 |
27 | export const AppContext = createContext({
28 | args: {},
29 | config: null,
30 | updateConfig: async () => ({} as LLMConfig)
31 | });
32 |
33 | export function AppProvider({ children, args }: { children: ReactNode; args: any }) {
34 | const [config, setConfig] = useState(null);
35 |
36 | const loadConfig = async () => {
37 | try {
38 | const data = await configApi.getConfig();
39 | setConfig(data);
40 | } catch (error) {
41 | console.error('Error loading config:', error);
42 | }
43 | };
44 |
45 | const updateConfig = async (newConfig: Partial) => {
46 | try {
47 | const updatedConfig = await configApi.updateConfig(newConfig);
48 | setConfig(updatedConfig);
49 | return updatedConfig;
50 | } catch (error) {
51 | console.error('Error updating config:', error);
52 | throw error;
53 | }
54 | };
55 |
56 | useEffect(() => {
57 | loadConfig();
58 | }, []);
59 |
60 | return (
61 |
62 | {children}
63 |
64 | );
65 | }
--------------------------------------------------------------------------------
/credentials/index.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from 'path';
3 |
4 | export type Credentials = {
5 | [key: string]: any;
6 | };
7 |
8 | // Credentials files that are needed will be auto-generated in this folder when you run the app.
9 | // You can edit them as needed.
10 | export default function getCredentials(namespace: String, defaults: Credentials = {}): Credentials {
11 | const current_file_dir = __dirname;
12 | const filename = path.join(current_file_dir, `${namespace}.json`);
13 | const file_exists = fs.existsSync(filename);
14 |
15 | let credentials: Credentials = {};
16 |
17 | if (file_exists) {
18 | const file_contents = fs.readFileSync(filename);
19 |
20 | try {
21 | credentials = JSON.parse(file_contents.toString()) as Credentials;
22 | } catch (e) {
23 | if (e instanceof SyntaxError) {
24 | console.error(`** Error parsing credentials file: ${filename}`);
25 | console.error(e);
26 | return {};
27 | }
28 | }
29 | }
30 |
31 | const handler: ProxyHandler = {
32 | get(target: Credentials, prop: string, receiver: any): any {
33 | if (target[prop] === undefined) {
34 | if(defaults[prop] !== undefined) {
35 | console.log(`*** Using default credential: "${prop}" in ${filename}`);
36 | target[prop] = defaults[prop];
37 | } else {
38 | console.log(`*** Missing credential: "${prop}" in ${filename}`);
39 | target[prop] = "";
40 | }
41 | fs.writeFileSync(filename, JSON.stringify(credentials, null, 4));
42 | }
43 | return Reflect.get(target, prop, receiver);
44 | },
45 | };
46 | const proxy: Credentials = new Proxy(credentials, handler);
47 |
48 | return proxy;
49 | }
50 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "spiel-bun",
3 | "private": true,
4 | "version": "1.0.1",
5 | "nodeArgs": [
6 | "--disable-warning=ExperimentalWarning"
7 | ],
8 | "scripts": {
9 | "dev": "npx tsx run.ts --mode development --web-port 3000",
10 | "dev-bun": "nodemon --exec 'bun run.ts' --watch server --watch shared --watch *.ts --ext ts,tsx,js,mjs,json",
11 | "build": "vite build",
12 | "prod": "vite build && bun run.ts --mode=production",
13 | "lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0"
14 | },
15 | "dependencies": {
16 | "@aws-sdk/client-ses": "^3.840.0",
17 | "axios": "^1.7.5",
18 | "babel-plugin-styled-components": "^2.1.4",
19 | "compression": "^1.7.4",
20 | "cookie-parser": "^1.4.6",
21 | "express": "^4.21.2",
22 | "mime": "^4.0.1",
23 | "nocache": "^4.0.0",
24 | "openai": "^5.8.2",
25 | "pg": "^8.16.2",
26 | "pg-server": "^0.1.8",
27 | "pgwire": "^0.6.0",
28 | "react": "^18.2.0",
29 | "react-dom": "^18.2.0",
30 | "react-markdown": "^9.0.1",
31 | "react-router-dom": "^6.26.1",
32 | "socket.io": "^4.8.1",
33 | "socket.io-client": "^4.8.1",
34 | "sql-formatter": "^15.4.1",
35 | "styled-components": "^6.1.8",
36 | "yargs": "^17.7.2"
37 | },
38 | "devDependencies": {
39 | "@types/express": "^4.17.23",
40 | "@types/mime": "^4.0.0",
41 | "@types/node": "^24.0.10",
42 | "@types/react": "^18.2.66",
43 | "@types/react-dom": "^18.2.22",
44 | "@types/styled-components": "^5.1.34",
45 | "@typescript-eslint/eslint-plugin": "^7.2.0",
46 | "@typescript-eslint/parser": "^7.2.0",
47 | "@vitejs/plugin-react": "^4.2.1",
48 | "esbuild": "^0.25.5",
49 | "eslint": "^8.57.0",
50 | "eslint-plugin-react-hooks": "^4.6.0",
51 | "eslint-plugin-react-refresh": "^0.4.6",
52 | "nodemon": "^3.1.0",
53 | "postject": "^1.0.0-alpha.6",
54 | "tsx": "^4.20.3",
55 | "typescript": "^5.2.2",
56 | "vite": "^5.2.8"
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/client/components/About.tsx:
--------------------------------------------------------------------------------
1 | import styled from 'styled-components';
2 |
3 | const AboutContainer = styled.div`
4 | max-width: 600px;
5 | margin: 0 auto;
6 | line-height: 1.6;
7 |
8 | h1 {
9 | margin-bottom: 20px;
10 | color: color(display-p3 0.964 0.7613 0.3253);
11 | }
12 |
13 | h2 {
14 | margin-top: 30px;
15 | margin-bottom: 15px;
16 | color: #fff;
17 | }
18 |
19 | p {
20 | margin-bottom: 15px;
21 | color: rgba(255, 255, 255, 0.9);
22 | }
23 |
24 | a {
25 | color: color(display-p3 0.964 0.7613 0.3253);
26 | text-decoration: none;
27 |
28 | &:hover {
29 | text-decoration: underline;
30 | }
31 | }
32 |
33 | ul {
34 | margin-bottom: 15px;
35 | padding-left: 20px;
36 |
37 | li {
38 | margin-bottom: 8px;
39 | color: rgba(255, 255, 255, 0.9);
40 | }
41 | }
42 | `;
43 |
44 | export function About() {
45 | return (
46 |
47 | About dbpill
48 |
49 |
50 | dbpill is a PostgreSQL query performance monitoring and optimization tool
51 | that uses AI to automatically suggest database indexes to improve query performance.
52 |
53 |
54 | Features
55 |
56 | - Real-time query performance monitoring
57 | - AI-powered index suggestions using large language models
58 | - One-click index application and reversion
59 | - Query execution time comparison
60 | - Transparent proxy between your application and PostgreSQL
61 |
62 |
63 | How it works
64 |
65 | dbpill acts as a transparent proxy between your application and PostgreSQL database.
66 | It captures and analyzes SQL queries, then uses AI to suggest optimal indexes
67 | that can significantly improve query performance.
68 |
69 |
70 | Contact & Support
71 |
72 | Website: dbpill.com
73 | Email: help@dbpill.com
74 | Author: @mayfer
75 |
76 |
77 |
78 | For bug reports, feature requests, or general questions, please reach out via email.
79 |
80 |
81 | );
82 | }
--------------------------------------------------------------------------------
/server/database_helper.ts:
--------------------------------------------------------------------------------
1 | import { Client } from 'pg';
2 | import args from 'server/args';
3 |
4 | // We import `node:sqlite` lazily inside `initialize()` so that the module can
5 | // set up any warning-suppression logic **before** Node executes the
6 | // experimental SQLite code path.
7 | // eslint-disable-next-line @typescript-eslint/ban-types
8 | type NodeSqliteDatabase = any;
9 | type Statement = any;
10 |
11 | // Test initial database connectivity and log the outcome
12 | export async function testDbConnection(connectionString: string): Promise {
13 | const client = new Client({ connectionString });
14 | try {
15 | await client.connect();
16 | await client.query('SELECT 1');
17 | console.log(`Database connection successful. Starting proxy on port ${args.proxyPort}\n`);
18 | } catch (error) {
19 | console.error(`❌ Failed to connect to database: ${connectionString}`);
20 | console.error(error);
21 | } finally {
22 | try { await client.end(); } catch (_) { /* ignore */ }
23 | }
24 | }
25 |
26 | export class DatabaseHelper {
27 | private db: NodeSqliteDatabase | null = null;
28 |
29 | constructor(private dbPath: string) {
30 | this.initialize();
31 | }
32 |
33 | async initialize(): Promise {
34 | // Lazily require the experimental built-in so callers can install any
35 | // warning filters (e.g. process.emitWarning override) beforehand.
36 | if (!this.db) {
37 | const { DatabaseSync } = await import('node:sqlite');
38 | this.db = new DatabaseSync(this.dbPath);
39 | }
40 | }
41 |
42 | private checkDb(): void {
43 | if (!this.db) {
44 | throw new Error('Database not initialized. Call initialize() first.');
45 | }
46 | }
47 |
48 | async exec(sql: string): Promise {
49 | this.checkDb();
50 | this.db!.exec(sql);
51 | }
52 |
53 | async run(sql: string, params?: any[]): Promise {
54 | this.checkDb();
55 | const stmt: Statement = this.db!.prepare(sql);
56 | const result = params ? stmt.run(...params) : stmt.run();
57 | return result;
58 | }
59 |
60 | async get(sql: string, params?: any[]): Promise {
61 | this.checkDb();
62 | const stmt: Statement = this.db!.prepare(sql);
63 | const row = params ? stmt.get(...params) : stmt.get();
64 | return row;
65 | }
66 |
67 | async all(sql: string, params?: any[]): Promise {
68 | this.checkDb();
69 | const stmt: Statement = this.db!.prepare(sql);
70 | const rows = params ? stmt.all(...params) : stmt.all();
71 | return rows;
72 | }
73 |
74 | async close(): Promise {
75 | if (this.db) {
76 | this.db.close();
77 | this.db = null;
78 | }
79 | }
80 | }
--------------------------------------------------------------------------------
/DIST.md:
--------------------------------------------------------------------------------
1 | # murat notes
2 |
3 | npx tsx run.ts --mode development --port 3000 --db=postgresql://cashorbit@localhost:5432/cashorbit
4 |
5 |
6 | # Distribution & Development Guide
7 |
8 | This document explains two workflows:
9 |
10 | 1. Local development with plain `node`/TypeScript.
11 | 2. Producing a **single-file executable** using Node SEA for shipping to end-users.
12 |
13 | ---
14 | ## 1 · Local development
15 |
16 | 1. **Install deps** (once):
17 | ```bash
18 | npm install
19 | ```
20 |
21 | 2. **Start the server in development mode** (Vite middleware + HMR):
22 | ```bash
23 | npx tsx run.ts --mode development --port 3000 # or: npm run dev
24 | ```
25 |
26 | * `run.ts` boots Vite in middleware mode, so it automatically handles HMR
27 | and transforms your React/Vue/Svelte pages on the fly—no manual `vite build` step required.
28 | * Requests to `/client/**` are proxied to Vite; API/Socket.IO routes work exactly as in production.
29 |
30 | Open in your browser.
31 |
32 | ---
33 | ## 2 · Shipping a single executable (Node SEA)
34 |
35 | SEA lets you bundle **one CommonJS file + static assets** into a copy of
36 | `node`, creating an app that runs on machines **without Node installed**.
37 |
38 | ### 2.1 Prerequisites (once)
39 | ```bash
40 | npm install --save-dev esbuild postject
41 | ```
42 |
43 | ### 2.2 Build steps
44 |
45 | > All commands assume the project root (`dbpill`) as CWD.
46 |
47 | 1. **Build the client**
48 | ```bash
49 | npm run build # runs Vite → dist/**
50 | ```
51 |
52 | 2. **Bundle & transpile the server** (TypeScript → CJS)
53 | ```bash
54 | npx esbuild run_executable.ts \
55 | --bundle --platform=node --format=cjs \
56 | --outfile=server.bundle.cjs
57 | ```
58 |
59 | 3. **Create `sea-config.json`**
60 | ```jsonc
61 | {
62 | "main": "./server.bundle.cjs",
63 | "disableExperimentalSEAWarning": true,
64 | "output": "sea-prep.blob",
65 | "assets": {
66 | "dist/index.html": "./dist/index.html",
67 | "dist/index.js.txt": "./dist/index.js.txt",
68 | "dist/assets/index.css": "./dist/assets/index.css",
69 | "dbpill.sqlite.db": "./dbpill.sqlite.db"
70 | }
71 | }
72 | ```
73 |
74 | 4. **Generate the SEA blob**
75 | ```bash
76 | node --experimental-sea-config sea-config.json
77 | ```
78 |
79 | 5. **Create a copy of the Node binary & inject the blob**
80 |
81 | ```bash
82 | # macOS example – adjust flags for Linux/Windows
83 |
84 | cp $(command -v node) dbpill # final executable name
85 | codesign --remove-signature dbpill # mac only
86 |
87 | npx postject dbpill NODE_SEA_BLOB sea-prep.blob \
88 | --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 \
89 | --macho-segment-name NODE_SEA # omit on Linux
90 |
91 | codesign --sign - dbpill # re-sign on mac
92 | ```
93 |
94 | ### 2.3 Run the binary
95 | ```bash
96 | ./dbpill --port 3000
97 | ```
98 |
99 | ### 2.4 What's inside vs. outside
100 | * **Inside**: bundled server CJS, `dist/**` assets, `dbpill.sqlite.db`.
101 | * The executable is now completely self-contained using Node.js 24's built-in SQLite.
102 |
103 | ---
104 | ## 3 Convenient npm scripts
105 | Add these to `package.json` if you like:
106 | ```jsonc
107 | {
108 | "scripts": {
109 | "dev": "tsx run.ts --mode development --port 3000",
110 | "build:client": "vite build",
111 |
112 | "sea:bundle": "esbuild run_executable.ts --bundle --platform=node --format=cjs --outfile=server.bundle.cjs",
113 | "sea:prep": "node --experimental-sea-config sea-config.json",
114 | "sea:build": "npm run build:client && npm run sea:bundle && npm run sea:prep",
115 | "sea:inject": "postject dbpill NODE_SEA_BLOB sea-prep.blob --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 --macho-segment-name NODE_SEA"
116 | }
117 | }
118 | ```
119 |
120 | Then:
121 | ```bash
122 | npm run dev # dev server with live reload
123 | npm run sea:build # produce sea-prep.blob
124 | npm run sea:inject # inject into ./dbpill
125 | ```
126 |
127 | Happy hacking & shipping! 🚀
128 |
129 |
--------------------------------------------------------------------------------
/landing/client/downloads.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | dbpill • Purchase complete
6 |
7 |
8 |
59 |
60 |
61 |
Purchase complete
62 |
Your downloads are ready for __PURCHASE_EMAIL__:
63 |
64 |
65 |
66 |
81 |
82 |
83 |
98 |
99 |
100 |
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/server/config_manager.ts:
--------------------------------------------------------------------------------
1 | import { DatabaseHelper } from './database_helper';
2 |
3 | export interface LLMConfig {
4 | id: number;
5 | llm_endpoint: string;
6 | llm_model: string;
7 | llm_api_key: string | null;
8 | created_at: string;
9 | updated_at: string;
10 | }
11 |
12 | export interface ApiKey {
13 | id: number;
14 | vendor: string;
15 | api_key: string;
16 | created_at: string;
17 | updated_at: string;
18 | }
19 |
20 | export interface VendorApiKeys {
21 | anthropic?: string;
22 | openai?: string;
23 | xai?: string;
24 | google?: string;
25 | }
26 |
27 | export class ConfigManager {
28 | private dbHelper: DatabaseHelper;
29 |
30 | constructor(private dbPath: string) {
31 | this.dbHelper = new DatabaseHelper(dbPath);
32 | this.initialize();
33 | }
34 |
35 | async initialize(): Promise {
36 | await this.dbHelper.initialize();
37 |
38 | await this.dbHelper.exec(`
39 | CREATE TABLE IF NOT EXISTS configs (
40 | id INTEGER PRIMARY KEY AUTOINCREMENT,
41 | llm_endpoint TEXT NOT NULL DEFAULT 'anthropic',
42 | llm_model TEXT NOT NULL DEFAULT 'claude-sonnet-4',
43 | llm_api_key TEXT,
44 | created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
45 | updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
46 | )
47 | `);
48 |
49 | // Create API keys table for vendor-specific keys
50 | await this.dbHelper.exec(`
51 | CREATE TABLE IF NOT EXISTS api_keys (
52 | id INTEGER PRIMARY KEY AUTOINCREMENT,
53 | vendor TEXT NOT NULL UNIQUE,
54 | api_key TEXT NOT NULL,
55 | created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
56 | updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
57 | )
58 | `);
59 |
60 | // Ensure we have at least one config row
61 | const existingConfig = await this.dbHelper.get('SELECT COUNT(*) as count FROM configs');
62 | if (existingConfig.count === 0) {
63 | await this.dbHelper.run(`
64 | INSERT INTO configs (llm_endpoint, llm_model, llm_api_key)
65 | VALUES ('anthropic', 'claude-sonnet-4', NULL)
66 | `);
67 | }
68 | }
69 |
70 | async getConfig(): Promise {
71 | const config = await this.dbHelper.get('SELECT * FROM configs ORDER BY updated_at DESC LIMIT 1');
72 | return config;
73 | }
74 |
75 | async updateConfig({
76 | llm_endpoint,
77 | llm_model,
78 | llm_api_key
79 | }: {
80 | llm_endpoint: string;
81 | llm_model: string;
82 | llm_api_key: string | null;
83 | }): Promise {
84 | await this.dbHelper.run(`
85 | UPDATE configs
86 | SET llm_endpoint = ?, llm_model = ?, llm_api_key = ?, updated_at = CURRENT_TIMESTAMP
87 | WHERE id = (SELECT id FROM configs ORDER BY updated_at DESC LIMIT 1)
88 | `, [llm_endpoint, llm_model, llm_api_key]);
89 | }
90 |
91 | async getApiKeys(): Promise {
92 | const keys = await this.dbHelper.all('SELECT vendor, api_key FROM api_keys');
93 | const result: VendorApiKeys = {};
94 |
95 | keys.forEach((key: any) => {
96 | result[key.vendor as keyof VendorApiKeys] = key.api_key;
97 | });
98 |
99 | return result;
100 | }
101 |
102 | async updateApiKey(vendor: string, apiKey: string | null): Promise {
103 | if (apiKey === null || apiKey === '') {
104 | // Delete the key if null or empty
105 | await this.dbHelper.run('DELETE FROM api_keys WHERE vendor = ?', [vendor]);
106 | } else {
107 | // Upsert the key
108 | await this.dbHelper.run(`
109 | INSERT INTO api_keys (vendor, api_key, updated_at)
110 | VALUES (?, ?, CURRENT_TIMESTAMP)
111 | ON CONFLICT(vendor) DO UPDATE SET
112 | api_key = excluded.api_key,
113 | updated_at = excluded.updated_at
114 | `, [vendor, apiKey]);
115 | }
116 | }
117 |
118 | async updateApiKeys(apiKeys: VendorApiKeys): Promise {
119 | for (const [vendor, apiKey] of Object.entries(apiKeys)) {
120 | await this.updateApiKey(vendor, apiKey || null);
121 | }
122 | }
123 |
124 | async getApiKeyForVendor(vendor: string): Promise {
125 | const result = await this.dbHelper.get('SELECT api_key FROM api_keys WHERE vendor = ?', [vendor]);
126 | return result?.api_key || null;
127 | }
128 |
129 | async close(): Promise {
130 | await this.dbHelper.close();
131 | }
132 | }
--------------------------------------------------------------------------------
/server/args.ts:
--------------------------------------------------------------------------------
1 | import yargs from 'yargs';
2 | import { hideBin } from 'yargs/helpers';
3 | import { createRequire } from 'node:module';
4 |
5 | // Use a try/catch to handle both regular Node.js and SEA contexts
6 | let require;
7 | try {
8 | // This works in regular Node.js ES modules
9 | require = createRequire(import.meta.url);
10 | } catch (err) {
11 | // Fallback for SEA context - use the current working directory
12 | require = createRequire(process.cwd() + '/package.json');
13 | }
14 | // Try to load package.json from different locations
15 | let packageVersion = '1.0.0'; // fallback version
16 | try {
17 | const pkg = require('../package.json');
18 | packageVersion = pkg.version;
19 | } catch (err) {
20 | try {
21 | const pkg = require('./package.json');
22 | packageVersion = pkg.version;
23 | } catch (err2) {
24 | // Use fallback version
25 | }
26 | }
27 |
28 | // Validation function for PostgreSQL connection strings
29 | function validateConnectionString(connectionString: string): boolean {
30 | try {
31 | // Check if it starts with postgres:// or postgresql://
32 | if (!connectionString.match(/^postgres(ql)?:\/\//)) {
33 | throw new Error('Connection string must start with postgres:// or postgresql://');
34 | }
35 |
36 | // Try to parse as URL to validate basic structure
37 | const url = new URL(connectionString);
38 |
39 | // Validate protocol
40 | if (!['postgres:', 'postgresql:'].includes(url.protocol)) {
41 | throw new Error('Protocol must be postgres: or postgresql:');
42 | }
43 |
44 | // Validate hostname (required)
45 | if (!url.hostname) {
46 | throw new Error('Hostname is required');
47 | }
48 |
49 | // Validate port (if provided, must be a valid number between 1-65535)
50 | if (url.port) {
51 | const portNum = parseInt(url.port, 10);
52 | if (isNaN(portNum) || portNum < 1 || portNum > 65535) {
53 | throw new Error('Port must be a valid number between 1 and 65535');
54 | }
55 | }
56 |
57 | // Validate database name (if provided, must not be empty after removing leading slashes)
58 | const dbName = url.pathname.replace(/^\/+/, '');
59 | if (url.pathname && url.pathname !== '/' && !dbName) {
60 | throw new Error('Database name cannot be empty');
61 | }
62 |
63 | // Validate username (if provided, must not be empty)
64 | if (url.username === '') {
65 | throw new Error('Username cannot be empty if specified');
66 | }
67 |
68 | return true;
69 | } catch (error) {
70 | if (error instanceof TypeError) {
71 | throw new Error('Invalid connection string format');
72 | }
73 | throw error;
74 | }
75 | }
76 |
77 | const _yargs = yargs(hideBin(process.argv))
78 | // Treat the first positional argument as the database connection string
79 | .usage('$0 [options]')
80 | .example('$0 postgres://user:pass@host:5432/db', 'Set up a proxy to intercept queries to the database.')
81 | .demandCommand(1, 'A PostgreSQL connection string is required as the first argument.')
82 | .check((argv) => {
83 | // Validate the database connection string
84 | if (argv._ && argv._.length > 0) {
85 | const connectionString = String(argv._[0]);
86 | try {
87 | validateConnectionString(connectionString);
88 | return true;
89 | } catch (error) {
90 | throw new Error(`Invalid database connection string: ${error.message}\n\nExpected format: postgres://[user[:password]@]host[:port]/database\nExamples:\n postgres://user:pass@localhost:5432/mydb\n postgresql://user@localhost/mydb\n postgres://localhost:5432/mydb`);
91 | }
92 | }
93 | return true;
94 | })
95 | .option('web-port', {
96 | default: 3000,
97 | type: 'number',
98 | describe: 'Web server port'
99 | })
100 | .option('proxy-port', {
101 | default: 5433,
102 | type: 'number',
103 | describe: 'Port to run the SQL proxy on'
104 | })
105 | .option('verbose', {
106 | default: false,
107 | type: 'boolean',
108 | describe: 'Enable verbose debug logging'
109 | })
110 |
111 | _yargs.version(packageVersion);
112 |
113 | const argv = _yargs.argv;
114 |
115 | // Map first positional argument to argv.db for downstream consumption
116 | if (argv._ && argv._.length > 0) {
117 | argv.db = String(argv._[0]);
118 | }
119 |
120 |
121 | process.env.NODE_ENV = argv.mode;
122 |
123 | // const options = _yargs.getOptions();
124 | // const hiddenOptions = options.hiddenOptions;
125 | // const defaultOptions = ['help', 'version', '_', '$0'];
126 | // const visibleOptions = Object.keys(options.key).filter(key => !hiddenOptions.includes(key) && !defaultOptions.includes(key));
127 | // console.log('Running with args:');
128 | // for (const key of visibleOptions) {
129 | // const value = argv[key];
130 | // console.log(` --${key} ${value}`);
131 | // }
132 |
133 | export default argv as Record;
134 |
--------------------------------------------------------------------------------
/client/utils/sqlHighlighter.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | interface HighlightedToken {
4 | type: 'keyword' | 'string' | 'comment' | 'number' | 'operator' | 'identifier' | 'whitespace';
5 | value: string;
6 | }
7 |
8 | const SQL_KEYWORDS = [
9 | 'SELECT', 'FROM', 'WHERE', 'JOIN', 'INNER', 'LEFT', 'RIGHT', 'OUTER', 'FULL',
10 | 'ON', 'AS', 'AND', 'OR', 'NOT', 'IN', 'EXISTS', 'BETWEEN', 'LIKE', 'IS',
11 | 'NULL', 'TRUE', 'FALSE', 'INSERT', 'INTO', 'VALUES', 'UPDATE', 'SET',
12 | 'DELETE', 'CREATE', 'DROP', 'ALTER', 'TABLE', 'INDEX', 'VIEW', 'DATABASE',
13 | 'SCHEMA', 'PRIMARY', 'KEY', 'FOREIGN', 'REFERENCES', 'CONSTRAINT', 'UNIQUE',
14 | 'CHECK', 'DEFAULT', 'COLUMN', 'ADD', 'MODIFY', 'RENAME', 'TO',
15 | 'ORDER', 'BY', 'GROUP', 'HAVING', 'LIMIT', 'OFFSET', 'DISTINCT', 'ALL',
16 | 'UNION', 'INTERSECT', 'EXCEPT', 'CASE', 'WHEN', 'THEN', 'ELSE', 'END',
17 | 'IF', 'COALESCE', 'NULLIF', 'CAST', 'CONVERT', 'SUBSTRING', 'CONCAT',
18 | 'COUNT', 'SUM', 'AVG', 'MIN', 'MAX', 'ROUND', 'FLOOR', 'CEIL',
19 | 'VARCHAR', 'CHAR', 'TEXT', 'INTEGER', 'INT', 'BIGINT', 'SMALLINT',
20 | 'DECIMAL', 'NUMERIC', 'FLOAT', 'REAL', 'DOUBLE', 'BOOLEAN', 'BOOL',
21 | 'DATE', 'TIME', 'TIMESTAMP', 'DATETIME', 'YEAR', 'MONTH', 'DAY',
22 | 'HOUR', 'MINUTE', 'SECOND', 'INTERVAL', 'ZONE', 'WITH', 'WITHOUT',
23 | 'PRECISION', 'VARYING', 'SERIAL', 'BIGSERIAL', 'SMALLSERIAL',
24 | 'EXPLAIN', 'ANALYZE', 'VERBOSE', 'COSTS', 'BUFFERS', 'FORMAT', 'JSON'
25 | ];
26 |
27 | const tokenize = (sql: string): HighlightedToken[] => {
28 | const tokens: HighlightedToken[] = [];
29 | let i = 0;
30 |
31 | while (i < sql.length) {
32 | const char = sql[i];
33 |
34 | // Skip whitespace but preserve it
35 | if (/\s/.test(char)) {
36 | let whitespace = '';
37 | while (i < sql.length && /\s/.test(sql[i])) {
38 | whitespace += sql[i];
39 | i++;
40 | }
41 | tokens.push({ type: 'whitespace', value: whitespace });
42 | continue;
43 | }
44 |
45 | // Comments
46 | if (char === '-' && sql[i + 1] === '-') {
47 | let comment = '';
48 | while (i < sql.length && sql[i] !== '\n') {
49 | comment += sql[i];
50 | i++;
51 | }
52 | tokens.push({ type: 'comment', value: comment });
53 | continue;
54 | }
55 |
56 | if (char === '/' && sql[i + 1] === '*') {
57 | let comment = '';
58 | while (i < sql.length - 1) {
59 | comment += sql[i];
60 | if (sql[i] === '*' && sql[i + 1] === '/') {
61 | comment += sql[i + 1];
62 | i += 2;
63 | break;
64 | }
65 | i++;
66 | }
67 | tokens.push({ type: 'comment', value: comment });
68 | continue;
69 | }
70 |
71 | // String literals
72 | if (char === "'" || char === '"') {
73 | const quote = char;
74 | let string = quote;
75 | i++;
76 | while (i < sql.length) {
77 | if (sql[i] === quote) {
78 | string += sql[i];
79 | i++;
80 | // Handle escaped quotes
81 | if (sql[i] === quote) {
82 | string += sql[i];
83 | i++;
84 | continue;
85 | }
86 | break;
87 | }
88 | string += sql[i];
89 | i++;
90 | }
91 | tokens.push({ type: 'string', value: string });
92 | continue;
93 | }
94 |
95 | // Numbers
96 | if (/\d/.test(char)) {
97 | let number = '';
98 | while (i < sql.length && /[\d.]/.test(sql[i])) {
99 | number += sql[i];
100 | i++;
101 | }
102 | tokens.push({ type: 'number', value: number });
103 | continue;
104 | }
105 |
106 | // Operators and punctuation
107 | if (/[+\-*/%=<>!(),.;]/.test(char)) {
108 | let operator = char;
109 | i++;
110 | // Handle multi-character operators
111 | if ((char === '<' || char === '>' || char === '!' || char === '=') &&
112 | sql[i] === '=') {
113 | operator += sql[i];
114 | i++;
115 | } else if (char === '<' && sql[i] === '>') {
116 | operator += sql[i];
117 | i++;
118 | }
119 | tokens.push({ type: 'operator', value: operator });
120 | continue;
121 | }
122 |
123 | // Identifiers and keywords
124 | if (/[a-zA-Z_]/.test(char)) {
125 | let identifier = '';
126 | while (i < sql.length && /[a-zA-Z0-9_]/.test(sql[i])) {
127 | identifier += sql[i];
128 | i++;
129 | }
130 |
131 | const isKeyword = SQL_KEYWORDS.includes(identifier.toUpperCase());
132 | tokens.push({
133 | type: isKeyword ? 'keyword' : 'identifier',
134 | value: identifier
135 | });
136 | continue;
137 | }
138 |
139 | // Any other character
140 | tokens.push({ type: 'identifier', value: char });
141 | i++;
142 | }
143 |
144 | return tokens;
145 | };
146 |
147 | const getTokenStyle = (type: HighlightedToken['type']): React.CSSProperties => {
148 | switch (type) {
149 | case 'keyword':
150 | return { color: '#569CD6', fontWeight: 'bold' }; // Blue
151 | case 'string':
152 | return { color: '#CE9178' }; // Orange
153 | case 'comment':
154 | return { color: '#6A9955', fontStyle: 'italic' }; // Green
155 | case 'number':
156 | return { color: '#B5CEA8' }; // Light green
157 | case 'operator':
158 | return { color: '#D4D4D4' }; // Light gray
159 | case 'identifier':
160 | return { color: '#9CDCFE' }; // Light blue
161 | case 'whitespace':
162 | return {};
163 | default:
164 | return { color: '#D4D4D4' }; // Default light gray
165 | }
166 | };
167 |
168 | export const highlightSQL = (sql: string): JSX.Element => {
169 | const tokens = tokenize(sql);
170 |
171 | return (
172 | <>
173 | {tokens.map((token, index) => (
174 |
175 | {token.value}
176 |
177 | ))}
178 | >
179 | );
180 | };
--------------------------------------------------------------------------------
/run.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs'
2 | import path from 'path'
3 | import { fileURLToPath } from 'url'
4 | import express from "express";
5 | import http from "http";
6 | import { Server as SocketIOServer } from "socket.io";
7 | import { createServer as createViteServer } from 'vite'
8 | // Override emitWarning so the default stderr printing is bypassed for the one SQLite ExperimentalWarning.
9 | // Keep original behaviour for everything else.
10 | const originalEmitWarning = process.emitWarning;
11 | process.emitWarning = function (warning: any, ...args: any[]) {
12 | // Debugging line removed to avoid noisy console output.
13 | // If the first argument is the message string
14 | if (typeof warning === 'string' && warning.includes('SQLite')) {
15 | return;
16 | }
17 | // If the first argument is an Error object
18 | if (warning instanceof Error && warning.name === 'ExperimentalWarning' && /SQLite/.test(warning.message)) {
19 | return;
20 | }
21 | // @ts-ignore – preserve Node's original signature
22 | return originalEmitWarning.call(this, warning, ...args);
23 | };
24 |
25 | // app specific imports
26 | import args from "server/args";
27 | import { setup_routes } from "server/apis/http";
28 | import { setup_sockets } from "server/apis/sockets";
29 | import { getMainProps } from "server/main_props";
30 | import { buildProxyUrl, listener, startListener } from "server/proxy";
31 | import { testDbConnection } from "server/database_helper";
32 |
33 | const port = args.webPort;
34 | const mode: 'development' | 'production' = 'development'; // development or production
35 | const ssr_enabled = false;
36 |
37 | const __dirname = path.dirname(fileURLToPath(import.meta.url))
38 |
39 | async function createServer() {
40 | // Quickly verify database connectivity before starting the web server
41 | await testDbConnection(args.db);
42 |
43 | const app = express()
44 |
45 | const http_server = http.createServer(app);
46 | const io = new SocketIOServer(http_server, {});
47 | setup_sockets(io);
48 | setup_routes(app, io);
49 |
50 | const vite = await createViteServer({
51 | appType: 'custom',
52 | server: {
53 | middlewareMode: true,
54 | hmr: mode === 'development',
55 | },
56 | base: '/',
57 | clearScreen: false,
58 | })
59 |
60 | if (mode === 'production') {
61 | app.use('/client', express.static(path.resolve(__dirname, 'dist')))
62 | }
63 |
64 | // vite exposes all files at root by default, this is to prevent accessing server files
65 | app.use(async (req, res, next) => {
66 | const url = req.originalUrl
67 | let cleaned_url = url.split('?')[0]
68 | // remove leading slashes
69 | cleaned_url = cleaned_url.replace(/^\/+/, '')
70 |
71 | const allowed_prefixes = ['client', 'shared', 'node_modules', 'socket.io'];
72 | if (cleaned_url == '' || allowed_prefixes.some(prefix => cleaned_url.startsWith(prefix))) {
73 | return next();
74 | } else {
75 | // check if file with exact path exists
76 | const file_path = path.join(__dirname, cleaned_url);
77 | const exists = fs.existsSync(file_path);
78 | if(exists) {
79 | res.status(404).send('Not found');
80 | } else {
81 | next();
82 | }
83 | }
84 | })
85 |
86 |
87 | app.get('*', async (req, res, next) => {
88 | const url = req.originalUrl
89 |
90 | const skip_prefixes = ['/client', '/node_modules', '/@vite', '/@react-refresh'];
91 | if (skip_prefixes.some(prefix => url.startsWith(prefix))) {
92 | return next();
93 | }
94 |
95 | const initial_state = await getMainProps(req);
96 |
97 | try {
98 | let template = fs.readFileSync(
99 | path.resolve(__dirname, mode === 'production' ? './dist/index.html' : './index.html'),
100 | 'utf-8',
101 | )
102 |
103 | // Apply Vite HTML transforms. This injects the Vite HMR client,
104 | // and also applies HTML transforms from Vite plugins, e.g. global
105 | // preambles from @vitejs/plugin-react
106 | if(mode === 'development') {
107 | template = await vite.transformIndexHtml(url, template)
108 | }
109 |
110 | let html = '';
111 |
112 | if(ssr_enabled) {
113 | // ssr.tsx exports render() which returns a string body (and other metadata)
114 | const { render } = await vite.ssrLoadModule('/server/ssr.tsx')
115 |
116 | const ssr_parts = await render(url, initial_state);
117 | const {body, head} = ssr_parts;
118 |
119 | // Inject the app-rendered HTML & head tags (for css etc) into the template.
120 | html = template.replace(``, body)
121 | .replace(``, head)
122 | .replace(`''`, JSON.stringify(initial_state))
123 | } else {
124 | html = template.replace(``, '')
125 | .replace(``, '')
126 | .replace(`''`, JSON.stringify(initial_state))
127 | }
128 |
129 | res.status(200).set({ 'Content-Type': 'text/html' }).end(html)
130 |
131 | } catch (e) {
132 | // If an error is caught, let Vite fix the stack trace so it maps back
133 | // to your actual source code.
134 | vite.ssrFixStacktrace(e)
135 | next(e)
136 | }
137 | })
138 |
139 | if(mode === 'development') {
140 | app.use(vite.middlewares)
141 | }
142 |
143 | http_server.listen(port, async () => {
144 | // Log proxy URL using the helper function
145 | const listener = await startListener();
146 | const proxyUrl = buildProxyUrl(listener);
147 | console.log(`→ Connect to dbpill SQL proxy at ${proxyUrl} to intercept queries.`);
148 | console.log(`→ Go to dbpill web UI at http://localhost:${port} to manage the results.`)
149 | })
150 |
151 | return app
152 | }
153 |
154 | (async () => {
155 | const app = await createServer();
156 |
157 | process.on('uncaughtException', (error) => {
158 | console.error('Uncaught Exception:', error);
159 | });
160 |
161 | process.on('unhandledRejection', (reason, promise) => {
162 | console.error('Unhandled Rejection at:', promise, 'reason:', reason);
163 | });
164 | })();
165 |
--------------------------------------------------------------------------------
/client/components/QuerySuggestions.tsx:
--------------------------------------------------------------------------------
1 | import { useState } from 'react';
2 | import { queryApi } from '../utils/HttpApi';
3 | import {
4 | ActionButton,
5 | LoadingIndicator,
6 | } from '../styles/Styled';
7 | import { SuggestionBox } from './SuggestionBox';
8 |
9 | interface QuerySuggestionsProps {
10 | stat: any;
11 | loadingSuggestions: { [key: string]: boolean };
12 | setLoadingSuggestions: (fn: (prev: { [key: string]: boolean }) => { [key: string]: boolean }) => void;
13 | setStats: (fn: (prevStats: any[]) => any[]) => void;
14 | getSuggestions: (queryId: string) => Promise;
15 | }
16 |
17 | export function QuerySuggestions({
18 | stat,
19 | loadingSuggestions,
20 | setLoadingSuggestions,
21 | setStats,
22 | getSuggestions,
23 | }: QuerySuggestionsProps) {
24 | const createManualSuggestion = async () => {
25 | setLoadingSuggestions(prev => ({ ...prev, [stat.query_id]: true }));
26 |
27 | try {
28 | const data = await queryApi.createManualSuggestion(stat.query_id);
29 | setStats(prevStats => {
30 | const newStats = [...prevStats];
31 | const index = newStats.findIndex(s => s.query_id === stat.query_id);
32 | if (index !== -1) {
33 | newStats[index] = data;
34 | }
35 | return newStats;
36 | });
37 | } catch (error: any) {
38 | alert(error.message || 'Error creating manual suggestion');
39 | } finally {
40 | setLoadingSuggestions(prev => ({ ...prev, [stat.query_id]: false }));
41 | }
42 | };
43 |
44 | const handleSuggestionUpdate = (updatedStat: any) => {
45 | setStats(prevStats => {
46 | const newStats = [...prevStats];
47 | const index = newStats.findIndex(s => s.query_id === stat.query_id);
48 | if (index !== -1) {
49 | newStats[index] = updatedStat;
50 | }
51 | return newStats;
52 | });
53 | };
54 |
55 | const handleSuggestionDelete = (suggestionIndex: number) => {
56 | // This will be handled by the API response in handleSuggestionUpdate
57 | // No additional action needed here
58 | };
59 |
60 | // No LLM response yet - show initial buttons
61 | if (!stat.llm_response) {
62 | return (
63 | <>
64 | getSuggestions(stat.query_id)}
67 | disabled={loadingSuggestions[stat.query_id]}
68 | >
69 | {loadingSuggestions[stat.query_id] ? (
70 | Getting suggestions...
71 | ) : (
72 | '🤖 Get AI suggested indexes'
73 | )}
74 |
75 |
81 | {loadingSuggestions[stat.query_id] ? (
82 | Creating...
83 | ) : (
84 | '✎ Add custom index'
85 | )}
86 |
87 | >
88 | );
89 | }
90 |
91 | // Has LLM response - render suggestions
92 | return (
93 | <>
94 | {/* Render list of suggestions if available */}
95 | {stat.suggestions && Array.isArray(stat.suggestions) && stat.suggestions.length > 0 ? (
96 | stat.suggestions.slice().reverse().map((suggestion: any, reverseIdx: number) => {
97 | // Since we reversed the array, reverseIdx 0 = oldest suggestion, should be numbered 1
98 | const suggestionNumber = reverseIdx + 1;
99 | const statusText = stat.suggestions.length > 1 ? `Suggestion ${suggestionNumber}` : 'Suggestion';
100 | // Use the original index for the key (newest suggestions have lower original indexes)
101 | const originalIdx = stat.suggestions.length - 1 - reverseIdx;
102 |
103 | return (
104 |
113 | );
114 | })
115 | ) : stat.suggested_indexes && (
116 | // Legacy single suggestion - convert to same format and use unified renderer
117 |
131 | )}
132 |
133 | {!stat.suggested_indexes && (
134 | No new index suggestions
135 | )}
136 |
137 | {!stat.applied_indexes && (
138 |
139 |
getSuggestions(stat.query_id)}
143 | disabled={loadingSuggestions[stat.query_id]}
144 | >
145 | {loadingSuggestions[stat.query_id] ? (
146 | Getting more suggestions...
147 | ) : (
148 | '🤖 Ask for more'
149 | )}
150 |
151 |
157 | {loadingSuggestions[stat.query_id] ? (
158 | Creating...
159 | ) : (
160 | '✎ Add custom'
161 | )}
162 |
163 |
164 | )}
165 | >
166 | );
167 | }
--------------------------------------------------------------------------------
/client/utils/HttpApi.ts:
--------------------------------------------------------------------------------
1 | interface VendorApiKeys {
2 | anthropic?: string;
3 | openai?: string;
4 | xai?: string;
5 | google?: string;
6 | }
7 |
8 | interface LLMConfig {
9 | id: number;
10 | llm_endpoint: string;
11 | llm_model: string;
12 | llm_api_key: string | null;
13 | created_at: string;
14 | updated_at: string;
15 | apiKeys?: VendorApiKeys;
16 | }
17 |
18 | interface QueryGroup {
19 | query_id: number;
20 | query: string;
21 | avg_exec_time: number;
22 | total_time: number;
23 | max_exec_time: number;
24 | min_exec_time: number;
25 | last_exec_time: number;
26 | num_instances: number;
27 | llm_response?: string;
28 | suggested_indexes?: string;
29 | applied_indexes?: string;
30 | prev_exec_time?: number;
31 | new_exec_time?: number;
32 | hidden?: boolean;
33 | instances?: any[];
34 | prompt_preview?: string;
35 | suggestions?: any[];
36 | }
37 |
38 | interface QueryStatsResponse {
39 | stats: QueryGroup[];
40 | orderBy: string;
41 | orderDirection: string;
42 | }
43 |
44 | // Base fetch wrapper with error handling
45 | async function apiRequest(
46 | endpoint: string,
47 | options: RequestInit = {}
48 | ): Promise {
49 | const defaultOptions: RequestInit = {
50 | headers: {
51 | 'Content-Type': 'application/json',
52 | ...options.headers,
53 | },
54 | ...options,
55 | };
56 |
57 | const response = await fetch(endpoint, defaultOptions);
58 |
59 | if (!response.ok) {
60 | const errorData = await response.json().catch(() => ({ error: 'Network error' }));
61 | throw new Error(errorData.error || `HTTP ${response.status}`);
62 | }
63 |
64 | return response.json();
65 | }
66 |
67 | // Configuration API
68 | export const configApi = {
69 | async getConfig(): Promise {
70 | return apiRequest('/api/config');
71 | },
72 |
73 | async updateConfig(config: Partial): Promise {
74 | return apiRequest('/api/config', {
75 | method: 'POST',
76 | body: JSON.stringify(config),
77 | });
78 | },
79 | };
80 |
81 | // Query API
82 | export const queryApi = {
83 | async getAllQueries(orderBy: string = 'avg_exec_time', orderDirection: string = 'desc'): Promise {
84 | return apiRequest(`/api/all_queries?orderBy=${orderBy}&direction=${orderDirection}`);
85 | },
86 |
87 | async getQuery(queryId: string | number, instanceType?: 'slowest' | 'fastest' | 'latest'): Promise {
88 | const params = instanceType ? `?instance_type=${instanceType}` : '';
89 | return apiRequest(`/api/query/${queryId}${params}`);
90 | },
91 |
92 | async analyzeQuery(queryId: string | number): Promise {
93 | return apiRequest(`/api/analyze_query?query_id=${queryId}`);
94 | },
95 |
96 | async analyzeQueryWithParams(queryId: string | number, params: string): Promise {
97 | const encodedParams = encodeURIComponent(params);
98 | return apiRequest(`/api/analyze_query_with_params?query_id=${queryId}¶ms=${encodedParams}`);
99 | },
100 |
101 | async getSuggestions(queryId: string | number, prompt?: string): Promise {
102 | const body: Record = { query_id: queryId };
103 | if (prompt !== undefined) {
104 | body.prompt = prompt;
105 | }
106 |
107 | return apiRequest('/api/suggest', {
108 | method: 'POST',
109 | body: JSON.stringify(body),
110 | });
111 | },
112 |
113 | async saveEditedIndexes(queryId: string | number, editedIndexes: string, suggestionId?: string | number): Promise {
114 | const requestBody: any = {
115 | query_id: queryId,
116 | suggested_indexes: editedIndexes
117 | };
118 |
119 | if (suggestionId) {
120 | requestBody.suggestion_id = suggestionId;
121 | }
122 |
123 | return apiRequest('/api/save_edited_indexes', {
124 | method: 'POST',
125 | body: JSON.stringify(requestBody),
126 | });
127 | },
128 |
129 | async createManualSuggestion(queryId: string | number): Promise {
130 | return apiRequest('/api/create_manual_suggestion', {
131 | method: 'POST',
132 | body: JSON.stringify({ query_id: queryId }),
133 | });
134 | },
135 |
136 | async deleteSuggestion(suggestionId: string | number): Promise {
137 | return apiRequest(`/api/suggestion/${suggestionId}`, {
138 | method: 'DELETE',
139 | });
140 | },
141 |
142 | async applySuggestions(queryId: string | number, suggestionId?: string | number): Promise {
143 | const url = suggestionId
144 | ? `/api/apply_suggestions?query_id=${queryId}&suggestion_id=${suggestionId}`
145 | : `/api/apply_suggestions?query_id=${queryId}`;
146 | return apiRequest(url);
147 | },
148 |
149 | async revertSuggestions(queryId: string | number, suggestionId?: string | number): Promise {
150 | const url = suggestionId
151 | ? `/api/revert_suggestions?query_id=${queryId}&suggestion_id=${suggestionId}`
152 | : `/api/revert_suggestions?query_id=${queryId}`;
153 | return apiRequest(url);
154 | },
155 |
156 | async getRelevantTables(queryId: string | number): Promise> {
157 | return apiRequest(`/api/relevant_tables?query_id=${queryId}`);
158 | },
159 |
160 | async ignoreQuery(queryId: string | number): Promise {
161 | return apiRequest(`/api/ignore_query?query_id=${queryId}`);
162 | },
163 | };
164 |
165 | // Admin/maintenance API
166 | export const adminApi = {
167 | async resetQueryLogs(): Promise<{ success: boolean; message: string }> {
168 | return apiRequest<{ success: boolean; message: string }>('/api/reset_query_logs', {
169 | method: 'POST',
170 | });
171 | },
172 |
173 | async revertAllSuggestions(): Promise {
174 | return apiRequest('/api/revert_all_suggestions');
175 | },
176 |
177 | async getAllAppliedIndexes(): Promise {
178 | return apiRequest('/api/get_all_applied_indexes');
179 | },
180 | };
181 |
182 | // Export all APIs as a single object for convenience
183 | export const httpApi = {
184 | config: configApi,
185 | query: queryApi,
186 | admin: adminApi,
187 | };
--------------------------------------------------------------------------------
/client/App.tsx:
--------------------------------------------------------------------------------
1 | import { useEffect, useContext } from 'react';
2 | import {
3 | BrowserRouter as Router,
4 | Routes,
5 | Route,
6 | NavLink as RouterNavLink,
7 | } from 'react-router-dom';
8 | import io from 'socket.io-client';
9 | import styled from 'styled-components';
10 |
11 | import './App.css';
12 |
13 | import { QueryList } from './components/QueryList';
14 | import { Configs } from './components/Configs';
15 | import { About } from './components/About';
16 |
17 | import { AppContext, AppProvider } from './context/AppContext';
18 | import { MainProps } from 'shared/main_props';
19 |
20 | /* -------------------------------------------------------------------------- */
21 | /* Styles */
22 | /* -------------------------------------------------------------------------- */
23 |
24 | const Container = styled.div`
25 | font-family: 'Inconsolata', monospace;
26 | display: flex;
27 | flex-direction: column;
28 | height: 100vh;
29 | overflow: auto;
30 | background-color: rgba(40, 40, 40, 1);
31 | color: #fff;
32 |
33 | & code {
34 | background-color: rgba(255, 255, 255, 0.1);
35 | padding: 2px 4px;
36 | border-radius: 4px;
37 | }
38 | & pre > code {
39 | display: block;
40 | padding: 5px 7px;
41 | border-radius: 0;
42 | }
43 | `;
44 |
45 | const TextLogo = styled.div`
46 | font-size: 30px;
47 | font-weight: 700;
48 | text-transform: lowercase;
49 | letter-spacing: 2px;
50 | border: 1px solid color(display-p3 0.964 0.7613 0.3253);
51 | color: color(display-p3 0.964 0.7613 0.3253);
52 | background: linear-gradient(to right, rgba(86, 65, 9, 0.8) 25%, rgba(59, 40, 7, 0.8) 75%);
53 | display: inline-block;
54 | padding: 0 20px;
55 | margin-right: 10px;
56 | border-radius: 30px;
57 | position: relative;
58 | `;
59 |
60 | const NavBar = styled.div`
61 | display: flex;
62 | align-items: center;
63 | gap: 10px;
64 | padding: 10px 20px;
65 | background-color: rgba(0, 0, 0, 1);
66 | border-bottom: 1px solid rgba(255, 255, 255, 0.1);
67 | `;
68 |
69 | const DbInfo = styled.div`
70 | margin-left: auto;
71 | font-size: 14px;
72 | `;
73 |
74 | const InfoTable = styled.table`
75 | font-size: 12px;
76 | color: rgba(255, 255, 255, 0.8);
77 | border-collapse: collapse;
78 |
79 | th, td {
80 | padding: 2px 6px;
81 | }
82 |
83 | th {
84 | opacity: 0.5;
85 | }
86 |
87 | th:first-child, td:first-child {
88 | text-align: right;
89 | font-weight: 600;
90 | }
91 | `;
92 |
93 | const StyledNavLink = styled(RouterNavLink)`
94 | cursor: pointer;
95 | text-decoration: none;
96 | padding: 8px 12px;
97 | color: #fff;
98 | border: 2px solid transparent;
99 |
100 | &:hover {
101 | border-bottom-color: #ffffff77;
102 | }
103 |
104 | &.active {
105 | border-bottom-color: #fff;
106 | }
107 | `;
108 |
109 | const MainContent = styled.div`
110 | flex-grow: 1;
111 | padding: 20px;
112 | background-color: rgb(74, 73, 71);
113 | `;
114 |
115 | /* -------------------------------------------------------------------------- */
116 |
117 | function NavBarContent({ args }: { args: MainProps['args'] }) {
118 | const { config } = useContext(AppContext);
119 |
120 | return (
121 | <>
122 | dbpill
123 | {/* RouterNavLink automatically adds the `active` class */}
124 | Queries
125 | Config
126 | About
127 |
128 | {/* Show current DB connection info and LLM info */}
129 | {(() => {
130 | try {
131 | const dbUrl = new URL(args.db);
132 | const host = dbUrl.hostname;
133 | const port = dbUrl.port || '5432';
134 | const dbName = dbUrl.pathname.replace(/^\/+/, '');
135 | const proxyPort = args.proxyPort || 5433;
136 |
137 | // Get LLM info - only use config values
138 | const llmEndpoint = config?.llm_endpoint || 'anthropic';
139 | const llmModel = config?.llm_model || 'claude-sonnet-4';
140 |
141 | // Format LLM provider name for display
142 | let llmProvider = llmEndpoint;
143 | if (llmEndpoint === 'anthropic') {
144 | llmProvider = 'Anthropic';
145 | } else if (llmEndpoint === 'openai') {
146 | llmProvider = 'OpenAI';
147 | } else if (llmEndpoint.startsWith('http')) {
148 | // Custom URL - extract domain for display
149 | try {
150 | const url = new URL(llmEndpoint);
151 | llmProvider = url.hostname;
152 | } catch {
153 | llmProvider = 'Custom';
154 | }
155 | }
156 |
157 | return (
158 |
159 |
160 |
161 |
162 | | Proxy |
163 | {`:${proxyPort} → ${host}:${port}/${dbName}`} |
164 |
165 |
166 | | LLM |
167 | {`${llmProvider} • ${llmModel}`} |
168 |
169 |
170 |
171 |
172 | );
173 | } catch (_) {
174 | return null;
175 | }
176 | })()}
177 | >
178 | );
179 | }
180 |
181 | function App({ args }: MainProps) {
182 | // Establish socket connection (same behaviour as before)
183 | useEffect(() => {
184 | const socket = io();
185 | socket.on('connect', () => {
186 | console.log('connected to socket.io');
187 | });
188 |
189 | return () => {
190 | socket.disconnect();
191 | };
192 | }, []);
193 |
194 | return (
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 | } />
205 | } />
206 | } />
207 |
208 |
209 |
210 |
211 |
212 | );
213 | }
214 |
215 | export default App;
--------------------------------------------------------------------------------
/server/llm.ts:
--------------------------------------------------------------------------------
1 | import OpenAI from 'openai';
2 |
3 | import { ConfigManager } from './config_manager';
4 |
5 | // Map logical endpoint identifiers to their corresponding OpenAI-compatible base URLs
6 | function resolveBaseURL(endpoint: string): string {
7 | switch (endpoint) {
8 | case 'anthropic':
9 | // Anthropic OpenAI-compat layer
10 | return 'https://api.anthropic.com/v1/';
11 | case 'gemini':
12 | // Google Gemini compat endpoint
13 | return 'https://generativelanguage.googleapis.com/v1beta/openai/';
14 | case 'grok':
15 | // xAI Grok compat endpoint
16 | return 'https://api.x.ai/v1/';
17 | case 'openai':
18 | // Native OpenAI
19 | return 'https://api.openai.com/v1/';
20 | default:
21 | // Assume custom URL already contains protocol
22 | return endpoint;
23 | }
24 | }
25 |
26 | let configManager: ConfigManager | null = null;
27 |
28 | async function getConfigManager(): Promise {
29 | if (!configManager) {
30 | configManager = new ConfigManager('dbpill.sqlite.db');
31 | await configManager.initialize();
32 | }
33 | return configManager;
34 | }
35 |
36 | async function getCredentials(endpoint: string): Promise {
37 | const cm = await getConfigManager();
38 | const config = await cm.getConfig();
39 |
40 | // Map endpoint to vendor for API key lookup
41 | let vendor = endpoint;
42 | switch (endpoint) {
43 | case 'anthropic':
44 | vendor = 'anthropic';
45 | break;
46 | case 'openai':
47 | vendor = 'openai';
48 | break;
49 | case 'gemini':
50 | vendor = 'google';
51 | break;
52 | case 'grok':
53 | vendor = 'xai';
54 | break;
55 | default:
56 | // For custom endpoints, try to get the API key from general config
57 | vendor = null;
58 | break;
59 | }
60 |
61 | // Try vendor-specific API key first
62 | if (vendor) {
63 | const vendorApiKey = await cm.getApiKeyForVendor(vendor);
64 | if (vendorApiKey) {
65 | return vendorApiKey;
66 | }
67 | }
68 |
69 | // Fall back to general config
70 | return config.llm_api_key;
71 | }
72 |
73 | export interface Completion {
74 | text: string;
75 | input_tokens: number;
76 | output_tokens: number;
77 | stopSequence: string | undefined;
78 | }
79 |
80 | // Helper to decide which parameter name to use for specifying the number of
81 | // completion tokens. Some providers/models (e.g. OpenAI reasoning models like
82 | // o1, o3, o4) have migrated to `max_completion_tokens` while the majority still expect `max_tokens`.
83 | function resolveMaxTokensParam(endpoint: string, model: string): 'max_tokens' | 'max_completion_tokens' {
84 | const m = model?.toLowerCase() || '';
85 |
86 | // OpenAI reasoning models (o1, o3, o4, etc. and their variants like mini)
87 | // require the newer parameter
88 | if (endpoint === 'openai' && /^o\d+/.test(m)) {
89 | return 'max_completion_tokens';
90 | }
91 |
92 | // Default – legacy OpenAI-style parameter.
93 | return 'max_tokens';
94 | }
95 |
96 | // Helper to choose a sensible default for the maximum number of tokens the
97 | // model is allowed to generate. Most contemporary chat models comfortably
98 | // support ≥8k completion tokens, so we default to 8192 unless explicitly
99 | // overridden at runtime.
100 | function resolveDefaultMaxTokens(endpoint: string, model: string): number {
101 | // In future this could consult per-model limits. For now, follow the user
102 | // guidance of using ~8k across the board.
103 | return 8192;
104 | }
105 |
106 | export async function prompt_llm({
107 | prompt,
108 | // Temperature is accepted for API compatibility but deliberately ignored
109 | // because certain models (e.g. o3) only support the default value (1).
110 | temperature: _ignoredTemperature,
111 | stop,
112 | streamHandler,
113 | }: {
114 | prompt: string;
115 | temperature?: number;
116 | stop?: string[];
117 | streamHandler?: (stream: any, text: string, stopSequence?: string) => void
118 | }): Promise {
119 |
120 | const cm = await getConfigManager();
121 | const config = await cm.getConfig();
122 |
123 | const endpoint = config.llm_endpoint || 'anthropic';
124 | const baseURL = resolveBaseURL(endpoint);
125 | const model = config.llm_model || 'claude-sonnet-4-0';
126 |
127 | const API_KEY = await getCredentials(endpoint);
128 |
129 | const openai = new OpenAI({
130 | apiKey: API_KEY,
131 | baseURL,
132 | });
133 |
134 | // Determine parameter name & sensible default for completion length based
135 | // on the provider/model.
136 | const tokenParamName = resolveMaxTokensParam(endpoint, model);
137 | const maxTokens = resolveDefaultMaxTokens(endpoint, model);
138 |
139 | const completionParams: any = {
140 | model,
141 | messages: [{ role: 'user', content: prompt }],
142 | temperature: 1,
143 | stop,
144 | stream: true,
145 | };
146 | completionParams[tokenParamName] = maxTokens;
147 |
148 | // Attempt the request with the selected parameter. If the provider rejects
149 | // it, retry once with the alternative parameter name for maximum
150 | // compatibility.
151 | const altTokenParamName = tokenParamName === 'max_tokens' ? 'max_completion_tokens' : 'max_tokens';
152 |
153 | let stream: any;
154 | try {
155 | stream = await openai.chat.completions.create(completionParams as any);
156 | } catch (err: any) {
157 | const msg: string | undefined = err?.message || err?.error?.message;
158 | const shouldRetry = msg && msg.includes(`Unsupported parameter`) && msg.includes(tokenParamName);
159 |
160 | if (shouldRetry) {
161 | // Swap the parameter name and try once more.
162 | delete completionParams[tokenParamName];
163 | completionParams[altTokenParamName] = maxTokens;
164 | stream = await openai.chat.completions.create(completionParams as any);
165 | } else {
166 | throw err; // Propagate unknown errors
167 | }
168 | }
169 |
170 | let text = '';
171 | let stopSequence: string | undefined;
172 |
173 | for await (const chunk of stream) {
174 | // Different providers surface streaming deltas differently.
175 | // - OpenAI-compatible: chunk.choices[0].delta.content
176 | // - Anthropic: chunk.content OR chunk.completion
177 | // - Others (e.g. o3): may vary but generally expose `.content` too.
178 |
179 | const choice = (chunk as any)?.choices?.[0];
180 |
181 | const delta: string =
182 | choice?.delta?.content ??
183 | (chunk as any)?.content ??
184 | (chunk as any)?.completion ??
185 | '';
186 |
187 | text += delta;
188 |
189 | // Capture finish/stop information if present.
190 | const finishReason: string | undefined =
191 | choice?.finish_reason ??
192 | (chunk as any)?.stop_reason ??
193 | (chunk as any)?.finish_reason;
194 |
195 | if (finishReason && !stopSequence) {
196 | stopSequence = finishReason;
197 | }
198 |
199 | if (streamHandler && (delta || stopSequence)) {
200 | streamHandler(stream, delta, stopSequence);
201 | }
202 | }
203 |
204 | return {
205 | text,
206 | input_tokens: 0,
207 | output_tokens: 0,
208 | stopSequence,
209 | } as Completion;
210 | };
211 |
--------------------------------------------------------------------------------
/proxy-standalone.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env tsx
2 |
3 | import { createAdvancedProxy, IAdvancedProxySession, bindSocket, DbRawCommand, ResponseCode } from 'pg-server';
4 | import { DbResponseParser } from 'pg-server/protocol/response-parser';
5 | import { CommandWriter } from 'pg-server/protocol/command-writer';
6 | import fs from 'fs';
7 | import path from 'path';
8 | import * as net from 'net';
9 | import * as tls from 'tls';
10 | import { fileURLToPath } from 'url';
11 |
12 | // Fix __dirname for ES modules
13 | const __filename = fileURLToPath(import.meta.url);
14 | const __dirname = path.dirname(__filename);
15 |
16 | // Get database URL and proxy port from command line
17 | const dbUrl = process.argv[2] || 'postgresql://cashorbit@localhost:5432/cashorbit';
18 | const proxyPort = parseInt(process.argv[3]) || 5433;
19 |
20 | console.log('PostgreSQL TLS Proxy (Node.js)');
21 | console.log('Database:', dbUrl);
22 | console.log('Proxy port:', proxyPort);
23 |
24 | // === TLS MITM settings ===
25 | const keyPath = path.resolve(__dirname, 'credentials/proxy.key');
26 | const certPath = path.resolve(__dirname, 'credentials/proxy.crt');
27 |
28 | if (!fs.existsSync(keyPath) || !fs.existsSync(certPath)) {
29 | console.error('[proxy] TLS key or certificate not found. Expected at:');
30 | console.error(` key : ${keyPath}`);
31 | console.error(` cert: ${certPath}`);
32 | console.error('Please create or place a valid certificate pair before starting the proxy.');
33 | process.exit(1);
34 | }
35 |
36 | const TLS_SERVER_OPTS: tls.TlsOptions = {
37 | key: fs.readFileSync(keyPath),
38 | cert: fs.readFileSync(certPath),
39 | requestCert: false,
40 | rejectUnauthorized: false,
41 | secureProtocol: 'TLS_method', // Support all TLS versions
42 | honorCipherOrder: true,
43 | ciphers: 'HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA',
44 | };
45 |
46 | console.log('[proxy] Certificate loaded successfully');
47 | console.log('[proxy] Key file size:', fs.statSync(keyPath).size, 'bytes');
48 | console.log('[proxy] Cert file size:', fs.statSync(certPath).size, 'bytes');
49 |
50 | // Parse backend details
51 | const parsedUrl = new URL(dbUrl);
52 | const backendHost = parsedUrl.hostname || 'localhost';
53 | const backendPort = Number(parsedUrl.port) || 5432;
54 |
55 | // Helper to create backend socket (plain TCP)
56 | function createBackendSocket(): net.Socket {
57 | return net.connect({ host: backendHost, port: backendPort });
58 | }
59 |
60 | // Very simple passthrough proxy for now
61 | function startPgProxy(clientSock: net.Socket, pending?: Buffer) {
62 | console.log('[proxy] Initialising pg-server bindSocket layer');
63 |
64 | const dbSock = createBackendSocket();
65 |
66 | let parties: { client: any; db: CommandWriter };
67 |
68 | const { writer } = bindSocket(clientSock, (cmd, _wrt) => {
69 | dbSock.write(cmd.getRawData() as Uint8Array);
70 | });
71 |
72 | const parser = new DbResponseParser();
73 | dbSock.on('data', (buf) => {
74 | parser.parse(buf, (res) => {
75 | writer.socket.write(res.getRawData() as Uint8Array);
76 | });
77 | });
78 |
79 | parties = { client: writer, db: new CommandWriter(dbSock) };
80 |
81 | dbSock.on('error', (e) => console.error('[proxy] Backend DB error:', e));
82 | dbSock.on('close', () => clientSock.destroy());
83 |
84 | // feed any pending bytes
85 | if (pending && pending.length) {
86 | clientSock.emit('data', pending);
87 | }
88 | }
89 |
90 | // Simple proxy session that just passes through commands
91 | class SimpleProxySession implements IAdvancedProxySession {
92 | onConnect(socket: net.Socket) {
93 | console.log('[proxy] Client connected via pg-server');
94 | }
95 | }
96 |
97 | // === Listener that understands PostgreSQL SSLRequest handshake ===
98 | const listener = net.createServer((rawClient) => {
99 | console.log('[proxy] TCP connection accepted from', rawClient.remoteAddress, 'port', rawClient.remotePort);
100 |
101 | rawClient.on('error', (err) => {
102 | console.error('[proxy] rawClient error (pre-TLS):', err);
103 | });
104 |
105 | let hasReceivedData = false;
106 |
107 | const handleFirstData = (first8: Buffer) => {
108 | if (hasReceivedData) return;
109 | hasReceivedData = true;
110 |
111 | console.log('[proxy] Received first data chunk:', first8.slice(0, Math.min(first8.length, 8)).toString('hex'));
112 |
113 | const isSSLRequest = first8.length >= 8 && first8.readUInt32BE(4) === 0x04d2162f;
114 |
115 | if (isSSLRequest) {
116 | console.log('[proxy] Detected SSLRequest, upgrading to TLS');
117 | rawClient.write('S');
118 | console.log('[proxy] Sent S response, creating TLS socket...');
119 |
120 | const tlsClient = new tls.TLSSocket(rawClient, { ...TLS_SERVER_OPTS, isServer: true });
121 | console.log('[proxy] TLS socket created, waiting for handshake...');
122 |
123 | // Add comprehensive TLS event logging
124 | tlsClient.on('keylog', (line) => {
125 | console.log('[proxy] TLS keylog:', line.toString());
126 | });
127 |
128 | tlsClient.on('session', (session) => {
129 | console.log('[proxy] TLS session established, length:', session.length);
130 | });
131 |
132 | tlsClient.on('secureConnect', () => {
133 | console.log('[proxy] secureConnect fired!');
134 | console.log('[proxy] TLS version:', tlsClient.getProtocol());
135 | console.log('[proxy] TLS cipher:', tlsClient.getCipher());
136 | console.log('[proxy] TLS authorized:', tlsClient.authorized);
137 | console.log('[proxy] TLS server name:', (tlsClient as any).servername || 'none');
138 | });
139 |
140 | tlsClient.on('OCSPResponse', (response) => {
141 | console.log('[proxy] OCSP response received');
142 | });
143 |
144 | tlsClient.on('error', (err) => {
145 | console.error('[proxy] tlsClient error during/after handshake:', err);
146 | console.error('[proxy] Error code:', err.code);
147 | console.error('[proxy] Error errno:', (err as any).errno);
148 | });
149 |
150 | tlsClient.on('close', (hadError) => {
151 | console.log(`[proxy] tlsClient closed during/after handshake. Had error: ${hadError}`);
152 | });
153 |
154 | // Once we receive the first decrypted Postgres packet, start full proxying
155 | tlsClient.once('data', (firstPg) => {
156 | console.log('[proxy] Received first Postgres bytes after TLS handshake:', firstPg.slice(0, 32).toString('hex'));
157 | startPgProxy(tlsClient, firstPg);
158 | });
159 |
160 | // Safety timeout – if we never get Postgres data, terminate
161 | const tlsTimeout = setTimeout(() => {
162 | console.warn('[proxy] TLS handshake appeared to stall (no Postgres data within 10s)');
163 | tlsClient.destroy();
164 | }, 10000);
165 |
166 | tlsClient.on('data', () => clearTimeout(tlsTimeout));
167 | tlsClient.on('close', () => clearTimeout(tlsTimeout));
168 |
169 | } else {
170 | console.log('[proxy] No SSLRequest detected, proceeding with plain text');
171 | startPgProxy(rawClient, first8);
172 | }
173 | };
174 |
175 | rawClient.on('data', handleFirstData);
176 | rawClient.on('readable', () => {
177 | if (hasReceivedData) return;
178 | const chunk = rawClient.read();
179 | if (chunk) {
180 | console.log('[proxy] Got data via readable event');
181 | handleFirstData(chunk);
182 | }
183 | });
184 |
185 | const timeout = setTimeout(() => {
186 | if (!hasReceivedData) {
187 | console.log('[proxy] No data received after 5 seconds, checking if readable...');
188 | const chunk = rawClient.read();
189 | if (chunk) {
190 | console.log('[proxy] Found buffered data on timeout');
191 | handleFirstData(chunk);
192 | } else {
193 | console.log('[proxy] No buffered data found, connection may be stalled');
194 | rawClient.destroy();
195 | }
196 | }
197 | }, 5000);
198 |
199 | rawClient.on('close', () => {
200 | clearTimeout(timeout);
201 | console.log('[proxy] rawClient closed');
202 | });
203 | });
204 |
205 | listener.listen(proxyPort, () => {
206 | console.log(`PostgreSQL proxy listening on port ${proxyPort} (TLS MITM ready with Node.js)`);
207 | });
208 |
209 | // Handle graceful shutdown
210 | process.on('SIGINT', () => {
211 | console.log('\n[proxy] Shutting down...');
212 | listener.close(() => {
213 | process.exit(0);
214 | });
215 | });
--------------------------------------------------------------------------------
/run_executable.ts:
--------------------------------------------------------------------------------
1 |
2 | // ---------------------------------------------------------------------------
3 | // SEA bootstrap: restore a real, file-system-backed `require` BEFORE anything
4 | // else is evaluated. We do it with plain-CommonJS so esbuild keeps the code
5 | // right at the top of the output file.
6 | // ---------------------------------------------------------------------------
7 | const { createRequire } = require('node:module');
8 | // Build a real file-system aware require without shadowing esbuild's internal
9 | // helper (which is also called `requireX`).
10 | // In SEA context, use a simple fallback approach
11 | let realRequire;
12 | try {
13 | // Try using __filename first (regular Node.js)
14 | realRequire = createRequire(__filename);
15 | } catch (err) {
16 | // Fallback for SEA context - use a valid file path
17 | realRequire = createRequire(process.cwd() + '/package.json');
18 | }
19 |
20 | // Expose it globally so libraries that call plain `require()` (e.g. inside the
21 | // esbuild bundle) still succeed, but don't overwrite the per-module helpers
22 | // that esbuild generates (require2, require3...).
23 | // eslint-disable-next-line @typescript-eslint/ban-ts-comment
24 | // @ts-ignore
25 | if (typeof global.require !== 'function') {
26 | global.require = realRequire;
27 | }
28 |
29 | import fs from 'fs'
30 | import path from 'path'
31 | import express from "express";
32 | import http from "http";
33 | import { Server as SocketIOServer } from "socket.io";
34 |
35 | // app specific imports
36 | import args from "server/args";
37 | import { setup_routes } from "server/apis/http";
38 | import { setup_sockets } from "server/apis/sockets";
39 | import { getMainProps } from "server/main_props";
40 | import { buildProxyUrl, startListener } from "server/proxy";
41 | import { testDbConnection } from "server/database_helper";
42 |
43 | // Node SEA (Single Executable Application) allows bundling assets at build time.
44 | // We import the helper APIs so that, when the application is built as a SEA,
45 | // we can read those embedded assets. When running in development mode the files
46 | // will be read from the real file-system instead.
47 | import { getAsset, isSea } from "node:sea";
48 |
49 | // Override emitWarning so the default stderr printing is bypassed for SQLite and url.parse() warnings.
50 | // Keep original behaviour for everything else.
51 | const originalEmitWarning = process.emitWarning;
52 | process.emitWarning = function (warning: any, ...args: any[]) {
53 | // Debugging line removed to avoid noisy console output.
54 | // If the first argument is the message string
55 | if (typeof warning === 'string' && (warning.includes('SQLite') || warning.includes('url.parse()'))) {
56 | return;
57 | }
58 | // If the first argument is an Error object
59 | if (warning instanceof Error) {
60 | if (warning.name === 'ExperimentalWarning' && /SQLite/.test(warning.message)) {
61 | return;
62 | }
63 | if (warning.name === 'DeprecationWarning' && /url\.parse\(\)/.test(warning.message)) {
64 | return;
65 | }
66 | }
67 | // @ts-ignore – preserve Node's original signature
68 | return originalEmitWarning.call(this, warning, ...args);
69 | };
70 |
71 | // Convenience helper for loading a UTF-8 text asset — either from the real
72 | // file-system (during local development) or from the SEA bundle (when
73 | // isSea() === true).
74 | function readAssetTextSync(relativePath: string): string {
75 | const absolute = path.resolve(__dirname, relativePath);
76 |
77 | if (fs.existsSync(absolute)) {
78 | return fs.readFileSync(absolute, "utf8");
79 | }
80 |
81 | if (isSea()) {
82 | // When packaged as a SEA the file will have been listed under the
83 | // `assets` field of sea-config.json using exactly the same key that we
84 | // pass here. If it does not exist an error will be thrown so let it
85 | // propagate up.
86 | return getAsset(relativePath, "utf8");
87 | }
88 |
89 | throw new Error(`Asset not found: ${relativePath}`);
90 | }
91 |
92 | const port = args.webPort;
93 | async function createServer() {
94 | // Quickly verify database connectivity before starting the web server
95 | await testDbConnection(args.db);
96 |
97 | const app = express()
98 |
99 | const http_server = http.createServer(app);
100 | const io = new SocketIOServer(http_server, {});
101 | setup_sockets(io);
102 | setup_routes(app, io);
103 |
104 | // Serve static assets from /client/* by mapping to /dist/*
105 | app.get('/client/*', (req, res, next) => {
106 | // Extract the path after /client/
107 | const assetPath = req.path.replace('/client/', '');
108 |
109 | // Handle special cases first
110 | if (assetPath === 'index.css') {
111 | res.setHeader('Content-Type', 'text/css');
112 | try {
113 | const output = readAssetTextSync('dist/assets/index.css');
114 | return res.send(output);
115 | } catch (err) {
116 | return res.status(404).send('CSS file not found');
117 | }
118 | }
119 |
120 | if (assetPath === 'index.js') {
121 | res.setHeader('Content-Type', 'text/javascript');
122 | // Prefer the vanilla file name first (Vite's default). Fallback to the
123 | // legacy `.js.txt` name so existing builds keep working.
124 | try {
125 | const output = readAssetTextSync('dist/index.js');
126 | return res.send(output);
127 | } catch (_) {
128 | try {
129 | const output = readAssetTextSync('dist/index.js.txt');
130 | return res.send(output);
131 | } catch (err) {
132 | return res.status(404).send('JS file not found');
133 | }
134 | }
135 | }
136 |
137 | // For all other assets, try to serve them from dist/
138 | const distPath = `dist/${assetPath}`;
139 |
140 | try {
141 | // Try to read the asset
142 | const asset = readAssetTextSync(distPath);
143 |
144 | // Set appropriate content type based on file extension
145 | const ext = path.extname(assetPath).toLowerCase();
146 | const contentTypes: { [key: string]: string } = {
147 | '.js': 'text/javascript',
148 | '.css': 'text/css',
149 | '.html': 'text/html',
150 | '.json': 'application/json',
151 | '.svg': 'image/svg+xml',
152 | '.png': 'image/png',
153 | '.jpg': 'image/jpeg',
154 | '.jpeg': 'image/jpeg',
155 | '.gif': 'image/gif',
156 | '.ico': 'image/x-icon',
157 | '.woff': 'font/woff',
158 | '.woff2': 'font/woff2',
159 | '.ttf': 'font/ttf',
160 | '.eot': 'application/vnd.ms-fontobject'
161 | };
162 |
163 | if (contentTypes[ext]) {
164 | res.setHeader('Content-Type', contentTypes[ext]);
165 | }
166 |
167 | res.send(asset);
168 | } catch (err) {
169 | // Asset not found, continue to next middleware
170 | next();
171 | }
172 | });
173 |
174 | // vite exposes all files at root by default, this is to prevent accessing server files
175 | app.use(async (req, res, next) => {
176 | const url = req.originalUrl
177 | let cleaned_url = url.split('?')[0]
178 | // remove leading slashes
179 | cleaned_url = cleaned_url.replace(/^\/+/, '')
180 |
181 | const allowed_prefixes = ['client', 'shared', 'node_modules', 'socket.io'];
182 | if (cleaned_url == '' || allowed_prefixes.some(prefix => cleaned_url.startsWith(prefix))) {
183 | return next();
184 | } else {
185 | // check if file with exact path exists
186 | const file_path = path.join(__dirname, cleaned_url);
187 | const exists = fs.existsSync(file_path);
188 | if(exists) {
189 | res.status(404).send('Not found');
190 | } else {
191 | next();
192 | }
193 | }
194 | })
195 |
196 |
197 | app.get('*', async (req, res, next) => {
198 | const url = req.originalUrl
199 |
200 | const skip_prefixes = ['/client', '/node_modules', '/@vite', '/@react-refresh'];
201 | if (skip_prefixes.some(prefix => url.startsWith(prefix))) {
202 | return next();
203 | }
204 |
205 | const initial_state = await getMainProps(req);
206 |
207 | try {
208 | const template = readAssetTextSync('dist/index.html');
209 |
210 | let html = '';
211 |
212 | html = template.replace(``, '')
213 | .replace(``, '')
214 | .replace(`''`, JSON.stringify(initial_state))
215 |
216 | res.status(200).set({ 'Content-Type': 'text/html' }).end(html)
217 |
218 | } catch (e) {
219 | // If an error is caught, let Vite fix the stack trace so it maps back
220 | // to your actual source code.
221 | next(e)
222 | }
223 | })
224 |
225 | http_server.listen(port, async () => {
226 |
227 | // Log proxy URL using the helper function
228 | const listener = await startListener();
229 | const proxyUrl = buildProxyUrl(listener);
230 | console.log(`→ Connect to dbpill SQL proxy at ${proxyUrl} to intercept queries.`);
231 | console.log(`→ Go to dbpill web UI at http://localhost:${port} to manage the results.`)
232 | })
233 |
234 | return app
235 | }
236 |
237 | // Initialize the server (wrapped in a promise chain to avoid top-level await)
238 | createServer().catch((err) => {
239 | console.error('Failed to start server:', err);
240 | process.exit(1);
241 | });
242 |
243 | process.on('uncaughtException', (error) => {
244 | console.error('Uncaught Exception:', error);
245 | });
246 |
247 | process.on('unhandledRejection', (reason, promise) => {
248 | console.error('Unhandled Rejection at:', promise, 'reason:', reason);
249 | });
250 |
251 | // Node:sea types are declared in `node-sea.ts`.
--------------------------------------------------------------------------------
/server/prompt_generator.ts:
--------------------------------------------------------------------------------
1 | export interface SuggestionPromptParams {
2 | queryText: string;
3 | queryPlanJson: any;
4 | tableDefinitions: string[];
5 | appliedIndexes?: string | null;
6 | suggestionHistory?: any[];
7 | }
8 |
9 | /**
10 | * Generate the full prompt sent to the LLM for index suggestions based on
11 | * the query, its plan and table metadata.
12 | */
13 | export function generateSuggestionPrompt({
14 | queryText,
15 | queryPlanJson,
16 | tableDefinitions,
17 | appliedIndexes,
18 | suggestionHistory = [],
19 | }: SuggestionPromptParams): string {
20 | // Generate suggestion history section
21 | let historySection = '';
22 | if (suggestionHistory && suggestionHistory.length > 0) {
23 | historySection = `\n\n`;
24 | historySection += `The following index suggestions have been tried previously for this query. Please learn from these attempts and suggest something different that might work better:\n\n`;
25 |
26 | // Reverse the array to show chronological order (oldest first)
27 | const chronologicalHistory = [...suggestionHistory].reverse();
28 |
29 | chronologicalHistory.forEach((suggestion, index) => {
30 | const suggestionNum = index + 1; // Start from 1 for oldest attempt
31 | historySection += `\n`;
32 | historySection += `${suggestion.reverted ? 'Applied and then REVERTED (did not help)' : suggestion.applied ? 'Currently APPLIED' : 'Suggested but not applied'}\n`;
33 | historySection += `\n\`\`\`sql\n${suggestion.suggested_indexes || 'None'}\n\`\`\`\n\n`;
34 |
35 | if (suggestion.prev_exec_time && suggestion.new_exec_time) {
36 | const improvement = suggestion.prev_exec_time / suggestion.new_exec_time;
37 | historySection += `${suggestion.prev_exec_time.toFixed(2)}ms → ${suggestion.new_exec_time.toFixed(2)}ms (${improvement.toFixed(2)}x ${improvement > 1 ? 'improvement' : 'degradation'})\n`;
38 |
39 | if (suggestion.reverted) {
40 | historySection += `This suggestion was reverted because it ${improvement < 1 ? 'made the query slower' : 'did not provide sufficient improvement'}\n`;
41 | }
42 | } else {
43 | historySection += `Not measured\n`;
44 | }
45 |
46 | if (suggestion.llm_response && suggestion.llm_response !== 'Manual suggestion') {
47 | // Extract reasoning from LLM response if available
48 | const reasoning = suggestion.llm_response.split('```')[0].trim();
49 | if (reasoning.length > 50) {
50 | historySection += `${reasoning.substring(0, 300)}${reasoning.length > 300 ? '...' : ''}\n`;
51 | }
52 | }
53 |
54 | historySection += `\n\n`;
55 | });
56 |
57 | historySection += `Based on this history, please suggest a completely different approach. If previous attempts focused on certain columns or index types, try a different strategy.\n\n\n`;
58 | }
59 |
60 | return `
61 |
62 | Given the following PostgreSQL query, query plan & table definitions, suggest only one index improvement that would result in significantly faster query execution. Generally avoid partial indexes unless you're *certain* it will lead to orders-of-magnitude improvements. Think through the query, the query plan, the indexes the plan used, the indexes already present on the tables, and come up with a plan. Then, provide a single code block with all the index proposals together at the end. i.e.:
63 | \u0060\u0060\u0060sql
64 | CREATE INDEX dbpill_index_name_upper ON table_name (column_name1, some_function(column_name2));
65 | \u0060\u0060\u0060
66 |
67 | Make sure the suggested index is to improve the provided query specifically, not other hypothetical queries. Pay close attention to the query, and make sure any data transformation in the where clause is also applied to the index declaration.
68 |
69 | Always prefix the index name with dbpill_ to avoid conflicts with existing indexes.
70 |
71 |
72 |
73 | PostgreSQL Index-Tuning Heuristics
74 | (Optimized for automated review of EXPLAIN (ANALYZE, BUFFERS) plans, table DDL and statistics)
75 |
76 |
77 | 1 / Scan Patterns
78 |
79 | Seq Scan touching ≫ 5–10 % of a large relation
80 | Likely missing index on filter predicates
81 | Create (partial) index on columns in WHERE clause
82 |
83 |
84 | Index Scan with many "Rows Removed by Filter"
85 | Index is not covering or not selective
86 | Add INCLUDE columns or switch to composite/partial index
87 |
88 |
89 | Index Scan reading ≫ 10 % of pages
90 | Low selectivity—index may be useless
91 | Consider dropping or replacing with composite/partial index
92 |
93 |
94 | Index Only Scan not chosen (shows Heap Fetches)
95 | Key columns are in index but query still hits heap
96 | Add remaining output columns with INCLUDE, or VACUUM so visibility map is up-to-date
97 |
98 |
99 | Bitmap Index Scan → Bitmap Heap Scan
100 | Acceptable for medium result sets; if repeated or expensive, consider better index
101 | Create composite index that matches all bitmap conditions or make index more selective
102 |
103 |
104 | Multiple Index Scans on the same table under one node
105 | Optimiser intersecting results instead of single probe
106 | Build composite index with columns ordered by equality → range → sort columns
107 |
108 |
109 |
110 |
111 | 2 / Filter & Predicate Clues
112 | Filter executed after scan (Filter: line) ⇒ predicate not in index; evaluate partial/composite index.
113 | Expression filters (WHERE lower(col) = …, JSONB operators, date trunc, etc.) ⇒ consider expression or functional index.
114 | High-cardinality boolean or enum used in filter ⇒ partial index … WHERE flag = 'Y'.
115 |
116 |
117 |
118 | 3 / Join Indicators
119 |
120 | Hash Join building large hash on a big table
121 | No usable B-tree on join key
122 |
123 |
124 | Merge Join performing explicit sort on input
125 | Add index that matches join key and order
126 |
127 |
128 | Nested Loop with high actual rows on inner side
129 | Inner table needs index on join key to avoid repeated scans
130 |
131 |
132 |
133 |
134 | 4 / Sort & Aggregate
135 | Sort node with external or disk method ⇒ add index that matches ORDER BY keys (or keys + filter for partial sort).
136 | GroupAggregate doing explicit sort ⇒ same as above or consider index-only aggregate (ordered DISTINCT).
137 | Aggregate scanning full table for COUNT/ SUM with selective filter ⇒ index on filtered column(s) may be faster.
138 |
139 |
140 |
141 | 5 / Parallelism Hints
142 | Parallel Seq Scan on a small (< 1 GB) table usually means no selective index exists; add one.
143 | Parallel Index Scan rarely appears; if Postgres parallelises a query but falls back to serial index probes, check whether composite/covering index could avoid that.
144 |
145 |
146 |
147 | 6 / Index Design Checks
148 | Match access pattern. Equality columns first, then range, then ordering/grouping columns.
149 | Cover what you return. Use INCLUDE for non-filter, non-order columns to promote index-only scan.
150 | Use the right type.
151 | • Pattern search with %suffix ⇒ B-tree not useful; use pg_trgm GIN.
152 | • Full-text ⇒ GIN/GiST on to_tsvector.
153 | • @>/JSONB containment ⇒ GIN.
154 | • Large monotonically increasing key ⇒ consider BRIN.
155 | Partial indexes: Perfect when predicate value appears in ≪ 20 % of rows.
156 | Do not over-index. Each added index costs space & write-amplification; prefer composite or partial over many singles.
157 | Eliminate duplicates. Drop overlapping or unused indexes (check pg_stat_user_indexes.idx_scan = 0).
158 |
159 |
160 |
161 | 7 / Health & Maintenance Signals
162 | High avg_leaf_density or idx_scan ≪ idx_tup_fetch ⇒ index bloat; consider REINDEX or pg_repack.
163 | Stale n_dead_tup or many Heap Fetches on supposed index-only path ⇒ run VACUUM (ANALYZE) more often.
164 | random_page_cost vs. seq_page_cost: if custom settings are skewing plans, validate them.
165 |
166 |
167 |
168 | 8 / Rule-of-Thumb Creation Checklist (for suggestion engines)
169 |
170 | IF column appears in (JOIN OR WHERE OR ORDER BY OR GROUP BY)
171 | AND table.rows > 10 000
172 | AND condition is selective (estimated_rows < 5 % of reltuples)
173 | THEN propose index on those columns
174 |
175 |
176 | - Prefer composite over separate indexes when query touches ≥ 2 columns together.
177 | - For ad-hoc predicates that hit small slice of large table, suggest partial index.
178 | - When multiple plans share identical expensive node, recommend clustering or covering index to serve all.
179 |
180 |
181 |
182 |
183 | Use the above cues to generate candidate CREATE INDEX statements, explain why they help (selectivity, covering, ordering) and estimate improvement based on plan cost and actual time/rows metrics.
184 |
185 |
186 |
187 | ${historySection}
188 |
189 | ${queryText}
190 |
191 |
192 |
193 | ${JSON.stringify(queryPlanJson, null, 2)}
194 |
195 |
196 |
197 | ${tableDefinitions.join('\n\n')}
198 |
199 |
200 | ${appliedIndexes ? `\nThe following indexes are currently applied to the database:\n${appliedIndexes}\n` : ``}
201 | `;
202 | }
--------------------------------------------------------------------------------
/client/components/QueryList.tsx:
--------------------------------------------------------------------------------
1 | import { useEffect, useState, useContext } from 'react';
2 | import { useNavigate } from 'react-router-dom';
3 | import { AppContext } from '../context/AppContext';
4 | import { queryApi } from '../utils/HttpApi';
5 | import styled from 'styled-components';
6 | import { QuerySuggestions } from './QuerySuggestions';
7 | import dbpillDiagram from '../assets/dbpill_diagram.svg';
8 |
9 | import {
10 | QuerySort,
11 | QuerySortOption,
12 | TableContainer,
13 | QueryCard,
14 | QueryContentSection,
15 | QueryText,
16 | QueryStatsSection,
17 | ActionButton,
18 | QueryActionsSection,
19 | LoadingIndicator,
20 | StatsTable,
21 | StatsTableBody,
22 | StatsTableRow,
23 | StatsTableLabelCell,
24 | StatsTableValueCell,
25 | NumUnit,
26 | } from '../styles/Styled';
27 |
28 | import { QueryDetailsBar } from './QueryDetailsBar';
29 |
30 | import { formatNumber } from '../utils/formatNumber';
31 | import { highlightSQL } from '../utils/sqlHighlighter';
32 |
33 | // --- Local styled components for the new bottom tab bar redesign ---
34 |
35 | const CardWrapper = styled.div`
36 | display: flex;
37 | flex-direction: column;
38 | `;
39 |
40 | const StatsHeader = styled.div`
41 | margin-bottom: 10px;
42 | padding-left: 5px;
43 | `;
44 |
45 | const StatsText = styled.div`
46 | color: rgba(255, 255, 255, 0.5);
47 | font-size: 14px;
48 | `;
49 |
50 | const InstructionsContainer = styled.div`
51 | max-width: 600px;
52 | margin: 0 auto;
53 | line-height: 1.6;
54 |
55 | h1 {
56 | margin-bottom: 20px;
57 | color: color(display-p3 0.964 0.7613 0.3253);
58 | }
59 |
60 | p {
61 | margin-bottom: 15px;
62 | color: rgba(255, 255, 255, 0.9);
63 | }
64 | `;
65 |
66 | export function QueryList() {
67 | const [stats, setStats] = useState([]);
68 | const [orderBy, setOrderBy] = useState('avg_exec_time');
69 | const [orderDirection, setOrderDirection] = useState<'asc' | 'desc'>('desc');
70 | const [loadingSuggestions, setLoadingSuggestions] = useState<{ [key: string]: boolean }>({});
71 | const [rerunning, setRerunning] = useState<{ [key: string]: boolean }>({});
72 | const [expandedQueries, setExpandedQueries] = useState<{ [key: string]: boolean }>({});
73 | const navigate = useNavigate();
74 | const { args } = useContext(AppContext);
75 |
76 | const toggleQueryExpansion = (queryId: string) => {
77 | setExpandedQueries(prev => ({
78 | ...prev,
79 | [queryId]: !prev[queryId],
80 | }));
81 | };
82 |
83 |
84 |
85 | const order = (column: string) => {
86 | if (orderBy === column) {
87 | setOrderDirection(orderDirection === 'desc' ? 'asc' : 'desc');
88 | } else {
89 | setOrderDirection('desc');
90 | }
91 | setOrderBy(column);
92 | };
93 |
94 | const handleRerun = async (queryId: string) => {
95 | setRerunning(prev => ({ ...prev, [queryId]: true }));
96 | try {
97 | const data = await queryApi.analyzeQuery(queryId);
98 | setStats(prevStats => {
99 | const newStats = [...prevStats];
100 | const idx = newStats.findIndex(s => s.query_id === parseInt(queryId));
101 | if (idx !== -1) newStats[idx] = { ...newStats[idx], ...data };
102 | return newStats;
103 | });
104 | } catch (error) {
105 | console.error('Error rerunning query:', error);
106 | } finally {
107 | setRerunning(prev => ({ ...prev, [queryId]: false }));
108 | }
109 | };
110 |
111 | const getSuggestions = async (query_id: string) => {
112 | if (loadingSuggestions[query_id]) {
113 | return;
114 | }
115 |
116 | // Find any custom prompt stored in the current stats array for this query
117 | const currentStat = stats.find((s) => s.query_id === parseInt(query_id));
118 | const promptOverride = currentStat?.prompt_preview;
119 |
120 | setLoadingSuggestions(prev => ({ ...prev, [query_id]: true }));
121 | try {
122 | const data = await queryApi.getSuggestions(query_id, promptOverride);
123 | setStats((prevStats) => {
124 | const newStats = [...prevStats];
125 | const index = newStats.findIndex((stat) => stat.query_id === parseInt(query_id));
126 | if (index !== -1) {
127 | newStats[index] = {
128 | ...newStats[index],
129 | ...data,
130 | };
131 | }
132 | return newStats;
133 | });
134 | } catch (err: any) {
135 | alert(err.message);
136 | } finally {
137 | setLoadingSuggestions(prev => ({ ...prev, [query_id]: false }));
138 | }
139 | };
140 |
141 |
142 |
143 | useEffect(() => {
144 | const loadQueries = async () => {
145 | try {
146 | const data = await queryApi.getAllQueries(orderBy, orderDirection);
147 | setStats(data.stats);
148 | setOrderBy(data.orderBy);
149 | setOrderDirection(data.orderDirection as 'asc' | 'desc');
150 | } catch (error) {
151 | console.error('Error loading queries:', error);
152 | }
153 | };
154 |
155 | loadQueries();
156 | }, [orderBy, orderDirection]);
157 |
158 | const columns = stats[0] ? Object.keys(stats[0]) : [];
159 | if (columns.includes('query_id')) {
160 | columns.splice(columns.indexOf('query_id'), 1);
161 | }
162 |
163 | const dbUrl = new URL(args.db);
164 | const dbUser = dbUrl.username;
165 | const dbName = dbUrl.pathname.replace(/^\/+/, '');
166 |
167 | // Show instructions if no queries are available
168 | if (stats.length === 0) {
169 | return (
170 |
171 | Instructions
172 | {args && (
173 | <>
174 |
175 | dbpill is running on port {args.proxyPort}
→ postgresql://{dbUser}@localhost:{args.proxyPort}/{dbName}
176 |
177 |
178 | Change your app's PostgreSQL connection to port {args.proxyPort} to start intercepting queries.
179 |
180 | Once you start using your app & running queries through dbpill, they will appear here for analysis and optimization.
181 |
182 | >
183 | )}
184 |
185 | );
186 | }
187 |
188 | return (
189 |
190 |
191 |
192 | {stats.length} unique queries captured{' '}
193 | {stats.reduce((acc, stat) => acc + stat.num_instances, 0)} times
194 |
195 |
196 |
197 |
198 | order('avg_exec_time')}
200 | $active={orderBy === 'avg_exec_time' ? 'true' : undefined}
201 | >
202 | {orderBy === 'avg_exec_time' && (orderDirection === 'asc' ? '▲' : '▼')} Avg time
203 |
204 | order('total_time')}
206 | $active={orderBy === 'total_time' ? 'true' : undefined}
207 | >
208 | {orderBy === 'total_time' && (orderDirection === 'asc' ? '▲' : '▼')} Total time
209 |
210 |
211 | order('max_exec_time')}
213 | $active={orderBy === 'max_exec_time' ? 'true' : undefined}
214 | >
215 | {orderBy === 'max_exec_time' && (orderDirection === 'asc' ? '▲' : '▼')} Max time
216 |
217 | order('num_instances')}
219 | $active={orderBy === 'num_instances' ? 'true' : undefined}
220 | >
221 | {orderBy === 'num_instances' && (orderDirection === 'asc' ? '▲' : '▼')} Run count
222 |
223 | order('prev_exec_time/new_exec_time')}
225 | $active={orderBy === 'prev_exec_time/new_exec_time' ? 'true' : undefined}
226 | >
227 | {orderBy === 'prev_exec_time/new_exec_time' && (orderDirection === 'asc' ? '▲' : '▼')} Improvements
228 |
229 |
230 |
231 |
232 | {stats.map((stat, index) => {
233 | const isExpanded = expandedQueries[stat.query_id];
234 | const hasPerformanceData = stat.new_exec_time && stat.prev_exec_time;
235 | const improvement = hasPerformanceData ? stat.prev_exec_time / stat.new_exec_time : 0;
236 |
237 | return (
238 |
239 |
240 |
241 | toggleQueryExpansion(stat.query_id)}>
242 | {highlightSQL(stat.query)}
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 | Total
251 |
252 | {formatNumber(stat.total_time)} ms from {stat.num_instances} {stat.num_instances === 1 ? 'run' : 'runs'}
253 |
254 |
255 |
256 | Avg
257 | {formatNumber(stat.avg_exec_time)} ms
258 |
259 |
260 | Min
261 | {formatNumber(stat.min_exec_time)} ms
262 |
263 |
264 | Max
265 | {formatNumber(stat.max_exec_time)} ms
266 |
267 |
268 | Last
269 | {formatNumber(stat.last_exec_time)} ms
270 |
271 |
272 |
273 |
274 | handleRerun(stat.query_id.toString())}
277 | disabled={rerunning[stat.query_id]}
278 | style={{ marginTop: '8px', alignSelf: 'flex-start' }}
279 | >
280 | {rerunning[stat.query_id] ? Running... : (
281 | <>
282 | ↻ Run again with random params
283 | >
284 | )}
285 |
286 |
287 |
288 |
289 |
296 |
297 |
298 |
299 |
305 |
306 | );
307 | })}
308 |
309 |
310 | );
311 | }
--------------------------------------------------------------------------------
/client/components/SuggestionBox.tsx:
--------------------------------------------------------------------------------
1 | import { useState } from 'react';
2 | import { queryApi } from '../utils/HttpApi';
3 | import {
4 | ActionButton,
5 | LoadingIndicator,
6 | StatusTag,
7 | SuggestionContent,
8 | HighlightedSQL,
9 | StatsTable,
10 | StatsTableBody,
11 | StatsTableRow,
12 | StatsTableLabelCell,
13 | StatsTableValueCell,
14 | StatsTableImprovementCell,
15 | SuggestionTitleBar,
16 | SuggestionTitleGroup,
17 | SuggestionActionGroup,
18 | SuggestionContainer,
19 | DeleteSuggestionButton,
20 | PerformanceBadge,
21 | NumUnit,
22 | } from '../styles/Styled';
23 | import { formatNumber } from '../utils/formatNumber';
24 | import { highlightSQL } from '../utils/sqlHighlighter';
25 |
26 | interface SuggestionBoxProps {
27 | suggestion: any;
28 | queryId: string;
29 | suggestionIndex: number;
30 | statusText: string;
31 | onUpdate: (updatedStat: any) => void;
32 | onDelete: (suggestionIndex: number) => void;
33 | }
34 |
35 | export function SuggestionBox({
36 | suggestion,
37 | queryId,
38 | suggestionIndex,
39 | statusText,
40 | onUpdate,
41 | onDelete,
42 | }: SuggestionBoxProps) {
43 | const [isLoading, setIsLoading] = useState(false);
44 | const [isEditing, setIsEditing] = useState(false);
45 | const [editedIndexes, setEditedIndexes] = useState(suggestion.suggested_indexes || '');
46 | const [originalIndexes, setOriginalIndexes] = useState(suggestion.suggested_indexes || '');
47 | const [hasBeenEdited, setHasBeenEdited] = useState(false);
48 |
49 | const isReverted = !!suggestion.reverted;
50 | const isApplied = !!suggestion.applied && !isReverted;
51 | const isSuggested = !suggestion.applied && !isReverted;
52 | const status: 'reverted' | 'applied' | 'suggested' = isReverted ? 'reverted' : isApplied ? 'applied' : 'suggested';
53 |
54 | const startEdit = () => {
55 | const initialContent = suggestion.suggested_indexes || '';
56 | setEditedIndexes(initialContent);
57 | setOriginalIndexes(initialContent);
58 | setIsEditing(true);
59 | };
60 |
61 | const cancelEdit = () => {
62 | setIsEditing(false);
63 | setEditedIndexes(originalIndexes);
64 | };
65 |
66 | const saveEdit = async () => {
67 | setIsLoading(true);
68 |
69 | try {
70 | const requestBody: any = {
71 | query_id: queryId,
72 | suggested_indexes: editedIndexes
73 | };
74 |
75 | if (suggestion.suggestion_id) {
76 | requestBody.suggestion_id = suggestion.suggestion_id;
77 | }
78 |
79 | const data = await queryApi.saveEditedIndexes(queryId, editedIndexes, suggestion.suggestion_id);
80 | onUpdate(data);
81 | setHasBeenEdited(true);
82 | setIsEditing(false);
83 | } catch (error: any) {
84 | alert(error.message || 'Error saving edited indexes');
85 | } finally {
86 | setIsLoading(false);
87 | }
88 | };
89 |
90 | const applySuggestion = async () => {
91 | setIsLoading(true);
92 | try {
93 | const data = await queryApi.applySuggestions(queryId, suggestion.suggestion_id);
94 | onUpdate(data);
95 | } catch (error: any) {
96 | alert(error.message);
97 | } finally {
98 | setIsLoading(false);
99 | }
100 | };
101 |
102 | const revertSuggestion = async () => {
103 | setIsLoading(true);
104 | try {
105 | const data = await queryApi.revertSuggestions(queryId, suggestion.suggestion_id);
106 | onUpdate(data);
107 | } catch (error: any) {
108 | alert(error.message);
109 | } finally {
110 | setIsLoading(false);
111 | }
112 | };
113 |
114 | const deleteSuggestion = async () => {
115 | if (!confirm('Are you sure you want to delete this suggestion?')) {
116 | return;
117 | }
118 |
119 | setIsLoading(true);
120 |
121 | try {
122 | const data = await queryApi.deleteSuggestion(suggestion.suggestion_id);
123 | onDelete(suggestionIndex);
124 | onUpdate(data);
125 | } catch (error: any) {
126 | alert(error.message || 'Error deleting suggestion');
127 | } finally {
128 | setIsLoading(false);
129 | }
130 | };
131 |
132 | const renderActions = () => {
133 | return (
134 |
135 | {isSuggested && !isEditing && (
136 | <>
137 |
143 | ✎ Edit
144 |
145 |
150 | {isLoading ? (
151 | Applying...
152 | ) : (
153 | `⬇ Apply Index${suggestion.suggested_indexes && suggestion.suggested_indexes.trim().split(';').filter(line => line.trim()).length > 1 ? 'es' : ''}`
154 | )}
155 |
156 | >
157 | )}
158 |
159 | {isSuggested && isEditing && (
160 | <>
161 | {editedIndexes !== originalIndexes && (
162 |
168 | {isLoading ? (
169 | Saving...
170 | ) : (
171 | '💾 Save'
172 | )}
173 |
174 | )}
175 |
181 | {isLoading ? (
182 | Canceling...
183 | ) : (
184 | '✕ Cancel'
185 | )}
186 |
187 | >
188 | )}
189 |
190 | {isApplied && (
191 | <>
192 | Applied
193 |
198 | {isLoading ? (
199 | Reverting...
200 | ) : (
201 | '⬆ Revert'
202 | )}
203 |
204 | >
205 | )}
206 |
207 | {isReverted && !isEditing && (
208 | <>
209 |
215 | ✎ Edit
216 |
217 |
222 | {isLoading ? (
223 | Re-applying...
224 | ) : (
225 | `⬇ Re-apply${suggestion.suggested_indexes && suggestion.suggested_indexes.trim().split(';').filter(line => line.trim()).length > 1 ? ' Indexes' : ''}`
226 | )}
227 |
228 | >
229 | )}
230 |
231 | {isReverted && isEditing && (
232 | <>
233 | {editedIndexes !== originalIndexes && (
234 |
240 | {isLoading ? (
241 | Saving...
242 | ) : (
243 | '💾 Save'
244 | )}
245 |
246 | )}
247 |
253 | {isLoading ? (
254 | Canceling...
255 | ) : (
256 | '✕ Cancel'
257 | )}
258 |
259 | >
260 | )}
261 |
262 | );
263 | };
264 |
265 | const renderContent = () => {
266 | const hasPerf = suggestion.prev_exec_time !== null && suggestion.new_exec_time !== null &&
267 | suggestion.prev_exec_time !== undefined && suggestion.new_exec_time !== undefined;
268 | const improvementVal = hasPerf ? (suggestion.prev_exec_time / suggestion.new_exec_time) : 0;
269 |
270 | return (
271 |
272 | {isEditing ? (
273 |
334 | );
335 | };
336 |
337 | return (
338 |
339 | {suggestion.suggestion_id && (
340 |
344 | ✕
345 |
346 | )}
347 |
348 |
349 |
350 | {statusText}
351 |
352 | {hasBeenEdited && (
353 | (edited)
354 | )}
355 |
356 |
357 | {renderActions()}
358 |
359 |
360 | {renderContent()}
361 |
362 | );
363 | }
--------------------------------------------------------------------------------
/make_all_executables.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Make executables for macOS (arm64 + x64), Linux (arm64 + x64) and Windows (arm64 + x64)
3 | # Requires: curl, tar, unzip, postject, codesign (macOS), Node.js >= v22 with SEA enabled.
4 | # Optional: set NODE_VERSION env var to override Node version.
5 |
6 | set -euo pipefail
7 |
8 | # Function to show usage
9 | show_usage() {
10 | echo "Usage: $0 [mac|win|linux|all]"
11 | echo ""
12 | echo "Arguments:"
13 | echo " mac Build only macOS executables (arm64 + x64)"
14 | echo " win Build only Windows executables (arm64 + x64)"
15 | echo " linux Build only Linux executables (arm64 + x64)"
16 | echo " all Build for all platforms (default)"
17 | echo ""
18 | echo "Note: macOS builds require notarization setup if MAC_NOTARIZE_PROFILE is set"
19 | exit 1
20 | }
21 |
22 | # Parse command line arguments
23 | PLATFORM="${1:-all}"
24 | case "$PLATFORM" in
25 | mac|win|linux|all)
26 | ;;
27 | -h|--help)
28 | show_usage
29 | ;;
30 | *)
31 | echo "Error: Invalid platform '$PLATFORM'"
32 | show_usage
33 | ;;
34 | esac
35 |
36 | # Load environment variables from .env if present so that AWS/Apple credentials & config are available
37 | if [[ -f ".env" ]]; then
38 | echo "Loading variables from .env"
39 | set -o allexport
40 | # shellcheck disable=SC1091
41 | source .env
42 | set +o allexport
43 | fi
44 |
45 | NODE_VERSION="${NODE_VERSION:-24.3.0}"
46 |
47 | # Define all possible targets
48 | ALL_TARGETS=(
49 | "darwin arm64"
50 | "darwin x64"
51 | "linux arm64"
52 | "linux x64"
53 | "win arm64"
54 | "win x64"
55 | )
56 |
57 | # Select targets based on platform argument
58 | case "$PLATFORM" in
59 | mac)
60 | TARGETS=(
61 | "darwin arm64"
62 | "darwin x64"
63 | )
64 | ;;
65 | win)
66 | TARGETS=(
67 | "win arm64"
68 | "win x64"
69 | )
70 | ;;
71 | linux)
72 | TARGETS=(
73 | "linux arm64"
74 | "linux x64"
75 | )
76 | ;;
77 | all)
78 | TARGETS=("${ALL_TARGETS[@]}")
79 | ;;
80 | esac
81 |
82 | echo "Building for platform(s): $PLATFORM"
83 | echo "Selected targets: ${TARGETS[*]}"
84 |
85 | # Check macOS notarization setup when building for mac
86 | if [[ "$PLATFORM" == "mac" || "$PLATFORM" == "all" ]]; then
87 | if [[ -z "${MAC_NOTARIZE_PROFILE:-}" ]]; then
88 | echo "Warning: MAC_NOTARIZE_PROFILE not set. macOS binaries will be built but not notarized."
89 | echo "To enable notarization, set up a keychain profile and export MAC_NOTARIZE_PROFILE."
90 | else
91 | echo "macOS notarization enabled with profile: $MAC_NOTARIZE_PROFILE"
92 | fi
93 | fi
94 |
95 | # Sentinel copied from existing make_executable.sh
96 | SENTINEL="NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2"
97 | APP_NAME="dbpill"
98 | APP_VERSION=$(node -p "require('./package.json').version")
99 | BUILD_DIR="build"
100 | CACHE_DIR="cache"
101 | ENTITLEMENTS="sea.entitlements"
102 |
103 | # Create necessary directories
104 | mkdir -p "$BUILD_DIR" "$CACHE_DIR"
105 |
106 | ###############################################################################
107 | # Step 1: Build the project and create the SEA payload (platform-agnostic).
108 | ###############################################################################
109 |
110 | echo "[1/4] Building project and SEA blob"
111 |
112 | npm run build
113 |
114 | npx esbuild run_executable.ts \
115 | --bundle --platform=node --format=cjs \
116 | --outfile=server.bundle.cjs
117 |
118 | node --experimental-sea-config sea-config.json # produces sea-prep.blob
119 |
120 | echo "SEA blob generated (sea-prep.blob)"
121 |
122 | ###############################################################################
123 | # Step 2: Iterate over targets and patch the correct Node runtime.
124 | ###############################################################################
125 |
126 | echo "[2/4] Building executables for all platforms"
127 |
128 | for tuple in "${TARGETS[@]}"; do
129 | read -r OS ARCH <<<"$tuple"
130 | echo "\n=== Building for $OS-$ARCH ==="
131 |
132 | case "$OS" in
133 | win)
134 | EXT="zip"
135 | OUT_EXT=".exe"
136 | BIN_PATH="node.exe"
137 | README_EXT=".txt"
138 | ;;
139 | *)
140 | EXT="tar.gz"
141 | OUT_EXT=""
142 | BIN_PATH="bin/node"
143 | README_EXT=""
144 | ;;
145 | esac
146 |
147 | PKG="node-v${NODE_VERSION}-${OS}-${ARCH}.${EXT}"
148 | URL="https://nodejs.org/dist/v${NODE_VERSION}/${PKG}"
149 | CACHED_PKG="${CACHE_DIR}/${PKG}"
150 |
151 | WORKDIR="${BUILD_DIR}/${OS}-${ARCH}"
152 | rm -rf "$WORKDIR" && mkdir -p "$WORKDIR"
153 |
154 | # Download Node archive if not cached
155 | if [[ ! -f "$CACHED_PKG" ]]; then
156 | echo "Downloading $PKG to cache..."
157 | curl -L --progress-bar -o "$CACHED_PKG" "$URL"
158 | else
159 | echo "Using cached $PKG"
160 | fi
161 |
162 | # Extract archive
163 | case "$EXT" in
164 | zip)
165 | unzip -qo "$CACHED_PKG" -d "$WORKDIR" ;;
166 | tar.gz)
167 | tar -xzf "$CACHED_PKG" -C "$WORKDIR" ;;
168 | tar.xz)
169 | tar -xf "$CACHED_PKG" -C "$WORKDIR" ;;
170 | esac
171 |
172 | # Locate the node binary that was extracted and copy to top-level WORKDIR
173 | BIN_SRC=$(find "$WORKDIR" -type f -path "*/${BIN_PATH}" | head -n 1)
174 | if [[ -z "$BIN_SRC" ]]; then
175 | echo "Error: could not locate Node binary in $WORKDIR" >&2
176 | exit 1
177 | fi
178 | cp "$BIN_SRC" "$WORKDIR/${APP_NAME}${OUT_EXT}"
179 |
180 | # Clean up extracted Node folder (keep only our executable)
181 | find "$WORKDIR" -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} +
182 |
183 | # Inject the SEA blob
184 | npx postject "$WORKDIR/${APP_NAME}${OUT_EXT}" NODE_SEA_BLOB sea-prep.blob \
185 | --sentinel-fuse "$SENTINEL" \
186 | --macho-segment-name NODE_SEA
187 |
188 | # Codesign macOS binaries so they can be executed locally
189 | if [[ "$OS" == "darwin" ]]; then
190 | codesign --remove-signature "$WORKDIR/${APP_NAME}${OUT_EXT}"
191 | if [[ -n "${MAC_CODESIGN_IDENTITY:-}" ]]; then
192 | echo "Signing with identity: $MAC_CODESIGN_IDENTITY (hardened runtime, JIT entitlement)"
193 | codesign --sign "$MAC_CODESIGN_IDENTITY" \
194 | --options runtime \
195 | --entitlements "$ENTITLEMENTS" \
196 | --timestamp "$WORKDIR/${APP_NAME}${OUT_EXT}"
197 | else
198 | echo "Signing with ad-hoc identity (hardened runtime, JIT entitlement)"
199 | codesign --sign - \
200 | --options runtime \
201 | --entitlements "$ENTITLEMENTS" \
202 | "$WORKDIR/${APP_NAME}${OUT_EXT}"
203 | fi
204 | fi
205 |
206 | echo "Built: $WORKDIR/${APP_NAME}${OUT_EXT}"
207 | done
208 |
209 | ###############################################################################
210 | # Step 3: Create README files and archives (zip for macOS, tar.gz otherwise)
211 | ###############################################################################
212 |
213 | echo "[3/4] Creating archives with README files"
214 |
215 | for tuple in "${TARGETS[@]}"; do
216 | read -r OS ARCH <<<"$tuple"
217 |
218 | case "$OS" in
219 | win)
220 | OUT_EXT=".exe"
221 | README_EXT=".txt"
222 | ;;
223 | *)
224 | OUT_EXT=""
225 | README_EXT=""
226 | ;;
227 | esac
228 |
229 | WORKDIR="${BUILD_DIR}/${OS}-${ARCH}"
230 | README_FILE="${WORKDIR}/README${README_EXT}"
231 |
232 | # Create README file
233 | cat > "$README_FILE" << EOF
234 | dbpill
235 | ======
236 |
237 | dbpill is a database optimization tool that runs a PostgreSQL proxy that intercepts all queries your app makes, provides detailed query analyses, and proposes AI suggested optimizations that you can instantly apply & measure (and revert if needed) in one click.
238 |
239 | Run dbpill from your terminal with:
240 |
241 | ./dbpill postgres://username:password@host:port/database
242 |
243 | This will run the proxy & interface, all you need to do is point your application's database to the proxy's address, noted in the command output. The web interface will be available at http://localhost:3000/ where you can view all queries and apply optimizations.
244 |
245 | If you need help, run:
246 | ./dbpill --help
247 |
248 | If it doesn't work and you'd like to send us your output as a bug report, run:
249 | ./dbpill --verbose
250 |
251 | If you'd like to export the query logs & detailed analyses, they will be saved to a sqlite file in the same directory as the executable called dbpill.sqlite.db
252 |
253 | For more information and documentation, visit:
254 | https://dbpill.com
255 |
256 | For help, email help@dbpill.com
257 |
258 | Platform: ${OS}-${ARCH}
259 | Version: ${APP_VERSION}
260 | Build Date: $(date)
261 | Author: Murat Ayfer (https://x.com/mayfer)
262 | EOF
263 |
264 | # Determine archive extension (.zip for macOS as required for notarization, .tar.gz for other platforms)
265 | if [[ "$OS" == "darwin" || "$OS" == "win" ]]; then
266 | ARCHIVE_EXT="zip"
267 | else
268 | ARCHIVE_EXT="tar.gz"
269 | fi
270 | ARCHIVE_NAME="${APP_NAME}-${APP_VERSION}-${OS}-${ARCH}.${ARCHIVE_EXT}"
271 | ARCHIVE_BASE="${APP_NAME}-${APP_VERSION}-${OS}-${ARCH}"
272 | ARCHIVE_DIR="${BUILD_DIR}/${ARCHIVE_BASE}"
273 | echo "Creating archive: $ARCHIVE_NAME"
274 |
275 | # Create a directory with the desired archive name and copy contents
276 | rm -rf "$ARCHIVE_DIR" && mkdir -p "$ARCHIVE_DIR"
277 | cp -R "$WORKDIR"/* "$ARCHIVE_DIR/"
278 |
279 | # Create archive from the properly named directory
280 | if [[ "$ARCHIVE_EXT" == "zip" ]]; then
281 | # Use ditto to preserve permissions and extended attributes (including notarization tickets)
282 | ditto -c -k --keepParent --sequesterRsrc --rsrc "$ARCHIVE_DIR" "${BUILD_DIR}/$ARCHIVE_NAME"
283 | else
284 | tar -czf "${BUILD_DIR}/$ARCHIVE_NAME" -C "$BUILD_DIR" "$ARCHIVE_BASE"
285 | fi
286 |
287 | # Clean up the temporary archive directory
288 | rm -rf "$ARCHIVE_DIR"
289 |
290 | echo "Archive created: $ARCHIVE_NAME"
291 |
292 | # Optionally notarize macOS archives (requires a configured notarytool keychain profile)
293 | if [[ "$OS" == "darwin" && -n "${MAC_NOTARIZE_PROFILE:-}" ]]; then
294 | echo "Submitting $ARCHIVE_NAME for notarization (profile: $MAC_NOTARIZE_PROFILE) ..."
295 | xcrun notarytool submit "${BUILD_DIR}/$ARCHIVE_NAME" --keychain-profile "$MAC_NOTARIZE_PROFILE" --wait
296 |
297 | # After successful notarization, staple the executable inside the workdir
298 | echo "Stapling notarization ticket to executable..."
299 | if xcrun stapler staple "$WORKDIR/${APP_NAME}${OUT_EXT}"; then
300 | echo "Successfully stapled ticket to executable"
301 | # Recreate the archive with the stapled executable
302 | echo "Recreating archive with stapled executable..."
303 | # Recreate the properly named directory and copy contents
304 | rm -rf "$ARCHIVE_DIR" && mkdir -p "$ARCHIVE_DIR"
305 | cp -R "$WORKDIR"/* "$ARCHIVE_DIR/"
306 | if [[ "$ARCHIVE_EXT" == "zip" ]]; then
307 | ditto -c -k --keepParent --sequesterRsrc --rsrc "$ARCHIVE_DIR" "${BUILD_DIR}/$ARCHIVE_NAME"
308 | else
309 | tar -czf "${BUILD_DIR}/$ARCHIVE_NAME" -C "$BUILD_DIR" "$ARCHIVE_BASE"
310 | fi
311 | # Clean up the temporary archive directory
312 | rm -rf "$ARCHIVE_DIR"
313 | else
314 | echo "Warning: Failed to staple ticket to executable. Archive notarization may still work."
315 | fi
316 |
317 | # 'stapler' cannot operate on .zip archives. It works with .app, .pkg, or .dmg bundles.
318 | if [[ "$ARCHIVE_EXT" != "zip" ]]; then
319 | echo "Stapling notarization ticket to archive"
320 | xcrun stapler staple "${BUILD_DIR}/$ARCHIVE_NAME"
321 | else
322 | echo "Note: .zip archives cannot be stapled, but executable inside is stapled."
323 | fi
324 | fi
325 |
326 | # Optionally upload the archive to S3 when S3_BUCKET is defined (requires AWS CLI)
327 | if [[ -n "${S3_BUCKET:-}" ]]; then
328 | echo "Uploading $ARCHIVE_NAME to s3://$S3_BUCKET/"
329 | aws s3 cp "${BUILD_DIR}/$ARCHIVE_NAME" "s3://${S3_BUCKET}/${ARCHIVE_NAME}" --acl public-read
330 | echo "✓ Upload complete. Download URL: https://${S3_BUCKET}.s3.amazonaws.com/${ARCHIVE_NAME}"
331 | fi
332 | done
333 |
334 | ###############################################################################
335 | # Step 4: Cleanup and summary
336 | ###############################################################################
337 |
338 | echo "[4/4] Cleanup and summary"
339 |
340 | # Clean up temporary files
341 | rm -f sea-prep.blob server.bundle.cjs
342 |
343 | echo "\nBuild complete for platform(s): $PLATFORM!"
344 | echo "Archives created:"
345 | for tuple in "${TARGETS[@]}"; do
346 | read -r OS ARCH <<<"$tuple"
347 | if [[ "$OS" == "darwin" || "$OS" == "win" ]]; then
348 | ARCHIVE_EXT="zip"
349 | else
350 | ARCHIVE_EXT="tar.gz"
351 | fi
352 | echo " ${APP_NAME}-${APP_VERSION}-${OS}-${ARCH}.${ARCHIVE_EXT}"
353 | done
354 |
355 | echo "\nExecutables are also available in ./${BUILD_DIR}/ for testing"
356 | echo "Downloaded Node.js archives are cached in ./${CACHE_DIR}/"
--------------------------------------------------------------------------------
/server/query_analyzer.ts:
--------------------------------------------------------------------------------
1 | import { Pool, PoolClient } from 'pg';
2 | import { QueryLogger } from './query_logger';
3 | import { format as formatQuery } from 'sql-formatter';
4 | import argv from './args';
5 |
6 | function debug(message: string, ...args: any[]) {
7 | if (argv.verbose) {
8 | console.log(message, ...args);
9 | }
10 | }
11 |
12 | interface AnalyzeParams {
13 | query: string;
14 | params?: any[];
15 | }
16 |
17 | interface QueryPlan {
18 | Plan: {
19 | 'Planning Time': number;
20 | 'Execution Time': number;
21 | };
22 | }
23 |
24 | export class QueryAnalyzer {
25 | private pool: Pool;
26 | private sessionId: string;
27 | public logger: QueryLogger;
28 | public host: string;
29 | public database: string;
30 | public port: number;
31 | // Cache for table sizes to avoid repeated queries
32 | private tableSizeCache: Map = new Map();
33 |
34 | constructor(connectionString: string) {
35 | this.pool = new Pool({ connectionString });
36 | this.sessionId = Math.random().toString(36).substring(2, 8);
37 |
38 | // Parse connection details for later use (e.g. filtering logs by DB)
39 | try {
40 | const url = new URL(connectionString);
41 | this.host = url.hostname;
42 | // Remove leading slashes in pathname to obtain DB name
43 | this.database = url.pathname.replace(/^\/+/g, '');
44 | this.port = url.port ? parseInt(url.port, 10) : 5432;
45 | } catch (_) {
46 | // Fallbacks in case the connection string cannot be parsed
47 | this.host = 'localhost';
48 | this.database = '';
49 | this.port = 5432;
50 | }
51 |
52 | const logger = new QueryLogger('dbpill.sqlite.db');
53 | this.logger = logger;
54 | }
55 |
56 | private shouldSkipAnalysis(query: string): boolean {
57 | const trimmedQuery = query.trim().toUpperCase();
58 |
59 | // List of query types that can't be analyzed with EXPLAIN
60 | const skipPatterns = [
61 | 'SHOW',
62 | 'SET',
63 | 'RESET',
64 | 'START TRANSACTION',
65 | 'SAVEPOINT',
66 | 'RELEASE SAVEPOINT',
67 | 'ROLLBACK TO SAVEPOINT',
68 | 'PREPARE',
69 | 'EXECUTE',
70 | 'DEALLOCATE',
71 | 'LISTEN',
72 | 'NOTIFY',
73 | 'UNLISTEN',
74 | 'LOAD',
75 | 'DISCARD',
76 | 'CHECKPOINT',
77 | 'VACUUM',
78 | 'ANALYZE',
79 | 'REINDEX',
80 | 'CLUSTER',
81 | 'LOCK',
82 | 'GRANT',
83 | 'REVOKE',
84 | 'COPY',
85 | 'CREATE',
86 | 'DROP',
87 | 'ALTER',
88 | 'COMMENT',
89 | 'SECURITY',
90 | '\\', // psql meta-commands
91 | ];
92 |
93 | return skipPatterns.some(pattern => trimmedQuery.startsWith(pattern));
94 | }
95 |
96 | async getTableStructure(tableName: string, schemaName?: string): Promise {
97 | let output = '';
98 |
99 | let client: PoolClient | null = null;
100 | try {
101 | client = await this.pool.connect();
102 |
103 | if(!schemaName) {
104 | const currentSchemaQuery = `
105 | SELECT current_schema();
106 | `;
107 |
108 | const currentSchemaResult = await client.query(currentSchemaQuery);
109 | schemaName = currentSchemaResult.rows[0].current_schema;
110 | }
111 |
112 | try {
113 | // Table info
114 | const tableInfoQuery = `
115 | SELECT table_type
116 | FROM information_schema.tables
117 | WHERE table_schema = $1 AND table_name = $2;
118 | `;
119 | const tableInfoResult = await client.query(tableInfoQuery, [schemaName, tableName]);
120 | if (tableInfoResult.rows.length === 0) {
121 | return `Table '${schemaName}.${tableName}' not found.`;
122 | }
123 | output += `${tableInfoResult.rows[0].table_type}: ${schemaName}.${tableName}\n\n`;
124 |
125 | // Columns
126 | const columnQuery = `
127 | SELECT column_name, data_type, character_maximum_length, is_nullable, column_default
128 | FROM information_schema.columns
129 | WHERE table_schema = $1 AND table_name = $2
130 | ORDER BY ordinal_position;
131 | `;
132 | const columnResult = await client.query(columnQuery, [schemaName, tableName]);
133 | output += 'Columns:\n';
134 | columnResult.rows.forEach(row => {
135 | let columnInfo = `- ${row.column_name}: ${row.data_type}`;
136 | if (row.character_maximum_length) columnInfo += `(${row.character_maximum_length})`;
137 | columnInfo += ` ${row.is_nullable === 'YES' ? 'NULL' : 'NOT NULL'}`;
138 | if (row.column_default) columnInfo += ` DEFAULT ${row.column_default}`;
139 | output += columnInfo + '\n';
140 | });
141 | output += '\n';
142 |
143 | // Constraints
144 | const constraintQuery = `
145 | SELECT con.conname, con.contype, pg_get_constraintdef(con.oid) as definition
146 | FROM pg_constraint con
147 | INNER JOIN pg_class rel ON rel.oid = con.conrelid
148 | INNER JOIN pg_namespace nsp ON nsp.oid = rel.relnamespace
149 | WHERE nsp.nspname = $1 AND rel.relname = $2;
150 | `;
151 | const constraintResult = await client.query(constraintQuery, [schemaName, tableName]);
152 | if (constraintResult.rows.length > 0) {
153 | output += 'Constraints:\n';
154 | constraintResult.rows.forEach(row => {
155 | const constraintType = {
156 | 'p': 'PRIMARY KEY',
157 | 'f': 'FOREIGN KEY',
158 | 'u': 'UNIQUE',
159 | 'c': 'CHECK'
160 | }[row.contype] || 'OTHER';
161 | output += `- ${row.conname} (${constraintType}): ${row.definition}\n`;
162 | });
163 | output += '\n';
164 | }
165 |
166 | // Indexes
167 | const indexQuery = `
168 | SELECT indexname, indexdef
169 | FROM pg_indexes
170 | WHERE schemaname = $1 AND tablename = $2;
171 | `;
172 | const indexResult = await client.query(indexQuery, [schemaName, tableName]);
173 | if (indexResult.rows.length > 0) {
174 | output += 'Indexes:\n';
175 | indexResult.rows.forEach(row => {
176 | output += `- ${row.indexname}: ${row.indexdef}\n`;
177 | });
178 | output += '\n';
179 | }
180 |
181 | // Table stats
182 | const statsQuery = `
183 | SELECT pg_total_relation_size($1::regclass) AS total_size_bytes,
184 | pg_table_size($1::regclass) AS table_size_bytes,
185 | pg_indexes_size($1::regclass) AS index_size_bytes,
186 | pg_size_pretty(pg_total_relation_size($1::regclass)) AS total_size_pretty,
187 | pg_size_pretty(pg_table_size($1::regclass)) AS table_size_pretty,
188 | pg_size_pretty(pg_indexes_size($1::regclass)) AS index_size_pretty,
189 | pg_stat_get_live_tuples($1::regclass) AS live_tuples,
190 | pg_stat_get_dead_tuples($1::regclass) AS dead_tuples,
191 | (SELECT reltuples FROM pg_class WHERE oid = $1::regclass) AS estimated_rows
192 | `;
193 | const statsResult = await client.query(statsQuery, [`${schemaName}.${tableName}`]);
194 | if (statsResult.rows.length > 0) {
195 | const stats = statsResult.rows[0];
196 | output += 'Table Statistics:\n';
197 | output += `- Total Size: ${stats.total_size_pretty}\n`;
198 | output += `- Table Size: ${stats.table_size_pretty}\n`;
199 | output += `- Index Size: ${stats.index_size_pretty}\n`;
200 | output += `- Live Tuples: ${stats.live_tuples}\n`;
201 | output += `- Dead Tuples: ${stats.dead_tuples}\n`;
202 | output += `- Estimated Rows: ${stats.estimated_rows}\n`;
203 | output += `- Table Size (bytes): ${stats.table_size_bytes}\n`;
204 | }
205 |
206 | } catch (error) {
207 | output += `Error: ${error}\n`;
208 | }
209 |
210 | } catch (error) {
211 | debug('Error connecting to the database:', error);
212 | throw error;
213 | } finally {
214 | if (client) {
215 | client.release();
216 | }
217 | }
218 | return output;
219 | }
220 |
221 | /**
222 | * Return raw size information for a table (bytes + estimated rows).
223 | * Results are cached in-memory for the lifetime of the QueryAnalyzer instance
224 | * to avoid repeated calls for the same table name.
225 | */
226 | async getTableSize(tableName: string): Promise<{ table_size_bytes: number; estimated_rows: number }> {
227 | if (this.tableSizeCache.has(tableName)) {
228 | // Return cached value if present
229 | return this.tableSizeCache.get(tableName)!;
230 | }
231 |
232 | let client: PoolClient | null = null;
233 | try {
234 | client = await this.pool.connect();
235 |
236 | const sizeQuery = `
237 | SELECT pg_table_size(c.oid) AS table_size_bytes, c.reltuples AS estimated_rows
238 | FROM pg_class c
239 | WHERE c.relname = $1 AND c.relkind = 'r'
240 | LIMIT 1;
241 | `;
242 | const { rows } = await client.query(sizeQuery, [tableName]);
243 |
244 | if (rows.length === 0) {
245 | // Fallback: unknown table – cache zeroes to avoid repeated look-ups
246 | const fallback = { table_size_bytes: 0, estimated_rows: 0 };
247 | this.tableSizeCache.set(tableName, fallback);
248 | return fallback;
249 | }
250 |
251 | const info = {
252 | table_size_bytes: Number(rows[0].table_size_bytes),
253 | estimated_rows: Number(rows[0].estimated_rows),
254 | };
255 | this.tableSizeCache.set(tableName, info);
256 | return info;
257 | } finally {
258 | if (client) {
259 | client.release();
260 | }
261 | }
262 | }
263 |
264 | async analyze({ query, params = [] }: AnalyzeParams): Promise {
265 | if (this.shouldSkipAnalysis(query)) {
266 | return {
267 | sessionId: this.sessionId,
268 | query,
269 | params,
270 | queryPlan: null,
271 | planTime: 0,
272 | execTime: 0,
273 | tableSizes: {},
274 | };
275 | }
276 |
277 | let client: PoolClient | null = null;
278 |
279 | try {
280 | client = await this.pool.connect();
281 |
282 | const explainQuery = `EXPLAIN (ANALYZE, FORMAT JSON) ${query}`;
283 | const result = await client.query(explainQuery, params);
284 |
285 | const rows = (result as any)?.rows;
286 | if (Array.isArray(rows) && rows.length > 0) {
287 | const firstRow = rows[0];
288 | const planArray = firstRow?.["QUERY PLAN"];
289 |
290 | if (Array.isArray(planArray) && planArray.length > 0) {
291 | const queryPlan: QueryPlan = planArray[0];
292 | const planTime = queryPlan['Planning Time'];
293 | const execTime = queryPlan['Execution Time'];
294 |
295 | // Extract table names from the plan to collect size statistics
296 | function extractRelationNames(plan: any): string[] {
297 | const relationNames: string[] = [];
298 | function traverse(obj: any) {
299 | if (obj && typeof obj === 'object') {
300 | if ('Relation Name' in obj) {
301 | relationNames.push(obj['Relation Name']);
302 | }
303 | for (const key in obj) {
304 | if (Object.prototype.hasOwnProperty.call(obj, key)) {
305 | traverse(obj[key]);
306 | }
307 | }
308 | } else if (Array.isArray(obj)) {
309 | obj.forEach(traverse);
310 | }
311 | }
312 | traverse(plan);
313 | return [...new Set(relationNames)];
314 | }
315 |
316 | let tableSizes: Record = {};
317 | try {
318 | const tables = extractRelationNames(queryPlan);
319 | const sizePromises = tables.map(async (t) => ({ name: t, info: await this.getTableSize(t) }));
320 | const sizes = await Promise.all(sizePromises);
321 | sizes.forEach(({ name, info }) => {
322 | tableSizes[name] = info;
323 | });
324 | } catch (_) {
325 | // Ignore errors in table size retrieval – analysis should still succeed
326 | }
327 |
328 | const sessionId = this.sessionId;
329 | debug(query);
330 | return { sessionId, query, params, queryPlan, planTime, execTime, tableSizes };
331 | }
332 | }
333 |
334 | // Fallback – return a minimal object so callers don't crash
335 | return {
336 | sessionId: this.sessionId,
337 | query,
338 | params,
339 | queryPlan: null,
340 | planTime: 0,
341 | execTime: 0,
342 | tableSizes: {},
343 | };
344 | } catch (error) {
345 | console.error(query);
346 | console.error(params);
347 | console.error('Error analyzing query:', error);
348 | console.error('Did you try connecting to a database different than the one you are running dbpill on? In that case, proxy may still work, but queries will fail to get analyzed.');
349 |
350 | // Still return a minimal object so downstream logic can continue
351 | return {
352 | sessionId: this.sessionId,
353 | query,
354 | params,
355 | queryPlan: null,
356 | planTime: 0,
357 | execTime: 0,
358 | tableSizes: {},
359 | };
360 | } finally {
361 | if (client) {
362 | client.release();
363 | }
364 | }
365 | }
366 |
367 | async saveAnalysis({ sessionId, query, params, queryPlan, planTime, execTime }: any) {
368 | query = formatQuery(query, { language: 'postgresql', denseOperators: true });
369 | await this.logger.addQueryStats({
370 | sessionId,
371 | query,
372 | params: JSON.stringify(params, null, 2),
373 | queryPlan: JSON.stringify(queryPlan, null, 2),
374 | planTime,
375 | execTime,
376 | host: this.host,
377 | database: this.database,
378 | port: this.port,
379 | });
380 | }
381 |
382 | async applyIndexes(indexes: string) {
383 | const client = await this.pool.connect();
384 | try {
385 | await client.query(`
386 | BEGIN;
387 | ${indexes}
388 | COMMIT;
389 | `);
390 | } catch (error) {
391 | console.error(indexes);
392 | console.error('Error applying indexes:', error);
393 | await client.query(`
394 | ROLLBACK;
395 | `);
396 | throw error;
397 | } finally {
398 | if (client) {
399 | client.release();
400 | }
401 | }
402 | }
403 |
404 | async getAllAppliedIndexes(): Promise {
405 | const client = await this.pool.connect();
406 | try {
407 | const indexes = await client.query(`
408 | SELECT
409 | i.relname AS index_name,
410 | t.relname AS table_name,
411 | a.attname AS column_name,
412 | ix.indisunique AS is_unique,
413 | ix.indisprimary AS is_primary,
414 | pg_catalog.pg_get_indexdef(ix.indexrelid) AS index_definition
415 | FROM
416 | pg_catalog.pg_class t
417 | JOIN pg_catalog.pg_index ix ON t.oid = ix.indrelid
418 | JOIN pg_catalog.pg_class i ON ix.indexrelid = i.oid
419 | JOIN pg_catalog.pg_attribute a ON a.attnum = ANY(ix.indkey) AND a.attrelid = t.oid
420 | WHERE
421 | t.relkind = 'r' -- only tables
422 | AND i.relname LIKE 'dbpill_%'
423 | ORDER BY
424 | t.relname, i.relname;
425 | `);
426 | return indexes.rows;
427 | } catch (error) {
428 | console.error('Error getting all applied indexes:', error);
429 | throw error;
430 | } finally {
431 | if (client) {
432 | client.release();
433 | }
434 | }
435 | }
436 |
437 | async close(): Promise {
438 | await this.pool.end();
439 | }
440 | }
441 |
442 | export default QueryAnalyzer;
--------------------------------------------------------------------------------
/client/components/Configs.tsx:
--------------------------------------------------------------------------------
1 | import React, { Component } from 'react';
2 | import styled from 'styled-components';
3 | import { AppContext } from '../context/AppContext';
4 | import { ActionButton, LoadingIndicator } from '../styles/Styled';
5 | import { adminApi } from '../utils/HttpApi';
6 |
7 | /* -------------------------------------------------------------------------- */
8 | /* Styles */
9 | /* -------------------------------------------------------------------------- */
10 |
11 | const Container = styled.div`
12 | max-width: 800px;
13 | margin: 0 auto;
14 | `;
15 |
16 | const Title = styled.h1`
17 | font-size: 24px;
18 | margin-bottom: 20px;
19 | color: #fff;
20 | `;
21 |
22 | const Form = styled.form`
23 | display: flex;
24 | flex-direction: column;
25 | gap: 20px;
26 | background-color: rgba(0, 0, 0, 0.3);
27 | padding: 30px;
28 | border-radius: 8px;
29 | border: 1px solid rgba(255, 255, 255, 0.1);
30 | `;
31 |
32 | const FormGroup = styled.div`
33 | display: flex;
34 | flex-direction: column;
35 | gap: 8px;
36 | `;
37 |
38 | const Label = styled.label`
39 | font-weight: 600;
40 | color: #fff;
41 | font-size: 14px;
42 | `;
43 |
44 | const Input = styled.input`
45 | padding: 12px;
46 | border: 1px solid rgba(255, 255, 255, 0.2);
47 | border-radius: 4px;
48 | background-color: rgba(255, 255, 255, 0.1);
49 | color: #fff;
50 | font-family: 'Inconsolata', monospace;
51 | font-size: 14px;
52 |
53 | &:focus {
54 | outline: none;
55 | border-color: color(display-p3 0.964 0.7613 0.3253);
56 | }
57 |
58 | &::placeholder {
59 | color: rgba(255, 255, 255, 0.5);
60 | }
61 | `;
62 |
63 | const Select = styled.select`
64 | padding: 12px;
65 | border: 1px solid rgba(255, 255, 255, 0.2);
66 | border-radius: 4px;
67 | background-color: rgba(255, 255, 255, 0.1);
68 | color: #fff;
69 | font-family: 'Inconsolata', monospace;
70 | font-size: 14px;
71 |
72 | &:focus {
73 | outline: none;
74 | border-color: color(display-p3 0.964 0.7613 0.3253);
75 | }
76 |
77 | option {
78 | background-color: #333;
79 | color: #fff;
80 | }
81 | `;
82 |
83 | const Button = styled.button`
84 | padding: 12px 24px;
85 | background-color: color(display-p3 0.964 0.7613 0.3253);
86 | color: #000;
87 | border: none;
88 | border-radius: 4px;
89 | font-weight: 600;
90 | cursor: pointer;
91 | font-family: 'Inconsolata', monospace;
92 | font-size: 14px;
93 | align-self: flex-start;
94 |
95 | &:hover {
96 | background-color: color(display-p3 0.9 0.7 0.3);
97 | }
98 |
99 | &:disabled {
100 | opacity: 0.5;
101 | cursor: not-allowed;
102 | }
103 | `;
104 |
105 | const Message = styled.div<{ type: 'success' | 'error' }>`
106 | padding: 12px;
107 | border-radius: 4px;
108 | background-color: ${props => props.type === 'success' ? 'rgba(0, 255, 0, 0.1)' : 'rgba(255, 0, 0, 0.1)'};
109 | border: 1px solid ${props => props.type === 'success' ? 'rgba(0, 255, 0, 0.3)' : 'rgba(255, 0, 0, 0.3)'};
110 | color: ${props => props.type === 'success' ? '#4CAF50' : '#F44336'};
111 | font-size: 14px;
112 | `;
113 |
114 | const Description = styled.p`
115 | color: rgba(255, 255, 255, 0.7);
116 | font-size: 14px;
117 | margin-bottom: 20px;
118 | line-height: 1.5;
119 | `;
120 |
121 | const HelpText = styled.span`
122 | color: rgba(255, 255, 255, 0.5);
123 | font-size: 12px;
124 | margin-top: 4px;
125 | `;
126 |
127 | /* -------------------------------------------------------------------------- */
128 |
129 | interface VendorApiKeys {
130 | anthropic?: string;
131 | openai?: string;
132 | xai?: string;
133 | google?: string;
134 | }
135 |
136 | const DEFAULT_MODELS = {
137 | anthropic: 'claude-sonnet-4-0',
138 | openai: 'o3',
139 | gemini: 'gemini-2.5-pro',
140 | grok: 'grok-3-beta'
141 | };
142 |
143 | interface ConfigsState {
144 | loading: boolean;
145 | resetting: boolean;
146 | message: { type: 'success' | 'error'; text: string } | null;
147 | messageTarget?: 'config' | 'apiKeys';
148 | endpointType: 'anthropic' | 'openai' | 'gemini' | 'grok' | 'custom';
149 | customUrl: string;
150 | formData: {
151 | llm_model: string;
152 | llm_api_key: string;
153 | };
154 | apiKeys: VendorApiKeys;
155 | }
156 |
157 | const InlineMessage = styled.span<{ type: 'success' | 'error' }>`
158 | color: ${props => (props.type === 'success' ? '#4CAF50' : '#F44336')};
159 | font-size: 14px;
160 | `;
161 |
162 | export class Configs extends Component<{}, ConfigsState> {
163 | static contextType = AppContext;
164 | declare context: React.ContextType;
165 |
166 | constructor(props: {}) {
167 | super(props);
168 |
169 | this.state = {
170 | loading: false,
171 | resetting: false,
172 | message: null,
173 | messageTarget: undefined,
174 | endpointType: 'anthropic',
175 | customUrl: '',
176 | formData: {
177 | llm_model: '',
178 | llm_api_key: ''
179 | },
180 | apiKeys: {
181 | anthropic: '',
182 | openai: '',
183 | xai: '',
184 | google: ''
185 | }
186 | };
187 | }
188 |
189 | componentDidMount() {
190 | this.initializeFromConfig();
191 | }
192 |
193 | componentDidUpdate(prevProps: {}, prevState: ConfigsState) {
194 | // Re-initialize if context becomes available
195 | if (this.context?.config && !prevState.formData.llm_model) {
196 | this.initializeFromConfig();
197 | }
198 | }
199 |
200 | initializeFromConfig = () => {
201 | const { config } = this.context || {};
202 | if (!config) return;
203 |
204 | const endpoint = config.llm_endpoint || 'anthropic';
205 | const endpointType =
206 | endpoint === 'anthropic' || endpoint === 'openai' || endpoint === 'gemini' || endpoint === 'grok'
207 | ? (endpoint as ConfigsState['endpointType'])
208 | : 'custom';
209 |
210 | const customUrl = endpointType === 'custom' ? endpoint : '';
211 |
212 | const defaultModel =
213 | config.llm_model ||
214 | (endpointType in DEFAULT_MODELS ? DEFAULT_MODELS[endpointType as keyof typeof DEFAULT_MODELS] : '');
215 |
216 | this.setState({
217 | endpointType,
218 | customUrl,
219 | formData: {
220 | llm_model: defaultModel,
221 | llm_api_key: config.llm_api_key || ''
222 | },
223 | apiKeys: {
224 | anthropic: config.apiKeys?.anthropic || '',
225 | openai: config.apiKeys?.openai || '',
226 | xai: config.apiKeys?.xai || '',
227 | google: config.apiKeys?.google || ''
228 | }
229 | });
230 | };
231 |
232 | /* ------------------------------ Handlers ------------------------------ */
233 |
234 | handleEndpointChange = (e: React.ChangeEvent) => {
235 | const newType = e.target.value as ConfigsState['endpointType'];
236 |
237 | this.setState(
238 | prev => ({
239 | endpointType: newType,
240 | customUrl: newType === 'custom' ? 'https://' : '',
241 | formData: {
242 | ...prev.formData,
243 | llm_model:
244 | newType !== 'custom' && newType in DEFAULT_MODELS
245 | ? DEFAULT_MODELS[newType as keyof typeof DEFAULT_MODELS]
246 | : prev.formData.llm_model
247 | }
248 | }),
249 | () => this.save('config')
250 | );
251 | };
252 |
253 | handleInputChange = (e: React.ChangeEvent) => {
254 | const { name, value } = e.target;
255 | this.setState(prev => ({
256 | formData: {
257 | ...prev.formData,
258 | [name]: value
259 | }
260 | }));
261 | };
262 |
263 | handleCustomUrlChange = (e: React.ChangeEvent) => {
264 | this.setState({ customUrl: e.target.value });
265 | };
266 |
267 | handleApiKeyChange = (vendor: keyof VendorApiKeys, value: string) => {
268 | this.setState(prev => ({
269 | apiKeys: {
270 | ...prev.apiKeys,
271 | [vendor]: value
272 | }
273 | }));
274 | };
275 |
276 | handleKeyDown = (target: 'config' | 'apiKeys') => (e: React.KeyboardEvent) => {
277 | if (e.key === 'Enter') {
278 | e.preventDefault();
279 | this.save(target);
280 | }
281 | };
282 |
283 | /* ------------------------------ Save Logic ---------------------------- */
284 |
285 | save = async (target: 'config' | 'apiKeys') => {
286 | if (this.state.loading) return;
287 |
288 | this.setState({ loading: true, message: null, messageTarget: target });
289 |
290 | const { updateConfig } = this.context;
291 | const { endpointType, customUrl, formData, apiKeys } = this.state;
292 |
293 | const submitData = {
294 | llm_endpoint: endpointType === 'custom' ? customUrl : endpointType,
295 | ...formData,
296 | apiKeys
297 | };
298 |
299 | try {
300 | await updateConfig(submitData);
301 | this.setState({ loading: false, message: { type: 'success', text: 'Saved!' }, messageTarget: target });
302 | setTimeout(() => this.setState({ message: null }), 2500);
303 | } catch (error) {
304 | console.error('Error updating config:', error);
305 | this.setState({ loading: false, message: { type: 'error', text: 'Save failed' }, messageTarget: target });
306 | }
307 | };
308 |
309 | /* ---------------------------- Reset Handler --------------------------- */
310 |
311 | handleReset = async () => {
312 | if (!confirm('Are you sure you want to clear all query logs? This action cannot be undone.')) return;
313 |
314 | this.setState({ resetting: true });
315 | try {
316 | await adminApi.resetQueryLogs();
317 | alert('Query logs have been cleared.');
318 | } catch (error: any) {
319 | console.error('Error resetting query logs:', error);
320 | alert(error.message || 'Failed to reset query logs');
321 | } finally {
322 | this.setState({ resetting: false });
323 | }
324 | };
325 |
326 | /* ------------------------------ Render ------------------------------- */
327 |
328 | render() {
329 | const { config } = this.context;
330 | const {
331 | loading,
332 | resetting,
333 | message,
334 | messageTarget,
335 | endpointType,
336 | customUrl,
337 | formData,
338 | apiKeys
339 | } = this.state;
340 |
341 | if (!config) {
342 | return (
343 |
344 | Loading Configuration...
345 |
346 | );
347 | }
348 |
349 | return (
350 |
351 | LLM Configuration
352 |
353 | Configure the Language Model settings for query optimization suggestions.
354 | These settings are stored locally and persist across restarts.
355 |
356 |
357 | {/* LLM Config Form */}
358 |
433 |
434 | {config.updated_at && (
435 |
436 | Last updated: {new Date(config.updated_at).toLocaleString()}
437 |
438 | )}
439 |
440 | {/* API Keys Form */}
441 | API Keys
442 |
443 | Configure API keys for each LLM provider. These keys are stored locally and will be
444 | used automatically when you select the corresponding provider above.
445 |
446 |
447 |
517 |
518 | {/* Maintenance */}
519 | Database Configuration
520 |
521 | You must configure the database connection string when launching the proxy:
522 | ./dbpill --db=postgres://user:password@host:port/database
523 |
524 |
525 | Maintenance
526 | Clear all captured query logs.
527 |
533 | {resetting ? Resetting... : 'Reset all ⌫'}
534 |
535 |
536 | );
537 | }
538 | }
--------------------------------------------------------------------------------
/client/styles/Styled.tsx:
--------------------------------------------------------------------------------
1 | import styled from 'styled-components';
2 |
3 | export const Table = styled.table`
4 | width: 100%;
5 | border-collapse: collapse;
6 | margin-top: 20px;
7 | background-color: rgba(45, 45, 45, 1);
8 | `;
9 |
10 | export const TableRow = styled.tr`
11 | background-color: rgba(45, 45, 45, 1);
12 | &:nth-child(even) {
13 | background-color: rgba(50, 50, 50, 1);
14 | }
15 | border-bottom: 1px solid rgba(255, 255, 255, 0.1);
16 | `;
17 |
18 | export const TableData = styled.td`
19 | padding: 12px;
20 | line-height: 1.2em;
21 | max-width: 20vw;
22 | vertical-align: top;
23 | overflow: hidden;
24 | text-overflow: ellipsis;
25 | text-align: left;
26 | border-right: 1px solid rgba(255, 255, 255, 0.1);
27 | color: rgba(255, 255, 255, 0.9);
28 | `;
29 |
30 | export const QueryStats = styled.div`
31 | display: flex;
32 | flex-direction: column;
33 | text-align: left;
34 | `;
35 |
36 | export const QueryStat = styled.div`
37 | padding: 0 2px 2px 2px;
38 | `;
39 |
40 | export const QueryText = styled.div<{ $expanded?: boolean }>`
41 | text-align: left;
42 | white-space: pre-wrap;
43 | word-wrap: break-word;
44 | word-break: break-word;
45 | overflow-wrap: break-word;
46 | padding: 12px;
47 | margin: 0;
48 | display: block;
49 | color: rgba(255, 255, 255, 0.7);
50 | font-size: 13px;
51 | line-height: 1.4;
52 | cursor: pointer;
53 |
54 | ${props => !props.$expanded && `
55 | max-height: 200px;
56 | overflow: hidden;
57 | position: relative;
58 |
59 | &:hover {
60 | border-color: rgba(255, 255, 255, 0.3);
61 | }
62 | `}
63 |
64 | ${props => props.$expanded && `
65 | max-height: none;
66 | `}
67 | `;
68 |
69 | export const QueryExpandHint = styled.div`
70 | font-size: 11px;
71 | color: rgba(255, 255, 255, 0.5);
72 | margin-top: 4px;
73 | font-style: italic;
74 | `;
75 |
76 | export const QuerySort = styled.span`
77 | user-select: none;
78 | `;
79 |
80 | export const QuerySortOption = styled.span<{ $active?: string }>`
81 | cursor: pointer;
82 | display: inline-block;
83 | margin: 0 5px;
84 | padding: 0 5px;
85 | line-height: 20px;
86 | user-select: none;
87 | border-bottom: 1px solid rgba(255, 255, 255, 0.3);
88 | color: rgba(255, 255, 255, 0.8);
89 |
90 | &:hover {
91 | box-shadow: 0 2px 0 0 rgba(255, 255, 255, 0.5);
92 | color: #fff;
93 | }
94 |
95 | ${props => props.$active && `
96 | box-shadow: 0 2px 0 0 #ffa;
97 | color: #ffa;
98 |
99 | &:hover {
100 | box-shadow: 0 2px 0 0 #ffa;
101 | color: #ffa;
102 | }
103 |
104 | `}
105 | `;
106 |
107 | export const RowIndex = styled.span`
108 | opacity: 0.2;
109 | font-size: 1.2em;
110 | `;
111 |
112 | export const ActionButton = styled.button<{ $variant?: 'primary' | 'secondary' | 'danger' | 'success' | 'ai-suggestion' }>`
113 | padding: 6px 10px;
114 | font-size: 13px;
115 | font-weight: 500;
116 | border: 1px solid rgba(255, 255, 255, 0.2);
117 | background: rgba(0, 0, 0, 0.1);
118 | color: rgba(255, 255, 255, 0.9);
119 | cursor: pointer;
120 | margin: 0;
121 | line-height: 1.2;
122 | min-width: 70px;
123 | transition: all 0.15s ease;
124 | border-radius: 0;
125 | font-family: "Inconsolata", monospace;
126 |
127 | ${props => props.$variant === 'primary' && `
128 | border-color: #6366f1;
129 | color: #6366f1;
130 |
131 | &:hover {
132 | background: rgba(99, 102, 241, 0.1);
133 | border-color: #8b5cf6;
134 | color: #8b5cf6;
135 | }
136 | `}
137 |
138 | ${props => props.$variant === 'ai-suggestion' && `
139 | border-color:rgb(215, 184, 255);
140 | background: rgba(215, 184, 255, 0.1);
141 | color:rgb(215, 184, 255);
142 | padding: 12px 20px;
143 | font-size: 16px;
144 | min-width: 140px;
145 | font-weight: 600;
146 | width: auto;
147 | flex-shrink: 0;
148 |
149 | &:hover {
150 | background: rgba(215, 184, 255, 0.1);
151 | border-color:rgb(255, 255, 255);
152 | color:rgb(255, 255, 255);
153 | }
154 | `}
155 |
156 | ${props => props.$variant === 'success' && `
157 | border-color:rgb(73, 202, 159);
158 | color: rgb(73, 202, 159);
159 |
160 | &:hover {
161 | background: rgba(16, 185, 129, 0.1);
162 | }
163 | `}
164 |
165 | ${props => props.$variant === 'danger' && `
166 | border-color: rgb(255, 150, 150);
167 | color:rgb(255, 150, 150);
168 |
169 | &:hover {
170 | background: rgba(239, 68, 68, 0.1);
171 | }
172 | `}
173 |
174 | ${props => (!props.$variant || props.$variant === 'secondary') && `
175 | border-color: rgba(255, 255, 255, 0.2);
176 | color: rgba(255, 255, 255, 0.8);
177 |
178 | &:hover {
179 | background: rgba(255, 255, 255, 0.05);
180 | border-color: rgba(255, 255, 255, 0.4);
181 | color: rgba(255, 255, 255, 1);
182 | }
183 | `}
184 |
185 | &:active {
186 | background: rgba(255, 255, 255, 0.1);
187 | }
188 |
189 | &:disabled {
190 | opacity: 0.4;
191 | cursor: not-allowed;
192 | border-color: rgba(255, 255, 255, 0.1);
193 | color: rgba(255, 255, 255, 0.4);
194 | }
195 | `;
196 |
197 | export const StatsCard = styled.div`
198 | background: rgba(30, 30, 30, 1);
199 | border-radius: 0;
200 | padding: 12px;
201 | margin-bottom: 8px;
202 | height: 100%;
203 | display: flex;
204 | flex-direction: column;
205 | justify-content: space-between;
206 | `;
207 |
208 | export const StatsGrid = styled.div`
209 | width: 200px;
210 | display: grid;
211 | grid-template-columns: 1fr;
212 | gap: 8px 16px;
213 | margin-bottom: 8px;
214 | `;
215 |
216 | export const StatItem = styled.div`
217 | display: flex;
218 | justify-content: space-between;
219 | align-items: center;
220 | padding: 3px 0;
221 |
222 | &:not(:last-child) {
223 | border-bottom: 1px solid rgba(255, 255, 255, 0.03);
224 | }
225 | `;
226 |
227 | export const StatLabel = styled.span`
228 | color: rgba(255, 255, 255, 0.6);
229 | `;
230 |
231 | export const StatValue = styled.span<{ $trend?: 'up' | 'down' | 'neutral' }>`
232 |
233 | ${props => props.$trend === 'up' && `
234 | color: #10B981;
235 | `}
236 |
237 | ${props => props.$trend === 'down' && `
238 | color: #EF4444;
239 | `}
240 |
241 | ${props => (!props.$trend || props.$trend === 'neutral') && `
242 | color: rgba(255, 255, 255, 0.9);
243 | `}
244 | `;
245 |
246 | export const PerformanceBadge = styled.div<{ $improvement: number }>`
247 | display: inline-block;
248 | line-height: 2em;
249 | padding: 0 10px;
250 | border-radius: 5px;
251 | white-space: nowrap;
252 |
253 | ${props => props.$improvement > 5.0 && `
254 | color:rgb(72, 255, 142);
255 | text-shadow: 0 0 5px rgba(72, 255, 142, 1);
256 | `}
257 |
258 | ${props => props.$improvement > 2.0 && `
259 | color:rgb(72, 255, 142);
260 | `}
261 |
262 | ${props => props.$improvement < 0.8 && `
263 | color: #EF4444;
264 | `}
265 |
266 | ${props => props.$improvement >= 0.8 && props.$improvement <= 2.0 && `
267 | color: #F59E0B;
268 | `}
269 | `;
270 |
271 | export const TableContainer = styled.div`
272 | display: flex;
273 | margin-top: 20px;
274 | flex-direction: column;
275 | gap: 15px;
276 | `;
277 |
278 | export const QueryCard = styled.div`
279 | display: flex;
280 | border-radius: 10px 10px 0 0;
281 | box-shadow: 0 3px 5px 0 rgba(0, 0, 0, 0.5);
282 | overflow: hidden;
283 | `;
284 |
285 | export const QuerySection = styled.div`
286 | flex: 1;
287 | padding: 16px;
288 | display: flex;
289 | flex-direction: column;
290 | min-height: 150px;
291 | `;
292 |
293 | export const QueryContentSection = styled(QuerySection)`
294 | padding-bottom: 0;
295 | background: rgba(45, 45, 45, 1);
296 | `;
297 |
298 | export const QueryStatsSection = styled(QuerySection)`
299 | background: rgba(40, 40, 40, 1);
300 | `;
301 |
302 | export const QueryActionsSection = styled(QuerySection)`
303 | background: rgba(45, 45, 45, 1);
304 | align-items: center;
305 | justify-content: center;
306 | `;
307 |
308 | export const StatusTag = styled.div<{ $status: 'suggested' | 'applied' | 'reverted' }>`
309 | display: inline-block;
310 |
311 | ${props => props.$status === 'suggested' && `
312 | color:rgb(152, 192, 255);
313 | `}
314 |
315 | ${props => props.$status === 'applied' && `
316 | color:rgb(110, 215, 180);
317 | `}
318 |
319 | ${props => props.$status === 'reverted' && `
320 | color:rgb(248, 113, 113);
321 | `}
322 | `;
323 |
324 | export const QueryIndex = styled.div`
325 | width: 20px;
326 | height: 32px;
327 | color: rgba(255, 255, 255, 0.4);
328 | display: flex;
329 | align-items: center;
330 | justify-content: center;
331 | font-size: 14px;
332 | font-weight: 600;
333 | `;
334 |
335 | export const ActionGroup = styled.div`
336 | display: flex;
337 | flex-wrap: wrap;
338 | gap: 4px;
339 | margin-top: 8px;
340 | `;
341 |
342 | export const SuggestionContent = styled.div<{ $status: 'suggested' | 'applied' | 'reverted' }>`
343 | background: ${props =>
344 | props.$status === 'applied' ? 'rgba(16, 185, 129, 0.2)' :
345 | props.$status === 'reverted' ? 'rgba(239, 68, 68, 0.2)' :
346 | 'rgba(59, 130, 246, 0.2)'
347 | };
348 | padding: 12px;
349 | margin-bottom: 12px;
350 | flex-grow: 1;
351 | width: 100%;
352 | box-sizing: border-box;
353 | border-bottom-left-radius: 10px;
354 | border-bottom-right-radius: 10px;
355 | `;
356 |
357 | export const HighlightedSQL = styled.div`
358 | font-size: 13px;
359 | line-height: 1.4;
360 | white-space: pre-wrap;
361 | word-wrap: break-word;
362 | word-break: break-word;
363 | overflow-wrap: break-word;
364 | `;
365 |
366 | export const Block = styled.div`
367 | padding: 10px;
368 | background-color: rgba(45, 45, 45, 1);
369 | color: rgba(255, 255, 255, 0.9);
370 |
371 | & h1, & h2, & h3, & h4 {
372 | color: #fff;
373 | }
374 |
375 | & pre {
376 | background-color: rgba(30, 30, 30, 1);
377 | color: rgba(255, 255, 255, 0.9);
378 | padding: 10px;
379 | }
380 | `;
381 |
382 | export const LoadingIndicator = styled.div`
383 | display: inline-block;
384 | animation: loading-indicator 1s infinite linear;
385 | @keyframes loading-indicator {
386 | 0% { opacity: 0; }
387 | 50% { opacity: 1; }
388 | 100% { opacity: 0; }
389 | }
390 | `;
391 |
392 | export const SuggestionsApplied = styled.div`
393 | color: rgba(100, 255, 100, 0.8);
394 | margin-bottom: 10px;
395 | `;
396 |
397 | export const GlobalStats = styled.div`
398 | color: rgba(255, 255, 255, 0.7);
399 | margin-bottom: 10px;
400 | text-align: right;
401 | `;
402 |
403 | export const StatTable = styled.table`
404 | width: 100%;
405 | border-collapse: collapse;
406 | margin-top: 20px;
407 | display: inline-block;
408 | background-color: rgba(35, 35, 35, 1);
409 | `;
410 |
411 | export const StatRow = styled.tr``;
412 |
413 | export const StatHeader = styled.th`
414 | text-align: left;
415 | color: rgba(255, 255, 255, 0.8);
416 | `;
417 |
418 | export const StatCell = styled.td`
419 | color: rgba(255, 255, 255, 0.9);
420 | `;
421 |
422 | export const StatsTable = styled.table`
423 | width: max-content;
424 | border-collapse: collapse;
425 | `;
426 |
427 | export const StatsTableBody = styled.tbody``;
428 |
429 | export const StatsTableRow = styled.tr``;
430 |
431 | export const StatsTableLabelCell = styled.td`
432 | padding: 3px 8px;
433 | color: rgba(255, 255, 255, 0.6);
434 | border-bottom: 1px solid rgba(255, 255, 255, 0.03);
435 | `;
436 |
437 | export const StatsTableValueCell = styled.td`
438 | padding: 3px 8px;
439 | border-bottom: 1px solid rgba(255, 255, 255, 0.03);
440 | `;
441 |
442 | export const StatsTableActionCell = styled.td`
443 | padding: 3px 8px;
444 | border-bottom: 1px solid rgba(255, 255, 255, 0.03);
445 | `;
446 |
447 | export const StatsTableHeaderCell = styled.td`
448 | padding: 6px 8px;
449 | border-bottom: 2px solid rgba(255, 255, 255, 0.1);
450 | font-weight: bold;
451 | color: rgba(255, 255, 255, 0.8);
452 | background: rgba(255, 255, 255, 0.02);
453 | `;
454 |
455 | export const StatsTableImprovementCell = styled.td`
456 | padding: 3px 8px;
457 | vertical-align: middle;
458 | `;
459 |
460 | export const SuggestionTitleBar = styled.div<{ $status: 'suggested' | 'applied' | 'reverted' }>`
461 | display: flex;
462 | align-items: center;
463 | justify-content: space-between;
464 | padding: 8px 12px;
465 | background-color: ${props =>
466 | props.$status === 'applied' ? 'rgba(16, 185, 129, 0.37)' :
467 | props.$status === 'reverted' ? 'rgba(239, 68, 68, 0.35)' :
468 | 'rgba(59, 131, 246, 0.35)'
469 | };
470 | border-bottom: 1px solid rgba(255, 255, 255, 0.1);
471 | border-top-left-radius: 10px;
472 | border-top-right-radius: 10px;
473 | width: 100%;
474 | box-sizing: border-box;
475 | `;
476 |
477 | export const SuggestionTitleGroup = styled.div`
478 | display: flex;
479 | align-items: center;
480 | gap: 8px;
481 |
482 | /* Remove bottom margin from StatusTag when used inline */
483 | ${StatusTag} {
484 | margin-bottom: 0;
485 | }
486 | `;
487 |
488 | export const SuggestionActionGroup = styled.div`
489 | display: flex;
490 | align-items: center;
491 | gap: 8px;
492 | margin-left: auto;
493 | `;
494 |
495 | export const DeleteSuggestionButton = styled.div`
496 | position: absolute;
497 | top: -10px;
498 | right: -10px;
499 | width: 24px;
500 | height: 24px;
501 | border-radius: 50%;
502 | border: none;
503 | background-color: none;
504 | color: white;
505 | cursor: pointer;
506 | display: flex;
507 | align-items: center;
508 | justify-content: center;
509 | font-size: 12px;
510 | font-weight: bold;
511 | opacity: 0;
512 | transition: opacity 0.2s ease;
513 | z-index: 10;
514 |
515 | &:hover {
516 | background-color: #ff6666;
517 | }
518 |
519 | &:disabled {
520 | opacity: 0.5;
521 | cursor: not-allowed;
522 | background-color: #666;
523 | }
524 | `;
525 |
526 | export const SuggestionContainer = styled.div`
527 | display: flex;
528 | flex-direction: column;
529 | width: 100%;
530 | align-self: stretch;
531 | box-sizing: border-box;
532 | position: relative;
533 |
534 | &:hover ${DeleteSuggestionButton} {
535 | opacity: 1;
536 | }
537 | `;
538 |
539 |
540 | export const NumUnit = styled.span`
541 | color: rgba(255, 255, 255, 0.5);
542 | `;
543 |
544 | export const QueryDetailsBottomBar = styled.div`
545 | display: flex;
546 | width: 100%;
547 | background: rgba(35, 35, 35, 1);
548 | border-top: 1px solid rgba(255, 255, 255, 0.1);
549 | border-bottom-left-radius: 10px;
550 | border-bottom-right-radius: 10px;
551 | box-shadow: 0 3px 5px 0 rgba(0, 0, 0, 0.5);
552 | `;
553 |
554 | export const QueryDetailsBottomBarSection = styled.div`
555 | flex: 1;
556 | display: flex;
557 | align-items: center;
558 | padding: 0 10px;
559 | gap: 5px;
560 | font-size: 11px;
561 | user-select: none;
562 | `;
563 |
564 | export const QueryDetailsTabButton = styled.button<{ $active?: boolean }>`
565 | background: none;
566 | font-family: "Inconsolata", monospace;
567 | font-size: 14px;
568 | border: none;
569 | cursor: pointer;
570 | padding: 0 10px;
571 | color: ${props => (props.disabled ? 'rgba(255, 255, 255, 0.25)' : props.$active ? '#fff' : 'rgba(255, 255, 255, 0.6)')};
572 |
573 | line-height: 40px;
574 |
575 | ${props => props.$active && `
576 | background: #000;
577 | `}
578 |
579 | &:hover {
580 | color: ${props => (props.disabled ? 'rgba(255, 255, 255, 0.25)' : '#fff')};
581 | }
582 | &:disabled {
583 | cursor: not-allowed;
584 | opacity: 0.3;
585 | }
586 | `;
587 |
588 | export const QueryDetailsPanel = styled.div`
589 | background: #000;
590 | padding: 12px 20px;
591 | margin: 0 10px;
592 | border-bottom-left-radius: 10px;
593 | border-bottom-right-radius: 10px;
594 | font-size: 13px;
595 | line-height: 1.4;
596 | white-space: pre-wrap;
597 | `;
598 |
599 | export const ExpandArrow = styled.span`
600 | color: rgba(255, 255, 255, 0.3);
601 | `;
602 |
603 | // QueryDetailsBar specific components
604 | export const InstanceTypeContainer = styled.div`
605 | margin-bottom: 1rem;
606 | display: flex;
607 | gap: 0.5rem;
608 | align-items: center;
609 | `;
610 |
611 | export const InstanceTypeLabel = styled.span`
612 | font-weight: bold;
613 | `;
614 |
615 | export const InstanceTypeSelect = styled.select`
616 | padding: 4px 8px;
617 | border-radius: 4px;
618 | border: 1px solid #333;
619 | background-color: #1a1a1a;
620 | color: white;
621 | `;
622 |
623 | export const TableItemContainer = styled.div`
624 | margin-bottom: 1rem;
625 | `;
626 |
627 | export const TableDefinitionPre = styled.pre`
628 | white-space: pre-wrap;
629 | margin-top: 0.5rem;
630 | `;
631 |
632 |
633 |
634 | export const FullWidthStatsTable = styled(StatsTable)`
635 | width: 100%;
636 | `;
637 |
638 | export const ParameterCell = styled(StatsTableLabelCell)`
639 | max-width: 150px;
640 | white-space: nowrap;
641 | overflow: hidden;
642 | text-overflow: ellipsis;
643 | `;
644 |
645 | export const CompactActionButton = styled(ActionButton)`
646 | background-color: rgba(255, 255, 255, 0.1);
647 | padding: 4px 8px;
648 | min-width: 60px;
649 | `;
650 |
651 | export const ShowMoreContainer = styled.div`
652 | margin-top: 1rem;
653 | text-align: center;
654 | `;
655 |
656 | export const ShowMoreButton = styled(ActionButton)`
657 | padding: 8px 16px;
658 | `;
659 |
660 | export const PromptContainer = styled.div`
661 | position: relative;
662 | width: 100%;
663 | `;
664 |
665 | export const PromptTitle = styled.h2`
666 | margin: 0;
667 | `;
668 |
669 | export const EditedIndicator = styled.span`
670 | color: #ff9500;
671 | font-size: 0.8em;
672 | margin-left: 8px;
673 | `;
674 |
675 | export const PromptActionGroup = styled.div`
676 | position: absolute;
677 | top: 0;
678 | right: 0;
679 | display: flex;
680 | gap: 8px;
681 | `;
682 |
683 | export const PromptActionButton = styled(ActionButton)`
684 | padding: 4px 8px;
685 | `;
686 |
687 | export const PromptTextarea = styled.textarea`
688 | width: 100%;
689 | min-height: 300px;
690 | background-color: #1a1a1a;
691 | color: white;
692 | border: 1px solid #333;
693 | padding: 8px;
694 | border-radius: 4px;
695 | white-space: pre-wrap;
696 | font-family: "Inconsolata", monospace;
697 | `;
698 |
699 | export const ContentPre = styled.pre`
700 | white-space: pre-wrap;
701 | `;
--------------------------------------------------------------------------------
/server/query_logger.ts:
--------------------------------------------------------------------------------
1 | import { DatabaseHelper } from './database_helper';
2 |
3 | export interface QueryInstance {
4 | instance_id: number;
5 | query_id: number;
6 | session_id: string;
7 | params: string;
8 | query_plan: string;
9 | plan_time: number;
10 | exec_time: number;
11 | timestamp: string;
12 | }
13 |
14 | export interface QueryGroup {
15 | query_id: number;
16 | host: string;
17 | database: string;
18 | port: number;
19 | query: string;
20 | num_instances: number;
21 | instances?: QueryInstance[];
22 | llm_response: string;
23 | suggested_indexes: string;
24 | applied_indexes: string;
25 | prev_exec_time: number;
26 | new_exec_time: number;
27 | min_exec_time: number;
28 | max_exec_time: number;
29 | avg_exec_time: number;
30 | total_time: number;
31 | last_exec_time: number;
32 | hidden?: boolean;
33 | }
34 |
35 | export class QueryLogger {
36 | private dbHelper: DatabaseHelper;
37 |
38 | constructor(private dbPath: string) {
39 | this.dbHelper = new DatabaseHelper(dbPath);
40 | this.initialize();
41 | }
42 |
43 | async initialize(): Promise {
44 | await this.dbHelper.initialize();
45 |
46 | await this.dbHelper.exec(`
47 | CREATE TABLE IF NOT EXISTS queries (
48 | query_id INTEGER PRIMARY KEY AUTOINCREMENT,
49 | host TEXT,
50 | database TEXT,
51 | port INTEGER,
52 | query TEXT,
53 | llm_response TEXT,
54 | suggested_indexes TEXT,
55 | applied_indexes TEXT,
56 | prev_exec_time REAL,
57 | new_exec_time REAL,
58 | hidden BOOLEAN DEFAULT 0
59 | )
60 | `);
61 |
62 | // New table to store every suggestion separately so we can keep
63 | // a full history of prompts / responses and track whether a
64 | // suggestion has been applied or reverted as well as the before /
65 | // after performance numbers.
66 | await this.dbHelper.exec(`
67 | CREATE TABLE IF NOT EXISTS index_suggestions (
68 | suggestion_id INTEGER PRIMARY KEY AUTOINCREMENT,
69 | query_id INTEGER,
70 | prompt TEXT,
71 | llm_response TEXT,
72 | suggested_indexes TEXT,
73 | -- Whether the indexes generated in this suggestion are
74 | -- currently applied to the database.
75 | applied BOOLEAN DEFAULT 0,
76 | -- Whether an applied suggestion has subsequently been
77 | -- reverted. Once reverted we keep the row for history
78 | -- purposes but mark reverted = 1 so we can distinguish the
79 | -- state.
80 | reverted BOOLEAN DEFAULT 0,
81 | prev_exec_time REAL,
82 | new_exec_time REAL,
83 | created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
84 | FOREIGN KEY (query_id) REFERENCES queries(query_id)
85 | )
86 | `);
87 |
88 | await this.dbHelper.exec(`
89 | CREATE UNIQUE INDEX IF NOT EXISTS idx_queries_unique
90 | ON queries(host, database, port, query);
91 | `);
92 |
93 | await this.dbHelper.exec(`
94 | CREATE TABLE IF NOT EXISTS query_instances (
95 | instance_id INTEGER PRIMARY KEY AUTOINCREMENT,
96 | query_id INTEGER,
97 | session_id TEXT,
98 | params TEXT,
99 | query_plan TEXT,
100 | plan_time REAL,
101 | exec_time REAL,
102 | timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
103 | FOREIGN KEY (query_id) REFERENCES queries(query_id)
104 | )
105 | `);
106 |
107 | const num_rows = await this.dbHelper.get('SELECT COUNT(*) as count FROM queries');
108 | // console.log('Database initialized with', num_rows.count, 'rows');
109 | }
110 |
111 | async addQueryStats({
112 | sessionId,
113 | query,
114 | params,
115 | queryPlan,
116 | planTime,
117 | execTime,
118 | host,
119 | database,
120 | port
121 | }: {
122 | sessionId: string,
123 | query: string,
124 | params: string,
125 | queryPlan: string,
126 | planTime: number,
127 | execTime: number,
128 | host: string,
129 | database: string,
130 | port: number
131 | }): Promise {
132 | // Insert or ignore the query grouped by host/database/port
133 | await this.dbHelper.run(`
134 | INSERT OR IGNORE INTO queries (query, host, database, port)
135 | VALUES (?, ?, ?, ?)
136 | `, [query, host, database, port]);
137 |
138 | // Get the query_id for this connection-specific query
139 | const { query_id } = await this.dbHelper.get('SELECT query_id FROM queries WHERE query = ? AND host = ? AND database = ? AND port = ?', [query, host, database, port]);
140 |
141 | // Insert the query instance
142 | await this.dbHelper.run(`
143 | INSERT INTO query_instances (query_id, session_id, params, query_plan, plan_time, exec_time)
144 | VALUES (?, ?, ?, ?, ?, ?)
145 | `, [query_id, sessionId, params, queryPlan, planTime, execTime]);
146 | }
147 |
148 | async getQueryGroups({orderBy, orderDirection, queryId, host, database, port}: { orderBy: string, orderDirection: string, queryId?: number, host?: string, database?: string, port?: number}): Promise {
149 | // Handle total_time sorting by calculating it in SQL
150 | const actualOrderBy = orderBy === 'total_time' ? '(avg_exec_time * num_instances)' : `qs.${orderBy}`;
151 |
152 | const filters: string[] = [];
153 | const params: any[] = [];
154 |
155 | if(host !== undefined) {
156 | filters.push('q.host = ?');
157 | params.push(host);
158 | }
159 | if(database !== undefined) {
160 | filters.push('q.database = ?');
161 | params.push(database);
162 | }
163 | if(port !== undefined) {
164 | filters.push('q.port = ?');
165 | params.push(port);
166 | }
167 |
168 | let filterSql = '';
169 | if(filters.length > 0) {
170 | filterSql = 'WHERE ' + filters.join(' AND ');
171 | }
172 |
173 | // Handle improvement ratio ordering by calculating it in SQL
174 | const improvementOrderBy = orderBy === 'prev_exec_time/new_exec_time'
175 | ? 'CASE WHEN qs.prev_exec_time IS NOT NULL AND qs.new_exec_time IS NOT NULL THEN (qs.prev_exec_time / qs.new_exec_time) ELSE NULL END'
176 | : actualOrderBy;
177 |
178 | let results: QueryGroup[] = await this.dbHelper.all(`
179 | WITH latest_suggestion AS (
180 | SELECT s1.* FROM index_suggestions s1
181 | INNER JOIN (
182 | SELECT query_id, MAX(suggestion_id) AS max_id
183 | FROM index_suggestions
184 | GROUP BY query_id
185 | ) s2 ON s1.query_id = s2.query_id AND s1.suggestion_id = s2.max_id
186 | ),
187 | query_stats AS (
188 | SELECT
189 | q.query_id,
190 | q.query,
191 | q.host,
192 | q.database,
193 | q.port,
194 | ls.llm_response,
195 | ls.suggested_indexes,
196 | ls.applied as applied_indexes,
197 | ls.prev_exec_time,
198 | ls.new_exec_time,
199 | COUNT(q.query_id) AS num_instances,
200 | MAX(qi.exec_time) AS max_exec_time,
201 | MIN(qi.exec_time) AS min_exec_time,
202 | AVG(qi.exec_time) AS avg_exec_time
203 | FROM
204 | queries q
205 | JOIN
206 | query_instances qi ON q.query_id = qi.query_id
207 | LEFT JOIN latest_suggestion ls ON q.query_id = ls.query_id
208 | ${filters.length > 0 ? filterSql : ''}
209 | GROUP BY
210 | q.query_id, q.query, q.host, q.database, q.port, ls.llm_response, ls.suggested_indexes, ls.applied, ls.prev_exec_time, ls.new_exec_time
211 | )
212 | SELECT
213 | qs.*,
214 | (qs.avg_exec_time * qs.num_instances) AS total_time,
215 | CASE WHEN qs.prev_exec_time IS NOT NULL AND qs.new_exec_time IS NOT NULL THEN (qs.prev_exec_time / qs.new_exec_time) ELSE NULL END AS improvement_ratio
216 | FROM query_stats qs
217 | ${queryId ? 'WHERE qs.query_id = ?' : ''}
218 | ORDER BY
219 | ${improvementOrderBy} ${orderDirection === 'desc' ? 'DESC NULLS LAST' : 'ASC NULLS LAST'}${orderBy === 'prev_exec_time/new_exec_time' ? ', (qs.avg_exec_time * qs.num_instances) DESC' : ''};
220 | `, [...params, ...(queryId ? [queryId] : [])]);
221 |
222 | const query_ids = results.map(result => result.query_id);
223 |
224 | // Attach full suggestion history for each query so the frontend can
225 | // show a list if it wants to. We also concatenate all AI responses
226 | // so the existing UI that displays a single text blob continues to
227 | // work unchanged.
228 | for (const res of results) {
229 | const suggestions = await this.getSuggestionsForQuery(res.query_id);
230 | // @ts-ignore – dynamic property so we don't have to change the
231 | // QueryGroup interface everywhere right now.
232 | res.suggestions = suggestions;
233 |
234 | if (suggestions.length > 0) {
235 | // Keep legacy llm_response and suggested_indexes for backward compatibility
236 | // but let the client format them however it wants using the suggestions array
237 | res.llm_response = suggestions.map((s: any) => s.llm_response || '').join('\n\n');
238 | res.suggested_indexes = suggestions.map((s: any) => s.suggested_indexes || '').join('\n\n');
239 | const latest = suggestions[0]; // because ORDER BY DESC now, latest is at the beginning
240 | res.applied_indexes = latest.applied ? latest.suggested_indexes : null;
241 | res.prev_exec_time = latest.prev_exec_time;
242 | res.new_exec_time = latest.new_exec_time;
243 | } else {
244 | // ensure properties exist for consistency
245 | res.llm_response = null;
246 | res.suggested_indexes = null;
247 | res.applied_indexes = null;
248 | }
249 | }
250 | const query = `
251 | SELECT qi.*
252 | FROM query_instances qi
253 | JOIN (
254 | SELECT query_id, MAX(timestamp) as max_timestamp
255 | FROM query_instances
256 | WHERE query_id IN (${query_ids.map(id => `?`).join(',')})
257 | GROUP BY query_id
258 | ) latest ON qi.query_id = latest.query_id AND qi.timestamp = latest.max_timestamp
259 | ORDER BY qi.query_id;
260 |
261 | `;
262 |
263 | const last_instances = await this.dbHelper.all(query, query_ids);
264 |
265 | for(let i = 0; i < results.length; i++) {
266 | const result = results[i];
267 | const last_instance = last_instances.find(row => row.query_id == result.query_id);
268 | if(last_instance) {
269 | results[i].last_exec_time = last_instance.exec_time;
270 | }
271 | }
272 |
273 | return results;
274 | }
275 |
276 | async getQueryGroup(queryId: number): Promise {
277 | const results = await this.getQueryGroups({orderBy: 'max_exec_time', orderDirection: 'desc', queryId});
278 |
279 | if(results.length == 0) {
280 | return null;
281 | }
282 |
283 | const result = results[0];
284 |
285 | const instances = await this.dbHelper.all(`
286 | SELECT * FROM query_instances WHERE query_id = ? ORDER BY timestamp DESC
287 | `, [queryId]);
288 |
289 | result.instances = instances;
290 | return result;
291 | }
292 |
293 | async getQueryInstances(queryId: number): Promise {
294 | return this.dbHelper.all(`
295 | SELECT * FROM query_instances WHERE query_id = ? ORDER BY exec_time DESC
296 | `, [queryId]);
297 | }
298 |
299 | async getSlowestQueryInstance(queryId: number): Promise {
300 | return this.dbHelper.get(`
301 | SELECT * FROM query_instances WHERE query_id = ? ORDER BY exec_time DESC LIMIT 1
302 | `, [queryId]);
303 | }
304 |
305 | async getFastestQueryInstance(queryId: number): Promise {
306 | return this.dbHelper.get(`
307 | SELECT * FROM query_instances WHERE query_id = ? ORDER BY exec_time ASC LIMIT 1
308 | `, [queryId]);
309 | }
310 |
311 | async getLatestQueryInstance(queryId: number): Promise {
312 | return this.dbHelper.get(`
313 | SELECT * FROM query_instances WHERE query_id = ? ORDER BY timestamp DESC LIMIT 1
314 | `, [queryId]);
315 | }
316 |
317 | async getQueryStats(queryId: number): Promise {
318 | return this.dbHelper.get(`
319 | SELECT q.query_id, q.query, qi.*, q.llm_response, q.suggested_indexes, q.applied_indexes
320 | FROM queries q
321 | JOIN query_instances qi ON q.query_id = qi.query_id
322 | WHERE q.query_id = ?
323 | ORDER BY qi.timestamp DESC
324 | LIMIT 1
325 | `, [queryId]);
326 | }
327 |
328 | async getQueryStatsOrderBy(orderBy: string, direction: string = 'DESC'): Promise {
329 | if (direction.toUpperCase() !== 'ASC' && direction.toUpperCase() !== 'DESC') {
330 | throw new Error('Invalid direction. Must be either ASC or DESC.');
331 | }
332 | return this.dbHelper.all(`
333 | SELECT q.query_id, q.query, qi.*, q.llm_response, q.suggested_indexes, q.applied_indexes
334 | FROM queries q
335 | JOIN query_instances qi ON q.query_id = qi.query_id
336 | ORDER BY qi.${orderBy} ${direction}
337 | `);
338 | }
339 |
340 | async getAllQueryStats(): Promise {
341 | return this.dbHelper.all(`
342 | SELECT q.query_id, q.query, qi.*, q.llm_response, q.suggested_indexes, q.applied_indexes
343 | FROM queries q
344 | JOIN query_instances qi ON q.query_id = qi.query_id
345 | ORDER BY qi.timestamp DESC
346 | `);
347 | }
348 |
349 |
350 | async updateQueryStats(queryId: number, updates: Partial): Promise {
351 | await this.dbHelper.run(`
352 | UPDATE queries SET ${Object.keys(updates).map(key => `${key} = ?`).join(', ')} WHERE query_id = ?
353 | `, [...Object.values(updates), queryId]);
354 | }
355 |
356 | async addSuggestion({ query_id, prompt, llm_response, suggested_indexes }: { query_id: number; prompt?: string; llm_response: string; suggested_indexes: string }) {
357 | // Store the suggestion in the dedicated table so we have a full history.
358 | await this.dbHelper.run(
359 | `INSERT INTO index_suggestions (query_id, prompt, llm_response, suggested_indexes)
360 | VALUES (?, ?, ?, ?)`,
361 | [query_id, prompt ?? null, llm_response, suggested_indexes]
362 | );
363 |
364 | // For backwards-compatibility with older parts of the code base (and to
365 | // avoid a huge refactor touching many files at once) we still update the
366 | // latest information on the parent row in the `queries` table. This lets
367 | // existing UI that expects these columns to continue working while we
368 | // migrate progressively to the new data model.
369 | await this.dbHelper.run(
370 | `UPDATE queries SET llm_response = ?, suggested_indexes = ? WHERE query_id = ?`,
371 | [llm_response, suggested_indexes, query_id]
372 | );
373 | }
374 |
375 | async resetQueryData(): Promise {
376 | // Delete child tables first to satisfy foreign key constraints
377 | await this.dbHelper.exec('DELETE FROM query_instances');
378 | // Also delete all saved suggestions that reference queries
379 | await this.dbHelper.exec('DELETE FROM index_suggestions');
380 | // Finally, delete queries
381 | await this.dbHelper.exec('DELETE FROM queries');
382 | }
383 |
384 | /* ------------------------------------------------------------------ */
385 | /* Suggestion-level helpers */
386 | /* ------------------------------------------------------------------ */
387 |
388 | async getSuggestionsForQuery(queryId: number): Promise {
389 | return this.dbHelper.all(
390 | `SELECT * FROM index_suggestions WHERE query_id = ? ORDER BY created_at DESC`,
391 | [queryId]
392 | );
393 | }
394 |
395 | async getLatestSuggestion(queryId: number): Promise {
396 | return this.dbHelper.get(
397 | `SELECT * FROM index_suggestions WHERE query_id = ? ORDER BY suggestion_id DESC LIMIT 1`,
398 | [queryId]
399 | );
400 | }
401 |
402 | async getLatestUnappliedSuggestion(queryId: number): Promise {
403 | return this.dbHelper.get(
404 | `SELECT * FROM index_suggestions WHERE query_id = ? AND applied = 0 ORDER BY suggestion_id DESC LIMIT 1`,
405 | [queryId]
406 | );
407 | }
408 |
409 | async getLatestAppliedSuggestion(queryId: number): Promise {
410 | return this.dbHelper.get(
411 | `SELECT * FROM index_suggestions WHERE query_id = ? AND applied = 1 ORDER BY suggestion_id DESC LIMIT 1`,
412 | [queryId]
413 | );
414 | }
415 |
416 | async updateSuggestion(suggestionId: number, updates: Record): Promise {
417 | const keys = Object.keys(updates);
418 | if (keys.length === 0) return;
419 |
420 | const sql = `UPDATE index_suggestions SET ${keys.map(k => `${k} = ?`).join(', ')} WHERE suggestion_id = ?`;
421 | await this.dbHelper.run(sql, [...Object.values(updates), suggestionId]);
422 | }
423 |
424 | async markSuggestionApplied(suggestionId: number, { prev_exec_time, new_exec_time }: { prev_exec_time: number, new_exec_time: number }): Promise {
425 | await this.updateSuggestion(suggestionId, {
426 | applied: 1,
427 | reverted: 0,
428 | prev_exec_time,
429 | new_exec_time
430 | });
431 | }
432 |
433 | async markSuggestionReverted(suggestionId: number): Promise {
434 | await this.updateSuggestion(suggestionId, {
435 | applied: 0,
436 | reverted: 1
437 | });
438 | }
439 |
440 | async close(): Promise {
441 | await this.dbHelper.close();
442 | }
443 |
444 | // Backward compatibility methods - delegate to dbHelper
445 | async get(sql: string, params?: any[]): Promise {
446 | return this.dbHelper.get(sql, params);
447 | }
448 |
449 | async run(sql: string, params?: any[]): Promise {
450 | return this.dbHelper.run(sql, params);
451 | }
452 |
453 | async all(sql: string, params?: any[]): Promise {
454 | return this.dbHelper.all(sql, params);
455 | }
456 |
457 | async exec(sql: string): Promise {
458 | return this.dbHelper.exec(sql);
459 | }
460 | }
--------------------------------------------------------------------------------