├── .prettierrc
├── docs
├── .npmrc
├── .gitignore
├── pnpm-workspace.yaml
├── package.json
├── 1.guide
│ ├── 3.http-server.md
│ ├── 4.custom-connector.md
│ └── 1.index.md
├── .docs
│ └── public
│ │ └── icon.svg
├── 2.connectors
│ ├── turso.md
│ ├── neon.md
│ ├── 1.index.md
│ ├── mysql.md
│ ├── vercel.md
│ ├── postgresql.md
│ ├── bun.md
│ ├── planetscale.md
│ ├── sqlite.md
│ ├── libsql.md
│ ├── pglite.md
│ └── cloudflare.md
├── 3.integrations
│ ├── kysely.md
│ ├── prisma.md
│ ├── 1.index.md
│ └── drizzle.md
└── .config
│ └── docs.yaml
├── .npmrc
├── pnpm-workspace.yaml
├── renovate.json
├── .github
├── codecov.yml
└── workflows
│ ├── ci.yml
│ └── autofix.yml
├── .prettierignore
├── test
├── connectors
│ ├── cloudflare
│ │ ├── wrangler-d1.toml
│ │ ├── wrangler-mysql.toml
│ │ ├── wrangler-pg.toml
│ │ ├── cloudflare-d1.test.ts
│ │ ├── cloudflare-hyperdrive-postgresql.test.ts
│ │ └── cloudflare-hyperdrive-mysql.test.ts
│ ├── sqlite3.test.ts
│ ├── node-sqlite.test.ts
│ ├── postgresql.test.ts
│ ├── better-sqlite3.test.ts
│ ├── mysql2.test.ts
│ ├── planetscale.test.ts
│ ├── pglite.test.ts
│ ├── libsql.test.ts
│ ├── bun-test.ts
│ └── _tests.ts
├── template.test.ts
└── integrations
│ └── drizzle.test.ts
├── .gitignore
├── vitest.config.ts
├── .editorconfig
├── examples
└── drizzle
│ ├── package.json
│ └── index.ts
├── src
├── index.ts
├── connectors
│ ├── _internal
│ │ ├── cloudflare.ts
│ │ └── statement.ts
│ ├── libsql
│ │ ├── node.ts
│ │ ├── http.ts
│ │ ├── web.ts
│ │ └── core.ts
│ ├── cloudflare-d1.ts
│ ├── bun-sqlite.ts
│ ├── better-sqlite3.ts
│ ├── planetscale.ts
│ ├── mysql2.ts
│ ├── postgresql.ts
│ ├── node-sqlite.ts
│ ├── pglite.ts
│ ├── cloudflare-hyperdrive-postgresql.ts
│ ├── cloudflare-hyperdrive-mysql.ts
│ └── sqlite3.ts
├── template.ts
├── integrations
│ └── drizzle
│ │ ├── index.ts
│ │ ├── _utils.ts
│ │ └── _session.ts
├── database.ts
├── _connectors.ts
└── types.ts
├── eslint.config.mjs
├── .env.example
├── docker-compose.yaml
├── tsconfig.json
├── LICENSE
├── README.md
├── package.json
├── scripts
└── gen-connectors.ts
└── CHANGELOG.md
/.prettierrc:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/docs/.npmrc:
--------------------------------------------------------------------------------
1 | shamefully-hoist=true
2 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | ignore-workspace-root-check=true
2 |
--------------------------------------------------------------------------------
/pnpm-workspace.yaml:
--------------------------------------------------------------------------------
1 | packages:
2 | - "examples/**"
3 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .nuxt
3 | .data
4 | .output
5 | dist
6 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "github>unjs/renovate-config"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/.github/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | default:
5 | threshold: 5%
6 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | /connectors
2 | /integrations
3 | /src/_connectors.ts
4 | /dist
5 | /node_modules
6 | /.output
7 | pnpm-lock.yaml
8 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/wrangler-d1.toml:
--------------------------------------------------------------------------------
1 | [[d1_databases]]
2 | binding = "test"
3 | database_name = "test"
4 | database_id = "test"
5 |
6 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/wrangler-mysql.toml:
--------------------------------------------------------------------------------
1 | [[hyperdrive]]
2 | binding = "MYSQL"
3 | id = "mysql"
4 | # localConnectionString = "mysql://test:test@localhost:3306/db0"
5 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/wrangler-pg.toml:
--------------------------------------------------------------------------------
1 | [[hyperdrive]]
2 | binding = "POSTGRESQL"
3 | id = "postgresql"
4 | # localConnectionString = "postgres://test:test@localhost:5432/db0"
5 |
--------------------------------------------------------------------------------
/docs/pnpm-workspace.yaml:
--------------------------------------------------------------------------------
1 | packages: []
2 | ignoredBuiltDependencies:
3 | - "@parcel/watcher"
4 | - "@tailwindcss/oxide"
5 | - esbuild
6 | - vue-demi
7 | onlyBuiltDependencies:
8 | - better-sqlite3
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode
2 | node_modules
3 | *.log
4 | .DS_Store
5 | coverage
6 | dist
7 | tmp
8 | /test.*
9 | __*
10 | .data
11 | .tmp
12 | .env
13 | .wrangler
14 |
15 | /connectors
16 | /integrations
17 |
18 | tsconfig.tsbuildinfo
19 |
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "private": true,
3 | "type": "module",
4 | "scripts": {
5 | "build": "undocs build",
6 | "dev": "undocs dev"
7 | },
8 | "devDependencies": {
9 | "undocs": "^0.4.10"
10 | },
11 | "packageManager": "pnpm@10.19.0"
12 | }
13 |
--------------------------------------------------------------------------------
/vitest.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "vitest/config";
2 |
3 | export default defineConfig({
4 | test: {
5 | setupFiles: ["dotenv/config"],
6 | coverage: {
7 | reporter: ["text", "clover", "json"],
8 | include: ["src/**/*.ts"],
9 | },
10 | },
11 | });
12 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | end_of_line = lf
5 | insert_final_newline = true
6 | trim_trailing_whitespace = true
7 | charset = utf-8
8 |
9 | [*.js]
10 | indent_style = space
11 | indent_size = 2
12 |
13 | [{package.json,*.yml,*.cjson}]
14 | indent_style = space
15 | indent_size = 2
16 |
--------------------------------------------------------------------------------
/docs/1.guide/3.http-server.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: material-symbols:http
3 | ---
4 |
5 | # HTTP Server
6 |
7 | > Expose SQL databases over (secure) HTTP as a restful API for edge runtimes!
8 |
9 | > [!NOTE]
10 | > 🚀 This feature is planned! Follow up [unjs/db0#6](https://github.com/unjs/db0/issues/6)
11 |
--------------------------------------------------------------------------------
/examples/drizzle/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "db0-with-drizzle",
3 | "private": true,
4 | "scripts": {
5 | "start": "jiti ./index.ts"
6 | },
7 | "devDependencies": {
8 | "drizzle-kit": "^0.20.14",
9 | "drizzle-orm": "^0.29.4",
10 | "jiti": "^1.21.0",
11 | "db0": "latest"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/docs/.docs/public/icon.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/docs/2.connectors/turso.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:turso
3 | ---
4 |
5 | # Turso
6 |
7 | > Connect DB0 to Turso
8 |
9 | :read-more{to="https://turso.tech"}
10 |
11 | ::read-more{to="https://github.com/unjs/db0/issues/32"}
12 | This connector is planned to be supported. Follow up via [unjs/db0#32](https://github.com/unjs/db0/issues/32).
13 | ::
14 |
--------------------------------------------------------------------------------
/test/connectors/sqlite3.test.ts:
--------------------------------------------------------------------------------
1 | import { describe } from "vitest";
2 | import connector from "../../src/connectors/sqlite3";
3 | import { testConnector } from "./_tests";
4 |
5 | describe("connectors: sqlite3", () => {
6 | testConnector({
7 | dialect: "sqlite",
8 | connector: connector({
9 | name: ":memory:",
10 | }),
11 | });
12 | });
13 |
--------------------------------------------------------------------------------
/docs/2.connectors/neon.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: cbi:neon
3 | ---
4 |
5 | # NEON
6 |
7 | > Connect DB0 to Neon Serverless Postgres.
8 |
9 | :read-more{to="https://neon.tech/"}
10 |
11 | ::read-more{to="https://github.com/unjs/db0/issues/32"}
12 | This connector is planned to be supported. Follow up via [unjs/db0#32](https://github.com/unjs/db0/issues/32).
13 | ::
14 |
15 |
--------------------------------------------------------------------------------
/docs/3.integrations/kysely.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: mynaui:letter-k
3 | ---
4 |
5 | # Kysely
6 |
7 | > Integrate DB0 with Kysely ORM
8 |
9 | :read-more{to="https://kysely.dev"}
10 |
11 |
12 | ::read-more{to="https://github.com/unjs/db0/issues/50"}
13 | An example for this integration is planned. Follow up via [unjs/db0#50](https://github.com/unjs/db0/issues/50).
14 | ::
15 |
--------------------------------------------------------------------------------
/docs/3.integrations/prisma.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:prisma
3 | ---
4 |
5 | # Prisma
6 |
7 | > Integrate DB0 with Prisma ORM
8 |
9 | :read-more{to="https://www.prisma.io"}
10 |
11 |
12 | ::read-more{to="https://github.com/unjs/db0/issues/50"}
13 | An example for this integration is planned. Follow up via [unjs/db0#50](https://github.com/unjs/db0/issues/50).
14 | ::
15 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export { createDatabase } from "./database.ts";
2 |
3 | export { connectors } from "./_connectors.ts";
4 |
5 | export type {
6 | Connector,
7 | Database,
8 | ExecResult,
9 | Primitive,
10 | SQLDialect,
11 | Statement,
12 | PreparedStatement,
13 | } from "./types.ts";
14 |
15 | export type { ConnectorName, ConnectorOptions } from "./_connectors.ts";
16 |
--------------------------------------------------------------------------------
/test/connectors/node-sqlite.test.ts:
--------------------------------------------------------------------------------
1 | import { describe } from "vitest";
2 | import connector from "../../src/connectors/node-sqlite";
3 | import { testConnector } from "./_tests";
4 |
5 | describe("connectors: node-sqlite (native)", () => {
6 | testConnector({
7 | dialect: "sqlite",
8 | connector: connector({
9 | name: ":memory:",
10 | }),
11 | });
12 | });
13 |
--------------------------------------------------------------------------------
/eslint.config.mjs:
--------------------------------------------------------------------------------
1 | import unjs from "eslint-config-unjs";
2 |
3 | // https://github.com/unjs/eslint-config
4 | export default unjs({
5 | ignores: ["integrations/**", "connectors/**", "**/.docs"],
6 | rules: {
7 | "unicorn/expiring-todo-comments": 0,
8 | "@typescript-eslint/no-non-null-assertion": 0,
9 | "unicorn/no-null": 0,
10 | "@typescript-eslint/no-unused-vars": 0,
11 | },
12 | });
13 |
--------------------------------------------------------------------------------
/docs/3.integrations/1.index.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: carbon:property-relationship
3 | ---
4 |
5 | # Integrations
6 |
7 | > You can integrate DB0 instance to ORM or framework of your choice.
8 |
9 | Currently supported integrations:
10 |
11 | - [Drizzle](/integrations/drizzle)
12 |
13 | ::read-more{to="https://github.com/unjs/db0/issues/50"}
14 | See [unjs/db0#50](https://github.com/unjs/db0/issues/50) for list of upcoming integrations.
15 | ::
16 |
--------------------------------------------------------------------------------
/test/connectors/postgresql.test.ts:
--------------------------------------------------------------------------------
1 | import { describe } from "vitest";
2 | import connector from "../../src/connectors/postgresql";
3 | import { testConnector } from "./_tests";
4 |
5 | describe.runIf(process.env.POSTGRESQL_URL)(
6 | "connectors: postgresql.test",
7 | () => {
8 | testConnector({
9 | dialect: "postgresql",
10 | connector: connector({
11 | url: process.env.POSTGRESQL_URL!,
12 | }),
13 | });
14 | },
15 | );
16 |
--------------------------------------------------------------------------------
/test/connectors/better-sqlite3.test.ts:
--------------------------------------------------------------------------------
1 | import { fileURLToPath } from "node:url";
2 | import { rmSync } from "node:fs";
3 | import { describe } from "vitest";
4 | import connector from "../../src/connectors/better-sqlite3";
5 | import { testConnector } from "./_tests";
6 |
7 | describe("connectors: better-sqlite3", () => {
8 | testConnector({
9 | dialect: "sqlite",
10 | connector: connector({
11 | name: ":memory:",
12 | }),
13 | });
14 | });
15 |
--------------------------------------------------------------------------------
/test/connectors/mysql2.test.ts:
--------------------------------------------------------------------------------
1 | import { describe } from "vitest";
2 | import connector from "../../src/connectors/mysql2";
3 | import { testConnector } from "./_tests";
4 |
5 | describe.runIf(process.env.MYSQL_URL)("connectors: mysql2.test", () => {
6 | testConnector({
7 | dialect: "mysql",
8 | connector: connector({
9 | host: "localhost",
10 | user: "test",
11 | password: "test",
12 | database: "db0",
13 | }),
14 | });
15 | });
16 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | POSTGRESQL_URL=postgresql://test:test@localhost:5432/db0
2 | MYSQL_URL=mysql://test:test@localhost:3306/db0
3 |
4 | # PlanetScale
5 | PLANETSCALE_HOST=aws.connect.psdb.cloud
6 | PLANETSCALE_USERNAME=username
7 | PLANETSCALE_PASSWORD=password
8 |
9 | # Cloudflare Hyperdrive
10 | WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_POSTGRESQL=postgresql://test:test@localhost:5432/db0
11 | WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_MYSQL=mysql://test:test@localhost:3306/db0
12 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on: { push: {}, pull_request: {} }
4 |
5 | jobs:
6 | tests:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v6
10 | - run: npm i -g --force corepack && corepack enable
11 | - uses: actions/setup-node@v6
12 | with: { node-version: lts/*, cache: pnpm }
13 | - run: pnpm install
14 | - run: pnpm lint
15 | - run: pnpm build
16 | - run: pnpm test:types
17 | - run: pnpm vitest
18 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | pg:
3 | # https://hub.docker.com/_/postgres
4 | image: postgres:alpine
5 | network_mode: "host"
6 | environment:
7 | POSTGRES_USER: test
8 | POSTGRES_PASSWORD: test
9 | POSTGRES_DB: db0
10 | mysql:
11 | # https://hub.docker.com/_/mysql
12 | image: mysql
13 | network_mode: "host"
14 | environment:
15 | MYSQL_ROOT_PASSWORD: test
16 | MYSQL_DATABASE: db0
17 | MYSQL_USER: test
18 | MYSQL_PASSWORD: test
19 |
--------------------------------------------------------------------------------
/.github/workflows/autofix.yml:
--------------------------------------------------------------------------------
1 | name: autofix.ci
2 | on: { push: {}, pull_request: {} }
3 | permissions: { contents: read }
4 | jobs:
5 | autofix:
6 | runs-on: ubuntu-latest
7 | steps:
8 | - uses: actions/checkout@v6
9 | - run: npm i -fg corepack && corepack enable
10 | - uses: actions/setup-node@v6
11 | with: { node-version: lts/*, cache: pnpm }
12 | - run: pnpm install
13 | - run: pnpm lint:fix
14 | - uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
15 | with: { commit-message: "chore: apply automated updates" }
16 |
--------------------------------------------------------------------------------
/src/connectors/_internal/cloudflare.ts:
--------------------------------------------------------------------------------
1 | import type { Hyperdrive } from "@cloudflare/workers-types";
2 |
3 | function getCloudflareEnv() {
4 | return (
5 | (globalThis as any).__env__ ||
6 | import("cloudflare:workers" as any).then((mod) => mod.env)
7 | );
8 | }
9 |
10 | export async function getHyperdrive(bindingName: string): Promise {
11 | const env = await getCloudflareEnv();
12 | const binding: Hyperdrive = env[bindingName];
13 | if (!binding) {
14 | throw new Error(`[db0] [hyperdrive] binding \`${bindingName}\` not found`);
15 | }
16 | return binding;
17 | }
18 |
--------------------------------------------------------------------------------
/docs/1.guide/4.custom-connector.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: material-symbols-light:dashboard-customize
3 | ---
4 |
5 | # Custom Connectors
6 |
7 | > If there is no built-in connector yet for a SQL database integration, you can create a custom one by yourself.
8 |
9 | ::read-more{to="https://github.com/unjs/db0/tree/main/src/connectors"}
10 | Explore [built-in connectors](https://github.com/unjs/db0/tree/main/src/connectors) to learn how to implement a custom connector.
11 | ::
12 |
13 | > [!NOTE]
14 | > [Request](https://github.com/unjs/db0/issues/new?assignees=&labels=connector&projects=&template=feature-request.yml) a new connector.
15 |
--------------------------------------------------------------------------------
/test/connectors/planetscale.test.ts:
--------------------------------------------------------------------------------
1 | import { describe } from "vitest";
2 | import connector from "../../src/connectors/planetscale";
3 | import { testConnector } from "./_tests";
4 |
5 | describe.runIf(
6 | process.env.PLANETSCALE_HOST &&
7 | process.env.PLANETSCALE_USERNAME &&
8 | process.env.PLANETSCALE_PASSWORD,
9 | )("connectors: planetscale.test", () => {
10 | testConnector({
11 | dialect: "mysql",
12 | connector: connector({
13 | host: process.env.PLANETSCALE_HOST!,
14 | username: process.env.PLANETSCALE_USERNAME!,
15 | password: process.env.PLANETSCALE_PASSWORD!,
16 | }),
17 | });
18 | });
19 |
--------------------------------------------------------------------------------
/docs/2.connectors/1.index.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: gravity-ui:plug-connection
3 | ---
4 |
5 | # Connectors
6 |
7 | > You can use DB0 api with the connector of your own choice.
8 |
9 | Currently supported connectors:
10 |
11 | - [Bun](/connectors/bun)
12 | - [Cloudflare D1](/connectors/cloudflare)
13 | - [LibSQL](/connectors/libsql)
14 | - [PlanetScale](/connectors/planetscale)
15 | - [PostgreSQL](/connectors/postgresql)
16 | - [MySQL](/connectors/mysql)
17 | - [SQLite](/connectors/sqlite)
18 |
19 | ::read-more{to="https://github.com/unjs/db0/issues/32"}
20 | See [unjs/db0#32](https://github.com/unjs/db0/issues/32) for the list of upcoming connectors.
21 | ::
22 |
--------------------------------------------------------------------------------
/src/connectors/libsql/node.ts:
--------------------------------------------------------------------------------
1 | import type { Config, Client } from "@libsql/client";
2 | import type { Connector, Primitive } from "db0";
3 | import { createClient } from "@libsql/client";
4 | import libSqlCore from "./core.ts";
5 |
6 | export type ConnectorOptions = Config;
7 |
8 | export default function libSqlConnector(
9 | opts: ConnectorOptions,
10 | ): Connector {
11 | let _client: Client | undefined;
12 | const getClient = () => {
13 | if (!_client) {
14 | _client = createClient(opts);
15 | }
16 | return _client;
17 | };
18 | return libSqlCore({
19 | name: "libsql-node",
20 | getClient,
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/src/connectors/libsql/http.ts:
--------------------------------------------------------------------------------
1 | import type { Config, Client } from "@libsql/client";
2 | import type { Connector, Primitive } from "db0";
3 | import { createClient } from "@libsql/client/http";
4 | import libSqlCore from "./core.ts";
5 |
6 | export type ConnectorOptions = Config;
7 |
8 | export default function libSqlConnector(
9 | opts: ConnectorOptions,
10 | ): Connector {
11 | let _client: Client | undefined;
12 | const getClient = () => {
13 | if (!_client) {
14 | _client = createClient(opts);
15 | }
16 | return _client;
17 | };
18 | return libSqlCore({
19 | name: "libsql-http",
20 | getClient,
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/src/connectors/libsql/web.ts:
--------------------------------------------------------------------------------
1 | import type { Config, Client } from "@libsql/client";
2 | import type { Connector, Primitive } from "db0";
3 | import { createClient } from "@libsql/client/http";
4 | import libSqlCore from "./core.ts";
5 |
6 | export type ConnectorOptions = Config;
7 |
8 | export default function libSqlConnector(
9 | opts: ConnectorOptions,
10 | ): Connector {
11 | let _client: Client | undefined;
12 | const getClient = () => {
13 | if (!_client) {
14 | _client = createClient(opts);
15 | }
16 | return _client;
17 | };
18 | return libSqlCore({
19 | name: "libsql-web",
20 | getClient,
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/docs/2.connectors/mysql.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:mysql
3 | ---
4 |
5 | # MySQL
6 |
7 | > Connect DB0 to Mysql Database using mysql2
8 |
9 | ## Usage
10 |
11 | For this connector, you need to install [`mysql2`](https://www.npmjs.com/package/mysql2) dependency:
12 |
13 | :pm-install{name="mysql2"}
14 |
15 | Use `mysql2` connector:
16 |
17 | ```js
18 | import { createDatabase } from "db0";
19 | import mysql from "db0/connectors/mysql2";
20 |
21 | const db = createDatabase(
22 | mysql({
23 | /* options */
24 | }),
25 | );
26 | ```
27 |
28 | ## Options
29 |
30 | :read-more{to="https://github.com/sidorares/node-mysql2/blob/master/typings/mysql/lib/Connection.d.ts#L82-L329"}
31 |
--------------------------------------------------------------------------------
/test/connectors/pglite.test.ts:
--------------------------------------------------------------------------------
1 | import { fileURLToPath } from "node:url";
2 | import { rm, mkdir } from "node:fs/promises";
3 | import { dirname, resolve } from "node:path";
4 | import { describe } from "vitest";
5 | import PGlite from "../../src/connectors/pglite";
6 | import { testConnector } from "./_tests";
7 |
8 | describe("connectors: pglite", async () => {
9 | const dataDir = fileURLToPath(new URL(".tmp/pglite", import.meta.url));
10 | await rm(dataDir, { recursive: true }).catch(() => {
11 | /* */
12 | });
13 | await mkdir(dirname(dataDir), { recursive: true });
14 | testConnector({
15 | dialect: "postgresql",
16 | connector: PGlite({ dataDir }),
17 | });
18 | });
19 |
--------------------------------------------------------------------------------
/docs/2.connectors/vercel.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: radix-icons:vercel-logo
3 | ---
4 |
5 | # Vercel
6 |
7 | > Connect DB0 to Vercel Postgres
8 |
9 | :read-more{to="https://vercel.com/docs/storage/vercel-postgres"}
10 |
11 | ::read-more{to="https://github.com/unjs/db0/issues/32"}
12 | A dedicated `vercel` connector is planned to be supported. Follow up via [unjs/db0#32](https://github.com/unjs/db0/issues/32).
13 | ::
14 |
15 | ## Usage
16 |
17 | Use [`postgres`](/connectors/postgresql) connector:
18 |
19 | ```js
20 | import { createDatabase } from "db0";
21 | import postgres from "db0/connectors/postgres";
22 |
23 | const db = createDatabase(
24 | postgres({
25 | /* options */
26 | }),
27 | );
28 | ```
29 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ESNext",
4 | "module": "NodeNext",
5 | "moduleResolution": "NodeNext",
6 | "resolveJsonModule": true,
7 | "esModuleInterop": false,
8 | "allowSyntheticDefaultImports": true,
9 | "skipLibCheck": true,
10 | "strict": true,
11 | "verbatimModuleSyntax": true,
12 | "isolatedModules": true,
13 | "composite": true,
14 | "allowImportingTsExtensions": true,
15 | "isolatedDeclarations": true,
16 | "forceConsistentCasingInFileNames": true,
17 | "noImplicitOverride": true,
18 | "noEmit": true,
19 | "paths": {
20 | "db0/connectors/*": ["./src/connectors/*"]
21 | }
22 | },
23 | "include": ["src"]
24 | }
25 |
--------------------------------------------------------------------------------
/test/connectors/libsql.test.ts:
--------------------------------------------------------------------------------
1 | import { fileURLToPath } from "node:url";
2 | import { existsSync, unlinkSync, mkdirSync } from "node:fs";
3 | import { dirname, resolve } from "node:path";
4 | import { describe } from "vitest";
5 | import libSql from "../../src/connectors/libsql/node";
6 | import { testConnector } from "./_tests";
7 |
8 | describe("connectors: libsql", () => {
9 | const dbPath = resolve(
10 | dirname(fileURLToPath(import.meta.url)),
11 | ".tmp/libsql/.data/local.db",
12 | );
13 | if (existsSync(dbPath)) {
14 | unlinkSync(dbPath);
15 | }
16 | mkdirSync(dirname(dbPath), { recursive: true });
17 | testConnector({
18 | dialect: "libsql",
19 | connector: libSql({
20 | url: `file:${dbPath}`,
21 | }),
22 | });
23 | });
24 |
--------------------------------------------------------------------------------
/docs/.config/docs.yaml:
--------------------------------------------------------------------------------
1 | # yaml-language-server: $schema=https://unpkg.com/undocs/schema/config.json
2 |
3 | name: "db0"
4 | shortDescription: "tiny sql connector"
5 | description: "Connect and query any compatible SQL database and integrate with your favorite tools."
6 | github: "unjs/db0"
7 | url: "https://db0.unjs.io"
8 | socials:
9 | twitter: "https://twitter.com/unjsio"
10 | bluesky: "https://bsky.app/profile/unjs.io"
11 | sponsors:
12 | api: https://sponsors.pi0.io/sponsors.json
13 | automd: true
14 | themeColor: "orange"
15 | landing:
16 | contributors: true
17 | # heroLinks:
18 | # stackblitz:
19 | # icon: "i-heroicons-play"
20 | # to: "https://stackblitz.com/github/unjs/packageName/tree/main/playground"
21 | # features:
22 | # - title:
23 | # description:
24 |
--------------------------------------------------------------------------------
/test/connectors/bun-test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect, test } from "bun:test";
2 |
3 | import connector from "../../src/connectors/bun-sqlite";
4 | import { createDatabase } from "../../src";
5 |
6 | test("connectors: bun", async () => {
7 | const db = createDatabase(connector({ name: ":memory:" }));
8 |
9 | const userId = "1001";
10 |
11 | await db.sql`DROP TABLE IF EXISTS users`;
12 | await db.sql`CREATE TABLE users ("id" TEXT PRIMARY KEY, "firstName" TEXT, "lastName" TEXT, "email" TEXT)`;
13 |
14 | await db.sql`INSERT INTO users VALUES (${userId}, 'John', 'Doe', '')`;
15 |
16 | const { rows } = await db.sql`SELECT * FROM users WHERE id = ${userId}`;
17 | expect(rows).toMatchObject([
18 | { id: userId, firstName: "John", lastName: "Doe", email: "" },
19 | ]);
20 | });
21 |
--------------------------------------------------------------------------------
/test/template.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, it, expect } from "vitest";
2 | import { sqlTemplate } from "../src/template";
3 |
4 | describe("SQL Template", () => {
5 | const tests = [
6 | {
7 | sql: sqlTemplate`SELECT * FROM {${"users"}} WHERE age > ${25} AND type = ${"test"}`,
8 | query: "SELECT * FROM users WHERE age > ? AND type = ?",
9 | values: [25, "test"],
10 | },
11 | {
12 | sql: sqlTemplate`INSERT INTO {${"users"}} ({${"name"}}, {${"age"}}) VALUES (${25}, ${"test"})`,
13 | query: "INSERT INTO users (name, age) VALUES (?, ?)",
14 | values: [25, "test"],
15 | },
16 | ];
17 |
18 | for (const test of tests) {
19 | const testName = `${test.query} (${test.values.join(", ")}))`;
20 | it(testName, () => {
21 | expect(test.sql).toEqual([test.query, test.values]);
22 | });
23 | }
24 | });
25 |
--------------------------------------------------------------------------------
/docs/2.connectors/postgresql.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:postgresql
3 | ---
4 |
5 | # PostgreSQL
6 |
7 | > Connect DB0 to PostgreSQL
8 |
9 | :read-more{to="https://www.postgresql.org"}
10 |
11 | ## Usage
12 |
13 | For this connector, you need to install [`pg`](https://www.npmjs.com/package/pg) dependency:
14 |
15 | :pm-install{name="pg @types/pg"}
16 |
17 | Use `postgresql` connector:
18 |
19 | ```js
20 | import { createDatabase } from "db0";
21 | import postgresql from "db0/connectors/postgresql";
22 |
23 | const db = createDatabase(
24 | postgresql({
25 | bindingName: "DB",
26 | }),
27 | );
28 | ```
29 |
30 | ## Options
31 |
32 | ### `url`
33 |
34 | Connection URL string.
35 |
36 | Alternatively, you can add connection configuration.
37 |
38 | :read-more{title="node-postgres client options" to="https://node-postgres.com/apis/client#new-client"}
39 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/cloudflare-d1.test.ts:
--------------------------------------------------------------------------------
1 | import { getPlatformProxy, type PlatformProxy } from "wrangler";
2 | import { afterAll, beforeAll, describe } from "vitest";
3 | import cloudflareD1 from "../../../src/connectors/cloudflare-d1";
4 | import { testConnector } from "../_tests";
5 | import { fileURLToPath } from "node:url";
6 |
7 | describe("connectors: cloudflare-d1", () => {
8 | let platformProxy: PlatformProxy;
9 |
10 | beforeAll(async () => {
11 | platformProxy = await getPlatformProxy({
12 | configPath: fileURLToPath(new URL("wrangler-d1.toml", import.meta.url)),
13 | });
14 | (globalThis as any).__env__ = platformProxy.env;
15 | });
16 |
17 | afterAll(async () => {
18 | await platformProxy?.dispose();
19 | (globalThis as any).__env__ = undefined;
20 | });
21 |
22 | testConnector({
23 | dialect: "sqlite",
24 | connector: cloudflareD1({
25 | bindingName: "test",
26 | }),
27 | });
28 | });
29 |
--------------------------------------------------------------------------------
/docs/2.connectors/bun.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:bun
3 | ---
4 |
5 | # Bun SQlite
6 |
7 | > Connect DB0 to Bun SQLite
8 |
9 | :read-more{to="https://bun.sh/docs/api/sqlite"}
10 |
11 | > [!NOTE]
12 | > This connector needs Bun as runtime. Use `bun --bun ...` to make sure of this.
13 |
14 | ## Usage
15 |
16 | Use `bun-sqlite` connector:
17 |
18 | ```js
19 | import { createDatabase } from "db0";
20 | import bunSqlite from "db0/connectors/bun-sqlite";
21 |
22 | const db = createDatabase(bunSqlite({}));
23 | ```
24 |
25 | ## Options
26 |
27 | ### `name`
28 |
29 | Database (file) name. Default is `:memory`.
30 |
31 | ### `cwd`
32 |
33 | Working directory to create database. Default is current working directory of project. (It will be ignored if `path` is provided an absolute path or if name is `:memory` or empty).
34 |
35 | ### `path`
36 |
37 | Related (to `cwd`) or absolute path to the sql file. By default it is stored in `{cwd}/.data/{name}.bun.sqlite` / `.data/db.bun.sqlite`
38 |
--------------------------------------------------------------------------------
/examples/drizzle/index.ts:
--------------------------------------------------------------------------------
1 | import { sqliteTable, text, numeric } from "drizzle-orm/sqlite-core";
2 |
3 | import { createDatabase } from "../../src";
4 | import { drizzle } from "../../src/integrations/drizzle"
5 |
6 | import sqlite from "../../src/connectors/better-sqlite3";
7 |
8 | export const users = sqliteTable("users", {
9 | id: numeric("id"),
10 | name: text("full_name"),
11 | });
12 |
13 | const schema = { users };
14 |
15 | async function main() {
16 | const db0 = createDatabase(sqlite({}));
17 | const db = drizzle(db0, { schema });
18 |
19 | await db0.sql`create table if not exists users (
20 | id integer primary key autoincrement,
21 | full_name text
22 | )`;
23 |
24 | await db0.sql`insert into users (full_name) values ('John Doe')`;
25 |
26 | const res = await db.select().from(users).all();
27 | console.log({ res });
28 | }
29 |
30 | // eslint-disable-next-line unicorn/prefer-top-level-await
31 | main().catch((error) => {
32 | console.error(error);
33 | // eslint-disable-next-line unicorn/no-process-exit
34 | process.exit(1);
35 | });
36 |
--------------------------------------------------------------------------------
/src/template.ts:
--------------------------------------------------------------------------------
1 | import type { Primitive } from "./types.ts";
2 |
3 | export function sqlTemplate(
4 | strings: TemplateStringsArray,
5 | ...values: Primitive[]
6 | ): [string, Primitive[]] {
7 | if (!isTemplateStringsArray(strings) || !Array.isArray(values)) {
8 | throw new Error("[db0] invalid template invocation");
9 | }
10 |
11 | const staticIndexes: number[] = [];
12 |
13 | let result = strings[0] || "";
14 | for (let i = 1; i < strings.length; i++) {
15 | if (result.endsWith("{") && strings[i].startsWith("}")) {
16 | result = result.slice(0, -1) + values[i - 1] + strings[i].slice(1);
17 | staticIndexes.push(i - 1);
18 | continue;
19 | }
20 | result += `?${strings[i] ?? ""}`;
21 | }
22 |
23 | const dynamicValues = values.filter((_, i) => !staticIndexes.includes(i));
24 |
25 | return [result.trim(), dynamicValues];
26 | }
27 |
28 | function isTemplateStringsArray(
29 | strings: unknown,
30 | ): strings is TemplateStringsArray {
31 | return (
32 | Array.isArray(strings) && "raw" in strings && Array.isArray(strings.raw)
33 | );
34 | }
35 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Pooya Parsa
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/cloudflare-hyperdrive-postgresql.test.ts:
--------------------------------------------------------------------------------
1 | import { getPlatformProxy, type PlatformProxy } from "wrangler";
2 |
3 | import { afterAll, beforeAll, describe } from "vitest";
4 | import cloudflareHyperdrivePostgresql from "../../../src/connectors/cloudflare-hyperdrive-postgresql";
5 | import { testConnector } from "../_tests";
6 | import { fileURLToPath } from "node:url";
7 |
8 | describe.runIf(process.env.POSTGRESQL_URL)(
9 | "connectors: cloudflare-hyperdrive-postgresql",
10 | () => {
11 | let platformProxy: PlatformProxy;
12 |
13 | beforeAll(async () => {
14 | process.env.WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_POSTGRESQL =
15 | process.env.POSTGRESQL_URL;
16 | platformProxy = await getPlatformProxy({
17 | configPath: fileURLToPath(new URL("wrangler-pg.toml", import.meta.url)),
18 | });
19 | (globalThis as any).__env__ = platformProxy.env;
20 | });
21 |
22 | afterAll(async () => {
23 | await platformProxy?.dispose();
24 | });
25 |
26 | testConnector({
27 | dialect: "postgresql",
28 | connector: cloudflareHyperdrivePostgresql({
29 | bindingName: "POSTGRESQL",
30 | }),
31 | });
32 | },
33 | );
34 |
--------------------------------------------------------------------------------
/test/connectors/cloudflare/cloudflare-hyperdrive-mysql.test.ts:
--------------------------------------------------------------------------------
1 | import { getPlatformProxy, type PlatformProxy } from "wrangler";
2 | import { afterAll, beforeAll, describe } from "vitest";
3 | import cloudflareHyperdriveMysql from "../../../src/connectors/cloudflare-hyperdrive-mysql";
4 | import { testConnector } from "../_tests";
5 | import { fileURLToPath } from "node:url";
6 |
7 | describe.runIf(process.env.MYSQL_URL)(
8 | "connectors: cloudflare-hyperdrive-mysql",
9 | () => {
10 | let platformProxy: PlatformProxy;
11 |
12 | beforeAll(async () => {
13 | process.env.WRANGLER_HYPERDRIVE_LOCAL_CONNECTION_STRING_MYSQL =
14 | process.env.MYSQL_URL;
15 | platformProxy = await getPlatformProxy({
16 | configPath: fileURLToPath(
17 | new URL("wrangler-mysql.toml", import.meta.url),
18 | ),
19 | });
20 | (globalThis as any).__env__ = platformProxy.env;
21 | });
22 |
23 | afterAll(async () => {
24 | await platformProxy?.dispose();
25 | (globalThis as any).__env__ = undefined;
26 | });
27 |
28 | testConnector({
29 | dialect: "mysql",
30 | connector: cloudflareHyperdriveMysql({
31 | bindingName: "MYSQL",
32 | }),
33 | });
34 | },
35 | );
36 |
--------------------------------------------------------------------------------
/docs/2.connectors/planetscale.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:planetscale
3 | ---
4 |
5 | # PlanetScale
6 |
7 | > Connect DB0 to Planetscale
8 |
9 | :read-more{to="https://planetscale.com"}
10 |
11 | ## Usage
12 |
13 | For this connector, you need to install [`@planetscale/database`](https://www.npmjs.com/package/@planetscale/database) dependency:
14 |
15 | :pm-install{name="@planetscale/database"}
16 |
17 | Use `planetscale` connector:
18 |
19 | ```js
20 | import { createDatabase } from "db0";
21 | import planetscale from "db0/connectors/planetscale";
22 |
23 | const db = createDatabase(
24 | planetscale({
25 | host: "aws.connect.psdb.cloud",
26 | username: "username",
27 | password: "password",
28 | }),
29 | );
30 | ```
31 |
32 | ## Options
33 |
34 | ### `host`
35 |
36 | Planetscale host.
37 |
38 | ### `username`
39 |
40 | Planetscale username.
41 |
42 | ### `password`
43 |
44 | Planetscale password.
45 |
46 | ### 'url'
47 |
48 | Connection URL string.
49 | The `host`, `username` and `password` are extracted from the URL.
50 |
51 | :read-more{title="Create a database password" to="https://planetscale.com/docs/tutorials/planetscale-serverless-driver"}
52 |
53 | :read-more{title="@planetscale/database client options" to="https://github.com/planetscale/database-js"}
54 |
--------------------------------------------------------------------------------
/src/connectors/_internal/statement.ts:
--------------------------------------------------------------------------------
1 | import type { Primitive, Statement, PreparedStatement } from "db0";
2 |
3 | export abstract class BoundableStatement implements Statement {
4 | _statement: T;
5 |
6 | constructor(rawStmt: T) {
7 | this._statement = rawStmt;
8 | }
9 |
10 | bind(...params: Primitive[]): PreparedStatement {
11 | return new BoundStatement(this, params);
12 | }
13 |
14 | abstract all(...params: Primitive[]): Promise;
15 |
16 | abstract run(...params: Primitive[]): Promise<{ success: boolean }>;
17 |
18 | abstract get(...params: Primitive[]): Promise;
19 | }
20 |
21 | class BoundStatement implements PreparedStatement {
22 | #statement: S;
23 | #params: Primitive[];
24 |
25 | constructor(statement: S, params: Primitive[]) {
26 | this.#statement = statement;
27 | this.#params = params;
28 | }
29 |
30 | bind(...params: Primitive[]): BoundStatement {
31 | return new BoundStatement(this.#statement, params);
32 | }
33 |
34 | all(): Promise {
35 | return this.#statement.all(...this.#params);
36 | }
37 |
38 | run(): Promise<{ success: boolean }> {
39 | return this.#statement.run(...this.#params);
40 | }
41 |
42 | get(): Promise {
43 | return this.#statement.get(...this.#params);
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/connectors/cloudflare-d1.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | D1Database,
3 | D1PreparedStatement as RawStatement,
4 | } from "@cloudflare/workers-types";
5 | import type { Connector, Primitive } from "db0";
6 | import { BoundableStatement } from "./_internal/statement.ts";
7 |
8 | export interface ConnectorOptions {
9 | bindingName?: string;
10 | }
11 |
12 | export default function cloudflareD1Connector(
13 | options: ConnectorOptions,
14 | ): Connector {
15 | const getDB = () => {
16 | // TODO: Remove legacy __cf_env__ support in next major version
17 | const binding: D1Database =
18 | ((globalThis as any).__env__ as any)?.[options.bindingName!] ||
19 | ((globalThis as any).__cf_env__ as any)?.[options.bindingName!];
20 | if (!binding) {
21 | throw new Error(
22 | `[db0] [d1] binding \`${options.bindingName}\` not found`,
23 | );
24 | }
25 | return binding;
26 | };
27 |
28 | return {
29 | name: "cloudflare-d1",
30 | dialect: "sqlite",
31 | getInstance: () => getDB(),
32 | exec: (sql) => getDB().exec(sql),
33 | prepare: (sql) => new StatementWrapper(getDB().prepare(sql)),
34 | };
35 | }
36 |
37 | class StatementWrapper extends BoundableStatement {
38 | async all(...params: Primitive[]) {
39 | const res = await this._statement.bind(...params).all();
40 | return res.results;
41 | }
42 |
43 | async run(...params: Primitive[]) {
44 | const res = await this._statement.bind(...params).run();
45 | return res;
46 | }
47 |
48 | async get(...params: Primitive[]) {
49 | const res = await this._statement.bind(...params).first();
50 | return res;
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DB0
2 |
3 |
4 |
5 | [](https://npmjs.com/package/db0)
6 | [](https://npm.chart.dev/db0)
7 |
8 |
9 |
10 | > [!IMPORTANT]
11 | > DB0 development is in the early stages. Track progress via [GitHub issues](https://github.com/unjs/db0/issues).
12 |
13 | DB0 is a lightweight SQL connector:
14 |
15 | ✅ Works with several SQL [connectors](https://db0.unjs.io/connectors).
16 |
17 | ✅ Can be [integrated](https://db0.unjs.io/integrations) with ORMs and embedded into frameworks.
18 |
19 | ✅ Provides a simple but elegant query API out of the box.
20 |
21 | 👉 Read [📚 Documentation](https://db0.unjs.io)
22 |
23 | ## Contribution
24 |
25 |
26 | Local development
27 |
28 | - Clone this repository
29 | - Install the latest LTS version of [Node.js](https://nodejs.org/en/)
30 | - Enable [Corepack](https://github.com/nodejs/corepack) using `corepack enable`
31 | - Install dependencies using `pnpm install`
32 | - Run tests using `pnpm dev` or `pnpm test`
33 |
34 |
35 |
36 |
37 |
38 | ## License
39 |
40 |
41 |
42 | Published under the [MIT](https://github.com/unjs/db0/blob/main/LICENSE) license.
43 | Made by [@pi0](https://github.com/pi0) and [community](https://github.com/unjs/db0/graphs/contributors) 💛
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 | ---
54 |
55 | _🤖 auto updated with [automd](https://automd.unjs.io)_
56 |
57 |
58 |
--------------------------------------------------------------------------------
/src/connectors/bun-sqlite.ts:
--------------------------------------------------------------------------------
1 | import { resolve, dirname } from "node:path";
2 | import { mkdirSync } from "node:fs";
3 | import { Database, Statement as RawStatement } from "bun:sqlite";
4 | import type { Connector, Primitive } from "db0";
5 | import { BoundableStatement } from "./_internal/statement.ts";
6 |
7 | export interface ConnectorOptions {
8 | cwd?: string;
9 | path?: string;
10 | name?: string;
11 | }
12 |
13 | export default function bunSqliteConnector(
14 | opts: ConnectorOptions,
15 | ): Connector {
16 | let _db: Database;
17 | const getDB = () => {
18 | if (_db) {
19 | return _db;
20 | }
21 | if (opts.name === ":memory:") {
22 | _db = new Database(":memory:");
23 | } else {
24 | const filePath = resolve(
25 | opts.cwd || ".",
26 | opts.path || `.data/${opts.name || "db"}.bun.sqlite`,
27 | );
28 | mkdirSync(dirname(filePath), { recursive: true });
29 | _db = new Database(filePath);
30 | }
31 | return _db;
32 | };
33 |
34 | return {
35 | name: "sqlite",
36 | dialect: "sqlite",
37 | getInstance: () => getDB(),
38 | exec: (sql) => getDB().exec(sql),
39 | prepare: (sql) => new StatementWrapper(getDB().prepare(sql)),
40 | dispose: () => {
41 | _db?.close?.();
42 | _db = undefined as any;
43 | },
44 | };
45 | }
46 |
47 | class StatementWrapper extends BoundableStatement {
48 | all(...params: Primitive[]) {
49 | return Promise.resolve(this._statement.all(...params));
50 | }
51 |
52 | run(...params: Primitive[]) {
53 | const res = this._statement.run(...params);
54 | return Promise.resolve({ success: true, ...res });
55 | }
56 |
57 | get(...params: Primitive[]) {
58 | return Promise.resolve(this._statement.get(...params));
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/connectors/libsql/core.ts:
--------------------------------------------------------------------------------
1 | import type { Client, InStatement } from "@libsql/client";
2 | import type { Connector, Primitive } from "db0";
3 | import { BoundableStatement } from "../_internal/statement.ts";
4 |
5 | export type ConnectorOptions = {
6 | getClient: () => Client;
7 | name?: string;
8 | };
9 |
10 | type InternalQuery = (sql: InStatement) => Promise;
11 |
12 | export default function libSqlCoreConnector(
13 | opts: ConnectorOptions,
14 | ): Connector {
15 | const query: InternalQuery = (sql) => opts.getClient().execute(sql);
16 |
17 | return {
18 | name: opts.name || "libsql-core",
19 | dialect: "libsql",
20 | getInstance: async () => opts.getClient(),
21 | exec: (sql) => query(sql),
22 | prepare: (sql) => new StatementWrapper(sql, query),
23 | dispose: () => {
24 | opts.getClient()?.close?.();
25 | },
26 | };
27 | }
28 |
29 | class StatementWrapper extends BoundableStatement {
30 | #query: InternalQuery;
31 | #sql: string;
32 |
33 | constructor(sql: string, query: InternalQuery) {
34 | super();
35 | this.#sql = sql;
36 | this.#query = query;
37 | }
38 |
39 | async all(...params: Primitive[]) {
40 | const res = await this.#query({
41 | sql: this.#sql,
42 | args: params as Exclude[],
43 | });
44 | return res.rows;
45 | }
46 |
47 | async run(...params: Primitive[]) {
48 | const res = await this.#query({
49 | sql: this.#sql,
50 | args: params as Exclude[],
51 | });
52 | return {
53 | ...res,
54 | };
55 | }
56 |
57 | async get(...params: Primitive[]) {
58 | const res = await this.#query({
59 | sql: this.#sql,
60 | args: params as Exclude[],
61 | });
62 | return res.rows[0];
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/connectors/better-sqlite3.ts:
--------------------------------------------------------------------------------
1 | import { resolve, dirname } from "node:path";
2 | import { mkdirSync } from "node:fs";
3 | import Database from "better-sqlite3";
4 | import type { Connector, Primitive } from "db0";
5 | import type { Statement as RawStatement } from "better-sqlite3";
6 | import { BoundableStatement } from "./_internal/statement.ts";
7 |
8 | export interface ConnectorOptions {
9 | cwd?: string;
10 | path?: string;
11 | name?: string;
12 | }
13 |
14 | export default function sqliteConnector(
15 | opts: ConnectorOptions,
16 | ): Connector {
17 | let _db: Database.Database;
18 | const getDB = () => {
19 | if (_db) {
20 | return _db;
21 | }
22 | if (opts.name === ":memory:") {
23 | _db = new Database(":memory:");
24 | return _db;
25 | }
26 | const filePath = resolve(
27 | opts.cwd || ".",
28 | opts.path || `.data/${opts.name || "db"}.sqlite3`,
29 | );
30 | mkdirSync(dirname(filePath), { recursive: true });
31 | _db = new Database(filePath);
32 | return _db;
33 | };
34 |
35 | return {
36 | name: "sqlite",
37 | dialect: "sqlite",
38 | getInstance: () => getDB(),
39 | exec: (sql) => getDB().exec(sql),
40 | prepare: (sql) => new StatementWrapper(() => getDB().prepare(sql)),
41 | dispose: () => {
42 | _db?.close?.();
43 | _db = undefined as any;
44 | },
45 | };
46 | }
47 |
48 | class StatementWrapper extends BoundableStatement<() => RawStatement> {
49 | async all(...params: Primitive[]) {
50 | return this._statement().all(...params);
51 | }
52 |
53 | async run(...params: Primitive[]) {
54 | const res = this._statement().run(...params);
55 | return { success: res.changes > 0, ...res };
56 | }
57 |
58 | async get(...params: Primitive[]) {
59 | return this._statement().get(...params);
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/src/connectors/planetscale.ts:
--------------------------------------------------------------------------------
1 | import { Client, type ExecutedQuery, type Config } from "@planetscale/database";
2 |
3 | import type { Connector, Primitive } from "db0";
4 |
5 | import { BoundableStatement } from "./_internal/statement.ts";
6 |
7 | export type ConnectorOptions = Config;
8 |
9 | type InternalQuery = (
10 | sql: string,
11 | params?: unknown[],
12 | ) => Promise;
13 |
14 | export default function planetscaleConnector(
15 | opts: ConnectorOptions,
16 | ): Connector {
17 | let _client: undefined | Client;
18 | function getClient() {
19 | if (_client) {
20 | return _client;
21 | }
22 | const client = new Client(opts);
23 | _client = client;
24 | return client;
25 | }
26 |
27 | // Discussion on how @planetscale/database client works:
28 | // https://github.com/drizzle-team/drizzle-orm/issues/1743#issuecomment-1879479647
29 | const query: InternalQuery = (sql, params) =>
30 | getClient().execute(sql, params);
31 |
32 | return {
33 | name: "planetscale",
34 | dialect: "mysql",
35 | getInstance: () => getClient(),
36 | exec: (sql) => query(sql),
37 | prepare: (sql) => new StatementWrapper(sql, query),
38 | dispose: () => {
39 | _client = undefined;
40 | },
41 | };
42 | }
43 |
44 | class StatementWrapper extends BoundableStatement {
45 | #query: InternalQuery;
46 | #sql: string;
47 |
48 | constructor(sql: string, query: InternalQuery) {
49 | super();
50 | this.#sql = sql;
51 | this.#query = query;
52 | }
53 |
54 | async all(...params: Primitive[]) {
55 | const res = await this.#query(this.#sql, params);
56 | return res.rows;
57 | }
58 |
59 | async run(...params: Primitive[]) {
60 | const res = await this.#query(this.#sql, params);
61 | return {
62 | success: true,
63 | ...res,
64 | };
65 | }
66 |
67 | async get(...params: Primitive[]) {
68 | const res = await this.#query(this.#sql, params);
69 | return res.rows[0];
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/connectors/mysql2.ts:
--------------------------------------------------------------------------------
1 | import mysql from "mysql2/promise";
2 | import type { Connector, Primitive } from "db0";
3 | import { BoundableStatement } from "./_internal/statement.ts";
4 |
5 | export type ConnectorOptions = mysql.ConnectionOptions;
6 |
7 | type InternalQuery = (
8 | sql: string,
9 | params?: unknown[],
10 | ) => Promise;
11 |
12 | export default function mysqlConnector(
13 | opts: ConnectorOptions,
14 | ): Connector {
15 | let _connection: mysql.Connection | undefined;
16 | const getConnection = async () => {
17 | if (_connection) {
18 | return _connection;
19 | }
20 |
21 | _connection = await mysql.createConnection({
22 | ...opts,
23 | });
24 |
25 | return _connection;
26 | };
27 |
28 | const query: InternalQuery = (sql, params) =>
29 | getConnection()
30 | .then((c) => c.query(sql, params))
31 | .then((res) => res[0]);
32 |
33 | return {
34 | name: "mysql",
35 | dialect: "mysql",
36 | getInstance: () => getConnection(),
37 | exec: (sql) => query(sql),
38 | prepare: (sql) => new StatementWrapper(sql, query),
39 | dispose: async () => {
40 | await _connection?.end?.();
41 | _connection = undefined;
42 | },
43 | };
44 | }
45 |
46 | class StatementWrapper extends BoundableStatement {
47 | #query: InternalQuery;
48 | #sql: string;
49 |
50 | constructor(sql: string, query: InternalQuery) {
51 | super();
52 | this.#sql = sql;
53 | this.#query = query;
54 | }
55 |
56 | async all(...params: Primitive[]) {
57 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
58 | return res;
59 | }
60 |
61 | async run(...params: Primitive[]) {
62 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
63 | return {
64 | success: true,
65 | ...res,
66 | };
67 | }
68 |
69 | async get(...params: Primitive[]) {
70 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
71 | return res[0];
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/docs/1.guide/1.index.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: ph:book-open-duotone
3 | ---
4 |
5 | # Getting Started
6 |
7 | > db0 provides an easy way to connect and query sql database providers.
8 |
9 | > [!IMPORTANT]
10 | > DB0 development is in the early stages. Follow up progress via [GitHub issues](https://github.com/unjs/db0/issues).
11 |
12 | DB0 is a lightweight SQL connector:
13 |
14 | ✅ Works with several SQL [connectors](/connectors).
15 |
16 | ✅ Can be [integrated](/integrations) with ORMs and embedded into frameworks.
17 |
18 | ✅ Provides a simple but elegant query API out of the box.
19 |
20 | ## Quick start
21 |
22 | Install [`db0`](https://npmjs.com/package/db0) npm package:
23 |
24 | :pm-install{name="db0"}
25 |
26 | ```ts
27 | import { createDatabase } from "db0";
28 | import sqlite from "db0/connectors/better-sqlite3";
29 |
30 | // Initiate database with SQLite connector
31 | const db = createDatabase(sqlite({}));
32 |
33 | // Alternative:
34 | // `using` automatically closes the database connection
35 | // once the `db` variable goes out of scope (for example, the function execution ends)
36 | // await using db = createDatabase(sqlite({}));
37 |
38 | // Create users table
39 | await db.sql`CREATE TABLE IF NOT EXISTS users ("id" TEXT PRIMARY KEY, "firstName" TEXT, "lastName" TEXT, "email" TEXT)`;
40 |
41 | // Add a new user
42 | const userId = "1001";
43 | await db.sql`INSERT INTO users VALUES (${userId}, 'John', 'Doe', '')`;
44 |
45 | // Query for users
46 | const { rows } = await db.sql`SELECT * FROM users WHERE id = ${userId}`;
47 | console.log(rows);
48 |
49 | // Using static parameters
50 | const tableName = "users";
51 | const { rows } =
52 | await db.sql`SELECT * FROM {${tableName}} WHERE id = ${userId}`;
53 | console.log(rows);
54 | ```
55 |
56 | > [!IMPORTANT] > **Static Parameters** are a way to use string-literals other than places where prepared statements are supported, for eg. table name. **DO NOT USE** static parameters from untrusted source such as request body. **STATIC PARAMETERS ARE NOT SANITIZED**
57 |
58 | ## Next steps
59 |
60 | :read-more{to="/connectors"}
61 |
62 | :read-more{to="/integrations"}
63 |
--------------------------------------------------------------------------------
/src/integrations/drizzle/index.ts:
--------------------------------------------------------------------------------
1 | import type { Database } from "db0";
2 | import { DB0Session, type DB0SessionOptions } from "./_session.ts";
3 |
4 | import { DefaultLogger } from "drizzle-orm/logger";
5 |
6 | import {
7 | BaseSQLiteDatabase,
8 | SQLiteAsyncDialect,
9 | } from "drizzle-orm/sqlite-core";
10 |
11 | import {
12 | type DrizzleConfig as DrizzleBaseConfig,
13 | type RelationalSchemaConfig,
14 | type TablesRelationalConfig,
15 | createTableRelationsHelpers,
16 | extractTablesRelationalConfig,
17 | } from "drizzle-orm";
18 |
19 | export type DrizzleDatabase<
20 | TSchema extends Record = Record,
21 | > = BaseSQLiteDatabase<"async", any, TSchema>;
22 |
23 | export type DrizzleConfig<
24 | TSchema extends Record = Record,
25 | > = DrizzleBaseConfig;
26 |
27 | export function drizzle<
28 | TSchema extends Record = Record,
29 | >(db: Database, config?: DrizzleConfig): DrizzleDatabase {
30 | const dialect = new SQLiteAsyncDialect({ casing: config?.casing });
31 |
32 | let logger: DB0SessionOptions["logger"];
33 | if (config?.logger === true) {
34 | logger = new DefaultLogger();
35 | } else if (config?.logger !== false && config?.logger !== undefined) {
36 | logger = config.logger;
37 | }
38 |
39 | // Transform user schema to RelationalSchemaConfig
40 | // Reference: https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/d1/driver.ts
41 | let schema: RelationalSchemaConfig | undefined;
42 | if (config?.schema) {
43 | const tablesConfig = extractTablesRelationalConfig(
44 | config.schema,
45 | createTableRelationsHelpers,
46 | );
47 | schema = {
48 | fullSchema: config.schema,
49 | schema: tablesConfig.tables,
50 | tableNamesMap: tablesConfig.tableNamesMap,
51 | };
52 | }
53 |
54 | const session = new DB0Session(db, dialect, schema, {
55 | logger,
56 | });
57 |
58 | return new BaseSQLiteDatabase(
59 | "async",
60 | dialect,
61 | // @ts-expect-error TODO
62 |
63 | session,
64 | schema,
65 | ) as DrizzleDatabase;
66 | }
67 |
--------------------------------------------------------------------------------
/src/connectors/postgresql.ts:
--------------------------------------------------------------------------------
1 | import pg from "pg";
2 |
3 | import type { Connector, Primitive } from "db0";
4 |
5 | import { BoundableStatement } from "./_internal/statement.ts";
6 |
7 | export type ConnectorOptions = { url: string } | pg.ClientConfig;
8 |
9 | type InternalQuery = (
10 | sql: string,
11 | params?: Primitive[],
12 | ) => Promise;
13 |
14 | export default function postgresqlConnector(
15 | opts: ConnectorOptions,
16 | ): Connector {
17 | let _client: undefined | pg.Client | Promise;
18 | function getClient() {
19 | if (_client) {
20 | return _client;
21 | }
22 | const client = new pg.Client("url" in opts ? opts.url : opts);
23 | _client = client.connect().then(() => {
24 | _client = client;
25 | return _client;
26 | });
27 | return _client;
28 | }
29 |
30 | const query: InternalQuery = async (sql, params) => {
31 | const client = await getClient();
32 | return client.query(normalizeParams(sql), params);
33 | };
34 |
35 | return {
36 | name: "postgresql",
37 | dialect: "postgresql",
38 | getInstance: () => getClient(),
39 | exec: (sql) => query(sql),
40 | prepare: (sql) => new StatementWrapper(sql, query),
41 | dispose: async () => {
42 | await (await _client)?.end?.();
43 | _client = undefined;
44 | },
45 | };
46 | }
47 |
48 | // https://www.postgresql.org/docs/9.3/sql-prepare.html
49 | function normalizeParams(sql: string) {
50 | let i = 0;
51 | return sql.replace(/\?/g, () => `$${++i}`);
52 | }
53 |
54 | class StatementWrapper extends BoundableStatement {
55 | #query: InternalQuery;
56 | #sql: string;
57 |
58 | constructor(sql: string, query: InternalQuery) {
59 | super();
60 | this.#sql = sql;
61 | this.#query = query;
62 | }
63 |
64 | async all(...params: Primitive[]) {
65 | const res = await this.#query(this.#sql, params);
66 | return res.rows;
67 | }
68 |
69 | async run(...params: Primitive[]) {
70 | const res = await this.#query(this.#sql, params);
71 | return {
72 | success: true,
73 | ...res,
74 | };
75 | }
76 |
77 | async get(...params: Primitive[]) {
78 | const res = await this.#query(this.#sql, params);
79 | return res.rows[0];
80 | }
81 | }
82 |
--------------------------------------------------------------------------------
/src/connectors/node-sqlite.ts:
--------------------------------------------------------------------------------
1 | import { resolve, dirname } from "node:path";
2 | import { mkdirSync } from "node:fs";
3 | import type { Connector, Primitive } from "db0";
4 | import type { DatabaseSync, StatementSync } from "node:sqlite";
5 | import { BoundableStatement } from "./_internal/statement.ts";
6 |
7 | export interface ConnectorOptions {
8 | cwd?: string;
9 | path?: string;
10 | name?: string;
11 | }
12 |
13 | export default function nodeSqlite3Connector(
14 | opts: ConnectorOptions,
15 | ): Connector {
16 | let _db: DatabaseSync | undefined;
17 |
18 | const getDB = () => {
19 | if (_db) {
20 | return _db;
21 | }
22 | const nodeSqlite = globalThis.process?.getBuiltinModule?.("node:sqlite");
23 | if (!nodeSqlite) {
24 | throw new Error(
25 | "`node:sqlite` module is not available. Please ensure you are running in Node.js >= 22.5 or Deno >= 2.2.",
26 | );
27 | }
28 | if (opts.name === ":memory:") {
29 | _db = new nodeSqlite.DatabaseSync(":memory:");
30 | return _db;
31 | }
32 | const filePath = resolve(
33 | opts.cwd || ".",
34 | opts.path || `.data/${opts.name || "db"}.sqlite`,
35 | );
36 | mkdirSync(dirname(filePath), { recursive: true });
37 | _db = new nodeSqlite.DatabaseSync(filePath);
38 | return _db;
39 | };
40 |
41 | return {
42 | name: "node-sqlite",
43 | dialect: "sqlite",
44 | getInstance: () => getDB(),
45 | exec(sql: string) {
46 | getDB().exec(sql);
47 | return { success: true };
48 | },
49 | prepare: (sql) => new StatementWrapper(() => getDB().prepare(sql)),
50 | dispose: () => {
51 | _db?.close?.();
52 | _db = undefined;
53 | },
54 | };
55 | }
56 |
57 | class StatementWrapper extends BoundableStatement<() => StatementSync> {
58 | async all(...params: Primitive[]) {
59 | const raws = this._statement().all(
60 | ...(params as Exclude[]),
61 | );
62 | return raws;
63 | }
64 | async run(...params: Primitive[]) {
65 | const res = this._statement().run(
66 | ...(params as Exclude[]),
67 | );
68 | return { success: true, ...res };
69 | }
70 | async get(...params: Primitive[]) {
71 | const raw = this._statement().get(
72 | ...(params as Exclude[]),
73 | );
74 | return raw;
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/connectors/pglite.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | PGliteOptions,
3 | PGliteInterfaceExtensions,
4 | Results as PGLiteQueryResults,
5 | } from "@electric-sql/pglite";
6 | import { PGlite } from "@electric-sql/pglite";
7 | import type { Connector, Primitive } from "db0";
8 | import { BoundableStatement } from "./_internal/statement.ts";
9 |
10 | export type ConnectorOptions = PGliteOptions;
11 |
12 | type InternalQuery = (
13 | sql: string,
14 | params?: Primitive[],
15 | ) => Promise>;
16 |
17 | export default function pgliteConnector(
18 | opts?: TOptions,
19 | ): Connector> {
20 | type PGLiteInstance = PGlite &
21 | PGliteInterfaceExtensions;
22 |
23 | let _client: undefined | PGLiteInstance | Promise;
24 |
25 | function getClient() {
26 | return (_client ||= PGlite.create(opts).then((res) => (_client = res)));
27 | }
28 |
29 | const query: InternalQuery = async (sql, params) => {
30 | const client = await getClient();
31 | const normalizedSql = normalizeParams(sql);
32 | const result = await client.query(normalizedSql, params);
33 | return result;
34 | };
35 |
36 | return >{
37 | name: "pglite",
38 | dialect: "postgresql",
39 | getInstance: () => getClient(),
40 | exec: (sql) => query(sql),
41 | prepare: (sql) => new StatementWrapper(sql, query),
42 | dispose: async () => {
43 | await (await _client)?.close?.();
44 | _client = undefined;
45 | },
46 | };
47 | }
48 |
49 | // https://www.postgresql.org/docs/9.3/sql-prepare.html
50 | function normalizeParams(sql: string) {
51 | let i = 0;
52 | return sql.replace(/\?/g, () => `$${++i}`);
53 | }
54 |
55 | class StatementWrapper extends BoundableStatement {
56 | #query: InternalQuery;
57 | #sql: string;
58 |
59 | constructor(sql: string, query: InternalQuery) {
60 | super();
61 | this.#sql = sql;
62 | this.#query = query;
63 | }
64 |
65 | async all(...params: Primitive[]) {
66 | const result = await this.#query(this.#sql, params);
67 | return result.rows;
68 | }
69 |
70 | async run(...params: Primitive[]) {
71 | const result = await this.#query(this.#sql, params);
72 | return {
73 | success: true,
74 | ...result,
75 | };
76 | }
77 |
78 | async get(...params: Primitive[]) {
79 | const result = await this.#query(this.#sql, params);
80 | return result.rows[0];
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/docs/2.connectors/sqlite.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:sqlite
3 | ---
4 |
5 | # SQLite
6 |
7 | > Connect DB0 to local SQLite database with Node.js and Deno
8 |
9 | You have 3 options for using SQLite:
10 |
11 | - [`node-sqlite`](#node-sqlite) (recommended)
12 | - [`better-sqlite3`](#better-sqlite3)
13 | - [`sqlite3`](#sqlite3)
14 |
15 | ## `node-sqlite`
16 |
17 | This driver uses native [`node:sqlite`](https://nodejs.org/api/sqlite.html) supported in Node.js >= 22.5 (experimental) and Deno >= [2.2](https://deno.com/blog/v2.2) and requires **no dependencies**!
18 |
19 | :read-more{to="https://nodejs.org/api/sqlite.html" title="Node.js docs"}
20 |
21 | :read-more{to="https://docs.deno.com/api/node/sqlite/" title="Deno docs"}
22 |
23 | ```js
24 | import { createDatabase } from "db0";
25 | import sqlite from "db0/connectors/node-sqlite";
26 |
27 | const db = createDatabase(
28 | sqlite({
29 | name: ":memory:",
30 | }),
31 | );
32 | ```
33 |
34 | ## `better-sqlite3`
35 |
36 | :read-more{to="https://github.com/WiseLibs/better-sqlite3" title="better-sqlite3"}
37 |
38 | For this connector, you need to install [`better-sqlite3`](https://www.npmjs.com/package/better-sqlite3) dependency:
39 |
40 | :pm-install{name="better-sqlite3"}
41 |
42 | Use `better-sqlite3` connector:
43 |
44 | ```js
45 | import { createDatabase } from "db0";
46 | import sqlite from "db0/connectors/better-sqlite3";
47 |
48 | const db = createDatabase(
49 | sqlite({
50 | name: ":memory:",
51 | }),
52 | );
53 | ```
54 |
55 | ### Options
56 |
57 | #### `cwd`
58 |
59 | Working directory to create database. Default is current working directory of project. (It will be ignored if `path` is provided an absolute path.)
60 |
61 | #### `name`
62 |
63 | Database (file) name. Default is `db`.
64 |
65 | > [!NOTE]
66 | > You can use `:memory:` as name for in-memory storage.
67 |
68 | #### `path`
69 |
70 | Related (to `cwd`) or absolute path to the sql file. By default it is stored in `{cwd}/.data/{name}.sqlite3` / `.data/db.sqlite3`
71 |
72 | ## `sqlite3`
73 |
74 | :read-more{to="https://github.com/TryGhost/node-sqlite3" title="sqlite3"}
75 |
76 | For this connector, you need to install [`sqlite3`](https://www.npmjs.com/package/sqlite3) dependency:
77 |
78 | :pm-install{name="sqlite3"}
79 |
80 | Use `sqlite3` connector:
81 |
82 | ```js
83 | import { createDatabase } from "db0";
84 | import sqlite from "db0/connectors/sqlite3";
85 |
86 | const db = createDatabase(
87 | sqlite({
88 | name: ":memory:",
89 | }),
90 | );
91 | ```
92 |
93 | ### Options
94 |
95 | (same as [better-sqlite3](#better-sqlite3))
96 |
--------------------------------------------------------------------------------
/docs/2.connectors/libsql.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: clarity:node-line
3 | ---
4 |
5 | # LibSQL
6 |
7 | > Connect DB0 to LibSQL
8 |
9 | :read-more{to="https://libsql.org"}
10 |
11 | ## Usage
12 |
13 | For this connector, you need to install [`@libsql/client`](https://www.npmjs.com/package/@libsql/client) dependency:
14 |
15 | :pm-install{name="@libsql/client"}
16 |
17 | Choose on of the `libsql`, `libsql/http` or `libsql/web` connectors depending on your usage requirements:
18 |
19 | ::code-group
20 |
21 | ```ts [libsql.node.js]
22 | import { createDatabase } from "db0";
23 | import libSql from "db0/connectors/libsql";
24 |
25 | const db = createDatabase(libSql({ url: `file:local.db` }));
26 | ```
27 |
28 | ```ts [libsql.http.js]
29 | import { createDatabase } from "db0";
30 | import libSql from "db0/connectors/libsql/http";
31 |
32 | const db = createDatabase(libSql({}));
33 | ```
34 |
35 | ```ts [libsql.web.js]
36 | import { createDatabase } from "db0";
37 | import libSql from "db0/connectors/libsql/web";
38 |
39 | const db = createDatabase(libSql({}));
40 | ```
41 |
42 | ::
43 |
44 | ## Options
45 |
46 | ### `url`
47 |
48 | Type: `string`
49 |
50 | The database URL. The client supports `libsql:`, `http:`/`https:`, `ws:`/`wss:` and `file:` URL. For more information, please refer to the project README: [link](https://github.com/libsql/libsql-client-ts#supported-urls)
51 |
52 | ---
53 |
54 | ### `authToken`
55 |
56 | Type: `string` (optional)
57 |
58 | Authentication token for the database.
59 |
60 | ---
61 |
62 | ### `tls`
63 |
64 | Type: `boolean` (optional)
65 |
66 | Enables or disables TLS for `libsql:` URLs. By default, `libsql:` URLs use TLS. You can set this option to `false` to disable TLS.
67 |
68 | ---
69 |
70 | ### `intMode`
71 |
72 | Type: `IntMode` (optional)
73 |
74 | How to convert SQLite integers to JavaScript values:
75 |
76 | - `"number"` (default): returns SQLite integers as JavaScript `number`-s (double precision floats). `number` cannot precisely represent integers larger than 2^53-1 in absolute value, so attempting to read larger integers will throw a `RangeError`.
77 | - `"bigint"`: returns SQLite integers as JavaScript `bigint`-s (arbitrary precision integers). Bigints can precisely represent all SQLite integers.
78 | - `"string"`: returns SQLite integers as strings.
79 |
80 | ## References
81 |
82 | - [LibSQL Website](https://libsql.org/)
83 | - [LibSQL GitHub Repository](https://github.com/libsql/libsql)
84 | - [LibSQL Client API Reference](https://libsql.org/libsql-client-ts/index.html)
85 | - [LibSQL Client GitHub Repository](https://github.com/libsql/libsql-client-ts)
86 |
--------------------------------------------------------------------------------
/docs/3.integrations/drizzle.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:drizzle
3 | ---
4 |
5 | # Drizzle
6 |
7 | > Integrate DB0 with Drizzle ORM
8 |
9 | :read-more{to="https://orm.drizzle.team"}
10 |
11 | ## Example
12 |
13 | Define your database schema using Drizzle's schema system:
14 |
15 | ```ts [schema.ts]
16 | import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
17 |
18 | export const users = sqliteTable('users', {
19 | id: integer('id').primaryKey({ autoIncrement: true }),
20 | fullName: text('full_name').notNull(),
21 | email: text('email').notNull().unique(),
22 | createdAt: integer('created_at', { mode: 'timestamp' }).$defaultFn(() => new Date()),
23 | });
24 |
25 | export type User = typeof users.$inferSelect;
26 | export type NewUser = typeof users.$inferInsert;
27 | ```
28 |
29 | Initialize your database with Drizzle integration:
30 |
31 | ```ts [database.ts]
32 | import { createDatabase } from "db0";
33 | import sqlite from "db0/connectors/better-sqlite3";
34 | import { drizzle } from "db0/integrations/drizzle";
35 | import * as schema from "./schema";
36 |
37 | // Initialize DB instance with SQLite connector
38 | const db0 = createDatabase(sqlite({ name: 'database.sqlite' }));
39 |
40 | // Create Drizzle instance with schema
41 | export const db = drizzle(db0, { schema });
42 | ```
43 |
44 | Use Drizzle's migration system to create tables:
45 |
46 | ```bash
47 | # Generate migration from schema
48 | npx drizzle-kit generate
49 |
50 | # Apply migrations
51 | npx drizzle-kit migrate
52 |
53 | # Or push schema directly (development)
54 | npx drizzle-kit push
55 | ```
56 |
57 | Then use fully typed queries:
58 |
59 | ```ts [queries.ts]
60 | import { db, users, type NewUser } from "./database";
61 |
62 | // Insert a new user with type safety
63 | const newUser: NewUser = {
64 | fullName: 'John Doe',
65 | email: 'john@example.com',
66 | createdAt: new Date()
67 | };
68 |
69 | const insertedUser = await db.insert(users).values(newUser).returning().get()
70 |
71 | // Query users with full type safety
72 | const allUsers = await db.select().from(users);
73 |
74 | // Query with conditions
75 | const johnDoe = await db.select()
76 | .from(users)
77 | .where(eq(users.email, 'john@example.com'));
78 | ```
79 |
80 | ## Configuration
81 |
82 | Create a `drizzle.config.ts` file for migration management:
83 |
84 | ```ts [drizzle.config.ts]
85 | import { defineConfig } from 'drizzle-kit';
86 |
87 | export default defineConfig({
88 | schema: './schema.ts',
89 | out: './migrations',
90 | dialect: 'sqlite',
91 | dbCredentials: {
92 | url: './database.sqlite'
93 | }
94 | });
95 | ```
--------------------------------------------------------------------------------
/src/connectors/cloudflare-hyperdrive-postgresql.ts:
--------------------------------------------------------------------------------
1 | import pg from "pg";
2 |
3 | import type { Connector, Primitive } from "db0";
4 |
5 | import { BoundableStatement } from "./_internal/statement.ts";
6 | import { getHyperdrive } from "./_internal/cloudflare.ts";
7 |
8 | type OmitPgConfig = Omit<
9 | pg.ClientConfig,
10 | "user" | "database" | "password" | "port" | "host" | "connectionString"
11 | >;
12 | export type ConnectorOptions = {
13 | bindingName: string;
14 | } & OmitPgConfig;
15 |
16 | type InternalQuery = (
17 | sql: string,
18 | params?: Primitive[],
19 | ) => Promise;
20 |
21 | export default function cloudflareHyperdrivePostgresqlConnector(
22 | opts: ConnectorOptions,
23 | ): Connector {
24 | let _client: undefined | pg.Client | Promise;
25 | async function getClient() {
26 | if (_client) {
27 | return _client;
28 | }
29 | const hyperdrive = await getHyperdrive(opts.bindingName);
30 | const client = new pg.Client({
31 | ...opts,
32 | connectionString: hyperdrive.connectionString,
33 | });
34 | _client = client.connect().then(() => {
35 | _client = client;
36 | return _client;
37 | });
38 | return _client;
39 | }
40 |
41 | const query: InternalQuery = async (sql, params) => {
42 | const client = await getClient();
43 | return client.query(normalizeParams(sql), params);
44 | };
45 |
46 | return {
47 | name: "cloudflare-hyperdrive-postgresql",
48 | dialect: "postgresql",
49 | getInstance: () => getClient(),
50 | exec: (sql) => query(sql),
51 | prepare: (sql) => new StatementWrapper(sql, query),
52 | dispose: async () => {
53 | await (await _client)?.end?.();
54 | _client = undefined;
55 | },
56 | };
57 | }
58 |
59 | // https://www.postgresql.org/docs/9.3/sql-prepare.html
60 | function normalizeParams(sql: string) {
61 | let i = 0;
62 | return sql.replace(/\?/g, () => `$${++i}`);
63 | }
64 |
65 | class StatementWrapper extends BoundableStatement {
66 | #query: InternalQuery;
67 | #sql: string;
68 |
69 | constructor(sql: string, query: InternalQuery) {
70 | super();
71 | this.#sql = sql;
72 | this.#query = query;
73 | }
74 |
75 | async all(...params: Primitive[]) {
76 | const res = await this.#query(this.#sql, params);
77 | return res.rows;
78 | }
79 |
80 | async run(...params: Primitive[]) {
81 | const res = await this.#query(this.#sql, params);
82 | return {
83 | success: true,
84 | ...res,
85 | };
86 | }
87 |
88 | async get(...params: Primitive[]) {
89 | const res = await this.#query(this.#sql, params);
90 | return res.rows[0];
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/src/integrations/drizzle/_utils.ts:
--------------------------------------------------------------------------------
1 | import {
2 | type AnyColumn,
3 | type DriverValueDecoder,
4 | type SelectedFieldsOrdered,
5 | getTableName,
6 | is,
7 | Column,
8 | SQL,
9 | } from "drizzle-orm";
10 |
11 | // Source: https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/utils.ts#L14
12 |
13 | /** @internal */
14 | export function mapResultRow(
15 | columns: SelectedFieldsOrdered,
16 | row: unknown[],
17 | joinsNotNullableMap: Record | undefined,
18 | ): TResult {
19 | // Key -> nested object key, value -> table name if all fields in the nested object are from the same table, false otherwise
20 | const nullifyMap: Record = {};
21 |
22 | // eslint-disable-next-line unicorn/no-array-reduce
23 | const result = columns.reduce>(
24 | (result, { path, field }, columnIndex) => {
25 | let decoder: DriverValueDecoder;
26 | if (is(field, Column)) {
27 | decoder = field;
28 | } else if (is(field, SQL)) {
29 | decoder = "decoder" in field && (field.decoder as any);
30 | } else {
31 | decoder = "decoder" in field.sql && (field.sql.decoder as any);
32 | }
33 | let node = result;
34 | for (const [pathChunkIndex, pathChunk] of path.entries()) {
35 | if (pathChunkIndex < path.length - 1) {
36 | if (!(pathChunk in node)) {
37 | node[pathChunk] = {};
38 | }
39 | node = node[pathChunk];
40 | } else {
41 | const rawValue = row[columnIndex]!;
42 | const value = (node[pathChunk] =
43 | rawValue === null ? null : decoder.mapFromDriverValue(rawValue));
44 |
45 | if (joinsNotNullableMap && is(field, Column) && path.length === 2) {
46 | const objectName = path[0]!;
47 | if (!(objectName in nullifyMap)) {
48 | nullifyMap[objectName] =
49 | value === null ? getTableName(field.table) : false;
50 | } else if (
51 | typeof nullifyMap[objectName] === "string" &&
52 | nullifyMap[objectName] !== getTableName(field.table)
53 | ) {
54 | nullifyMap[objectName] = false;
55 | }
56 | }
57 | }
58 | }
59 | return result;
60 | },
61 | {},
62 | );
63 |
64 | // Nullify all nested objects from nullifyMap that are nullable
65 | if (joinsNotNullableMap && Object.keys(nullifyMap).length > 0) {
66 | for (const [objectName, tableName] of Object.entries(nullifyMap)) {
67 | if (typeof tableName === "string" && !joinsNotNullableMap[tableName]) {
68 | result[objectName] = null;
69 | }
70 | }
71 | }
72 |
73 | return result as TResult;
74 | }
75 |
--------------------------------------------------------------------------------
/test/connectors/_tests.ts:
--------------------------------------------------------------------------------
1 | import { beforeAll, expect, it } from "vitest";
2 | import {
3 | Connector,
4 | Database,
5 | createDatabase,
6 | type SQLDialect,
7 | } from "../../src";
8 |
9 | export function testConnector(opts: {
10 | connector: TConnector;
11 | dialect: SQLDialect;
12 | }) {
13 | let db: Database;
14 | beforeAll(() => {
15 | db = createDatabase(opts.connector);
16 | });
17 |
18 | const userId = "1001";
19 | const userSnapshot = `
20 | [
21 | {
22 | "email": "",
23 | "firstName": "John",
24 | "id": "1001",
25 | "lastName": "Doe",
26 | },
27 | ]
28 | `;
29 |
30 | it("instance matches", async () => {
31 | const instance = await db.getInstance();
32 | expect(instance).toBeDefined();
33 | expect(instance).toBe(await opts.connector.getInstance());
34 | });
35 |
36 | it("dialect matches", () => {
37 | expect(db.dialect).toBe(opts.dialect);
38 | });
39 |
40 | it("drop and create table", async () => {
41 | await db.sql`DROP TABLE IF EXISTS users`;
42 | switch (opts.dialect) {
43 | case "mysql": {
44 | await db.sql`CREATE TABLE users (\`id\` VARCHAR(4) PRIMARY KEY, \`firstName\` TEXT, \`lastName\` TEXT, \`email\` TEXT)`;
45 | break;
46 | }
47 | default: {
48 | await db.sql`CREATE TABLE users ("id" TEXT PRIMARY KEY, "firstName" TEXT, "lastName" TEXT, "email" TEXT)`;
49 | break;
50 | }
51 | }
52 | });
53 |
54 | it("insert", async () => {
55 | switch (opts.dialect) {
56 | case "mysql": {
57 | await db.sql`INSERT INTO users VALUES (${userId}, 'John', 'Doe', '')`;
58 | break;
59 | }
60 | default: {
61 | const { rows } =
62 | await db.sql`INSERT INTO users VALUES (${userId}, 'John', 'Doe', '') RETURNING *`;
63 | expect(rows).toMatchInlineSnapshot(userSnapshot);
64 | break;
65 | }
66 | }
67 | });
68 |
69 | it("select", async () => {
70 | const { rows } = await db.sql`SELECT * FROM users WHERE id = ${userId}`;
71 | expect(rows).toMatchInlineSnapshot(userSnapshot);
72 | });
73 |
74 | it("deferred prepare errors", async () => {
75 | await expect(
76 | db.prepare("SELECT * FROM non_existing_table").all(),
77 | ).rejects.toThrowError("non_existing_table");
78 | });
79 |
80 | it("dispose", async () => {
81 | await db.dispose();
82 | expect(db.disposed).toBe(true);
83 |
84 | let err;
85 | try {
86 | await db.getInstance();
87 | } catch (error) {
88 | err = error;
89 | }
90 | expect(err).toBeInstanceOf(Error);
91 | expect((err as Error).message).toBe(
92 | "This database instance has been disposed and cannot be used.",
93 | );
94 | });
95 | }
96 |
--------------------------------------------------------------------------------
/docs/2.connectors/pglite.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: simple-icons:postgresql
3 | ---
4 |
5 | # PGlite
6 |
7 | > Connect DB0 to Postgres using PGlite
8 |
9 | :read-more{to="https://pglite.dev"}
10 |
11 | ## Usage
12 |
13 | For this connector, you need to install [`@electric-sql/pglite`](https://www.npmjs.com/package/@electric-sql/pglite) dependency:
14 |
15 | :pm-i{name="@electric-sql/pglite"}
16 |
17 | Use `pglite` connector:
18 |
19 | ```js
20 | import { createDatabase } from "db0";
21 | import pglite from "db0/connectors/pglite";
22 |
23 | const db = createDatabase(
24 | pglite({
25 | /* options */
26 | }),
27 | );
28 | ```
29 |
30 |
31 |
32 | ## Options
33 |
34 | ### `dataDir`
35 |
36 | Path to the directory for storing the Postgres database. You can provide a URI scheme for various storage backends:
37 |
38 | - **`file://` or unprefixed**: File system storage, available in Node and Bun.
39 | - **`idb://`**: IndexedDB storage, available in the browser.
40 | - **`memory://`**: In-memory ephemeral storage, available on all platforms.
41 |
42 | ### `options`
43 |
44 | #### `dataDir`
45 |
46 | The directory in which to store the Postgres database when not provided as the first argument.
47 |
48 | **Type:** `string`
49 |
50 | #### `debug`
51 |
52 | Postgres debug level. Logs are sent to the console.
53 |
54 | **Type:** `1 | 2 | 3 | 4 | 5`
55 |
56 | #### `relaxedDurability`
57 |
58 | Under relaxed durability mode, PGlite will not wait for flushes to storage to complete after each query before returning results. This is particularly useful when using the IndexedDB file system.
59 |
60 | **Type:** `boolean`
61 |
62 | #### `fs`
63 |
64 | An alternative to providing a `dataDir` with a filesystem prefix. Initialize a `Filesystem` yourself and provide it here.
65 |
66 | **Type:** `Filesystem`
67 |
68 | #### `loadDataDir`
69 |
70 | A tarball of a PGlite datadir to load when the database starts. This should be a tarball produced from the related `.dumpDataDir()` method.
71 |
72 | **Type:** `Blob | File`
73 |
74 | #### `extensions`
75 |
76 | An object containing the extensions you wish to load.
77 |
78 | **Type:** `{ [namespace: string]: Extension }`
79 |
80 | #### `username`
81 |
82 | The username of the user to connect to the database as. Permissions will be applied in the context of this user.
83 |
84 | **Type:** `string`
85 |
86 | #### `database`
87 |
88 | The database from the Postgres cluster within the `dataDir` to connect to.
89 |
90 | **Type:** `string`
91 |
92 | #### `initialMemory`
93 |
94 | The initial amount of memory in bytes to allocate for the PGlite instance. PGlite will grow the memory automatically, but if you have a particularly large database, you can set this higher to prevent the pause during memory growth.
95 |
96 | **Type:** `number`
97 |
--------------------------------------------------------------------------------
/src/database.ts:
--------------------------------------------------------------------------------
1 | import { sqlTemplate } from "./template.ts";
2 | import type { Connector, Database, SQLDialect } from "./types.ts";
3 | import type { Primitive } from "./types.ts";
4 |
5 | const SQL_SELECT_RE = /^select/i;
6 | const SQL_RETURNING_RE = /[\s]returning[\s]/i;
7 | const DIALECTS_WITH_RET: Set = new Set(["postgresql", "sqlite"]);
8 |
9 | const DISPOSED_ERR =
10 | "This database instance has been disposed and cannot be used.";
11 |
12 | /**
13 | * Creates and returns a database interface using the specified connector.
14 | * This interface allows you to execute raw SQL queries, prepare SQL statements,
15 | * and execute SQL queries with parameters using tagged template literals.
16 | *
17 | * @param {Connector} connector - The database connector used to execute and prepare SQL statements. See {@link Connector}.
18 | * @returns {Database} The database interface that allows SQL operations. See {@link Database}.
19 | */
20 | export function createDatabase(
21 | connector: TConnector,
22 | ): Database {
23 | let _disposed = false;
24 | const checkDisposed = () => {
25 | if (_disposed) {
26 | const err = new Error(DISPOSED_ERR);
27 | Error.captureStackTrace?.(err, checkDisposed);
28 | throw err;
29 | }
30 | };
31 |
32 | return >{
33 | get dialect() {
34 | return connector.dialect;
35 | },
36 |
37 | get disposed() {
38 | return _disposed;
39 | },
40 |
41 | getInstance() {
42 | checkDisposed();
43 | return connector.getInstance();
44 | },
45 |
46 | exec: (sql: string) => {
47 | checkDisposed();
48 | return Promise.resolve(connector.exec(sql));
49 | },
50 |
51 | prepare: (sql: string) => {
52 | checkDisposed();
53 | return connector.prepare(sql);
54 | },
55 |
56 | sql: async (strings: TemplateStringsArray, ...values: Primitive[]) => {
57 | checkDisposed();
58 | const [sql, params] = sqlTemplate(strings, ...values);
59 | if (
60 | SQL_SELECT_RE.test(sql) /* select */ ||
61 | // prettier-ignore
62 | (DIALECTS_WITH_RET.has(connector.dialect) && SQL_RETURNING_RE.test(sql)) /* returning */
63 | ) {
64 | const rows = await connector.prepare(sql).all(...params);
65 | return {
66 | rows,
67 | success: true,
68 | };
69 | } else {
70 | const res = await connector.prepare(sql).run(...params);
71 | return res;
72 | }
73 | },
74 |
75 | dispose: () => {
76 | if (_disposed) {
77 | return Promise.resolve();
78 | }
79 | _disposed = true;
80 | try {
81 | return Promise.resolve(connector.dispose?.());
82 | } catch (error) {
83 | return Promise.reject(error);
84 | }
85 | },
86 |
87 | [Symbol.asyncDispose]() {
88 | return this.dispose();
89 | },
90 | };
91 | }
92 |
--------------------------------------------------------------------------------
/src/connectors/cloudflare-hyperdrive-mysql.ts:
--------------------------------------------------------------------------------
1 | import mysql from "mysql2/promise";
2 | import type { Connector, Primitive } from "db0";
3 | import { BoundableStatement } from "./_internal/statement.ts";
4 | import { getHyperdrive } from "./_internal/cloudflare.ts";
5 |
6 | type OmitMysqlConfig = Omit<
7 | mysql.ConnectionOptions,
8 | | "user"
9 | | "database"
10 | | "password"
11 | | "password1"
12 | | "password2"
13 | | "password3"
14 | | "port"
15 | | "host"
16 | | "uri"
17 | | "localAddress"
18 | | "socketPath"
19 | | "insecureAuth"
20 | | "passwordSha1"
21 | | "disableEval"
22 | >;
23 |
24 | export type ConnectorOptions = {
25 | bindingName: string;
26 | } & OmitMysqlConfig;
27 |
28 | type InternalQuery = (
29 | sql: string,
30 | params?: unknown[],
31 | ) => Promise;
32 |
33 | export default function cloudflareHyperdriveMysqlConnector(
34 | opts: ConnectorOptions,
35 | ): Connector {
36 | let _connection: mysql.Connection | undefined;
37 |
38 | const getConnection = async () => {
39 | if (_connection) {
40 | return _connection;
41 | }
42 |
43 | const hyperdrive = await getHyperdrive(opts.bindingName);
44 | _connection = await mysql.createConnection({
45 | ...opts,
46 | host: hyperdrive.host,
47 | user: hyperdrive.user,
48 | password: hyperdrive.password,
49 | database: hyperdrive.database,
50 | port: hyperdrive.port,
51 | // The following line is needed for mysql2 compatibility with Workers
52 | // mysql2 uses eval() to optimize result parsing for rows with > 100 columns
53 | // Configure mysql2 to use static parsing instead of eval() parsing with disableEval
54 | disableEval: true,
55 | });
56 |
57 | return _connection;
58 | };
59 |
60 | const query: InternalQuery = (sql, params) =>
61 | getConnection()
62 | .then((c) => c.query(sql, params))
63 | .then((res) => res[0]);
64 |
65 | return {
66 | name: "cloudflare-hyperdrive-mysql",
67 | dialect: "mysql",
68 | getInstance: () => getConnection(),
69 | exec: (sql) => query(sql),
70 | prepare: (sql) => new StatementWrapper(sql, query),
71 | dispose: async () => {
72 | await _connection?.end?.();
73 | _connection = undefined;
74 | },
75 | };
76 | }
77 |
78 | class StatementWrapper extends BoundableStatement {
79 | #query: InternalQuery;
80 | #sql: string;
81 |
82 | constructor(sql: string, query: InternalQuery) {
83 | super();
84 | this.#sql = sql;
85 | this.#query = query;
86 | }
87 |
88 | async all(...params: Primitive[]) {
89 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
90 | return res;
91 | }
92 |
93 | async run(...params: Primitive[]) {
94 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
95 | return {
96 | success: true,
97 | ...res,
98 | };
99 | }
100 |
101 | async get(...params: Primitive[]) {
102 | const res = (await this.#query(this.#sql, params)) as mysql.RowDataPacket[];
103 | return res[0];
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "db0",
3 | "version": "0.3.4",
4 | "description": "Lightweight SQL Connector",
5 | "repository": "unjs/db0",
6 | "license": "MIT",
7 | "sideEffects": false,
8 | "type": "module",
9 | "exports": {
10 | ".": {
11 | "types": "./dist/index.d.mts",
12 | "default": "./dist/index.mjs"
13 | },
14 | "./connectors/*": {
15 | "types": "./dist/connectors/*.d.ts",
16 | "default": "./dist/connectors/*.mjs"
17 | },
18 | "./integrations/*": {
19 | "types": "./dist/integrations/*/index.d.ts",
20 | "default": "./dist/integrations/*/index.mjs"
21 | },
22 | "./connectors/libsql/*": {
23 | "types": "./dist/connectors/libsql/*.d.ts",
24 | "default": "./dist/connectors/libsql/*.mjs"
25 | }
26 | },
27 | "types": "./dist/index.d.mts",
28 | "files": [
29 | "dist"
30 | ],
31 | "scripts": {
32 | "build": "pnpm gen-connectors && obuild",
33 | "gen-connectors": "jiti scripts/gen-connectors.ts",
34 | "db0": "pnpm jiti src/cli",
35 | "dev": "vitest",
36 | "lint": "eslint . && prettier -c src test",
37 | "lint:fix": "eslint . --fix && prettier -w src test",
38 | "prepack": "pnpm build",
39 | "release": "pnpm test && changelogen --release --push && pnpm publish",
40 | "test": "pnpm lint && pnpm test:types && vitest run --coverage && pnpm test:bun",
41 | "test:bun": "bun test ./test/connectors/bun-test.ts",
42 | "test:types": "tsc --noEmit"
43 | },
44 | "devDependencies": {
45 | "@cloudflare/workers-types": "^4.20251120.0",
46 | "@electric-sql/pglite": "^0.3.14",
47 | "@libsql/client": "^0.15.15",
48 | "@planetscale/database": "^1.19.0",
49 | "@types/better-sqlite3": "^7.6.13",
50 | "@types/bun": "^1.3.2",
51 | "@types/pg": "^8.15.6",
52 | "@vitest/coverage-v8": "^4.0.12",
53 | "automd": "^0.4.2",
54 | "better-sqlite3": "^12.4.1",
55 | "changelogen": "^0.6.2",
56 | "db0": "link:.",
57 | "dotenv": "^17.2.3",
58 | "drizzle-orm": "^0.44.7",
59 | "eslint": "^9.39.1",
60 | "eslint-config-unjs": "^0.5.0",
61 | "jiti": "^2.6.1",
62 | "mlly": "^1.8.0",
63 | "mysql2": "^3.15.3",
64 | "obuild": "^0.4.2",
65 | "pg": "^8.16.3",
66 | "prettier": "^3.6.2",
67 | "scule": "^1.3.0",
68 | "typescript": "^5.9.3",
69 | "vitest": "^4.0.12",
70 | "wrangler": "^4.49.1"
71 | },
72 | "peerDependencies": {
73 | "@electric-sql/pglite": "*",
74 | "@libsql/client": "*",
75 | "better-sqlite3": "*",
76 | "drizzle-orm": "*",
77 | "mysql2": "*",
78 | "sqlite3": "*"
79 | },
80 | "peerDependenciesMeta": {
81 | "@libsql/client": {
82 | "optional": true
83 | },
84 | "better-sqlite3": {
85 | "optional": true
86 | },
87 | "drizzle-orm": {
88 | "optional": true
89 | },
90 | "mysql2": {
91 | "optional": true
92 | },
93 | "@electric-sql/pglite": {
94 | "optional": true
95 | },
96 | "sqlite3": {
97 | "optional": true
98 | }
99 | },
100 | "packageManager": "pnpm@10.23.0",
101 | "pnpm": {
102 | "onlyBuiltDependencies": [
103 | "better-sqlite3",
104 | "esbuild",
105 | "sqlite3"
106 | ],
107 | "ignoredBuiltDependencies": [
108 | "@parcel/watcher",
109 | "es5-ext"
110 | ]
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/scripts/gen-connectors.ts:
--------------------------------------------------------------------------------
1 | import { readFile, readdir, writeFile } from "node:fs/promises";
2 | import { join } from "node:path";
3 | import { fileURLToPath } from "node:url";
4 | import { findTypeExports } from "mlly";
5 | import { camelCase, upperFirst } from "scule";
6 |
7 | const connectorsDir = fileURLToPath(
8 | new URL("../src/connectors", import.meta.url),
9 | );
10 |
11 | const connectorsMetaFile = fileURLToPath(
12 | new URL("../src/_connectors.ts", import.meta.url),
13 | );
14 |
15 | const aliases = {
16 | "node-sqlite": ["sqlite"],
17 | "bun-sqlite": ["bun"],
18 | "libsql-node": ["libsql"],
19 | } as const;
20 |
21 | async function getConnectorFiles(dir: string): Promise {
22 | const files: string[] = [];
23 | const entries = await readdir(dir, { withFileTypes: true });
24 |
25 | for (const entry of entries) {
26 | if (entry.name.startsWith("_")) {
27 | continue;
28 | }
29 | if (entry.isDirectory()) {
30 | files.push(...(await getConnectorFiles(join(dir, entry.name))));
31 | } else if (entry.isFile()) {
32 | files.push(join(dir, entry.name));
33 | }
34 | }
35 |
36 | return files;
37 | }
38 |
39 | const connectorFiles = await getConnectorFiles(connectorsDir);
40 | const connectorEntries = connectorFiles.map((file) =>
41 | file.replace(connectorsDir + "/", ""),
42 | );
43 |
44 | const connectors: {
45 | name: string;
46 | safeName: string;
47 | names: string[];
48 | subpath: string;
49 | optionsTExport?: string;
50 | optionsTName?: string;
51 | }[] = [];
52 |
53 | for (const entry of connectorEntries) {
54 | const pathName = entry.replace(/\.ts$/, "");
55 | const name = pathName.replace(/\/|\\/g, "-");
56 | const subpath = `db0/connectors/${pathName}`;
57 | const fullPath = join(connectorsDir, `${pathName}.ts`);
58 |
59 | const contents = await readFile(fullPath, "utf8");
60 | const optionsTExport = findTypeExports(contents).find((type) =>
61 | type.name?.endsWith("Options"),
62 | )?.name;
63 |
64 | const safeName = camelCase(name).replace(/db/i, "DB").replace(/sql/i, "SQL");
65 |
66 | const alternativeNames: string[] = aliases[name] || [];
67 |
68 | const names = [...new Set([name, ...alternativeNames])];
69 |
70 | const optionsTName = upperFirst(safeName) + "Options";
71 |
72 | connectors.push({
73 | name,
74 | safeName,
75 | names,
76 | subpath,
77 | optionsTExport,
78 | optionsTName,
79 | });
80 | }
81 |
82 | connectors.sort((a, b) => a.name.localeCompare(b.name));
83 |
84 | const genCode = /* ts */ `// Auto-generated using scripts/gen-connectors.
85 | // Do not manually edit!
86 | ${connectors
87 | .filter((d) => d.optionsTExport)
88 | .map(
89 | (d) =>
90 | /* ts */ `import type { ${d.optionsTExport} as ${d.optionsTName} } from "${d.subpath}";`,
91 | )
92 | .join("\n")}
93 |
94 | export type ConnectorName = ${connectors.flatMap((d) => d.names.map((name) => `"${name}"`)).join(" | ")};
95 |
96 | export type ConnectorOptions = {
97 | ${connectors
98 | .filter((d) => d.optionsTExport)
99 | .flatMap((d) =>
100 | d.names.map(
101 | (name, i) =>
102 | `${i === 0 ? "" : `/** alias of ${d.name} */\n `}"${name}": ${d.optionsTName};`,
103 | ),
104 | )
105 | .join("\n ")}
106 | };
107 |
108 | export const connectors: Record = Object.freeze({
109 | ${connectors.flatMap((d) => d.names.map((name, i) => `${i === 0 ? "" : `/** alias of ${d.name} */\n `}"${name}": "${d.subpath}"`)).join(",\n ")},
110 | } as const);
111 | `;
112 |
113 | await writeFile(connectorsMetaFile, genCode, "utf8");
114 | console.log("Generated connectors metadata file to", connectorsMetaFile);
115 |
--------------------------------------------------------------------------------
/src/_connectors.ts:
--------------------------------------------------------------------------------
1 | // Auto-generated using scripts/gen-connectors.
2 | // Do not manually edit!
3 | import type { ConnectorOptions as BetterSQLite3Options } from "db0/connectors/better-sqlite3";
4 | import type { ConnectorOptions as BunSQLiteOptions } from "db0/connectors/bun-sqlite";
5 | import type { ConnectorOptions as CloudflareD1Options } from "db0/connectors/cloudflare-d1";
6 | import type { ConnectorOptions as CloudflareHyperdriveMySQLOptions } from "db0/connectors/cloudflare-hyperdrive-mysql";
7 | import type { ConnectorOptions as CloudflareHyperdrivePostgreSQLOptions } from "db0/connectors/cloudflare-hyperdrive-postgresql";
8 | import type { ConnectorOptions as LibSQLCoreOptions } from "db0/connectors/libsql/core";
9 | import type { ConnectorOptions as LibSQLHttpOptions } from "db0/connectors/libsql/http";
10 | import type { ConnectorOptions as LibSQLNodeOptions } from "db0/connectors/libsql/node";
11 | import type { ConnectorOptions as LibSQLWebOptions } from "db0/connectors/libsql/web";
12 | import type { ConnectorOptions as MySQL2Options } from "db0/connectors/mysql2";
13 | import type { ConnectorOptions as NodeSQLiteOptions } from "db0/connectors/node-sqlite";
14 | import type { ConnectorOptions as PgliteOptions } from "db0/connectors/pglite";
15 | import type { ConnectorOptions as PlanetscaleOptions } from "db0/connectors/planetscale";
16 | import type { ConnectorOptions as PostgreSQLOptions } from "db0/connectors/postgresql";
17 | import type { ConnectorOptions as SQLite3Options } from "db0/connectors/sqlite3";
18 |
19 | export type ConnectorName = "better-sqlite3" | "bun-sqlite" | "bun" | "cloudflare-d1" | "cloudflare-hyperdrive-mysql" | "cloudflare-hyperdrive-postgresql" | "libsql-core" | "libsql-http" | "libsql-node" | "libsql" | "libsql-web" | "mysql2" | "node-sqlite" | "sqlite" | "pglite" | "planetscale" | "postgresql" | "sqlite3";
20 |
21 | export type ConnectorOptions = {
22 | "better-sqlite3": BetterSQLite3Options;
23 | "bun-sqlite": BunSQLiteOptions;
24 | /** alias of bun-sqlite */
25 | "bun": BunSQLiteOptions;
26 | "cloudflare-d1": CloudflareD1Options;
27 | "cloudflare-hyperdrive-mysql": CloudflareHyperdriveMySQLOptions;
28 | "cloudflare-hyperdrive-postgresql": CloudflareHyperdrivePostgreSQLOptions;
29 | "libsql-core": LibSQLCoreOptions;
30 | "libsql-http": LibSQLHttpOptions;
31 | "libsql-node": LibSQLNodeOptions;
32 | /** alias of libsql-node */
33 | "libsql": LibSQLNodeOptions;
34 | "libsql-web": LibSQLWebOptions;
35 | "mysql2": MySQL2Options;
36 | "node-sqlite": NodeSQLiteOptions;
37 | /** alias of node-sqlite */
38 | "sqlite": NodeSQLiteOptions;
39 | "pglite": PgliteOptions;
40 | "planetscale": PlanetscaleOptions;
41 | "postgresql": PostgreSQLOptions;
42 | "sqlite3": SQLite3Options;
43 | };
44 |
45 | export const connectors: Record = Object.freeze({
46 | "better-sqlite3": "db0/connectors/better-sqlite3",
47 | "bun-sqlite": "db0/connectors/bun-sqlite",
48 | /** alias of bun-sqlite */
49 | "bun": "db0/connectors/bun-sqlite",
50 | "cloudflare-d1": "db0/connectors/cloudflare-d1",
51 | "cloudflare-hyperdrive-mysql": "db0/connectors/cloudflare-hyperdrive-mysql",
52 | "cloudflare-hyperdrive-postgresql": "db0/connectors/cloudflare-hyperdrive-postgresql",
53 | "libsql-core": "db0/connectors/libsql/core",
54 | "libsql-http": "db0/connectors/libsql/http",
55 | "libsql-node": "db0/connectors/libsql/node",
56 | /** alias of libsql-node */
57 | "libsql": "db0/connectors/libsql/node",
58 | "libsql-web": "db0/connectors/libsql/web",
59 | "mysql2": "db0/connectors/mysql2",
60 | "node-sqlite": "db0/connectors/node-sqlite",
61 | /** alias of node-sqlite */
62 | "sqlite": "db0/connectors/node-sqlite",
63 | "pglite": "db0/connectors/pglite",
64 | "planetscale": "db0/connectors/planetscale",
65 | "postgresql": "db0/connectors/postgresql",
66 | "sqlite3": "db0/connectors/sqlite3",
67 | } as const);
68 |
--------------------------------------------------------------------------------
/src/connectors/sqlite3.ts:
--------------------------------------------------------------------------------
1 | import { resolve, dirname } from "node:path";
2 | import { mkdirSync } from "node:fs";
3 | import sqlite3 from "sqlite3";
4 |
5 | import type { Connector, Primitive } from "db0";
6 | import { BoundableStatement } from "./_internal/statement.ts";
7 |
8 | export interface ConnectorOptions {
9 | cwd?: string;
10 | path?: string;
11 | name?: string;
12 | }
13 |
14 | export default function nodeSqlite3Connector(
15 | opts: ConnectorOptions,
16 | ): Connector {
17 | let _db: sqlite3.Database;
18 | const _activeStatements = new Set();
19 |
20 | const getDB = () => {
21 | if (_db) {
22 | return _db;
23 | }
24 | if (opts.name === ":memory:") {
25 | _db = new sqlite3.Database(":memory:");
26 | return _db;
27 | }
28 | const filePath = resolve(
29 | opts.cwd || ".",
30 | opts.path || `.data/${opts.name || "db"}.sqlite3`,
31 | );
32 | mkdirSync(dirname(filePath), { recursive: true });
33 | _db = new sqlite3.Database(filePath);
34 | return _db;
35 | };
36 |
37 | const query = (sql: string) =>
38 | new Promise((resolve, reject) => {
39 | getDB().exec(sql, (err: Error | null) => {
40 | if (err) {
41 | return reject(err);
42 | }
43 | resolve({ success: true });
44 | });
45 | });
46 |
47 | return {
48 | name: "sqlite3",
49 | dialect: "sqlite",
50 | getInstance: () => getDB(),
51 | exec: (sql: string) => query(sql),
52 | prepare: (sql) => {
53 | const stmt = new StatementWrapper(sql, getDB());
54 | _activeStatements.add(stmt);
55 | return stmt;
56 | },
57 | dispose: async () => {
58 | await Promise.all(
59 | [..._activeStatements].map((s) =>
60 | s.finalize().catch((error) => {
61 | console.warn("[db0] [sqlite3] failed to finalize statement", error);
62 | }),
63 | ),
64 | );
65 | _activeStatements.clear();
66 | await new Promise((resolve, reject) =>
67 | _db?.close?.((error) => (error ? reject(error) : resolve())),
68 | );
69 | _db = undefined as any;
70 | },
71 | };
72 | }
73 |
74 | class StatementWrapper extends BoundableStatement {
75 | #onError?: (err: Error | null) => void; // #162
76 |
77 | constructor(sql: string, db: sqlite3.Database) {
78 | super(
79 | db.prepare(sql, (err) => {
80 | if (err && this.#onError) {
81 | return this.#onError(err);
82 | }
83 | }),
84 | );
85 | }
86 | async all(...params: Primitive[]) {
87 | const rows = await new Promise((resolve, reject) => {
88 | this.#onError = reject;
89 | this._statement.all(...params, (err: Error | null, rows: unknown[]) =>
90 | err ? reject(err) : resolve(rows),
91 | );
92 | });
93 | return rows;
94 | }
95 | async run(...params: Primitive[]) {
96 | await new Promise((resolve, reject) => {
97 | this.#onError = reject;
98 | this._statement.run(...params, (err: Error | null) =>
99 | err ? reject(err) : resolve(),
100 | );
101 | });
102 | return { success: true };
103 | }
104 | async get(...params: Primitive[]) {
105 | const row = await new Promise((resolve, reject) => {
106 | this.#onError = reject;
107 | this._statement.get(...params, (err: Error | null, row: unknown) =>
108 | err ? reject(err) : resolve(row),
109 | );
110 | });
111 | return row;
112 | }
113 |
114 | finalize() {
115 | try {
116 | // TODO: Can we await on finalize cb?
117 | this._statement.finalize();
118 | return Promise.resolve();
119 | } catch (error) {
120 | return Promise.reject(error);
121 | }
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/docs/2.connectors/cloudflare.md:
--------------------------------------------------------------------------------
1 | ---
2 | icon: devicon-plain:cloudflareworkers
3 | ---
4 |
5 | # Cloudflare
6 |
7 | > Connect DB0 to Cloudflare D1 or PostgreSQL/MySQL using Cloudflare Hyperdrive
8 |
9 |
10 | ## Cloudflare D1
11 |
12 | :read-more{to="https://developers.cloudflare.com/d1"}
13 |
14 | > [!NOTE]
15 | > This connector works within cloudflare workers with D1 enabled.
16 |
17 | ### Usage
18 |
19 | Use `cloudflare-d1` connector:
20 |
21 | ```js
22 | import { createDatabase } from "db0";
23 | import cloudflareD1 from "db0/connectors/cloudflare-d1";
24 |
25 | const db = createDatabase(
26 | cloudflareD1({
27 | bindingName: "DB",
28 | }),
29 | );
30 | ```
31 |
32 | > [!NOTE]
33 | > In order for the driver to work, `globalThis.__env__.DB` value should be set.
34 | >
35 | > If you are using [Nitro](https://nitro.unjs.io/) you don't need to do any extra steps.
36 |
37 | ### Options
38 |
39 | #### `bindingName`
40 |
41 | Assigned binding name.
42 |
43 | ---
44 |
45 | ## Hyperdrive PostgreSQL
46 |
47 | :read-more{to="https://developers.cloudflare.com/hyperdrive"}
48 |
49 | > [!NOTE]
50 | > This connector works within Cloudflare Workers with Hyperdrive enabled.
51 |
52 | ### Usage
53 |
54 | For this connector, you need to install [`pg`](https://www.npmjs.com/package/pg) dependency:
55 |
56 | :pm-install{name="pg @types/pg"}
57 |
58 | Use `cloudflare-hyperdrive-postgresql` connector:
59 |
60 | ```js
61 | import { createDatabase } from "db0";
62 | import cloudflareHyperdrivePostgresql from "db0/connectors/cloudflare-hyperdrive-postgresql";
63 |
64 | const db = createDatabase(
65 | cloudflareHyperdrivePostgresql({
66 | bindingName: "POSTGRESQL",
67 | }),
68 | );
69 | ```
70 |
71 | ### Options
72 |
73 | #### `bindingName`
74 |
75 | Assigned binding name for your Hyperdrive instance.
76 |
77 | #### Additional Options
78 |
79 | You can also pass PostgreSQL client configuration options (except for `user`, `database`, `password`, `port`, `host`, and `connectionString` which are managed by Hyperdrive):
80 |
81 | ```js
82 | const db = createDatabase(
83 | cloudflareHyperdrivePostgresql({
84 | bindingName: "HYPERDRIVE",
85 | // Additional PostgreSQL options
86 | statement_timeout: 5000,
87 | query_timeout: 10000,
88 | }),
89 | );
90 | ```
91 |
92 | :read-more{title="node-postgres documentation" to="https://node-postgres.com/apis/client#new-client"}
93 |
94 | ---
95 |
96 | ## Hyperdrive MySQL
97 |
98 | :read-more{to="https://developers.cloudflare.com/hyperdrive"}
99 |
100 | > [!NOTE]
101 | > This connector works within Cloudflare Workers with Hyperdrive enabled.
102 |
103 | ### Usage
104 |
105 | For this connector, you need to install [`mysql2`](https://www.npmjs.com/package/mysql2) dependency:
106 |
107 | :pm-install{name="mysql2"}
108 |
109 | Use `cloudflare-hyperdrive-mysql` connector:
110 |
111 | ```js
112 | import { createDatabase } from "db0";
113 | import cloudflareHyperdriveMysql from "db0/connectors/cloudflare-hyperdrive-mysql";
114 |
115 | const db = createDatabase(
116 | cloudflareHyperdriveMysql({
117 | bindingName: "MYSQL",
118 | }),
119 | );
120 | ```
121 |
122 | ### Options
123 |
124 | #### `bindingName`
125 |
126 | Assigned binding name for your Hyperdrive instance.
127 |
128 | ### Additional Options
129 |
130 | You can also pass MySQL client configuration options (except for connection/authentication options which are managed by Hyperdrive, and `disableEval` which is incompatible in Cloudflare Workers):
131 |
132 | ```js
133 | const db = createDatabase(
134 | cloudflareHyperdriveMysql({
135 | bindingName: "HYPERDRIVE",
136 | // Additional MySQL options
137 | connectTimeout: 10000,
138 | queryTimeout: 5000,
139 | }),
140 | );
141 | ```
142 |
143 | :read-more{to="https://github.com/sidorares/node-mysql2/blob/master/typings/mysql/lib/Connection.d.ts#L82-L329"}
144 |
--------------------------------------------------------------------------------
/src/integrations/drizzle/_session.ts:
--------------------------------------------------------------------------------
1 | import {
2 | type Logger,
3 | type RelationalSchemaConfig,
4 | type Query,
5 | type TablesRelationalConfig,
6 | entityKind,
7 | NoopLogger,
8 | } from "drizzle-orm";
9 |
10 | import {
11 | SQLiteAsyncDialect,
12 | SQLiteSession,
13 | SQLitePreparedQuery,
14 | } from "drizzle-orm/sqlite-core";
15 |
16 | import type {
17 | PreparedQueryConfig,
18 | SelectedFieldsOrdered,
19 | SQLiteExecuteMethod,
20 | SQLiteTransactionConfig,
21 | } from "drizzle-orm/sqlite-core";
22 |
23 | import type { Database, Statement } from "db0";
24 |
25 | // Used as reference: https://github.com/drizzle-team/drizzle-orm/blob/main/drizzle-orm/src/d1/session.ts
26 |
27 | export interface DB0SessionOptions {
28 | logger?: Logger;
29 | }
30 |
31 | export class DB0Session<
32 | TFullSchema extends Record,
33 | TSchema extends TablesRelationalConfig,
34 | > extends SQLiteSession<"async", unknown, TFullSchema, TSchema> {
35 | dialect!: SQLiteAsyncDialect;
36 |
37 | private logger: Logger;
38 |
39 | constructor(
40 | private db: Database,
41 | dialect: SQLiteAsyncDialect,
42 | private schema: RelationalSchemaConfig | undefined,
43 | private options: DB0SessionOptions = {},
44 | ) {
45 | super(dialect);
46 | this.logger = options.logger ?? new NoopLogger();
47 | }
48 |
49 | // @ts-expect-error TODO
50 | prepareQuery(
51 | query: Query,
52 | fields: SelectedFieldsOrdered | undefined,
53 | executeMethod: SQLiteExecuteMethod,
54 | customResultMapper?: (rows: unknown[][]) => unknown,
55 | ): DB0PreparedQuery {
56 | const stmt = this.db.prepare(query.sql);
57 | return new DB0PreparedQuery(
58 | stmt,
59 | query,
60 | this.logger,
61 | fields,
62 | executeMethod,
63 | customResultMapper,
64 | );
65 | }
66 |
67 | // TODO: Implement batch
68 |
69 | // TODO: Implement transaction
70 | override transaction(
71 | transaction: (tx: any) => T | Promise,
72 | config?: SQLiteTransactionConfig,
73 | ): Promise {
74 | throw new Error("transaction is not implemented!");
75 | // const tx = new D1Transaction('async', this.dialect, this, this.schema);
76 | // await this.run(sql.raw(`begin${config?.behavior ? ' ' + config.behavior : ''}`));
77 | // try {
78 | // const result = await transaction(tx);
79 | // await this.run(sql`commit`);
80 | // return result;
81 | // } catch (err) {
82 | // await this.run(sql`rollback`);
83 | // throw err;
84 | // }
85 | }
86 | }
87 |
88 | export class DB0PreparedQuery<
89 | T extends PreparedQueryConfig = PreparedQueryConfig,
90 | > extends SQLitePreparedQuery<{
91 | type: "async";
92 | run: Awaited>;
93 | all: T["all"];
94 | get: T["get"];
95 | values: T["values"];
96 | execute: T["execute"];
97 | }> {
98 | constructor(
99 | private stmt: Statement,
100 | query: Query,
101 | private logger: Logger,
102 | fields: SelectedFieldsOrdered | undefined,
103 | executeMethod: SQLiteExecuteMethod,
104 | customResultMapper?: (rows: unknown[][]) => unknown,
105 | ) {
106 | super("async", executeMethod, query);
107 | }
108 |
109 | run(): Promise<{ success: boolean }> {
110 | return this.stmt.run(...(this.query.params as any[]));
111 | }
112 |
113 | all(): Promise {
114 | return this.stmt.all(...(this.query.params as any[]));
115 | }
116 |
117 | get(): Promise {
118 | return this.stmt.get(...(this.query.params as any[]));
119 | }
120 |
121 | values(): Promise {
122 | return Promise.reject(new Error("values is not implemented!"));
123 | }
124 | }
125 |
126 | // Object.defineProperty(DB0PreparedQuery, entityKind, {
127 | // value: "DB0PreparedQuery",
128 | // enumerable: true,
129 | // configurable: true,
130 | // });
131 |
--------------------------------------------------------------------------------
/test/integrations/drizzle.test.ts:
--------------------------------------------------------------------------------
1 | import { afterAll, beforeAll, describe, expect, it } from "vitest";
2 |
3 | import { Database, createDatabase } from "../../src";
4 | import { type DrizzleDatabase, drizzle } from "../../src/integrations/drizzle";
5 |
6 | import * as dSqlite from "drizzle-orm/sqlite-core";
7 | import sqliteConnector from "../../src/connectors/better-sqlite3";
8 |
9 | import * as dPg from "drizzle-orm/pg-core";
10 | import pgConnector from "../../src/connectors/postgresql";
11 |
12 | describe("integrations: drizzle: better-sqlite3", () => {
13 | const users = dSqlite.sqliteTable("users", {
14 | id: dSqlite.numeric("id"),
15 | name: dSqlite.text("name"),
16 | });
17 |
18 | let drizzleDb: DrizzleDatabase;
19 | let db: Database;
20 |
21 | beforeAll(async () => {
22 | db = createDatabase(sqliteConnector({}));
23 | drizzleDb = drizzle(db);
24 | await db.sql`DROP TABLE IF EXISTS users`;
25 | await db.sql`create table if not exists users (
26 | id integer primary key autoincrement,
27 | name text
28 | )`;
29 | });
30 |
31 | it("insert", async () => {
32 | const res = await drizzleDb
33 | .insert(users)
34 | .values({
35 | name: "John Doe",
36 | })
37 | .returning();
38 |
39 | expect(res.length).toBe(1);
40 | expect(res[0].name).toBe("John Doe");
41 | });
42 |
43 | it("select", async () => {
44 | const res = await drizzleDb.select().from(users).all();
45 |
46 | expect(res.length).toBe(1);
47 | expect(res[0].name).toBe("John Doe");
48 | });
49 |
50 | it("accepts boolean logger config", () => {
51 | expect(() => drizzle(db, { logger: true })).not.toThrow();
52 | });
53 |
54 | afterAll(async () => {
55 | await db.sql`DROP TABLE IF EXISTS users`;
56 | });
57 | });
58 |
59 | describe("integrations: drizzle: with schema parameter", () => {
60 | const users = dSqlite.sqliteTable("users_schema", {
61 | id: dSqlite.numeric("id"),
62 | name: dSqlite.text("name"),
63 | email: dSqlite.text("email"),
64 | });
65 |
66 | const schema = { users };
67 |
68 | let drizzleDb: DrizzleDatabase;
69 | let db: Database;
70 |
71 | beforeAll(async () => {
72 | db = createDatabase(sqliteConnector({}));
73 | drizzleDb = drizzle(db, { schema });
74 | await db.sql`DROP TABLE IF EXISTS users_schema`;
75 | await db.sql`create table if not exists users_schema (
76 | id integer primary key autoincrement,
77 | name text,
78 | email text
79 | )`;
80 | });
81 |
82 | it("insert with schema", async () => {
83 | const res = await drizzleDb
84 | .insert(users)
85 | .values({
86 | name: "Jane Doe",
87 | email: "jane@example.com",
88 | })
89 | .returning();
90 |
91 | expect(res.length).toBe(1);
92 | expect(res[0].name).toBe("Jane Doe");
93 | expect(res[0].email).toBe("jane@example.com");
94 | });
95 |
96 | it("select with schema", async () => {
97 | const res = await drizzleDb.select().from(users).all();
98 |
99 | expect(res.length).toBe(1);
100 | expect(res[0].name).toBe("Jane Doe");
101 | expect(res[0].email).toBe("jane@example.com");
102 | });
103 |
104 | afterAll(async () => {
105 | await db.sql`DROP TABLE IF EXISTS users_schema`;
106 | });
107 | });
108 |
109 | describe.runIf(process.env.POSTGRESQL_URL)(
110 | "integrations: drizzle: postgres",
111 | () => {
112 | const users = dPg.pgTable("users", {
113 | id: dPg.numeric("id"),
114 | name: dPg.text("name"),
115 | });
116 |
117 | let drizzleDb: DrizzleDatabase;
118 | let db: Database>;
119 |
120 | beforeAll(async () => {
121 | db = createDatabase(
122 | pgConnector({
123 | url: process.env.POSTGRESQL_URL as string,
124 | }),
125 | );
126 |
127 | drizzleDb = drizzle(db);
128 | await db.sql`DROP TABLE IF EXISTS users`;
129 | await db.sql`CREATE TABLE users ("id" INTEGER PRIMARY KEY, "name" TEXT)`;
130 | });
131 |
132 | it("insert", async () => {
133 | const res = await drizzleDb
134 | .insert(users)
135 | .values({
136 | id: "1",
137 | name: "John Doe",
138 | })
139 | .returning();
140 |
141 | expect(res.length).toBe(1);
142 | expect(res[0].name).toBe("John Doe");
143 | });
144 |
145 | it("select", async () => {
146 | const res = await drizzleDb.select().from(users).all();
147 |
148 | expect(res.length).toBe(1);
149 | expect(res[0].name).toBe("John Doe");
150 | });
151 |
152 | afterAll(async () => {
153 | await db.sql`DROP TABLE IF EXISTS users`;
154 | });
155 | },
156 | );
157 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Represents primitive types that can be used in SQL operations.
3 | */
4 | export type Primitive = string | number | boolean | undefined | null;
5 |
6 | export type SQLDialect = "mysql" | "postgresql" | "sqlite" | "libsql";
7 |
8 | export type Statement = {
9 | /**
10 | * Binds parameters to the statement.
11 | * @param {...Primitive[]} params - Parameters to bind to the SQL statement.
12 | * @returns {PreparedStatement} The instance of the statement with bound parameters.
13 | */
14 | bind(...params: Primitive[]): PreparedStatement;
15 |
16 | /**
17 | * Executes the statement and returns all resulting rows as an array.
18 | * @param {...Primitive[]} params - Parameters to bind to the SQL statement.
19 | * @returns {Promise} A promise that resolves to an array of rows.
20 | */
21 | all(...params: Primitive[]): Promise;
22 |
23 | /**
24 | * Executes the statement as an action (e.g. insert, update, delete).
25 | * @param {...Primitive[]} params - Parameters to bind to the SQL statement.
26 | * @returns {Promise<{ success: boolean }>} A promise that resolves to the success state of the action.
27 | */
28 | run(...params: Primitive[]): Promise<{ success: boolean }>;
29 |
30 | /**
31 | * Executes the statement and returns a single row.
32 | * @param {...Primitive[]} params - Parameters to bind to the SQL statement.
33 | * @returns {Promise} A promise that resolves to the first row in the result set.
34 | */
35 | get(...params: Primitive[]): Promise;
36 | };
37 |
38 | export type PreparedStatement = {
39 | /**
40 | * Binds parameters to the statement.
41 | * @param {...Primitive[]} params - Parameters to bind to the SQL statement.
42 | * @returns {PreparedStatement} The instance of the statement with bound parameters.
43 | */
44 | bind(...params: Primitive[]): PreparedStatement;
45 |
46 | /**
47 | * Executes the statement and returns all resulting rows as an array.
48 | * @returns {Promise} A promise that resolves to an array of rows.
49 | */
50 | all(): Promise;
51 |
52 | /**
53 | * Executes the statement as an action (e.g. insert, update, delete).
54 | * @returns {Promise<{ success: boolean }>} A promise that resolves to the success state of the action.
55 | */
56 | run(): Promise<{ success: boolean }>;
57 |
58 | /**
59 | * Executes the statement and returns a single row.
60 | * @returns {Promise} A promise that resolves to the first row in the result set.
61 | */
62 | get(): Promise;
63 | };
64 |
65 | /**
66 | * Represents the result of a database execution.
67 | */
68 | export type ExecResult = unknown;
69 |
70 | /**
71 | * Defines a database connector for executing SQL queries and preparing statements.
72 | */
73 | export type Connector = {
74 | /**
75 | * The name of the connector.
76 | */
77 | name: string;
78 |
79 | /**
80 | * The SQL dialect used by the connector.
81 | */
82 | dialect: SQLDialect;
83 |
84 | /**
85 | * The client instance used internally.
86 | */
87 | getInstance: () => TInstance | Promise;
88 |
89 | /**
90 | * Executes an SQL query directly and returns the result.
91 | * @param {string} sql - The SQL string to execute.
92 | * @returns {ExecResult | Promise} The result of the execution.
93 | */
94 | exec: (sql: string) => ExecResult | Promise;
95 |
96 | /**
97 | * Prepares an SQL statement for execution.
98 | * @param {string} sql - The SQL string to prepare.
99 | * @returns {statement} The prepared SQL statement.
100 | */
101 | prepare: (sql: string) => Statement;
102 |
103 | /**
104 | * Closes the database connection and cleans up resources.
105 | * @returns {void | Promise} A promise that resolves when the connection is closed.
106 | */
107 | dispose?: () => void | Promise;
108 | };
109 |
110 | /**
111 | * Represents default SQL results, including any error messages, row changes and rows returned.
112 | */
113 | type DefaultSQLResult = {
114 | lastInsertRowid?: number;
115 | changes?: number;
116 | error?: string;
117 | rows?: { id?: string | number; [key: string]: unknown }[];
118 | success?: boolean;
119 | };
120 |
121 | export interface Database
122 | extends AsyncDisposable {
123 | readonly dialect: SQLDialect;
124 |
125 | /**
126 | * Indicates whether the database instance has been disposed/closed.
127 | * @returns {boolean} True if the database has been disposed, false otherwise.
128 | */
129 | readonly disposed: boolean;
130 |
131 | /**
132 | * The client instance used internally.
133 | * @returns {Promise} A promise that resolves with the client instance.
134 | */
135 | getInstance: () => Promise>>;
136 |
137 | /**
138 | * Executes a raw SQL string.
139 | * @param {string} sql - The SQL string to execute.
140 | * @returns {Promise} A promise that resolves with the execution result.
141 | */
142 | exec: (sql: string) => Promise;
143 |
144 | /**
145 | * Prepares an SQL statement from a raw SQL string.
146 | * @param {string} sql - The SQL string to prepare.
147 | * @returns {statement} The prepared SQL statement.
148 | */
149 | prepare: (sql: string) => Statement;
150 |
151 | /**
152 | * Executes SQL queries using tagged template literals.
153 | * @template T The expected type of query result.
154 | * @param {TemplateStringsArray} strings - The segments of the SQL string.
155 | * @param {...Primitive[]} values - The values to interpolate into the SQL string.
156 | * @returns {Promise} A promise that resolves with the typed result of the query.
157 | */
158 | sql: (
159 | strings: TemplateStringsArray,
160 | ...values: Primitive[]
161 | ) => Promise;
162 |
163 | /**
164 | * Closes the database connection and cleans up resources.
165 | * @returns {Promise} A promise that resolves when the connection is closed.
166 | */
167 | dispose: () => Promise;
168 |
169 | /**
170 | * AsyncDisposable implementation for using syntax support.
171 | * @returns {Promise} A promise that resolves when the connection is disposed.
172 | */
173 | [Symbol.asyncDispose]: () => Promise;
174 | }
175 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 |
4 | ## v0.3.4
5 |
6 | [compare changes](https://github.com/unjs/db0/compare/v0.3.3...v0.3.4)
7 |
8 | ### 📦 Build
9 |
10 | - Migrate to obuild ([8586320](https://github.com/unjs/db0/commit/8586320))
11 |
12 | ### ❤️ Contributors
13 |
14 | - Pooya Parsa ([@pi0](https://github.com/pi0))
15 |
16 | ## v0.3.3
17 |
18 | [compare changes](https://github.com/unjs/db0/compare/v0.3.2...v0.3.3)
19 |
20 | ### 🚀 Enhancements
21 |
22 | - Support `dispose` and `using createDatabase()` ([#178](https://github.com/unjs/db0/pull/178))
23 | - Cloudflare hyperdrive ([#164](https://github.com/unjs/db0/pull/164))
24 |
25 | ### 💅 Refactors
26 |
27 | - Strict types ([#179](https://github.com/unjs/db0/pull/179))
28 |
29 | ### 📖 Documentation
30 |
31 | - Improve drizzle integration example with drizzle-kit usage ([#170](https://github.com/unjs/db0/pull/170))
32 |
33 | ### 🏡 Chore
34 |
35 | - Update undocs ([d912de1](https://github.com/unjs/db0/commit/d912de1))
36 | - Update docs ([d4a3cd5](https://github.com/unjs/db0/commit/d4a3cd5))
37 | - Update undocs ([5d398ef](https://github.com/unjs/db0/commit/5d398ef))
38 | - Update deps ([29ca18b](https://github.com/unjs/db0/commit/29ca18b))
39 | - Update deps ([83f1425](https://github.com/unjs/db0/commit/83f1425))
40 | - Lint ([9784e4d](https://github.com/unjs/db0/commit/9784e4d))
41 | - Gitignore tsconfig.tsbuildinfo ([c68b294](https://github.com/unjs/db0/commit/c68b294))
42 |
43 | ### ❤️ Contributors
44 |
45 | - Pooya Parsa ([@pi0](https://github.com/pi0))
46 | - Fayaz Ahmed
47 | - Rihan Arfan ([@RihanArfan](https://github.com/RihanArfan))
48 |
49 | ## v0.3.2
50 |
51 | [compare changes](https://github.com/unjs/db0/compare/v0.3.1...v0.3.2)
52 |
53 | ### 🩹 Fixes
54 |
55 | - **sqlite:** Defer prepare errors ([#162](https://github.com/unjs/db0/pull/162))
56 |
57 | ### 📖 Documentation
58 |
59 | - Fix link to node-sqlite3 ([#159](https://github.com/unjs/db0/pull/159))
60 |
61 | ### 🏡 Chore
62 |
63 | - Apply automated updates ([91b9863](https://github.com/unjs/db0/commit/91b9863))
64 | - Update deps ([ad995db](https://github.com/unjs/db0/commit/ad995db))
65 |
66 | ### ✅ Tests
67 |
68 | - Only include src for coverage report ([#161](https://github.com/unjs/db0/pull/161))
69 |
70 | ### ❤️ Contributors
71 |
72 | - Farnabaz
73 | - Kanon ([@ysknsid25](https://github.com/ysknsid25))
74 | - Pooya Parsa ([@pi0](https://github.com/pi0))
75 | - Tsotne Nazarashvili
76 |
77 | ## v0.3.1
78 |
79 | [compare changes](https://github.com/unjs/db0/compare/v0.3.0...v0.3.1)
80 |
81 | ### 💅 Refactors
82 |
83 | - Alias `sqlite` to `node-sqlite` ([55df331](https://github.com/unjs/db0/commit/55df331))
84 |
85 | ### 📦 Build
86 |
87 | - Remove deprecated for aliases ([0011d57](https://github.com/unjs/db0/commit/0011d57))
88 |
89 | ### ❤️ Contributors
90 |
91 | - Pooya Parsa ([@pi0](https://github.com/pi0))
92 |
93 | ## v0.3.0
94 |
95 | [compare changes](https://github.com/unjs/db0/compare/v0.2.4...v0.3.0)
96 |
97 | ### 🚀 Enhancements
98 |
99 | - ⚠️ Prepared statements ([#157](https://github.com/unjs/db0/pull/157))
100 | - `node-sqlite` driver with native `node:sqlite` ([#155](https://github.com/unjs/db0/pull/155))
101 |
102 | ### 🩹 Fixes
103 |
104 | - **cloudflare-d1:** Correctly return results array ([#156](https://github.com/unjs/db0/pull/156))
105 | - Fix type exports ([0d5a151](https://github.com/unjs/db0/commit/0d5a151))
106 | - Correct `.getInstance()` type ([6a7dc5a](https://github.com/unjs/db0/commit/6a7dc5a))
107 |
108 | ### 💅 Refactors
109 |
110 | - ⚠️ Rename `node-sqlite3` to `sqlite3` ([eb8c06d](https://github.com/unjs/db0/commit/eb8c06d))
111 |
112 | ### 📖 Documentation
113 |
114 | - Add `node-sqlite` ([4da62b4](https://github.com/unjs/db0/commit/4da62b4))
115 |
116 | ### 📦 Build
117 |
118 | - ⚠️ Esm-only dist ([7d7bdec](https://github.com/unjs/db0/commit/7d7bdec))
119 |
120 | ### 🏡 Chore
121 |
122 | - Update deps ([a442671](https://github.com/unjs/db0/commit/a442671))
123 | - Update deps ([77bee33](https://github.com/unjs/db0/commit/77bee33))
124 | - Fix db0 link ([18b2bbc](https://github.com/unjs/db0/commit/18b2bbc))
125 |
126 | #### ⚠️ Breaking Changes
127 |
128 | - ⚠️ Prepared statements ([#157](https://github.com/unjs/db0/pull/157))
129 | - ⚠️ Rename `node-sqlite3` to `sqlite3` ([eb8c06d](https://github.com/unjs/db0/commit/eb8c06d))
130 | - ⚠️ Esm-only dist ([7d7bdec](https://github.com/unjs/db0/commit/7d7bdec))
131 |
132 | ### ❤️ Contributors
133 |
134 | - Pooya Parsa ([@pi0](https://github.com/pi0))
135 |
136 | ## v0.2.4
137 |
138 | [compare changes](https://github.com/unjs/db0/compare/v0.2.3...v0.2.4)
139 |
140 | ### 🩹 Fixes
141 |
142 | - **bun-sqlite:** Remove in-memory fallback when name is not provided ([#153](https://github.com/unjs/db0/pull/153))
143 |
144 | ### 🏡 Chore
145 |
146 | - Update deps ([cd49cad](https://github.com/unjs/db0/commit/cd49cad))
147 | - Update ci ([b7a6ec9](https://github.com/unjs/db0/commit/b7a6ec9))
148 | - Remove extra `@types/sqlite3` dev dependency ([a4df52b](https://github.com/unjs/db0/commit/a4df52b))
149 | - Add pnpm stuff ([5c2d3cd](https://github.com/unjs/db0/commit/5c2d3cd))
150 |
151 | ### ❤️ Contributors
152 |
153 | - Pooya Parsa ([@pi0](http://github.com/pi0))
154 | - Artem Melnyk ([@MellKam](http://github.com/MellKam))
155 |
156 | ## v0.2.3
157 |
158 | [compare changes](https://github.com/unjs/db0/compare/v0.2.2...v0.2.3)
159 |
160 | ### 📦 Build
161 |
162 | - Export `connectors` and connector ([d411f91](https://github.com/unjs/db0/commit/d411f91))
163 |
164 | ### ❤️ Contributors
165 |
166 | - Pooya Parsa ([@pi0](http://github.com/pi0))
167 |
168 | ## v0.2.2
169 |
170 | [compare changes](https://github.com/unjs/db0/compare/v0.2.1...v0.2.2)
171 |
172 | ### 🚀 Enhancements
173 |
174 | - Export database instances ([#132](https://github.com/unjs/db0/pull/132))
175 | - Add `node-sqlite3` connector ([#147](https://github.com/unjs/db0/pull/147))
176 | - Support `RETURNING` clause ([#139](https://github.com/unjs/db0/pull/139))
177 | - **pglite:** Use async init ([#130](https://github.com/unjs/db0/pull/130))
178 |
179 | ### 🩹 Fixes
180 |
181 | - Add missing `mysql` to `connectors` map ([#127](https://github.com/unjs/db0/pull/127))
182 | - Always add `{ success: true }` to select results ([#118](https://github.com/unjs/db0/pull/118))
183 |
184 | ### 📦 Build
185 |
186 | - Generate drivers meta ([#136](https://github.com/unjs/db0/pull/136))
187 | - Field export condition for `libsql` + `web` ([97630f3](https://github.com/unjs/db0/commit/97630f3))
188 |
189 | ### 🏡 Chore
190 |
191 | - Fix typos ([#122](https://github.com/unjs/db0/pull/122))
192 | - Update deps ([00e7454](https://github.com/unjs/db0/commit/00e7454))
193 | - Update deps ([37d2d3c](https://github.com/unjs/db0/commit/37d2d3c))
194 | - Update deps ([d0b5ca8](https://github.com/unjs/db0/commit/d0b5ca8))
195 |
196 | ### ❤️ Contributors
197 |
198 | - Sandro Circi ([@sandros94](http://github.com/sandros94))
199 | - Pooya Parsa ([@pi0](http://github.com/pi0))
200 | - Jonathan Ginn ([@ginnwork](http://github.com/ginnwork))
201 | - Aman Desai ([@amandesai01](http://github.com/amandesai01))
202 | - Farnabaz
203 | - Hotdogc1017 ([@hotdogc1017](http://github.com/hotdogc1017))
204 | - Nick-w-nick ([@nick-w-nick](http://github.com/nick-w-nick))
205 |
206 | ## v0.2.1
207 |
208 | [compare changes](https://github.com/unjs/db0/compare/v0.2.0...v0.2.1)
209 |
210 | ### 🩹 Fixes
211 |
212 | - **sqlite:** Support `:memory:` ([1a40c4f](https://github.com/unjs/db0/commit/1a40c4f))
213 |
214 | ### ❤️ Contributors
215 |
216 | - Pooya Parsa ([@pi0](http://github.com/pi0))
217 |
218 | ## v0.2.0
219 |
220 | [compare changes](https://github.com/unjs/db0/compare/v0.1.4...v0.2.0)
221 |
222 | ### 🚀 Enhancements
223 |
224 | - Add `dialect` and planetscale connector ([#59](https://github.com/unjs/db0/pull/59))
225 | - Mysql connector ([#86](https://github.com/unjs/db0/pull/86))
226 | - Support pglite ([#110](https://github.com/unjs/db0/pull/110))
227 |
228 | ### 🩹 Fixes
229 |
230 | - **postgresql:** Use default export ([#63](https://github.com/unjs/db0/pull/63))
231 | - **pkg:** ⚠️ Correct `/integrations/drizzle` subpath export ([#106](https://github.com/unjs/db0/pull/106))
232 |
233 | ### 💅 Refactors
234 |
235 | - Relax peer dependencies ([a80b62c](https://github.com/unjs/db0/commit/a80b62c))
236 | - **postgresql:** Correct function name ([#114](https://github.com/unjs/db0/pull/114))
237 | - Explicit exports ([f5c30bf](https://github.com/unjs/db0/commit/f5c30bf))
238 |
239 | ### 📖 Documentation
240 |
241 | - **vercel:** Fix the connector name ([#74](https://github.com/unjs/db0/pull/74))
242 | - Fix typo ([#108](https://github.com/unjs/db0/pull/108))
243 | - **sqlite:** Fix pm install component ([#85](https://github.com/unjs/db0/pull/85))
244 | - **guide:** Add information about static parameters ([#80](https://github.com/unjs/db0/pull/80))
245 | - Added jsdocs to exported functions and types ([#89](https://github.com/unjs/db0/pull/89))
246 | - Remove non existent `sql` import ([9e5550f](https://github.com/unjs/db0/commit/9e5550f))
247 |
248 | ### 🏡 Chore
249 |
250 | - Update dependencies ([c7aad11](https://github.com/unjs/db0/commit/c7aad11))
251 | - Update eslint config ([ee1dcbc](https://github.com/unjs/db0/commit/ee1dcbc))
252 | - Apply automated updates ([086317e](https://github.com/unjs/db0/commit/086317e))
253 | - **docs:** Update undocs ([122c83a](https://github.com/unjs/db0/commit/122c83a))
254 | - Update deps ([7e7a4ef](https://github.com/unjs/db0/commit/7e7a4ef))
255 | - Apply automated updates ([5b1bdbd](https://github.com/unjs/db0/commit/5b1bdbd))
256 |
257 | ### ✅ Tests
258 |
259 | - **integrations:** Drizzle integration test ([#79](https://github.com/unjs/db0/pull/79))
260 |
261 | #### ⚠️ Breaking Changes
262 |
263 | - **pkg:** ⚠️ Correct `/integrations/drizzle` subpath export ([#106](https://github.com/unjs/db0/pull/106))
264 |
265 | ### ❤️ Contributors
266 |
267 | - Pooya Parsa ([@pi0](http://github.com/pi0))
268 | - Adrien Zaganelli
269 | - Arash Ari Sheyda ([@arashsheyda](http://github.com/arashsheyda))
270 | - Gerben Mulder
271 | - Aman Desai ([@amandesai01](http://github.com/amandesai01))
272 | - Max ([@onmax](http://github.com/onmax))
273 | - Kh ([@hareland](http://github.com/hareland))
274 | - @beer ([@iiio2](http://github.com/iiio2))
275 | - Rishi Raj Jain
276 |
277 | ## v0.1.4
278 |
279 | [compare changes](https://github.com/unjs/db0/compare/v0.1.3...v0.1.4)
280 |
281 | ### 🩹 Fixes
282 |
283 | - **d1:** Support `__env__` for accessing binding ([2ef9d57](https://github.com/unjs/db0/commit/2ef9d57))
284 |
285 | ### 💅 Refactors
286 |
287 | - **d1:** Throw a better error if binding not found ([#60](https://github.com/unjs/db0/pull/60))
288 |
289 | ### 📖 Documentation
290 |
291 | - Fix typos ([#56](https://github.com/unjs/db0/pull/56))
292 |
293 | ### 🏡 Chore
294 |
295 | - Apply automated updates ([5760665](https://github.com/unjs/db0/commit/5760665))
296 |
297 | ### ❤️ Contributors
298 |
299 | - Pooya Parsa ([@pi0](http://github.com/pi0))
300 | - Sébastien Chopin
301 | - Neil Richter ([@noook](http://github.com/noook))
302 |
303 | ## v0.1.3
304 |
305 | [compare changes](https://github.com/unjs/db0/compare/v0.1.2...v0.1.3)
306 |
307 | ### 🚀 Enhancements
308 |
309 | - Add bun sqlite support ([d6de297](https://github.com/unjs/db0/commit/d6de297))
310 |
311 | ### 📖 Documentation
312 |
313 | - **readme:** Fix links ([#52](https://github.com/unjs/db0/pull/52))
314 |
315 | ### 🏡 Chore
316 |
317 | - Update docs ([598e90c](https://github.com/unjs/db0/commit/598e90c))
318 | - Update docs ([5eda18e](https://github.com/unjs/db0/commit/5eda18e))
319 | - Update readme ([#53](https://github.com/unjs/db0/pull/53))
320 | - Update autofix ci ([ecf97f1](https://github.com/unjs/db0/commit/ecf97f1))
321 |
322 | ### ❤️ Contributors
323 |
324 | - Pooya Parsa ([@pi0](http://github.com/pi0))
325 | - Shoshana Connack
326 |
327 | ## v0.1.2
328 |
329 | [compare changes](https://github.com/unjs/db0/compare/v0.1.1...v0.1.2)
330 |
331 | ### 🚀 Enhancements
332 |
333 | - Add libsql support ([#25](https://github.com/unjs/db0/pull/25))
334 | - Support multiple libsql exports ([#31](https://github.com/unjs/db0/pull/31))
335 | - Support static placeholders with `sql` template ([378fe62](https://github.com/unjs/db0/commit/378fe62))
336 |
337 | ### 💅 Refactors
338 |
339 | - Use `createDatabase` ([84c52d8](https://github.com/unjs/db0/commit/84c52d8))
340 | - Update drizzle integration ([74c909e](https://github.com/unjs/db0/commit/74c909e))
341 |
342 | ### 📖 Documentation
343 |
344 | - Initialize new docs ([c897405](https://github.com/unjs/db0/commit/c897405))
345 |
346 | ### 🏡 Chore
347 |
348 | - Add autofix ci ([32a43e3](https://github.com/unjs/db0/commit/32a43e3))
349 | - Update dependencies ([e3f1828](https://github.com/unjs/db0/commit/e3f1828))
350 | - Format code ([282c286](https://github.com/unjs/db0/commit/282c286))
351 | - Update deps ([6fe166d](https://github.com/unjs/db0/commit/6fe166d))
352 | - Update repo ([269efde](https://github.com/unjs/db0/commit/269efde))
353 | - Update landing ([5fcdb67](https://github.com/unjs/db0/commit/5fcdb67))
354 | - Update landing ([c17fa09](https://github.com/unjs/db0/commit/c17fa09))
355 | - Add npmrc ([859cc05](https://github.com/unjs/db0/commit/859cc05))
356 | - Update readme with automd ([303f138](https://github.com/unjs/db0/commit/303f138))
357 | - Update docs ([ea29f15](https://github.com/unjs/db0/commit/ea29f15))
358 |
359 | ### 🤖 CI
360 |
361 | - Use conventional commit for autofix action ([#34](https://github.com/unjs/db0/pull/34))
362 |
363 | ### ❤️ Contributors
364 |
365 | - Pooya Parsa ([@pi0](http://github.com/pi0))
366 | - Daniel Roe ([@danielroe](http://github.com/danielroe))
367 | - Heb ([@Hebilicious](http://github.com/Hebilicious))
368 |
369 | ## v0.1.1
370 |
371 |
372 | ### 🚀 Enhancements
373 |
374 | - Add posgresql connector ([51823eb](https://github.com/unjs/db0/commit/51823eb))
375 | - Support string templates for query ([feea30f](https://github.com/unjs/db0/commit/feea30f))
376 | - Drizzle integration support ([#17](https://github.com/unjs/db0/pull/17))
377 |
378 | ### 💅 Refactors
379 |
380 | - Merge `db.query` into `db.sql` ([eef2417](https://github.com/unjs/db0/commit/eef2417))
381 |
382 | ### 📖 Documentation
383 |
384 | - Add wip notice ([e7a551c](https://github.com/unjs/db0/commit/e7a551c))
385 |
386 | ### 🏡 Chore
387 |
388 | - Update deps ([0d47eea](https://github.com/unjs/db0/commit/0d47eea))
389 | - Fix eslintrc ([6c5a07d](https://github.com/unjs/db0/commit/6c5a07d))
390 | - Initiate docs ([16922ac](https://github.com/unjs/db0/commit/16922ac))
391 | - Add vercel.json for docs ([3a45877](https://github.com/unjs/db0/commit/3a45877))
392 | - Rename to `db0` ([61188b4](https://github.com/unjs/db0/commit/61188b4))
393 | - Prepare for initial release ([459c055](https://github.com/unjs/db0/commit/459c055))
394 | - Update dependencies ([52da7c2](https://github.com/unjs/db0/commit/52da7c2))
395 |
396 | ### ❤️ Contributors
397 |
398 | - Pooya Parsa ([@pi0](http://github.com/pi0))
399 |
400 |
--------------------------------------------------------------------------------