├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
├── pull_request_template.md
└── workflows
│ └── test.yml
├── .gitignore
├── .prettierignore
├── .prettierrc.json
├── CODE_OF_CONDUCT.md
├── LICENSE.txt
├── README.md
├── contributing.md
├── copy-handlebars.js
├── jest.config.js
├── jest.setup.js
├── package-lock.json
├── package.json
├── playground
├── .gitignore
├── index.js
├── package-lock.json
└── package.json
├── src
├── client.ts
├── connections
│ ├── bigquery.ts
│ ├── index.ts
│ ├── mongodb.ts
│ ├── motherduck.ts
│ ├── mysql.ts
│ ├── postgre
│ │ ├── base.ts
│ │ └── postgresql.ts
│ ├── snowflake
│ │ └── snowflake.ts
│ ├── sql-base.ts
│ └── sqlite
│ │ ├── base.ts
│ │ ├── cloudflare.ts
│ │ ├── starbase.ts
│ │ └── turso.ts
├── generators
│ ├── generate-models.backup.txt
│ ├── index-template.handlebars
│ └── model-template.handlebars
├── index.ts
├── models
│ ├── database.ts
│ ├── decorators.ts
│ └── index.backup.txt
├── playground.ts
├── query-builder
│ ├── dialects
│ │ ├── bigquery.ts
│ │ ├── default.ts
│ │ ├── duckdb.ts
│ │ ├── mysql.ts
│ │ ├── postgres.ts
│ │ └── sqlite-dialect.ts
│ └── index.ts
├── query-params.ts
├── query.ts
└── utils
│ ├── placeholder.ts
│ └── transformer.ts
├── tests
├── connections
│ ├── connection.test.ts
│ ├── create-test-connection.ts
│ └── postgres.test.ts
└── units
│ ├── placeholder.test.ts
│ └── query-builder
│ └── postgre.test.ts
└── tsconfig.json
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Purpose
2 |
3 |
4 |
5 | ## Tasks
6 |
7 |
8 | - [ ]
9 |
10 | ## Verify
11 |
12 |
13 | -
14 |
15 | ## Before
16 |
17 |
18 |
19 |
20 | ## After
21 |
22 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on: push
4 |
5 | jobs:
6 | build:
7 | name: 'Unit Test and Build'
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v4
12 |
13 | - name: Install modules
14 | run: npm install
15 |
16 | - name: Build
17 | run: npm run build
18 |
19 | # - name: Run sync-database-models command
20 | # id: sync
21 | # run: npm exec sync-database-models
22 |
23 | - name: Run tests
24 | run: npm test
25 |
26 | test_postgre:
27 | name: 'Postgres Connection'
28 | runs-on: ubuntu-latest
29 | needs: build
30 |
31 | services:
32 | postgres:
33 | image: postgres
34 | env:
35 | POSTGRES_PASSWORD: 123456
36 | options: >-
37 | --health-cmd pg_isready
38 | --health-interval 10s
39 | --health-timeout 5s
40 | --health-retries 5
41 | ports:
42 | - 5432:5432
43 |
44 | steps:
45 | - uses: actions/checkout@v4
46 |
47 | - name: Install modules
48 | run: npm install
49 |
50 | - name: Run tests
51 | env:
52 | CONNECTION_TYPE: postgres
53 | POSTGRES_HOST: localhost
54 | POSTGRES_DB: postgres
55 | POSTGRES_USER: postgres
56 | POSTGRES_PASSWORD: 123456
57 | POSTGRES_PORT: 5432
58 | POSTGRES_DEFAULT_SCHEMA: public
59 | run: npm run test:connection
60 |
61 | test_mysql:
62 | name: 'MySQL Connection'
63 | runs-on: ubuntu-latest
64 | needs: build
65 |
66 | strategy:
67 | matrix:
68 | version: ['5.7', '8.0']
69 |
70 | services:
71 | mysql:
72 | image: mysql:${{ matrix.version }}
73 | env:
74 | MYSQL_DATABASE: testdb
75 | MYSQL_ROOT_PASSWORD: 123456
76 | ports:
77 | - 3306:3306
78 | options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
79 |
80 | steps:
81 | - uses: actions/checkout@v4
82 |
83 | - name: Install modules
84 | run: npm install
85 |
86 | - name: Run tests
87 | env:
88 | CONNECTION_TYPE: mysql
89 | MYSQL_HOST: localhost
90 | MYSQL_DB: testdb
91 | MYSQL_USER: root
92 | MYSQL_PASSWORD: 123456
93 | MYSQL_PORT: 3306
94 | MYSQL_DEFAULT_SCHEMA: testdb
95 | run: npm run test:connection
96 |
97 | test_bigquery:
98 | name: 'BigQuery Connection'
99 | runs-on: ubuntu-latest
100 | needs: build
101 |
102 | steps:
103 | - uses: actions/checkout@v4
104 |
105 | - name: Install modules
106 | run: npm install
107 |
108 | - name: Run tests
109 | env:
110 | CONNECTION_TYPE: bigquery
111 | BIGQUERY_PROJECT_ID: ${{ secrets.BIGQUERY_PROJECT_ID }}
112 | BIGQUERY_CLIENT_EMAIL: ${{ secrets.BIGQUERY_CLIENT_EMAIL }}
113 | BIGQUERY_PRIVATE_KEY: ${{ secrets.BIGQUERY_PRIVATE_KEY }}
114 | BIGQUERY_DEFAULT_SCHEMA: ${{ secrets.BIGQUERY_DEFAULT_SCHEMA }}
115 | run: npm run test:connection
116 |
117 | test_turso:
118 | name: 'Turso Connection'
119 | runs-on: ubuntu-latest
120 | needs: build
121 |
122 | steps:
123 | - uses: actions/checkout@v4
124 |
125 | - name: Install modules
126 | run: npm install
127 |
128 | - name: Run tests
129 | env:
130 | CONNECTION_TYPE: turso
131 | run: npm run test:connection
132 |
133 | test_cloudflare:
134 | name: 'Cloudflare D1 Connection'
135 | runs-on: ubuntu-latest
136 | needs: build
137 |
138 | steps:
139 | - uses: actions/checkout@v4
140 |
141 | - name: Install modules
142 | run: npm install
143 |
144 | - name: Run tests
145 | env:
146 | CONNECTION_TYPE: cloudflare
147 | CLOUDFLARE_API_KEY: ${{ secrets.CLOUDFLARE_API_KEY }}
148 | CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
149 | CLOUDFLARE_DATABASE_ID: ${{ secrets.CLOUDFLARE_DATABASE_ID }}
150 | run: npm run test:connection
151 |
152 | test_starbase:
153 | name: 'StarbaseDB Connection'
154 | runs-on: ubuntu-latest
155 | needs: build
156 |
157 | steps:
158 | - uses: actions/checkout@v4
159 |
160 | - name: Install modules
161 | run: npm install
162 |
163 | - name: Run tests
164 | env:
165 | CONNECTION_TYPE: starbase
166 | STARBASEDB_URL: ${{ secrets.STARBASEDB_URL }}
167 | STARBASEDB_TOKEN: ${{ secrets.STARBASEDB_TOKEN }}
168 | run: npm run test:connection
169 |
170 | test_mongodb:
171 | name: 'MongoDB Connection'
172 | runs-on: ubuntu-latest
173 | needs: build
174 |
175 | services:
176 | mongodb:
177 | image: mongo
178 | ports:
179 | - 27017:27017
180 |
181 | steps:
182 | - uses: actions/checkout@v4
183 |
184 | - name: Install modules
185 | run: npm install
186 |
187 | - name: Run tests
188 | env:
189 | CONNECTION_TYPE: mongodb
190 | MONGODB_URI: mongodb://localhost:27017
191 | MONGODB_DB_NAME: testing
192 | run: npm run test:connection
193 |
194 | test_motherduck:
195 | name: 'Motherduck Connection'
196 | runs-on: ubuntu-latest
197 | needs: build
198 |
199 | steps:
200 | - uses: actions/checkout@v4
201 |
202 | - name: Install modules
203 | run: npm install
204 |
205 | - name: Run tests
206 | env:
207 | CONNECTION_TYPE: motherduck
208 | MOTHERDUCK_PATH: ${{ secrets.MOTHERDUCK_PATH }}
209 | MOTHERDUCK_TOKEN: ${{ secrets.MOTHERDUCK_TOKEN }}
210 | run: npm run test:connection
211 |
212 | test_snowflake:
213 | name: 'Snowflake Connection'
214 | runs-on: ubuntu-latest
215 | needs: build
216 |
217 | steps:
218 | - uses: actions/checkout@v4
219 |
220 | - name: Install modules
221 | run: npm install
222 |
223 | - name: Run tests
224 | env:
225 | CONNECTION_TYPE: snowflake
226 | SNOWFLAKE_ACCOUNT_ID: ${{ secrets.SNOWFLAKE_ACCOUNT_ID }}
227 | SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME }}
228 | SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
229 | SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_WAREHOUSE }}
230 | SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }}
231 | run: npm run test:connection
232 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 | .DS_STORE
54 |
55 | # Optional eslint cache
56 | .eslintcache
57 |
58 | # Optional stylelint cache
59 | .stylelintcache
60 |
61 | # Microbundle cache
62 | .rpt2_cache/
63 | .rts2_cache_cjs/
64 | .rts2_cache_es/
65 | .rts2_cache_umd/
66 |
67 | # Optional REPL history
68 | .node_repl_history
69 |
70 | # Output of 'npm pack'
71 | *.tgz
72 |
73 | # Yarn Integrity file
74 | .yarn-integrity
75 |
76 | # dotenv environment variable files
77 | .env
78 | .env.development.local
79 | .env.test.local
80 | .env.production.local
81 | .env.local
82 |
83 | # parcel-bundler cache (https://parceljs.org/)
84 | .cache
85 | .parcel-cache
86 |
87 | # Next.js build output
88 | .next
89 | out
90 |
91 | # Nuxt.js build / generate output
92 | .nuxt
93 | dist
94 |
95 | # Gatsby files
96 | .cache/
97 | # Comment in the public line in if your project uses Gatsby and not Next.js
98 | # https://nextjs.org/blog/next-9-1#public-directory-support
99 | # public
100 |
101 | # vuepress build output
102 | .vuepress/dist
103 |
104 | # vuepress v2.x temp and cache directory
105 | .temp
106 | .cache
107 |
108 | # Docusaurus cache and generated files
109 | .docusaurus
110 |
111 | # Serverless directories
112 | .serverless/
113 |
114 | # FuseBox cache
115 | .fusebox/
116 |
117 | # DynamoDB Local files
118 | .dynamodb/
119 |
120 | # TernJS port file
121 | .tern-port
122 |
123 | # Stores VSCode versions used for testing VSCode extensions
124 | .vscode-test
125 |
126 | # yarn v2
127 | .yarn/cache
128 | .yarn/unplugged
129 | .yarn/build-state.yml
130 | .yarn/install-state.gz
131 | .pnp.*
132 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | **/*.md
2 | **/*.txt
3 | /playground/*
4 |
5 | # dependencies
6 | pnpm-lock.yaml
7 | /node_modules
8 |
9 | # output
10 | /dist/
11 |
12 | # debug
13 | npm-debug.log*
14 | yarn-debug.log*
15 | yarn-error.log*
16 | .pnpm-debug.log*
17 |
18 | # handlebars
19 | **/*.sh
20 | **/*.handlebars
21 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "trailingComma": "es5",
3 | "tabWidth": 4,
4 | "semi": true,
5 | "singleQuote": true
6 | }
7 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | support@outerbase.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) 2022-2024 OUTERBASE, INC.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
21 |
22 | ## What is Outerbase SDK?
23 |
24 | Outerbase SDK is a way to interact with your database in a SQL-like manner. This library contains the following primary features:
25 |
26 | - [**Query Builder**](#chaining-query-operations): Execute queries on your database easily.
27 | - [**Saved Queries**](#run-saved-outerbase-queries): Run any saved queries from Outerbase in one line.
28 | - [**Database Model Generator**](#generate-models-from-your-database): Create Typescript models from your database schema.
29 |
30 | ## Usage
31 |
32 | ### Install with a package manager
33 |
34 | **npm**
35 | ```
36 | npm i @outerbase/sdk
37 | ```
38 |
39 | **pnpm**
40 | ```
41 | pnpm add @outerbase/sdk
42 | ```
43 |
44 | ### Initialize a connection to your database
45 |
46 | This library currently supports connecting to Outerbase connections, which supports **Postgres**, **MySQL**, **SQLite**, **SQL Server**, **Clickhouse** and more with direct integrations with platforms such as [DigitalOcean](https://digitalocean.com), [Neon](https://neon.tech), and [Turso](https://turso.tech).
47 |
48 | First we start by creating a connection object which is intended to be extensible where contributors can create a variety of connection types to other databases or additional third party tools to interact with. In this example we use the included `OuterbaseConnection` class.
49 |
50 | With a connection object instantiated we can create a new database instance to interact with that connection interface.
51 |
52 | ```ts
53 | import { Outerbase, OuterbaseConnection } from '@outerbase/sdk'
54 |
55 | // ...
56 |
57 | const connection: OuterbaseConnection = new OuterbaseConnection('INSERT_API_TOKEN');
58 | const db = Outerbase(connection);
59 | ```
60 |
61 | #### How to create an Outerbase Connection token
62 |
63 | When using the `OuterbaseConnection` class, you are required to provide an API token from Outerbase.
64 |
65 | 1. Create an account on [Outerbase](https://app.outerbase.com/)
66 | 2. Attach the database you want to use
67 | 3. Open your Base and click on _Base Settings_ on the left menu
68 | 4. Select the _General_ section
69 | 5. Underneath the _API Token_ section you will see a button to "Generate API Key". Click that and copy your API token to use it in declaring a new `OuterbaseConnection` object.
70 |
71 | ### Chaining query operations
72 |
73 | Instead of writing SQL directly in your code you can chain commands together that create simple and complex SQL queries for you.
74 |
75 | After you construct the series of SQL-like operations you intend to execute, you should end it by calling the `.query()` function call which will send the request to the database for exection.
76 |
77 | #### Select data from database
78 | ```ts
79 | let { data, error } = await db
80 | .selectFrom([
81 | {
82 | schema: 'public', // <- Optional
83 | table: 'person',
84 | columns: ['first_name', 'last_name', 'position', 'avatar'],
85 | },
86 | { table: 'users', columns: ['email'] },
87 | ])
88 | .leftJoin('users', equalsColumn('person.user_id', 'users.id'))
89 | .where(isNot('first_name', null))
90 | .where(equals('last_name', 'Doe'))
91 | .where(equalsNumber('avatar', 0))
92 | .limit(10)
93 | .offset(0)
94 | .orderBy(descending('first_name'))
95 | .asClass(Person)
96 | .query()
97 | ```
98 |
99 | #### Insert data into a table
100 | ```ts
101 | let { data } = await db
102 | .insert({ first_name: 'John', last_name: 'Doe', position: 'Developer', avatar: 0 })
103 | .into('person')
104 | .returning(['id'])
105 | .query();
106 | ```
107 |
108 | #### Update data in a table
109 | ```ts
110 | let { data } = await db
111 | .update({ first_name: 'Johnny' })
112 | .into('person')
113 | .where(equals('last_name', 'Doe'))
114 | .query();
115 | ```
116 |
117 | #### Delete data from a table
118 | ```ts
119 | let { data } = await db
120 | .deleteFrom('person')
121 | .where(equals('id', '1234'))
122 | .query();
123 | ```
124 |
125 | > IMPORTANT! To prevent your code from performing actions you do not want to happen, such as deleting data, make sure the database user role you provide in Outerbase has restricted scopes.
126 |
127 | ### Executing raw SQL queries
128 |
129 | Executing raw SQL queries against your database is possible by passing a valid SQL statement into a database instance created by the library.
130 |
131 | ```ts
132 | let { data, error } = await db.queryRaw('SELECT * FROM person');
133 | ```
134 |
135 | You can optionally pass in an array of parameters for sanitizing your SQL inputs.
136 |
137 | ```ts
138 | let { data, error } = await db.queryRaw('SELECT * FROM person WHERE id=:id', { id: "123" });
139 | ```
140 |
141 | ### Run saved Outerbase queries
142 |
143 | When you save queries to your Outerbase bases you can then directly execute those queries from this library. This enables you to make modifications to your query without having to alter and redeploy your codebase, and instead just make the modifications via Outerbase directly for convenience.
144 |
145 | ```ts
146 | let { data, error } = await connection.runSavedQuery(
147 | 'ea72da5f-5f7a-4bab-9f72-ffffffffffff'
148 | )
149 | ```
150 |
151 | Note that this is an exported function directly from the `OuterbaseConnection` class.
152 |
153 | ### Map results to class models
154 |
155 | As you construct a SQL statement to be ran you can also pass in a class type you would like the output to attempt to map to by using `.asClass(ClassName)`. In the below example we pass in `Person` as the class type and the query builder will know to respond either as a single `Person` object or a `Person[]` array based on the contents of the response.
156 |
157 | ```ts
158 | let { data, error } = await db
159 | .asClass(Person)
160 | .queryRaw('SELECT * FROM person');
161 | ```
162 |
163 | If your response cannot map to that class type based on property mismatch, you may not see any data being returned in your model.
164 |
165 | ### Generate models from your database
166 |
167 | > NOTE: This feature is still in early development.
168 |
169 | If your database is connected to Outerbase, then you can add a command to your `package.json` file in your project that can be executed to sync and download your database tables as Typescript models. These models are usable in your project and in many cases should map directly to the responses provided by the query builder library.
170 |
171 | To get started first add the following to your `package.json` file:
172 |
173 | ##### package.json
174 | ```ts
175 | "scripts": {
176 | "sync-models": "sync-database-models PATH=./folder/path/to/add/models API_KEY=outerbase_api_key"
177 | }
178 | ```
179 |
180 | Based on your `API_KEY` value the command will know how to fetch your database schema from Outerbase. It will convert your schema into various Typescript models and save each file to the path you provide. To run this command and generate the files you can execute the command as it is written above by typing:
181 |
182 | ```
183 | npm run sync-models
184 | ```
185 |
186 | The output produces a series of files, one per database table, that is a Typescript class for your queries to map their results to and you can access programatically easily. A sample output looks like the following where each property maps to a column in your database.
187 |
188 | ```ts
189 | export interface PersonType {
190 | firstName: string;
191 | lastName: string;
192 | position?: string;
193 | avatar?: number;
194 | }
195 |
196 | export class Person implements PersonType {
197 | firstName: string;
198 | lastName: string;
199 | position?: string;
200 | avatar?: number;
201 |
202 | constructor(data: any) {
203 | this.firstName = data.first_name;
204 | this.lastName = data.last_name;
205 | this.position = data.position;
206 | this.avatar = data.avatar;
207 | }
208 | }
209 | ```
210 |
211 | ## Contributing
212 |
213 | If you want to add contributions to this repository, please follow the instructions [here](contributing.md).
214 |
215 | ## Support
216 |
217 | For support join our community on [Discord](https://discord.gg/4M6AXzGG84). For enterprise solutions contact us at [support@outerbase.com](mailto:support@outerbase.com)
218 |
219 | ## License
220 |
221 | This project is licensed under the MIT license. See the [LICENSE](./LICENSE.txt) file for more info.
222 |
223 | ## Our Contributors
224 |
225 |
--------------------------------------------------------------------------------
/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Welcome! We love receiving contributions from our community, so thanks for stopping by! There are many ways to contribute, including submitting bug reports, improving documentation, submitting feature requests, reviewing new submissions, or contributing code that can be incorporated into the project.
4 |
5 | **Table of Contents:**
6 |
7 | 1. [Code of Conduct](#code-of-conduct)
8 | 2. [Questions](#questions)
9 | 3. [Feature Requests](#feature-requests)
10 | 4. [Reporting Bugs](#reporting-bugs)
11 |
12 |
13 | ## Code of Conduct
14 |
15 | To quote the famous William and Theodore:
16 |
17 | "Be excellent to each other."
18 |
19 |
20 | ## Questions
21 |
22 | If you have any questions feel free to utilize our [Discord Community](https://discord.gg/4M6AXzGG84) or the [GitHub Discussions](https://github.com/Outerbase/sdk/discussions) board.
23 |
24 |
25 | ## Feature Requests
26 |
27 | Please create a new GitHub issue for any major changes and enhancements that you wish to make. Please provide the feature you would like to see, why you need it, and how it will work. Discuss your ideas transparently and get community feedback before proceeding.
28 |
29 | Major Changes that you wish to contribute to the project should be discussed first in an GitHub issue that clearly outlines the changes and benefits of the feature.
30 |
31 | Small Changes can directly be crafted and submitted to the GitHub Repository as a Pull Request. See the section about Pull Request Submission Guidelines, and for detailed information the core development documentation.
32 |
33 |
34 | ## Reporting Bugs
35 |
36 | **If you find a security vulnerability, do NOT open an issue. Email security@outerbase.com instead.**
37 |
38 | Before you submit your issue, please [search the issue archive](https://github.com/Outerbase/sdk/issues?q=is%3Aissue+is%3Aclosed) - maybe your question or issue has already been identified or addressed.
39 |
40 | If you find a bug in the source code, you can help us by [submitting an issue to our GitHub issue tracker](https://github.com/Outerbase/sdk/issues). Even better, you can submit a Pull Request with a fix!
41 |
42 | _More contribution guidelines around code contribution and PR guidelines coming soon._
--------------------------------------------------------------------------------
/copy-handlebars.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const path = require('path');
3 |
4 | // Define the source and destination directories
5 | const srcDir = path.join(__dirname, 'src', 'generators');
6 | const destDir = path.join(__dirname, 'dist', 'generators');
7 |
8 | // Create the destination directory if it does not exist
9 | if (!fs.existsSync(destDir)) {
10 | fs.mkdirSync(destDir, { recursive: true });
11 | }
12 |
13 | // Copy the files from the source to the destination directory
14 | const filesToCopy = ['model-template.handlebars', 'index-template.handlebars'];
15 |
16 | filesToCopy.forEach(file => {
17 | const srcFile = path.join(srcDir, file);
18 | const destFile = path.join(destDir, file);
19 |
20 | fs.copyFileSync(srcFile, destFile);
21 | });
22 |
23 | console.log('Files copied successfully.');
24 |
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('ts-jest').JestConfigWithTsJest} */
2 |
3 | const { pathsToModuleNameMapper } = require('ts-jest');
4 | const { compilerOptions } = require('./tsconfig');
5 |
6 | pathsToModuleNameMapper(compilerOptions.paths, { prefix: '' });
7 |
8 | module.exports = {
9 | preset: 'ts-jest',
10 | testEnvironment: 'node',
11 | moduleNameMapper: pathsToModuleNameMapper(compilerOptions.paths, {
12 | prefix: '/',
13 | }),
14 | moduleDirectories: ['node_modules', 'src'],
15 | modulePaths: [''],
16 | setupFiles: ['/jest.setup.js'],
17 | coveragePathIgnorePatterns: ['/src/connections/.*$'],
18 | coverageThreshold: {
19 | global: {
20 | branches: 4,
21 | functions: 6,
22 | lines: 21,
23 | statements: 20,
24 | },
25 | },
26 | };
27 |
--------------------------------------------------------------------------------
/jest.setup.js:
--------------------------------------------------------------------------------
1 | require('dotenv').config();
2 | const tsconfigPaths = require('tsconfig-paths');
3 | const { compilerOptions } = require('./tsconfig');
4 |
5 | tsconfigPaths.register({
6 | baseUrl: './',
7 | paths: compilerOptions.paths,
8 | });
9 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@outerbase/sdk",
3 | "version": "2.0.0-rc.5",
4 | "description": "",
5 | "main": "dist/index.js",
6 | "module": "dist/index.js",
7 | "bin": {
8 | "sync-database-models": "./dist/generators/generate-models.js"
9 | },
10 | "files": [
11 | "dist/*"
12 | ],
13 | "scripts": {
14 | "compile": "tsc --project ./tsconfig.json",
15 | "build": "npm run compile && node copy-handlebars.js",
16 | "publish-npm-module": "npm publish --access public",
17 | "prepack": "npm run build",
18 | "prepare": "husky install",
19 | "test": "jest --verbose --testPathPattern=unit",
20 | "test:connection": "jest --verbose --testPathPattern=connection --runInBand --forceExit",
21 | "test:watch": "jest --watch",
22 | "test:coverage": "jest --coverage --testPathPattern=unit"
23 | },
24 | "lint-staged": {
25 | "**/*.{ts,tsx,js,json,css,scss,md}": [
26 | "prettier --write"
27 | ]
28 | },
29 | "keywords": [
30 | "query-builder",
31 | "query",
32 | "builder"
33 | ],
34 | "author": "Outerbase",
35 | "license": "MIT",
36 | "dependencies": {
37 | "@outerbase/sdk-transform": "^1.0.3",
38 | "handlebars": "^4.7.8"
39 | },
40 | "devDependencies": {
41 | "@google-cloud/bigquery": "^7.9.0",
42 | "@jest/globals": "^29.7.0",
43 | "@libsql/client": "^0.14.0",
44 | "@neondatabase/serverless": "^0.9.3",
45 | "@types/jest": "^29.5.13",
46 | "@types/node": "^20.12.12",
47 | "@types/ws": "^8.5.10",
48 | "dotenv": "^16.4.5",
49 | "duckdb": "^1.1.1",
50 | "husky": "^9.0.11",
51 | "jest": "^29.7.0",
52 | "lint-staged": "^15.2.4",
53 | "mongodb": "^6.9.0",
54 | "mysql2": "^3.11.3",
55 | "pg": "^8.13.0",
56 | "prettier": "^3.2.5",
57 | "snowflake-sdk": "^1.15.0",
58 | "ts-jest": "^29.1.3",
59 | "ts-node": "^10.9.2",
60 | "tsconfig-paths": "^4.2.0",
61 | "typescript": "^5.4.5",
62 | "ws": "^8.17.1"
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/playground/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 | .DS_STORE
54 |
55 | # Optional eslint cache
56 | .eslintcache
57 |
58 | # Optional stylelint cache
59 | .stylelintcache
60 |
61 | # Microbundle cache
62 | .rpt2_cache/
63 | .rts2_cache_cjs/
64 | .rts2_cache_es/
65 | .rts2_cache_umd/
66 |
67 | # Optional REPL history
68 | .node_repl_history
69 |
70 | # Output of 'npm pack'
71 | *.tgz
72 |
73 | # Yarn Integrity file
74 | .yarn-integrity
75 |
76 | # dotenv environment variable files
77 | .env
78 | .env.development.local
79 | .env.test.local
80 | .env.production.local
81 | .env.local
82 |
83 | # parcel-bundler cache (https://parceljs.org/)
84 | .cache
85 | .parcel-cache
86 |
87 | # Next.js build output
88 | .next
89 | out
90 |
91 | # Nuxt.js build / generate output
92 | .nuxt
93 | dist
94 |
95 | # Gatsby files
96 | .cache/
97 | # Comment in the public line in if your project uses Gatsby and not Next.js
98 | # https://nextjs.org/blog/next-9-1#public-directory-support
99 | # public
100 |
101 | # vuepress build output
102 | .vuepress/dist
103 |
104 | # vuepress v2.x temp and cache directory
105 | .temp
106 | .cache
107 |
108 | # Docusaurus cache and generated files
109 | .docusaurus
110 |
111 | # Serverless directories
112 | .serverless/
113 |
114 | # FuseBox cache
115 | .fusebox/
116 |
117 | # DynamoDB Local files
118 | .dynamodb/
119 |
120 | # TernJS port file
121 | .tern-port
122 |
123 | # Stores VSCode versions used for testing VSCode extensions
124 | .vscode-test
125 |
126 | # yarn v2
127 | .yarn/cache
128 | .yarn/unplugged
129 | .yarn/build-state.yml
130 | .yarn/install-state.gz
131 | .pnp.*
132 |
--------------------------------------------------------------------------------
/playground/index.js:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outerbase/sdk/bc0eb994dd78ee08c9738aa397454140fc4fa324/playground/index.js
--------------------------------------------------------------------------------
/playground/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "playground",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "type": "module",
7 | "scripts": {
8 | "start": "node index.js",
9 | "test": "echo \"Error: no test specified\" && exit 1"
10 | },
11 | "keywords": [],
12 | "author": "",
13 | "license": "ISC",
14 | "dependencies": {
15 | "express": "^4.19.2"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/src/client.ts:
--------------------------------------------------------------------------------
1 | import { QueryResult } from './connections';
2 | import { Query } from './query';
3 | import { QueryType } from './query-params';
4 | import { AbstractDialect } from './query-builder';
5 | import { TableColumnDefinition } from './models/database';
6 | import { SqlConnection } from './connections/sql-base';
7 |
8 | export enum QueryBuilderAction {
9 | SELECT = 'select',
10 | INSERT = 'insert',
11 | UPDATE = 'update',
12 | DELETE = 'delete',
13 |
14 | // Table operations
15 | CREATE_TABLE = 'createTable',
16 | UPDATE_TABLE = 'updateTable',
17 | DELETE_TABLE = 'deleteTable',
18 | TRUNCATE_TABLE = 'truncateTable',
19 | RENAME_TABLE = 'renameTable',
20 | ALTER_TABLE = 'alterTable',
21 | ADD_COLUMNS = 'addColumns',
22 | DROP_COLUMNS = 'dropColumns',
23 | RENAME_COLUMNS = 'renameColumns',
24 | UPDATE_COLUMNS = 'updateColumns',
25 | }
26 |
27 | export type QueryPart = [string, unknown[]];
28 | export interface WhereCondition {
29 | joinType?: undefined;
30 | column: string;
31 | value: unknown;
32 | operator: string;
33 | }
34 |
35 | export interface WhereClaues {
36 | joinType: 'OR' | 'AND';
37 | conditions: (WhereCondition | WhereClaues)[];
38 | }
39 |
40 | export type WhereGenerator = () => WhereClaues;
41 |
42 | export interface OrderByClause {
43 | columnName: string;
44 | direction: 'ASC' | 'DESC';
45 | }
46 |
47 | export interface QueryBuilderInternal {
48 | action: QueryBuilderAction;
49 |
50 | schema?: string;
51 | table?: string;
52 | selectColumns: string[];
53 | whereClauses: WhereClaues;
54 | data?: Record;
55 | limit?: number;
56 | offset?: number;
57 | orderBy: OrderByClause[];
58 |
59 | // This is temporary just to make SDK work with Outerbase
60 | countingAllColumnName?: string;
61 |
62 | // ------------------------------------------
63 | // This is use for alter or create schema
64 | // ------------------------------------------
65 | dropColumn?: string;
66 | columns: {
67 | name: string;
68 | definition?: TableColumnDefinition; // For alter or create column
69 | newName?: string; // For rename column
70 | }[];
71 | newTableName?: string; // For rename table
72 | }
73 |
74 | function buildWhereClause(args: unknown[]): WhereCondition | WhereClaues {
75 | if (args.length === 0) throw new Error('No arguments provided');
76 |
77 | if (typeof args[0] === 'string') {
78 | // This should be columnName, operator, and value arguments
79 | const columnName = args[0];
80 | const operator = args[1];
81 | const value = args[2];
82 |
83 | if (!operator) throw new Error("Operator can't be empty");
84 | if (typeof operator !== 'string')
85 | throw new Error('Operator must be a string');
86 | if (value === undefined) throw new Error("Value can't be empty");
87 |
88 | const whereCondition = {
89 | column: columnName,
90 | operator,
91 | value,
92 | };
93 |
94 | return whereCondition;
95 | } else if (typeof args[0] === 'object') {
96 | // This should be Record arguments
97 | const conditions = args[0] as Record;
98 |
99 | const whereClause: WhereClaues = {
100 | joinType: 'AND',
101 | conditions: [],
102 | };
103 |
104 | for (const key in conditions) {
105 | if (conditions[key] === null) {
106 | whereClause.conditions.push({
107 | column: key,
108 | value: conditions[key],
109 | operator: 'IS',
110 | });
111 | } else {
112 | whereClause.conditions.push({
113 | column: key,
114 | value: conditions[key],
115 | operator: '=',
116 | });
117 | }
118 | }
119 |
120 | if (whereClause.conditions.length === 1) {
121 | return whereClause.conditions[0];
122 | }
123 | return whereClause;
124 | } else if (typeof args[0] === 'function') {
125 | // This should be a callback function
126 | const callback = args[0] as WhereGenerator;
127 | const whereClause = callback();
128 | return whereClause;
129 | }
130 |
131 | throw new Error('Invalid arguments');
132 | }
133 |
134 | function whereImplementation(state: QueryBuilderInternal, args: unknown[]) {
135 | const whereClause = buildWhereClause(args);
136 |
137 | // If the join type is the same, we can merge it together
138 | if (
139 | whereClause.joinType &&
140 | state.whereClauses.joinType === whereClause.joinType
141 | ) {
142 | state.whereClauses.conditions = [
143 | ...state.whereClauses.conditions,
144 | ...whereClause.conditions,
145 | ];
146 | } else {
147 | state.whereClauses.conditions.push(whereClause);
148 | }
149 | }
150 |
151 | abstract class IQueryBuilder {
152 | abstract state: QueryBuilderInternal;
153 |
154 | protected connection: SqlConnection;
155 |
156 | constructor(connection: SqlConnection) {
157 | this.connection = connection;
158 | }
159 |
160 | toQuery(): Query {
161 | return buildQueryString(
162 | this.state,
163 | QueryType.named,
164 | this.connection.dialect
165 | );
166 | }
167 |
168 | query(): Promise {
169 | const query = this.toQuery();
170 | return this.connection.query(query);
171 | }
172 | }
173 |
174 | function createBlankState(action: QueryBuilderAction): QueryBuilderInternal {
175 | return {
176 | action,
177 | whereClauses: { joinType: 'AND', conditions: [] },
178 | selectColumns: [],
179 | orderBy: [],
180 | columns: [],
181 | };
182 | }
183 |
184 | class QueryBuilderSelect extends IQueryBuilder {
185 | state: QueryBuilderInternal = createBlankState(QueryBuilderAction.SELECT);
186 |
187 | constructor(connection: SqlConnection, columnNames: string[]) {
188 | super(connection);
189 | this.state.selectColumns = columnNames;
190 | }
191 |
192 | from(tableName: string) {
193 | this.state.table = tableName;
194 | return this;
195 | }
196 |
197 | select(...columName: string[]) {
198 | this.state.selectColumns = [...this.state.selectColumns, ...columName];
199 | return this;
200 | }
201 |
202 | count(columnName: string) {
203 | this.state.countingAllColumnName = columnName;
204 | return this;
205 | }
206 |
207 | where(conditions: Record): QueryBuilderSelect;
208 | where(
209 | columName: string,
210 | operator: string,
211 | value: unknown
212 | ): QueryBuilderSelect;
213 | where(callback: WhereGenerator): QueryBuilderSelect;
214 | where(...args: unknown[]) {
215 | whereImplementation(this.state, args);
216 | return this;
217 | }
218 |
219 | offset(offset: number) {
220 | this.state.offset = offset;
221 | return this;
222 | }
223 |
224 | limit(limit: number) {
225 | this.state.limit = limit;
226 | return this;
227 | }
228 |
229 | orderBy(columnName: string, direction: 'ASC' | 'DESC' = 'ASC') {
230 | this.state.orderBy.push({ columnName, direction });
231 | return this;
232 | }
233 | }
234 |
235 | class QueryBuilderInsert extends IQueryBuilder {
236 | state: QueryBuilderInternal = createBlankState(QueryBuilderAction.INSERT);
237 |
238 | constructor(connection: SqlConnection, data: Record) {
239 | super(connection);
240 | this.state.data = data;
241 | }
242 |
243 | into(tableName: string) {
244 | this.state.table = tableName;
245 | return this;
246 | }
247 | }
248 |
249 | class QueryBuilderUpdate extends IQueryBuilder {
250 | state: QueryBuilderInternal = createBlankState(QueryBuilderAction.UPDATE);
251 |
252 | constructor(connection: SqlConnection, data: Record) {
253 | super(connection);
254 | this.state.data = data;
255 | }
256 |
257 | into(tableName: string) {
258 | this.state.table = tableName;
259 | return this;
260 | }
261 |
262 | where(conditions: Record): QueryBuilderUpdate;
263 | where(
264 | columName: string,
265 | operator: string,
266 | value: unknown
267 | ): QueryBuilderUpdate;
268 | where(callback: WhereGenerator): QueryBuilderUpdate;
269 | where(...args: unknown[]) {
270 | whereImplementation(this.state, args);
271 | return this;
272 | }
273 | }
274 |
275 | class QueryBuilderDelete extends IQueryBuilder {
276 | state: QueryBuilderInternal = createBlankState(QueryBuilderAction.DELETE);
277 |
278 | constructor(connection: SqlConnection) {
279 | super(connection);
280 | }
281 |
282 | from(tableName: string) {
283 | this.state.table = tableName;
284 | return this;
285 | }
286 |
287 | where(conditions: Record): QueryBuilderDelete;
288 | where(
289 | columName: string,
290 | operator: string,
291 | value: unknown
292 | ): QueryBuilderDelete;
293 | where(callback: WhereGenerator): QueryBuilderDelete;
294 | where(...args: unknown[]) {
295 | whereImplementation(this.state, args);
296 | return this;
297 | }
298 | }
299 |
300 | class QueryBuilderCreateTable extends IQueryBuilder {
301 | state: QueryBuilderInternal = createBlankState(
302 | QueryBuilderAction.CREATE_TABLE
303 | );
304 |
305 | constructor(connection: SqlConnection, tableName: string) {
306 | super(connection);
307 | this.state.table = tableName;
308 | }
309 |
310 | column(name: string, definition: TableColumnDefinition) {
311 | this.state.columns.push({ name, definition });
312 | return this;
313 | }
314 | }
315 |
316 | class QueryBuilderDropTable extends IQueryBuilder {
317 | state: QueryBuilderInternal = createBlankState(
318 | QueryBuilderAction.DELETE_TABLE
319 | );
320 |
321 | constructor(connection: SqlConnection, tableName: string) {
322 | super(connection);
323 | this.state.table = tableName;
324 | }
325 | }
326 |
327 | class QueryBuilderAlterTable extends IQueryBuilder {
328 | state: QueryBuilderInternal = createBlankState(
329 | QueryBuilderAction.ALTER_TABLE
330 | );
331 |
332 | constructor(connection: SqlConnection, tableName: string) {
333 | super(connection);
334 | this.state.table = tableName;
335 | }
336 |
337 | alterColumn(columnName: string, definition: TableColumnDefinition) {
338 | this.state.action = QueryBuilderAction.UPDATE_COLUMNS;
339 | this.state.columns.push({ name: columnName, definition });
340 | return this;
341 | }
342 |
343 | addColumn(name: string, definition: TableColumnDefinition) {
344 | this.state.action = QueryBuilderAction.ADD_COLUMNS;
345 | this.state.columns.push({ name, definition });
346 | return this;
347 | }
348 |
349 | dropColumn(name: string) {
350 | this.state.action = QueryBuilderAction.DROP_COLUMNS;
351 | this.state.dropColumn = name;
352 | return this;
353 | }
354 |
355 | renameTable(newTableName: string) {
356 | this.state.action = QueryBuilderAction.RENAME_TABLE;
357 | this.state.newTableName = newTableName;
358 | return this;
359 | }
360 |
361 | renameColumn(columnName: string, newColumnName: string) {
362 | this.state.action = QueryBuilderAction.RENAME_COLUMNS;
363 | this.state.columns = [
364 | {
365 | name: columnName,
366 | newName: newColumnName,
367 | },
368 | ];
369 | return this;
370 | }
371 | }
372 |
373 | class QueryBuilder {
374 | connection: SqlConnection;
375 |
376 | constructor(connection: SqlConnection) {
377 | this.connection = connection;
378 | }
379 |
380 | select(...columnName: string[]) {
381 | return new QueryBuilderSelect(this.connection, columnName);
382 | }
383 |
384 | insert(data: Record) {
385 | return new QueryBuilderInsert(this.connection, data);
386 | }
387 |
388 | update(data: Record) {
389 | return new QueryBuilderUpdate(this.connection, data);
390 | }
391 |
392 | delete() {
393 | return new QueryBuilderDelete(this.connection);
394 | }
395 |
396 | createTable(tableName: string) {
397 | return new QueryBuilderCreateTable(this.connection, tableName);
398 | }
399 |
400 | dropTable(tableName: string) {
401 | return new QueryBuilderDropTable(this.connection, tableName);
402 | }
403 |
404 | alterTable(tableName: string) {
405 | return new QueryBuilderAlterTable(this.connection, tableName);
406 | }
407 |
408 | or(
409 | ...args: (WhereClaues | WhereCondition | WhereGenerator)[]
410 | ): WhereGenerator {
411 | return () => ({
412 | joinType: 'OR',
413 | conditions: args.map((arg) => {
414 | if (typeof arg === 'function') {
415 | return arg();
416 | }
417 | return arg;
418 | }),
419 | });
420 | }
421 |
422 | and(
423 | ...args: (WhereClaues | WhereCondition | WhereGenerator)[]
424 | ): WhereGenerator {
425 | return () => ({
426 | joinType: 'AND',
427 | conditions: args.map((arg) => {
428 | if (typeof arg === 'function') {
429 | return arg();
430 | }
431 | return arg;
432 | }),
433 | });
434 | }
435 |
436 | where(conditions: Record): WhereClaues | WhereCondition;
437 | where(columName: string, operator: string, value: unknown): WhereCondition;
438 | where(...args: unknown[]): WhereClaues | WhereCondition {
439 | return buildWhereClause(args);
440 | }
441 | }
442 |
443 | export function Outerbase(connection: SqlConnection) {
444 | return new QueryBuilder(connection);
445 | }
446 |
447 | function buildQueryString(
448 | queryBuilder: QueryBuilderInternal,
449 | queryType: QueryType,
450 | dialect: AbstractDialect
451 | ): Query {
452 | switch (queryBuilder.action) {
453 | case QueryBuilderAction.SELECT:
454 | return dialect.select(queryBuilder);
455 | case QueryBuilderAction.INSERT:
456 | return dialect.insert(queryBuilder);
457 | case QueryBuilderAction.UPDATE:
458 | return dialect.update(queryBuilder);
459 | case QueryBuilderAction.DELETE:
460 | return dialect.delete(queryBuilder);
461 | case QueryBuilderAction.CREATE_TABLE:
462 | return dialect.createTable(queryBuilder);
463 | case QueryBuilderAction.DELETE_TABLE:
464 | return dialect.dropTable(queryBuilder);
465 | case QueryBuilderAction.RENAME_TABLE:
466 | return dialect.renameTable(queryBuilder);
467 | case QueryBuilderAction.ADD_COLUMNS:
468 | return dialect.addColumn(queryBuilder);
469 | case QueryBuilderAction.UPDATE_COLUMNS:
470 | return dialect.alterColumn(queryBuilder);
471 | case QueryBuilderAction.RENAME_COLUMNS:
472 | return dialect.renameColumn(queryBuilder);
473 | case QueryBuilderAction.DROP_COLUMNS:
474 | return dialect.dropColumn(queryBuilder);
475 | default:
476 | throw new Error('Invalid action');
477 | }
478 | }
479 |
--------------------------------------------------------------------------------
/src/connections/bigquery.ts:
--------------------------------------------------------------------------------
1 | import { QueryType } from '../query-params';
2 | import { Query } from '../query';
3 | import { ConnectionSelectOptions, QueryResult } from './index';
4 | import { Database, Schema, Table, TableColumn } from '../models/database';
5 | import { BigQueryDialect } from '../query-builder/dialects/bigquery';
6 | import { BigQuery } from '@google-cloud/bigquery';
7 | import {
8 | createErrorResult,
9 | transformObjectBasedResultFirstRow,
10 | } from './../utils/transformer';
11 | import { SqlConnection } from './sql-base';
12 |
13 | const NUMERIC_TYPE = [
14 | 'INT64',
15 | 'FLOAT64',
16 | 'INTEGER',
17 | 'FLOAT',
18 | 'NUMERIC',
19 | 'BIGNUMERIC',
20 | ];
21 | export class BigQueryConnection extends SqlConnection {
22 | bigQuery: BigQuery;
23 | dialect = new BigQueryDialect();
24 | cacheFields: Record> = {};
25 |
26 | constructor(bigQuery: any) {
27 | super();
28 | this.bigQuery = bigQuery;
29 | }
30 |
31 | async connect(): Promise {
32 | return Promise.resolve();
33 | }
34 |
35 | async disconnect(): Promise {
36 | return Promise.resolve();
37 | }
38 |
39 | createTable(
40 | schemaName: string | undefined,
41 | tableName: string,
42 | columns: TableColumn[]
43 | ): Promise {
44 | // BigQuery does not support PRIMARY KEY. We can remove if here
45 | const tempColumns = structuredClone(columns);
46 | for (const column of tempColumns) {
47 | delete column.definition.references;
48 | }
49 |
50 | return super.createTable(schemaName, tableName, tempColumns);
51 | }
52 |
53 | async getFields(
54 | schemaName: string,
55 | tableName: string
56 | ): Promise> {
57 | if (this.cacheFields[schemaName]) return this.cacheFields[schemaName];
58 |
59 | if (!schemaName)
60 | throw new Error('Schema name is required for BigQuery');
61 |
62 | const [metadata] = await this.bigQuery
63 | .dataset(schemaName)
64 | .table(tableName)
65 | .getMetadata();
66 |
67 | const fields: { name: string; type: string }[] = metadata.schema.fields;
68 | const fieldsType: Record = fields.reduce(
69 | (acc, field) => {
70 | acc[field.name] = field.type;
71 | return acc;
72 | },
73 | {} as Record
74 | );
75 |
76 | this.cacheFields[schemaName] = fieldsType;
77 | return fieldsType;
78 | }
79 |
80 | transformTypedValue(type: string, value: unknown) {
81 | if (value === null) return value;
82 |
83 | if (NUMERIC_TYPE.includes(type)) {
84 | return Number(value);
85 | }
86 |
87 | return value;
88 | }
89 |
90 | async autoCastingType(
91 | schemaName: string | undefined,
92 | tableName: string,
93 | data: Record
94 | ): Promise> {
95 | const tmp = structuredClone(data);
96 |
97 | if (!schemaName)
98 | throw new Error('Schema name is required for BigQuery');
99 |
100 | const fieldsType: Record = await this.getFields(
101 | schemaName,
102 | tableName
103 | );
104 |
105 | for (const key in tmp) {
106 | const type = fieldsType[key];
107 | if (!type) continue;
108 | tmp[key] = this.transformTypedValue(type, tmp[key]);
109 | }
110 |
111 | return tmp;
112 | }
113 |
114 | async insert(
115 | schemaName: string | undefined,
116 | tableName: string,
117 | data: Record
118 | ): Promise {
119 | return super.insert(
120 | schemaName,
121 | tableName,
122 | await this.autoCastingType(schemaName, tableName, data)
123 | );
124 | }
125 |
126 | async insertMany(
127 | schemaName: string | undefined,
128 | tableName: string,
129 | data: Record[]
130 | ): Promise {
131 | const newData: Record[] = [];
132 |
133 | for (const item of data) {
134 | newData.push(
135 | await this.autoCastingType(schemaName, tableName, item)
136 | );
137 | }
138 |
139 | return super.insertMany(schemaName, tableName, newData);
140 | }
141 |
142 | async update(
143 | schemaName: string | undefined,
144 | tableName: string,
145 | data: Record,
146 | where: Record
147 | ): Promise {
148 | return super.update(
149 | schemaName,
150 | tableName,
151 | await this.autoCastingType(schemaName, tableName, data),
152 | await this.autoCastingType(schemaName, tableName, where)
153 | );
154 | }
155 |
156 | async delete(
157 | schemaName: string,
158 | tableName: string,
159 | where: Record
160 | ): Promise {
161 | return super.delete(
162 | schemaName,
163 | tableName,
164 | await this.autoCastingType(schemaName, tableName, where)
165 | );
166 | }
167 |
168 | async select(
169 | schemaName: string,
170 | tableName: string,
171 | options: ConnectionSelectOptions
172 | ): Promise {
173 | // Auto casting the where
174 | let where = options.where;
175 |
176 | if (where && where.length > 0) {
177 | const fields = await this.getFields(schemaName, tableName);
178 | where = where.map((t) => {
179 | const type = fields[t.name];
180 | if (!type) return t;
181 |
182 | return {
183 | ...t,
184 | value: this.transformTypedValue(type, t.value),
185 | };
186 | });
187 | }
188 |
189 | return super.select(schemaName, tableName, {
190 | ...options,
191 | where,
192 | });
193 | }
194 |
195 | /**
196 | * Triggers a query action on the current Connection object.
197 | *
198 | * The parameters object is sent along with the query to be used in the
199 | * query. By default if the query has parameters the SQL statement will
200 | * produce a string with `:property` values that the parameters object
201 | * keys should map to, and will be replaced by.
202 | *
203 | * @param query - The SQL query to be executed.
204 | * @param parameters - An object containing the parameters to be used in the query.
205 | * @returns Promise<{ data: any, error: Error | null }>
206 | */
207 | async internalQuery>(
208 | query: Query
209 | ): Promise> {
210 | try {
211 | const options = {
212 | query: query.query,
213 | params: query.parameters,
214 | useLegacySql: false,
215 | };
216 |
217 | const [rows] = await this.bigQuery.query(options);
218 | return transformObjectBasedResultFirstRow(rows) as QueryResult;
219 | } catch (error) {
220 | if (error instanceof Error) {
221 | return createErrorResult(error.message) as QueryResult;
222 | }
223 |
224 | return createErrorResult('Unexpected Error') as QueryResult;
225 | }
226 | }
227 |
228 | public async fetchDatabaseSchema(): Promise {
229 | const [datasetList] = await this.bigQuery.getDatasets();
230 |
231 | // Construct the query to get all the table in one go
232 | const sql = datasetList
233 | .map((dataset) => {
234 | const schemaPath = `${this.bigQuery.projectId}.${dataset.id}`;
235 |
236 | return `(
237 | SELECT
238 | a.table_schema,
239 | a.table_name,
240 | a.column_name,
241 | a.data_type,
242 | b.constraint_schema,
243 | b.constraint_name,
244 | c.constraint_type
245 | FROM \`${schemaPath}.INFORMATION_SCHEMA.COLUMNS\` AS a LEFT JOIN \`${schemaPath}.INFORMATION_SCHEMA.KEY_COLUMN_USAGE\` AS b ON (
246 | a.table_schema = b.table_schema AND
247 | a.table_name = b.table_name AND
248 | a.column_name = b.column_name
249 | ) LEFT JOIN \`${schemaPath}.INFORMATION_SCHEMA.TABLE_CONSTRAINTS\` AS c ON (
250 | b.constraint_schema = c.constraint_schema AND
251 | b.constraint_name = c.constraint_name
252 | )
253 | )`;
254 | })
255 | .join(' UNION ALL ');
256 |
257 | const { data } = await this.query<{
258 | table_schema: string;
259 | table_name: string;
260 | column_name: string;
261 | data_type: string;
262 | constraint_schema: string;
263 | constraint_name: string;
264 | constraint_type: null | 'PRIMARY KEY' | 'FOREIGN KEY';
265 | }>({ query: sql });
266 |
267 | // Group the database schema by table
268 | const database: Database = datasetList.reduce(
269 | (acc, dataset) => {
270 | acc[dataset.id ?? ''] = {};
271 | return acc;
272 | },
273 | {} as Record
274 | );
275 |
276 | // Group the table by database
277 | data.forEach((row) => {
278 | const schema = database[row.table_schema];
279 | if (!schema) {
280 | return;
281 | }
282 |
283 | const table = schema[row.table_name] ?? {
284 | name: row.table_name,
285 | columns: [],
286 | indexes: [],
287 | constraints: [],
288 | };
289 |
290 | if (!schema[row.table_name]) {
291 | schema[row.table_name] = table;
292 | }
293 |
294 | // Add the column to the table
295 | table.columns.push({
296 | name: row.column_name,
297 | definition: {
298 | type: row.data_type,
299 | primaryKey: row.constraint_type === 'PRIMARY KEY',
300 | },
301 | });
302 |
303 | // Add the constraint to the table
304 | if (row.constraint_name && row.constraint_type === 'PRIMARY KEY') {
305 | let constraint = table.constraints.find(
306 | (c) => c.name === row.constraint_name
307 | );
308 |
309 | if (!constraint) {
310 | constraint = {
311 | name: row.constraint_name,
312 | schema: row.constraint_schema,
313 | tableName: row.table_name,
314 | type: row.constraint_type,
315 | columns: [],
316 | };
317 |
318 | table.constraints.push(constraint);
319 | }
320 |
321 | constraint.columns.push({
322 | columnName: row.column_name,
323 | });
324 | }
325 | });
326 |
327 | return database;
328 | }
329 | }
330 |
--------------------------------------------------------------------------------
/src/connections/index.ts:
--------------------------------------------------------------------------------
1 | import { ColumnHeader, ResultSet } from '@outerbase/sdk-transform';
2 | import {
3 | Database,
4 | TableColumn,
5 | TableColumnDefinition,
6 | } from '../models/database';
7 |
8 | export interface QueryResult>
9 | extends Omit {
10 | data: T[];
11 | count?: number;
12 | error: Error | null;
13 | query: string;
14 | }
15 | export interface ConnectionSelectOptions {
16 | where?: { name: string; value: unknown; operator: string }[];
17 | orderBy?: (string | [string, 'ASC' | 'DESC'])[];
18 | offset?: number;
19 | limit?: number;
20 | includeCounting?: boolean;
21 | }
22 | export abstract class Connection {
23 | // Handles logic for securely connecting and properly disposing of the connection.
24 | abstract connect(): Promise;
25 | abstract disconnect(): Promise;
26 |
27 | // Retrieve metadata about the database, useful for introspection.
28 | abstract fetchDatabaseSchema(): Promise;
29 | abstract raw(
30 | query: string,
31 | params?: Record | unknown[]
32 | ): Promise;
33 | abstract testConnection(): Promise<{ error?: string }>;
34 |
35 | // Connection common operations that will be used by Outerbase
36 | abstract insert(
37 | schemaName: string | undefined,
38 | tableName: string,
39 | data: Record
40 | ): Promise;
41 |
42 | abstract insertMany(
43 | schemaName: string | undefined,
44 | tableName: string,
45 | data: Record[]
46 | ): Promise;
47 |
48 | abstract update(
49 | schemaName: string | undefined,
50 | tableName: string,
51 | data: Record,
52 | where: Record
53 | ): Promise;
54 |
55 | abstract delete(
56 | schemaName: string,
57 | tableName: string,
58 | where: Record
59 | ): Promise;
60 |
61 | abstract select(
62 | schemaName: string,
63 | tableName: string,
64 | options: ConnectionSelectOptions
65 | ): Promise;
66 |
67 | // Changing schema operations
68 | abstract dropTable(
69 | schemaName: string | undefined,
70 | tableName: string
71 | ): Promise;
72 |
73 | abstract createTable(
74 | schemaName: string | undefined,
75 | tableName: string,
76 | columns: Partial[]
77 | ): Promise;
78 |
79 | abstract renameColumn(
80 | schemaName: string | undefined,
81 | tableName: string,
82 | columnName: string,
83 | newColumnName: string
84 | ): Promise;
85 |
86 | abstract renameTable(
87 | schemaName: string | undefined,
88 | tableName: string,
89 | newTableName: string
90 | ): Promise;
91 |
92 | abstract alterColumn(
93 | schemaName: string | undefined,
94 | tableName: string,
95 | columnName: string,
96 | defintion: TableColumnDefinition
97 | ): Promise;
98 |
99 | abstract addColumn(
100 | schemaName: string | undefined,
101 | tableName: string,
102 | columnName: string,
103 | defintion: TableColumnDefinition
104 | ): Promise;
105 |
106 | abstract dropColumn(
107 | schemaName: string | undefined,
108 | tableName: string,
109 | columnName: string
110 | ): Promise;
111 | }
112 |
--------------------------------------------------------------------------------
/src/connections/motherduck.ts:
--------------------------------------------------------------------------------
1 | import duckDB from 'duckdb';
2 |
3 | import { Query } from '../query';
4 | import { QueryResult } from './index';
5 | import { PostgreBaseConnection } from './postgre/base';
6 | import {
7 | createErrorResult,
8 | transformObjectBasedResultFirstRow,
9 | } from './../utils/transformer';
10 |
11 | export class DuckDBConnection extends PostgreBaseConnection {
12 | client: duckDB.Database;
13 | connection: duckDB.Connection;
14 |
15 | constructor(client: any) {
16 | super();
17 | this.client = client;
18 | this.connection = client.connect();
19 | }
20 |
21 | /**
22 | * Performs a connect action on the current Connection object.
23 | *
24 | * @param details - Unused in the Motherduck scenario.
25 | * @returns Promise
26 | */
27 | async connect(): Promise {
28 | if (this.connection) {
29 | return this.client.connect();
30 | }
31 | }
32 |
33 | /**
34 | * Performs a disconnect action on the current Connection object.
35 | *
36 | * @returns Promise
37 | */
38 | async disconnect(): Promise {
39 | return this.client.close();
40 | }
41 |
42 | /**
43 | * Triggers a query action on the current Connection object.
44 | *
45 | * The parameters object is sent along with the query to be used in the
46 | * query. By default if the query has parameters the SQL statement will
47 | * produce a string with `?::[DataType]` values that the parameters object
48 | * keys should map to, and will be replaced by.
49 | *
50 | * @param query - The SQL query to be executed.
51 | * @param parameters - An object containing the parameters to be used in the query.
52 | * @returns Promise<{ data: any, error: Error | null }>
53 | */
54 | async internalQuery>(
55 | query: Query
56 | ): Promise> {
57 | const connection = this.connection;
58 | if (!connection) throw new Error('No DuckDB connection was found.');
59 |
60 | const { res, err } = await this.runQuery(
61 | query.query,
62 | query.parameters ?? []
63 | );
64 |
65 | if (err) {
66 | return createErrorResult(err.message) as QueryResult;
67 | }
68 |
69 | return transformObjectBasedResultFirstRow(res) as QueryResult;
70 | }
71 |
72 | protected runQuery(
73 | query: string,
74 | values: unknown[]
75 | ): Promise<{ res: duckDB.TableData; err: duckDB.DuckDbError | null }> {
76 | return new Promise((resolve) => {
77 | const stmt = this.connection.prepare(query);
78 | stmt.all(...values, (err, res) => {
79 | resolve({ res, err });
80 | });
81 | });
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/connections/mysql.ts:
--------------------------------------------------------------------------------
1 | import {
2 | FieldPacket,
3 | QueryError,
4 | type Connection,
5 | type QueryResult as MySQLQueryResult,
6 | } from 'mysql2';
7 | import { SqlConnection } from './sql-base';
8 | import { Query } from '../query';
9 | import { QueryType } from '../query-params';
10 | import { Constraint, Database, Table, TableColumn } from './../models/database';
11 | import { MySQLDialect } from './../query-builder/dialects/mysql';
12 | import {
13 | createErrorResult,
14 | transformFromSdkTransform,
15 | } from './../utils/transformer';
16 | import { QueryResult } from '.';
17 | import { ColumnDataType } from '../query-builder';
18 | import { transformMySQLResult } from '@outerbase/sdk-transform';
19 |
20 | interface MySQLSchemaResult {
21 | SCHEMA_NAME: string;
22 | }
23 | interface MySQLTableResult {
24 | TABLE_SCHEMA: string;
25 | TABLE_NAME: string;
26 | }
27 |
28 | interface MySQLColumnResult {
29 | TABLE_SCHEMA: string;
30 | TABLE_NAME: string;
31 | COLUMN_NAME: string;
32 | COLUMN_TYPE: string;
33 | ORDINAL_POSITION: number;
34 | IS_NULLABLE: string;
35 | COLUMN_DEFAULT: string | null;
36 | COLUMN_KEY: string;
37 | EXTRA: string;
38 | }
39 |
40 | interface MySQLConstraintResult {
41 | TABLE_SCHEMA: string;
42 | TABLE_NAME: string;
43 | CONSTRAINT_NAME: string;
44 | CONSTRAINT_TYPE: string;
45 | }
46 |
47 | export interface MySQLConstraintColumnResult {
48 | TABLE_SCHEMA: string;
49 | TABLE_NAME: string;
50 | COLUMN_NAME: string;
51 | CONSTRAINT_NAME: string;
52 | REFERENCED_TABLE_SCHEMA: string;
53 | REFERENCED_TABLE_NAME: string;
54 | REFERENCED_COLUMN_NAME: string;
55 | }
56 |
57 | // We expose this function for other drivers with similar structure
58 | // For example: PostgreSQL has almost the same structure with MySQL
59 | export function buildMySQLDatabaseSchmea({
60 | schemaList,
61 | tableList,
62 | columnList,
63 | constraintsList,
64 | constraintColumnsList,
65 | }: {
66 | schemaList: MySQLSchemaResult[];
67 | tableList: MySQLTableResult[];
68 | columnList: MySQLColumnResult[];
69 | constraintsList: MySQLConstraintResult[];
70 | constraintColumnsList: MySQLConstraintColumnResult[];
71 | }): Database {
72 | // Create dictionary of schema
73 | const result = schemaList.reduce((db, schema) => {
74 | db[schema.SCHEMA_NAME] = {};
75 | return db;
76 | }, {});
77 |
78 | // Table lookup by schema and table name
79 | const tableLookup: Record = {};
80 |
81 | // Group tables by schema and also build table lookup
82 | for (const table of tableList) {
83 | if (!result[table.TABLE_SCHEMA]) {
84 | result[table.TABLE_SCHEMA] = {};
85 | }
86 |
87 | if (!result[table.TABLE_SCHEMA][table.TABLE_NAME]) {
88 | const tableObject = {
89 | name: table.TABLE_NAME,
90 | columns: [],
91 | indexes: [],
92 | constraints: [],
93 | };
94 |
95 | tableLookup[table.TABLE_SCHEMA + '.' + table.TABLE_NAME] =
96 | tableObject;
97 | result[table.TABLE_SCHEMA][table.TABLE_NAME] = tableObject;
98 | }
99 | }
100 |
101 | // Column lookup by schema, table and column name
102 | const columnLookup: Record = {};
103 |
104 | // Group the columns by table and also build column lookup
105 | for (const column of columnList) {
106 | const table =
107 | tableLookup[column.TABLE_SCHEMA + '.' + column.TABLE_NAME];
108 |
109 | if (!table) continue;
110 |
111 | const columnObject: TableColumn = {
112 | name: column.COLUMN_NAME,
113 | position: column.ORDINAL_POSITION,
114 | definition: {
115 | type: column.COLUMN_TYPE,
116 | nullable: column.IS_NULLABLE === 'YES',
117 | default: column.COLUMN_DEFAULT,
118 | primaryKey: column.COLUMN_KEY === 'PRI',
119 | unique: column.EXTRA === 'UNI',
120 | },
121 | };
122 |
123 | columnLookup[
124 | column.TABLE_SCHEMA +
125 | '.' +
126 | column.TABLE_NAME +
127 | '.' +
128 | column.COLUMN_NAME
129 | ] = columnObject;
130 |
131 | table.columns.push(columnObject);
132 | }
133 |
134 | // Constraint lookup by schema and constraint name
135 | const constraintLookup: Record = {};
136 |
137 | // Group constraints by table and also build constraint lookup
138 | for (const constraint of constraintsList) {
139 | const table =
140 | tableLookup[constraint.TABLE_SCHEMA + '.' + constraint.TABLE_NAME];
141 |
142 | if (!table) continue;
143 |
144 | const constraintObject = {
145 | name: constraint.CONSTRAINT_NAME,
146 | schema: constraint.TABLE_SCHEMA,
147 | tableName: constraint.TABLE_NAME,
148 | type: constraint.CONSTRAINT_TYPE,
149 | columns: [],
150 | } as Constraint;
151 |
152 | constraintLookup[
153 | constraint.TABLE_SCHEMA +
154 | '.' +
155 | constraint.TABLE_NAME +
156 | '.' +
157 | constraint.CONSTRAINT_NAME
158 | ] = constraintObject;
159 |
160 | table.constraints.push(constraintObject);
161 | }
162 |
163 | // Group constraint columns by constraint
164 | for (const constraintColumn of constraintColumnsList) {
165 | const constraint =
166 | constraintLookup[
167 | constraintColumn.TABLE_SCHEMA +
168 | '.' +
169 | constraintColumn.TABLE_NAME +
170 | '.' +
171 | constraintColumn.CONSTRAINT_NAME
172 | ];
173 |
174 | if (!constraint) continue;
175 |
176 | const currentColumn =
177 | columnLookup[
178 | constraintColumn.TABLE_SCHEMA +
179 | '.' +
180 | constraintColumn.TABLE_NAME +
181 | '.' +
182 | constraintColumn.COLUMN_NAME
183 | ];
184 | if (currentColumn && constraintColumn.REFERENCED_COLUMN_NAME) {
185 | currentColumn.definition.references = {
186 | table: constraintColumn.REFERENCED_TABLE_NAME,
187 | column: [constraintColumn.REFERENCED_COLUMN_NAME],
188 | };
189 |
190 | constraint.referenceSchema =
191 | constraintColumn.REFERENCED_TABLE_SCHEMA;
192 | constraint.referenceTableName =
193 | constraintColumn.REFERENCED_TABLE_NAME;
194 | }
195 |
196 | constraint.columns.push({
197 | columnName: constraintColumn.COLUMN_NAME,
198 | referenceColumnName: constraintColumn.REFERENCED_COLUMN_NAME,
199 | });
200 | }
201 |
202 | return result;
203 | }
204 |
205 | export class MySQLConnection extends SqlConnection {
206 | protected conn: Connection;
207 | public dialect = new MySQLDialect();
208 | queryType = QueryType.positional;
209 |
210 | constructor(conn: any) {
211 | super();
212 | this.conn = conn;
213 | }
214 |
215 | mapDataType(dataType: string): string {
216 | if (dataType === ColumnDataType.STRING) return 'TEXT';
217 | if (dataType === ColumnDataType.NUMBER) return 'INT';
218 | if (dataType === ColumnDataType.ARRAY) return 'JSON';
219 | if (dataType === ColumnDataType.UUID) return 'TEXT';
220 | return super.mapDataType(dataType);
221 | }
222 |
223 | async internalQuery>(
224 | query: Query
225 | ): Promise> {
226 | try {
227 | const { fields, rows, error } = await new Promise<{
228 | rows: MySQLQueryResult;
229 | error: QueryError | null;
230 | fields: FieldPacket[];
231 | }>((r) =>
232 | this.conn.query(
233 | {
234 | sql: query.query,
235 | rowsAsArray: true,
236 | },
237 | query.parameters,
238 | (error, result, fields) => {
239 | if (Array.isArray(result)) {
240 | r({
241 | rows: (result as MySQLQueryResult) ?? [],
242 | fields: fields,
243 | error,
244 | });
245 | }
246 | return r({ rows: [], error, fields: [] });
247 | }
248 | )
249 | );
250 |
251 | if (error) {
252 | return createErrorResult(error.message);
253 | } else {
254 | return transformFromSdkTransform(
255 | transformMySQLResult([rows, fields])
256 | );
257 | }
258 | } catch {
259 | return createErrorResult('Unknown error') as QueryResult;
260 | }
261 | }
262 |
263 | async fetchDatabaseSchema(): Promise {
264 | // Get all the schema list
265 | const { data: schemaList } = await this.query({
266 | query: "SELECT SCHEMA_NAME FROM information_schema.SCHEMATA WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')",
267 | });
268 |
269 | const { data: tableList } = await this.query({
270 | query: "SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE FROM information_schema.tables WHERE TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')",
271 | });
272 |
273 | const { data: columnList } = await this.query({
274 | query: "SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, ORDINAL_POSITION, IS_NULLABLE, COLUMN_DEFAULT, COLUMN_KEY, EXTRA FROM information_schema.columns WHERE TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')",
275 | });
276 |
277 | const { data: constraintsList } =
278 | await this.query({
279 | query: `SELECT TABLE_SCHEMA, TABLE_NAME, CONSTRAINT_NAME, CONSTRAINT_TYPE FROM information_schema.table_constraints WHERE TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys') AND CONSTRAINT_TYPE IN ('PRIMARY KEY', 'UNIQUE', 'FOREIGN KEY')`,
280 | });
281 |
282 | const { data: constraintColumnsList } =
283 | await this.query({
284 | query: `SELECT CONSTRAINT_NAME, TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, REFERENCED_TABLE_SCHEMA, REFERENCED_TABLE_NAME, REFERENCED_COLUMN_NAME FROM information_schema.key_column_usage WHERE TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')`,
285 | });
286 |
287 | return buildMySQLDatabaseSchmea({
288 | schemaList,
289 | tableList,
290 | columnList,
291 | constraintsList,
292 | constraintColumnsList,
293 | });
294 | }
295 |
296 | async renameTable(
297 | schemaName: string | undefined,
298 | tableName: string,
299 | newTableName: string
300 | ): Promise {
301 | // If we don't put the schema name when rename the table,
302 | // it might change the schema of the table after rename.
303 | return super.renameTable(
304 | schemaName,
305 | tableName,
306 | schemaName ? `${schemaName}.${newTableName}` : newTableName
307 | );
308 | }
309 |
310 | async renameColumn(
311 | schemaName: string | undefined,
312 | tableName: string,
313 | columnName: string,
314 | newColumnName: string
315 | ): Promise {
316 | // Check the MySQL version
317 | const { data: version } = await this.query<{ version: string }>({
318 | query: 'SELECT VERSION() AS version',
319 | });
320 |
321 | const fullTableName = this.dialect.escapeId(
322 | schemaName ? `${schemaName}.${tableName}` : tableName
323 | );
324 |
325 | const versionNumber = parseInt(version[0].version.split('.')[0]);
326 | if (versionNumber < 8) {
327 | // We cannot rename column in MySQL version less than 8 using RENAME COLUMN
328 | // We need to get the CREATE SCRIPT of the table
329 | const { data: createTableResponse } = await this.query<{
330 | 'Create Table': string;
331 | }>({
332 | query: `SHOW CREATE TABLE ${fullTableName}`,
333 | });
334 |
335 | // Cannot rename column if the table does not exist
336 | if (createTableResponse.length === 0)
337 | return createErrorResult('Table does not exist');
338 |
339 | // Get the line of the column
340 | const createTable = createTableResponse[0]['Create Table'];
341 | const lists = createTable.split('\n');
342 | const columnLine = lists.find((line) =>
343 | line
344 | .trim()
345 | .toLowerCase()
346 | .startsWith(this.dialect.escapeId(columnName).toLowerCase())
347 | );
348 |
349 | if (!columnLine) return createErrorResult('Column does not exist');
350 |
351 | const [columnNamePart, ...columnDefinitions] = columnLine
352 | .trim()
353 | .replace(/,$/, '')
354 | .split(' ');
355 |
356 | const query = `ALTER TABLE ${fullTableName} CHANGE COLUMN ${columnNamePart} ${this.dialect.escapeId(newColumnName)} ${columnDefinitions.join(' ')}`;
357 | return await this.query({ query });
358 | }
359 |
360 | return super.renameColumn(
361 | schemaName,
362 | tableName,
363 | columnName,
364 | newColumnName
365 | );
366 | }
367 |
368 | async connect(): Promise {}
369 | async disconnect(): Promise {
370 | this.conn.destroy();
371 | }
372 | }
373 |
--------------------------------------------------------------------------------
/src/connections/postgre/base.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect, ColumnDataType } from './../../query-builder';
2 | import { PostgresDialect } from './../../query-builder/dialects/postgres';
3 | import { Database } from './../../models/database';
4 | import { buildMySQLDatabaseSchmea } from './../mysql';
5 | import { SqlConnection } from '../sql-base';
6 |
7 | export abstract class PostgreBaseConnection extends SqlConnection {
8 | dialect: AbstractDialect = new PostgresDialect();
9 |
10 | mapDataType(dataType: string): string {
11 | if (dataType === ColumnDataType.ID) return 'SERIAL';
12 | if (dataType === ColumnDataType.STRING) return 'TEXT';
13 | if (dataType === ColumnDataType.NUMBER) return 'INTEGER';
14 | return super.mapDataType(dataType);
15 | }
16 |
17 | async fetchDatabaseSchema(): Promise {
18 | // Get the list of schema first
19 | const { data: schemaList } = await this.query<{ schema_name: string }>({
20 | query: `SELECT schema_name FROM information_schema.schemata WHERE schema_name NOT IN ('information_schema', 'pg_catalog', 'pg_toast');`,
21 | });
22 |
23 | // Get the list of all tables
24 | const { data: tableList } = await this.query<{
25 | table_name: string;
26 | table_schema: string;
27 | }>({
28 | query: `SELECT table_name, table_schema FROM information_schema.tables WHERE table_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast');`,
29 | });
30 |
31 | // Get the list of all columns
32 | const { data: columnList } = await this.query<{
33 | table_schema: string;
34 | table_name: string;
35 | column_name: string;
36 | data_type: string;
37 | is_nullable: string;
38 | column_default: string;
39 | ordinal_position: number;
40 | }>({
41 | query: `SELECT * FROM information_schema.columns WHERE table_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast');`,
42 | });
43 |
44 | // Get the list of all constraints
45 | const { data: constraintList } = await this.query<{
46 | constraint_schema: string;
47 | constraint_name: string;
48 | table_name: string;
49 | table_schema: string;
50 | constraint_type: string;
51 | }>({
52 | query: `SELECT * FROM information_schema.table_constraints WHERE constraint_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast') AND constraint_type IN ('FOREIGN KEY', 'PRIMARY KEY', 'UNIQUE');`,
53 | });
54 |
55 | // Get the list of foreign key relation
56 | const { data: constraintColumnsList } = await this.query<{
57 | constraint_name: string;
58 | table_schema: string;
59 | table_name: string;
60 | column_name: string;
61 | reference_table_name: string;
62 | reference_column_name: string;
63 | reference_table_schema: string;
64 | }>({
65 | query: `SELECT
66 | kcu.constraint_name,
67 | kcu.table_schema,
68 | kcu.table_name,
69 | kcu.column_name,
70 | ccu.table_schema AS reference_table_schema,
71 | ccu.table_name AS reference_table_name,
72 | ccu.column_name AS reference_column_name
73 | FROM
74 | information_schema.table_constraints AS tc
75 | LEFT JOIN information_schema.key_column_usage AS kcu
76 | ON (
77 | tc.table_schema = kcu.table_schema AND
78 | tc.table_name = kcu.table_name AND
79 | tc.constraint_name = kcu.constraint_name
80 | )
81 | LEFT JOIN information_schema.constraint_column_usage AS ccu
82 | ON (
83 | ccu.table_schema = kcu.table_schema AND
84 | ccu.constraint_name = kcu.constraint_name AND
85 | tc.constraint_type = 'FOREIGN KEY'
86 | )
87 | WHERE
88 | kcu.constraint_schema NOT IN ('information_schema', 'pg_catalog', 'pg_toast')`,
89 | });
90 |
91 | // Postgres structure is similar to MySQL, so we can reuse the MySQL schema builder
92 | // by just mapping the column names
93 | return buildMySQLDatabaseSchmea({
94 | schemaList: schemaList.map((schema) => ({
95 | SCHEMA_NAME: schema.schema_name,
96 | })),
97 | tableList: tableList.map((table) => ({
98 | TABLE_NAME: table.table_name,
99 | TABLE_SCHEMA: table.table_schema,
100 | })),
101 | columnList: columnList.map((column) => ({
102 | TABLE_NAME: column.table_name,
103 | COLUMN_NAME: column.column_name,
104 | COLUMN_TYPE: column.data_type,
105 | IS_NULLABLE: column.is_nullable,
106 | COLUMN_DEFAULT: column.column_default,
107 | COLUMN_KEY: '',
108 | EXTRA: '',
109 | ORDINAL_POSITION: column.ordinal_position,
110 | TABLE_SCHEMA: column.table_schema,
111 | })),
112 | constraintsList: constraintList.map((constraint) => ({
113 | CONSTRAINT_NAME: constraint.constraint_name,
114 | CONSTRAINT_TYPE: constraint.constraint_type,
115 | TABLE_NAME: constraint.table_name,
116 | TABLE_SCHEMA: constraint.table_schema,
117 | })),
118 | constraintColumnsList: constraintColumnsList.map((constraint) => ({
119 | TABLE_NAME: constraint.table_name,
120 | TABLE_SCHEMA: constraint.table_schema,
121 | COLUMN_NAME: constraint.column_name,
122 | CONSTRAINT_NAME: constraint.constraint_name,
123 | REFERENCED_TABLE_NAME: constraint.reference_table_name,
124 | REFERENCED_COLUMN_NAME: constraint.reference_column_name,
125 | REFERENCED_TABLE_SCHEMA: constraint.reference_table_schema,
126 | })),
127 | });
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/src/connections/postgre/postgresql.ts:
--------------------------------------------------------------------------------
1 | import { Client } from 'pg';
2 | import { QueryResult } from '..';
3 | import { Query } from '../../query';
4 | import { AbstractDialect } from './../../query-builder';
5 | import { PostgresDialect } from './../../query-builder/dialects/postgres';
6 | import {
7 | createErrorResult,
8 | transformFromSdkTransform,
9 | } from './../../utils/transformer';
10 | import { PostgreBaseConnection } from './base';
11 | import { setPgParser, transformPgResult } from '@outerbase/sdk-transform';
12 |
13 | export class PostgreSQLConnection extends PostgreBaseConnection {
14 | client: Client;
15 | dialect: AbstractDialect = new PostgresDialect();
16 | protected numberedPlaceholder = true;
17 |
18 | constructor(pgClient: any) {
19 | super();
20 | this.client = pgClient;
21 | setPgParser(this.client);
22 | }
23 |
24 | async connect() {
25 | await this.client.connect();
26 | }
27 |
28 | async disconnect() {
29 | await this.client.end();
30 | }
31 |
32 | async internalQuery>(
33 | query: Query
34 | ): Promise> {
35 | try {
36 | const result = await this.client.query({
37 | text: query.query,
38 | rowMode: 'array',
39 | values: query.parameters as unknown[],
40 | });
41 |
42 | return transformFromSdkTransform(transformPgResult(result));
43 | } catch (e) {
44 | if (e instanceof Error) {
45 | return createErrorResult(e.message);
46 | }
47 | return createErrorResult('Unknown error');
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/connections/snowflake/snowflake.ts:
--------------------------------------------------------------------------------
1 | import snowflake from 'snowflake-sdk';
2 | import { Query } from '../../query';
3 | import { QueryResult } from '..';
4 | import {
5 | createErrorResult,
6 | transformFromSdkTransform,
7 | } from '../../utils/transformer';
8 | import { Database, TableColumn } from '../../models/database';
9 | import { PostgreBaseConnection } from './../postgre/base';
10 | import {
11 | buildMySQLDatabaseSchmea,
12 | MySQLConstraintColumnResult,
13 | } from '../mysql';
14 |
15 | import { transformArrayBasedResult } from '@outerbase/sdk-transform';
16 |
17 | export class SnowflakeConnection extends PostgreBaseConnection {
18 | protected db: snowflake.Connection;
19 |
20 | constructor(db: any) {
21 | super();
22 | this.db = db;
23 | }
24 |
25 | async connect(): Promise {
26 | await new Promise((resolve, reject) => {
27 | this.db.connectAsync((err, conn) => {
28 | if (err) reject(err.message);
29 | else resolve(conn);
30 | });
31 | });
32 | }
33 |
34 | async disconnect(): Promise {
35 | await new Promise((resolve) => this.db.destroy(resolve));
36 | }
37 |
38 | async testConnection(): Promise<{ error?: string }> {
39 | try {
40 | await this.connect();
41 | const { data } = await this.query({
42 | query: 'SELECT CURRENT_DATABASE() AS DBNAME;',
43 | });
44 |
45 | await this.disconnect();
46 | if (!data[0].DBNAME) return { error: 'Database does not exist' };
47 |
48 | return {};
49 | } catch (e) {
50 | if (e instanceof Error) return { error: e.message };
51 | return { error: 'Unknown error' };
52 | }
53 | }
54 |
55 | async fetchDatabaseSchema(): Promise {
56 | // Get the list of schema first
57 | const { data: schemaList } = await this.query<{ SCHEMA_NAME: string }>({
58 | query: `SELECT SCHEMA_NAME FROM information_schema.schemata WHERE schema_name NOT IN ('INFORMATION_SCHEMA');`,
59 | });
60 |
61 | // Get the list of all tables
62 | const { data: tableList } = await this.query<{
63 | TABLE_NAME: string;
64 | TABLE_SCHEMA: string;
65 | }>({
66 | query: `SELECT TABLE_NAME, TABLE_SCHEMA FROM information_schema.tables WHERE table_schema NOT IN ('INFORMATION_SCHEMA');`,
67 | });
68 |
69 | // Get the list of all columns
70 | const { data: columnList } = await this.query<{
71 | TABLE_SCHEMA: string;
72 | TABLE_NAME: string;
73 | COLUMN_NAME: string;
74 | DATA_TYPE: string;
75 | IS_NULLABLE: string;
76 | COLUMN_DEFAULT: string;
77 | ORDINAL_POSITION: number;
78 | }>({
79 | query: `SELECT * FROM information_schema.columns WHERE table_schema NOT IN ('INFORMATION_SCHEMA');`,
80 | });
81 |
82 | // Get the list of all constraints
83 | const { data: constraintsList } = await this.query<{
84 | CONSTRAINT_SCHEMA: string;
85 | CONSTRAINT_NAME: string;
86 | TABLE_NAME: string;
87 | TABLE_SCHEMA: string;
88 | CONSTRAINT_TYPE: string;
89 | }>({
90 | query: `SELECT * FROM information_schema.table_constraints WHERE CONSTRAINT_SCHEMA NOT IN ('INFORMATION_SCHEMA') AND CONSTRAINT_TYPE IN ('FOREIGN KEY', 'PRIMARY KEY', 'UNIQUE');`,
91 | });
92 |
93 | // Mamic the key usages table using SHOW PRIMARY KEY and SHOW FOREIGN KEYS
94 | const { data: primaryKeyConstraint } = await this.query<{
95 | schema_name: string;
96 | table_name: string;
97 | column_name: string;
98 | constraint_name: string;
99 | }>({ query: `SHOW PRIMARY KEYS;` });
100 |
101 | const { data: foreignKeyConstraint } = await this.query<{
102 | pk_schema_name: string;
103 | pk_table_name: string;
104 | pk_column_name: string;
105 | fk_schema_name: string;
106 | fk_table_name: string;
107 | fk_column_name: string;
108 | fk_name: string;
109 | }>({ query: `SHOW IMPORTED KEYS;` });
110 |
111 | // Postgres structure is similar to MySQL, so we can reuse the MySQL schema builder
112 | // by just mapping the column names
113 | return buildMySQLDatabaseSchmea({
114 | schemaList,
115 | tableList,
116 | columnList: columnList.map((column) => ({
117 | COLUMN_TYPE: column.DATA_TYPE,
118 | ...column,
119 | COLUMN_KEY: '',
120 | EXTRA: '',
121 | })),
122 | constraintsList,
123 | constraintColumnsList: [
124 | ...primaryKeyConstraint.map(
125 | (constraint): MySQLConstraintColumnResult => ({
126 | TABLE_SCHEMA: constraint.schema_name,
127 | TABLE_NAME: constraint.table_name,
128 | COLUMN_NAME: constraint.column_name,
129 | CONSTRAINT_NAME: constraint.constraint_name,
130 | REFERENCED_TABLE_SCHEMA: '',
131 | REFERENCED_TABLE_NAME: '',
132 | REFERENCED_COLUMN_NAME: '',
133 | })
134 | ),
135 | ...foreignKeyConstraint.map(
136 | (constraint): MySQLConstraintColumnResult => ({
137 | TABLE_SCHEMA: constraint.fk_schema_name,
138 | TABLE_NAME: constraint.fk_table_name,
139 | COLUMN_NAME: constraint.fk_column_name,
140 | CONSTRAINT_NAME: constraint.fk_name,
141 | REFERENCED_TABLE_SCHEMA: constraint.pk_schema_name,
142 | REFERENCED_TABLE_NAME: constraint.pk_table_name,
143 | REFERENCED_COLUMN_NAME: constraint.pk_column_name,
144 | })
145 | ),
146 | ],
147 | });
148 | }
149 |
150 | createTable(
151 | schemaName: string | undefined,
152 | tableName: string,
153 | columns: TableColumn[]
154 | ): Promise {
155 | const tempColumns = structuredClone(columns);
156 | for (const column of tempColumns) {
157 | if (column.definition.references) {
158 | column.definition.references.table = schemaName
159 | ? `${schemaName}.${column.definition.references.table}`
160 | : column.definition.references.table;
161 | }
162 | }
163 |
164 | return super.createTable(schemaName, tableName, tempColumns);
165 | }
166 |
167 | async renameTable(
168 | schemaName: string | undefined,
169 | tableName: string,
170 | newTableName: string
171 | ): Promise {
172 | // Schema is required for rename
173 | return super.renameTable(
174 | schemaName,
175 | tableName,
176 | schemaName ? `${schemaName}.${newTableName}` : newTableName
177 | );
178 | }
179 |
180 | async internalQuery>(
181 | query: Query
182 | ): Promise> {
183 | try {
184 | const [err, headers, rows] = await new Promise<
185 | [snowflake.SnowflakeError | undefined, string[], unknown[][]]
186 | >((resolve) => {
187 | this.db.execute({
188 | sqlText: query.query,
189 | binds: query.parameters as snowflake.Binds,
190 | rowMode: 'array',
191 | complete: (err, stmt, rows) => {
192 | resolve([
193 | err,
194 | err
195 | ? []
196 | : stmt.getColumns().map((col) => col.getName()),
197 | rows as unknown[][],
198 | ]);
199 | },
200 | });
201 | });
202 |
203 | if (err) return createErrorResult(err.message) as QueryResult;
204 | return transformFromSdkTransform({
205 | ...transformArrayBasedResult({
206 | headers,
207 | rows,
208 | headersMapper: (header) => ({
209 | name: header,
210 | displayName: header,
211 | originalType: null,
212 | }),
213 | }),
214 | stat: {
215 | queryDurationMs: 0,
216 | rowsAffected: 0,
217 | rowsRead: 0,
218 | rowsWritten: 0,
219 | },
220 | }) as QueryResult;
221 | } catch (e) {
222 | return createErrorResult('Unknown error') as QueryResult;
223 | }
224 | }
225 | }
226 |
227 | /*
228 | headers,
229 | (header) => ({
230 | name: header,
231 | displayName: header,
232 | originalType: null,
233 | }),
234 | rows
235 | */
236 |
--------------------------------------------------------------------------------
/src/connections/sql-base.ts:
--------------------------------------------------------------------------------
1 | import { Query } from '../query';
2 | import {
3 | Connection,
4 | ConnectionSelectOptions,
5 | Outerbase,
6 | QueryResult,
7 | } from '..';
8 | import { AbstractDialect, ColumnDataType } from './../query-builder';
9 | import { TableColumn, TableColumnDefinition } from './../models/database';
10 | import {
11 | namedPlaceholder,
12 | toNumberedPlaceholders,
13 | } from './../utils/placeholder';
14 |
15 | export abstract class SqlConnection extends Connection {
16 | abstract dialect: AbstractDialect;
17 | protected numberedPlaceholder = false;
18 |
19 | abstract internalQuery>(
20 | query: Query
21 | ): Promise>;
22 |
23 | mapDataType(dataType: string): string {
24 | if (dataType === ColumnDataType.ID) return 'INTEGER';
25 | if (dataType === ColumnDataType.NUMBER) return 'INTEGER';
26 | return dataType;
27 | }
28 |
29 | /**
30 | * This is a deprecated function, use raw instead. We keep this for
31 | * backward compatibility.
32 | *
33 | * @deprecated
34 | * @param query
35 | * @returns
36 | */
37 | async query>(
38 | query: Query
39 | ): Promise> {
40 | return (await this.raw(
41 | query.query,
42 | query.parameters
43 | )) as QueryResult;
44 | }
45 |
46 | async raw(
47 | query: string,
48 | params?: Record | unknown[]
49 | ): Promise {
50 | if (!params) return await this.internalQuery({ query });
51 |
52 | // Positional placeholder
53 | if (Array.isArray(params)) {
54 | if (this.numberedPlaceholder) {
55 | const { query: newQuery, bindings } = toNumberedPlaceholders(
56 | query,
57 | params
58 | );
59 |
60 | return await this.internalQuery({
61 | query: newQuery,
62 | parameters: bindings,
63 | });
64 | }
65 |
66 | return await this.internalQuery({ query, parameters: params });
67 | }
68 |
69 | // Named placeholder
70 | const { query: newQuery, bindings } = namedPlaceholder(
71 | query,
72 | params!,
73 | this.numberedPlaceholder
74 | );
75 | return await this.internalQuery({
76 | query: newQuery,
77 | parameters: bindings,
78 | });
79 | }
80 |
81 | async select(
82 | schemaName: string,
83 | tableName: string,
84 | options: ConnectionSelectOptions
85 | ): Promise {
86 | const query = Outerbase(this)
87 | .select()
88 | .from(schemaName ? `${schemaName}.${tableName}` : tableName);
89 |
90 | if (options.limit) {
91 | query.limit(options.limit);
92 | }
93 |
94 | if (options.offset) {
95 | query.offset(options.offset);
96 | }
97 |
98 | if (options.where) {
99 | for (const where of options.where) {
100 | query.where(where.name, where.operator, where.value);
101 | }
102 | }
103 |
104 | if (options.orderBy) {
105 | for (const orderBy of options.orderBy) {
106 | if (Array.isArray(orderBy)) {
107 | query.orderBy(orderBy[0], orderBy[1]);
108 | } else {
109 | query.orderBy(orderBy);
110 | }
111 | }
112 | }
113 |
114 | let count: number | undefined = undefined;
115 | const result = await this.query(query.toQuery());
116 |
117 | if (options.includeCounting) {
118 | const { data: countResult } = await this.query<{
119 | total_rows: number;
120 | }>(query.count('total_rows').toQuery());
121 |
122 | if (countResult && countResult.length === 1) {
123 | count = Number(countResult[0].total_rows);
124 | }
125 | }
126 |
127 | return {
128 | ...result,
129 | count,
130 | };
131 | }
132 |
133 | async insert(
134 | schemaName: string | undefined,
135 | tableName: string,
136 | data: Record
137 | ): Promise {
138 | const qb = Outerbase(this);
139 |
140 | return await this.query(
141 | qb
142 | .insert(data)
143 | .into(schemaName ? `${schemaName}.${tableName}` : tableName)
144 | .toQuery()
145 | );
146 | }
147 |
148 | async insertMany(
149 | schemaName: string | undefined,
150 | tableName: string,
151 | data: Record[]
152 | ): Promise {
153 | const qb = Outerbase(this);
154 |
155 | for (const item of data) {
156 | await this.query(
157 | qb
158 | .insert(item)
159 | .into(schemaName ? `${schemaName}.${tableName}` : tableName)
160 | .toQuery()
161 | );
162 | }
163 |
164 | return {
165 | data: [],
166 | error: null,
167 | query: '',
168 | headers: [],
169 | stat: {
170 | queryDurationMs: 0,
171 | rowsAffected: 0,
172 | rowsRead: 0,
173 | rowsWritten: 0,
174 | },
175 | };
176 | }
177 |
178 | async update(
179 | schemaName: string | undefined,
180 | tableName: string,
181 | data: Record,
182 | where: Record
183 | ): Promise {
184 | const qb = Outerbase(this);
185 |
186 | return await this.query(
187 | qb
188 | .update(data)
189 | .into(schemaName ? `${schemaName}.${tableName}` : tableName)
190 | .where(where)
191 | .toQuery()
192 | );
193 | }
194 |
195 | async delete(
196 | schemaName: string,
197 | tableName: string,
198 | where: Record
199 | ): Promise {
200 | const qb = Outerbase(this);
201 |
202 | return await this.query(
203 | qb
204 | .delete()
205 | .from(schemaName ? `${schemaName}.${tableName}` : tableName)
206 | .where(where)
207 | .toQuery()
208 | );
209 | }
210 |
211 | createTable(
212 | schemaName: string | undefined,
213 | tableName: string,
214 | columns: TableColumn[]
215 | ): Promise {
216 | const qb = Outerbase(this).createTable(
217 | schemaName ? `${schemaName}.${tableName}` : tableName
218 | );
219 |
220 | for (const column of columns) {
221 | qb.column(column.name, {
222 | ...column.definition,
223 | type: this.mapDataType(column.definition.type),
224 | });
225 | }
226 |
227 | return this.query(qb.toQuery());
228 | }
229 |
230 | dropTable(
231 | schemaName: string | undefined,
232 | tableName: string
233 | ): Promise {
234 | const qb = Outerbase(this);
235 |
236 | return this.query(
237 | qb
238 | .dropTable(
239 | schemaName ? `${schemaName}.${tableName}` : tableName
240 | )
241 | .toQuery()
242 | );
243 | }
244 |
245 | async renameColumn(
246 | schemaName: string | undefined,
247 | tableName: string,
248 | columnName: string,
249 | newColumnName: string
250 | ): Promise {
251 | const qb = Outerbase(this);
252 |
253 | return await this.query(
254 | qb
255 | .alterTable(
256 | schemaName ? `${schemaName}.${tableName}` : tableName
257 | )
258 | .renameColumn(columnName, newColumnName)
259 | .toQuery()
260 | );
261 | }
262 |
263 | async renameTable(
264 | schemaName: string | undefined,
265 | tableName: string,
266 | newTableName: string
267 | ): Promise {
268 | const qb = Outerbase(this);
269 |
270 | return await this.query(
271 | qb
272 | .alterTable(
273 | schemaName ? `${schemaName}.${tableName}` : tableName
274 | )
275 | .renameTable(newTableName)
276 | .toQuery()
277 | );
278 | }
279 |
280 | async alterColumn(
281 | schemaName: string | undefined,
282 | tableName: string,
283 | columnName: string,
284 | defintion: TableColumnDefinition
285 | ): Promise {
286 | const qb = Outerbase(this);
287 |
288 | return await this.query(
289 | qb
290 | .alterTable(
291 | schemaName ? `${schemaName}.${tableName}` : tableName
292 | )
293 | .alterColumn(columnName, {
294 | ...defintion,
295 | type: this.mapDataType(defintion.type),
296 | })
297 | .toQuery()
298 | );
299 | }
300 |
301 | async addColumn(
302 | schemaName: string | undefined,
303 | tableName: string,
304 | columnName: string,
305 | defintion: TableColumnDefinition
306 | ): Promise {
307 | const qb = Outerbase(this);
308 |
309 | return await this.query(
310 | qb
311 | .alterTable(
312 | schemaName ? `${schemaName}.${tableName}` : tableName
313 | )
314 | .addColumn(columnName, {
315 | ...defintion,
316 | type: this.mapDataType(defintion.type),
317 | })
318 | .toQuery()
319 | );
320 | }
321 |
322 | async dropColumn(
323 | schemaName: string | undefined,
324 | tableName: string,
325 | columnName: string
326 | ): Promise {
327 | const qb = Outerbase(this);
328 |
329 | return await this.query(
330 | qb
331 | .alterTable(
332 | schemaName ? `${schemaName}.${tableName}` : tableName
333 | )
334 | .dropColumn(columnName)
335 | .toQuery()
336 | );
337 | }
338 |
339 | async testConnection(): Promise<{ error?: string }> {
340 | try {
341 | await this.connect();
342 | const { error } = await this.raw('SELECT 1;');
343 | await this.disconnect();
344 | return { error: error ? error.message : undefined };
345 | } catch (error) {
346 | if (error instanceof Error) {
347 | return { error: error.message };
348 | }
349 | return { error: 'Unexpected error' };
350 | }
351 | }
352 | }
353 |
--------------------------------------------------------------------------------
/src/connections/sqlite/base.ts:
--------------------------------------------------------------------------------
1 | import { Database, Table, TableColumn } from './../../models/database';
2 | import { SqlConnection } from '../sql-base';
3 | import { AbstractDialect } from './../../query-builder';
4 | import { SqliteDialect } from './../../query-builder/dialects/sqlite-dialect';
5 | export abstract class SqliteBaseConnection extends SqlConnection {
6 | dialect: AbstractDialect = new SqliteDialect();
7 |
8 | public async fetchDatabaseSchema(): Promise {
9 | const { data: tableList } = await this.query<{
10 | type: string;
11 | name: string;
12 | tbl_name: string;
13 | }>({
14 | query: `SELECT * FROM sqlite_master WHERE type = 'table' AND (name NOT LIKE 'sqlite_%' OR name NOT LIKE '_cf_%')`,
15 | });
16 |
17 | const { data: columnList } = await this.query<{
18 | cid: number;
19 | name: string;
20 | type: string;
21 | notnull: 0 | 1;
22 | dflt_value: string | null;
23 | pk: 0 | 1;
24 | tbl_name: string;
25 | ref_table_name: string | null;
26 | ref_column_name: string | null;
27 | }>({
28 | query: `WITH master AS (SELECT tbl_name FROM sqlite_master WHERE type = 'table' AND tbl_name NOT LIKE 'sqlite_%' AND tbl_name NOT LIKE '_cf_%')
29 | SELECT columns.*, fk."table" AS ref_table_name, fk."to" AS ref_column_name
30 | FROM
31 | (SELECT fields.*, tbl_name FROM master CROSS JOIN pragma_table_info (master.tbl_name) fields) AS columns LEFT JOIN
32 | (SELECT fk.*, tbl_name FROM master CROSS JOIN pragma_foreign_key_list (master.tbl_name) fk) AS fk
33 | ON fk."from" = columns.name AND fk.tbl_name = columns.tbl_name;`,
34 | });
35 |
36 | const tableLookup = tableList.reduce(
37 | (acc, table) => {
38 | acc[table.tbl_name] = {
39 | name: table.name,
40 | columns: [],
41 | indexes: [],
42 | constraints: [],
43 | };
44 | return acc;
45 | },
46 | {} as Record
47 | );
48 |
49 | for (const column of columnList) {
50 | const currentTable = tableLookup[column.tbl_name];
51 | if (!currentTable) continue;
52 |
53 | currentTable.columns.push({
54 | name: column.name,
55 | position: column.cid,
56 | definition: {
57 | type: column.type,
58 | nullable: column.notnull === 0,
59 | default: column.dflt_value,
60 | primaryKey: column.pk === 1,
61 | unique: false,
62 | references:
63 | column.ref_table_name && column.ref_column_name
64 | ? {
65 | table: column.ref_table_name,
66 | column: [column.ref_column_name],
67 | }
68 | : undefined,
69 | },
70 | } as TableColumn);
71 |
72 | if (column.ref_table_name && column.ref_column_name) {
73 | currentTable.constraints.push({
74 | name: `fk_${column.tbl_name}_${column.name}`,
75 | schema: 'main',
76 | tableName: column.tbl_name,
77 | type: 'FOREIGN KEY',
78 | referenceTableName: column.ref_table_name,
79 | columns: [
80 | {
81 | columnName: column.name,
82 | referenceColumnName: column.ref_column_name,
83 | },
84 | ],
85 | });
86 | }
87 | }
88 |
89 | // Building primary key constraint
90 | Object.values(tableLookup).forEach((table) => {
91 | const primaryKeyColumns = table.columns
92 | .filter((column) => column.definition.primaryKey)
93 | .map((column) => column.name);
94 |
95 | if (primaryKeyColumns.length) {
96 | table.constraints.push({
97 | name: `pk_${table.name}`,
98 | schema: 'main',
99 | tableName: table.name,
100 | type: 'PRIMARY KEY',
101 | columns: primaryKeyColumns.map((columnName) => ({
102 | columnName,
103 | })),
104 | });
105 | }
106 | });
107 |
108 | // Sqlite default schema is "main", since we don't support
109 | // ATTACH, we don't need to worry about other schemas
110 | return {
111 | main: tableLookup,
112 | };
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/src/connections/sqlite/cloudflare.ts:
--------------------------------------------------------------------------------
1 | import { QueryType } from '../../query-params';
2 | import { Query } from '../../query';
3 | import { DefaultDialect } from '../../query-builder/dialects/default';
4 | import { SqliteBaseConnection } from './base';
5 | import { Database } from './../../models/database';
6 | import {
7 | createErrorResult,
8 | transformFromSdkTransform,
9 | } from './../../utils/transformer';
10 | import { QueryResult } from '..';
11 | import { transformCloudflareD1 } from '@outerbase/sdk-transform';
12 |
13 | interface CloudflareResult {
14 | results: {
15 | columns: string[];
16 | rows: unknown[][];
17 | };
18 |
19 | meta: {
20 | duration: number;
21 | changes: number;
22 | last_row_id: number;
23 | rows_read: number;
24 | rows_written: number;
25 | };
26 | }
27 |
28 | interface CloudflareResponse {
29 | success?: boolean;
30 | result: CloudflareResult[];
31 | error?: string;
32 | errors?: { code: number; message: string }[];
33 | }
34 |
35 | export type CloudflareD1ConnectionDetails = {
36 | apiKey: string;
37 | accountId: string;
38 | databaseId: string;
39 | };
40 |
41 | export class CloudflareD1Connection extends SqliteBaseConnection {
42 | // The Cloudflare API key with D1 access
43 | apiKey: string | undefined;
44 | accountId: string | undefined;
45 | databaseId: string | undefined;
46 |
47 | // Default query type to positional for Cloudflare
48 | queryType = QueryType.positional;
49 |
50 | // Default dialect for Cloudflare
51 | dialect = new DefaultDialect();
52 |
53 | /**
54 | * Creates a new CloudflareD1Connection object with the provided API key,
55 | * account ID, and database ID.
56 | *
57 | * @param apiKey - The API key to be used for authentication.
58 | * @param accountId - The account ID to be used for authentication.
59 | * @param databaseId - The database ID to be used for querying.
60 | */
61 | constructor(private _: CloudflareD1ConnectionDetails) {
62 | super();
63 | this.apiKey = _.apiKey;
64 | this.accountId = _.accountId;
65 | this.databaseId = _.databaseId;
66 | }
67 |
68 | /**
69 | * Performs a connect action on the current Connection object.
70 | * In this particular use case Cloudflare is a REST API and
71 | * requires an API key for authentication.
72 | *
73 | * @param details - Unused in the Cloudflare scenario.
74 | * @returns Promise
75 | */
76 | async connect(): Promise {
77 | return Promise.resolve();
78 | }
79 |
80 | /**
81 | * Performs a disconnect action on the current Connection object.
82 | * In this particular use case Cloudflare is a REST API and does
83 | * not require a disconnect action.
84 | *
85 | * @returns Promise
86 | */
87 | async disconnect(): Promise {
88 | return Promise.resolve();
89 | }
90 |
91 | /**
92 | * Triggers a query action on the current Connection object. The query
93 | * is a SQL query that will be executed on a D1 database in the Cloudflare
94 | * account. The query is sent to the Cloudflare API and the response
95 | * is returned.
96 | *
97 | * The parameters object is sent along with the query to be used in the
98 | * query. By default if the query has parameters the SQL statement will
99 | * produce a string with `:property` values that the parameters object
100 | * keys should map to, and will be replaced by.
101 | *
102 | * @param query - The SQL query to be executed.
103 | * @param parameters - An object containing the parameters to be used in the query.
104 | * @returns Promise<{ data: any, error: Error | null }>
105 | */
106 | async internalQuery>(
107 | query: Query
108 | ): Promise> {
109 | if (!this.apiKey) throw new Error('Cloudflare API key is not set');
110 | if (!this.accountId)
111 | throw new Error('Cloudflare account ID is not set');
112 | if (!this.databaseId)
113 | throw new Error('Cloudflare database ID is not set');
114 | if (!query) throw new Error('A SQL query was not provided');
115 |
116 | const response = await fetch(
117 | `https://api.cloudflare.com/client/v4/accounts/${this.accountId}/d1/database/${this.databaseId}/raw`,
118 | {
119 | method: 'POST',
120 | headers: {
121 | 'Content-Type': 'application/json',
122 | Authorization: `Bearer ${this.apiKey}`,
123 | },
124 | body: JSON.stringify({
125 | sql: query.query,
126 | params: query.parameters,
127 | }),
128 | }
129 | );
130 |
131 | const json: CloudflareResponse = await response.json();
132 |
133 | if (json.success) {
134 | return transformFromSdkTransform(
135 | transformCloudflareD1(json.result[0])
136 | );
137 | }
138 |
139 | return createErrorResult(
140 | json.error ??
141 | json.errors?.map((e) => e.message).join(', ') ??
142 | 'Unknown error'
143 | );
144 | }
145 |
146 | public async fetchDatabaseSchema(): Promise {
147 | const result = await super.fetchDatabaseSchema();
148 | delete result.main['_cf_KV'];
149 | return result;
150 | }
151 | }
152 |
--------------------------------------------------------------------------------
/src/connections/sqlite/starbase.ts:
--------------------------------------------------------------------------------
1 | import { QueryType } from '../../query-params';
2 | import { Query, constructRawQuery } from '../../query';
3 | import { DefaultDialect } from '../../query-builder/dialects/default';
4 | import { SqliteBaseConnection } from './base';
5 | import { QueryResult } from '..';
6 | import {
7 | createErrorResult,
8 | transformFromSdkTransform,
9 | } from '../../utils/transformer';
10 |
11 | import { transformStarbaseResult } from '@outerbase/sdk-transform';
12 |
13 | export type StarbaseConnectionDetails = {
14 | url: string;
15 | apiKey: string;
16 | };
17 |
18 | interface StarbaseResult {
19 | columns: string[];
20 | rows: unknown[][];
21 | meta: {
22 | rows_read: number;
23 | rows_written: number;
24 | };
25 | }
26 |
27 | interface StarbaseResponse {
28 | result: StarbaseResult | StarbaseResult[];
29 | error?: string;
30 | }
31 |
32 | export class StarbaseConnection extends SqliteBaseConnection {
33 | // The Starbase API key with
34 | url: string | undefined;
35 | apiKey: string | undefined;
36 |
37 | // Default query type to positional for Starbase
38 | queryType = QueryType.positional;
39 |
40 | // Default dialect for Starbase
41 | dialect = new DefaultDialect();
42 |
43 | /**
44 | * Creates a new StarbaseConnection object with the provided API key,
45 | * account ID, and database ID.
46 | *
47 | * @param apiKey - The API key to be used for authentication.
48 | * @param accountId - The account ID to be used for authentication.
49 | * @param databaseId - The database ID to be used for querying.
50 | */
51 | constructor(private _: StarbaseConnectionDetails) {
52 | super();
53 | this.url = _.url;
54 | this.apiKey = _.apiKey;
55 | }
56 |
57 | /**
58 | * Performs a connect action on the current Connection object.
59 | * In this particular use case Starbase is a REST API and
60 | * requires an API key for authentication.
61 | *
62 | * @param details - Unused in the Starbase scenario.
63 | * @returns Promise
64 | */
65 | async connect(): Promise {
66 | return Promise.resolve();
67 | }
68 |
69 | /**
70 | * Performs a disconnect action on the current Connection object.
71 | * In this particular use case Starbase is a REST API and does
72 | * not require a disconnect action.
73 | *
74 | * @returns Promise
75 | */
76 | async disconnect(): Promise {
77 | return Promise.resolve();
78 | }
79 |
80 | /**
81 | * Triggers a query action on the current Connection object. The query
82 | * is a SQL query that will be executed on a Starbase durable object
83 | * database. The query is sent to the Starbase API and the response
84 | * is returned.
85 | *
86 | * The parameters object is sent along with the query to be used in the
87 | * query. By default if the query has parameters the SQL statement will
88 | * produce a string with `:property` values that the parameters object
89 | * keys should map to, and will be replaced by.
90 | *
91 | * @param query - The SQL query to be executed.
92 | * @param parameters - An object containing the parameters to be used in the query.
93 | * @returns Promise<{ data: any, error: Error | null }>
94 | */
95 | async internalQuery>(
96 | query: Query
97 | ): Promise> {
98 | if (!this.url) throw new Error('Starbase URL is not set');
99 | if (!this.apiKey) throw new Error('Starbase API key is not set');
100 | if (!query) throw new Error('A SQL query was not provided');
101 |
102 | const response = await fetch(new URL('/query/raw', this.url).href, {
103 | method: 'POST',
104 | headers: {
105 | 'Content-Type': 'application/json',
106 | Authorization: `Bearer ${this.apiKey}`,
107 | },
108 | body: JSON.stringify({
109 | sql: query.query,
110 | params: query.parameters,
111 | }),
112 | });
113 |
114 | const json: StarbaseResponse = await response.json();
115 |
116 | if (json.error) {
117 | return createErrorResult(json.error) as QueryResult;
118 | }
119 |
120 | if (json.result) {
121 | const items = Array.isArray(json.result)
122 | ? json.result[0]
123 | : json.result;
124 |
125 | return transformFromSdkTransform(transformStarbaseResult(items));
126 | }
127 |
128 | return createErrorResult('ss') as QueryResult;
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/src/connections/sqlite/turso.ts:
--------------------------------------------------------------------------------
1 | import { Client, InValue } from '@libsql/client';
2 |
3 | import { AbstractDialect } from './../../query-builder';
4 | import { QueryResult } from '..';
5 | import { Query } from '../../query';
6 | import { PostgresDialect } from './../../query-builder/dialects/postgres';
7 | import { SqliteBaseConnection } from './base';
8 | import {
9 | createErrorResult,
10 | transformFromSdkTransform,
11 | } from './../../utils/transformer';
12 | import { transformTursoResult } from '@outerbase/sdk-transform';
13 |
14 | export class TursoConnection extends SqliteBaseConnection {
15 | client: Client;
16 | dialect: AbstractDialect = new PostgresDialect();
17 |
18 | constructor(client: any) {
19 | super();
20 | this.client = client;
21 | }
22 |
23 | async internalQuery>(
24 | query: Query
25 | ): Promise> {
26 | try {
27 | const result = await this.client.execute({
28 | sql: query.query,
29 | args: (query.parameters ?? []) as InValue[],
30 | });
31 |
32 | return transformFromSdkTransform(transformTursoResult(result));
33 | } catch (e) {
34 | if (e instanceof Error) {
35 | return createErrorResult(e.message) as QueryResult;
36 | } else {
37 | return createErrorResult('Unknown error') as QueryResult;
38 | }
39 | }
40 | }
41 |
42 | async connect() {}
43 | async disconnect() {}
44 | }
45 |
--------------------------------------------------------------------------------
/src/generators/generate-models.backup.txt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import pkg from 'handlebars';
3 | const { compile } = pkg;
4 | import { promises as fs } from 'fs';
5 | import { API_URL } from '../connections/outerbase.backup';
6 |
7 | const path = require('path');
8 | const handlebars = require('handlebars');
9 |
10 | handlebars.registerHelper('capitalize', function (str: string) {
11 | return str?.charAt(0).toUpperCase() + str?.slice(1);
12 | });
13 |
14 | handlebars.registerHelper('camelCase', function (str: string) {
15 | return str?.replace(/[-_](.)/g, (_, c) => c?.toUpperCase());
16 | });
17 |
18 | handlebars.registerHelper('neq', (a: any, b: any) => a !== b);
19 |
20 | function parseArgs(args: any[]): { API_KEY?: string; PATH?: string } {
21 | const argsMap: Record = {};
22 | args.slice(2).forEach((arg: { split: (arg0: string) => [any, any] }) => {
23 | const [key, value] = arg.split('=');
24 | argsMap[key] = value;
25 | });
26 |
27 | return argsMap;
28 | }
29 |
30 | async function main() {
31 | const args = parseArgs(process.argv);
32 | const apiKey = args.API_KEY || '';
33 | const folderPath = args.PATH || './';
34 |
35 | try {
36 | await fs.mkdir(folderPath, { recursive: true });
37 |
38 | // Load templates
39 | const modelTemplateSource = await fs.readFile(
40 | path.resolve(__dirname, 'model-template.handlebars'),
41 | 'utf-8'
42 | );
43 | const indexTemplateSource = await fs.readFile(
44 | path.resolve(__dirname, 'index-template.handlebars'),
45 | 'utf-8'
46 | );
47 |
48 | // Compile templates
49 | const modelTemplate = compile(modelTemplateSource);
50 | const indexTemplate = compile(indexTemplateSource);
51 |
52 | const response = await fetch(`${API_URL}/api/v1/ezql/schema`, {
53 | method: 'GET',
54 | headers: {
55 | 'Content-Type': 'application/json',
56 | 'X-Source-Token': apiKey,
57 | },
58 | });
59 |
60 | let json = await response.json();
61 | let schemaResponse = json.response;
62 | let tables: Array = [];
63 |
64 | const deletedPathsMap: Record = {};
65 |
66 | for (let key in schemaResponse) {
67 | let isPublic = key.toLowerCase() === 'public';
68 |
69 | if (Array.isArray(schemaResponse[key])) {
70 | for (let table of schemaResponse[key]) {
71 | if (table.type !== 'table') continue;
72 |
73 | // References will capture all columns that have foreign key constraints in this table
74 | table.references = [];
75 |
76 | // Loop through all columns in the table
77 | for (let column of table.columns) {
78 | const isPrimaryKey = table.constraints?.find(
79 | (constraint: { type: string; column: any }) =>
80 | constraint.type?.toUpperCase() ===
81 | 'PRIMARY KEY' &&
82 | constraint.column === column.name
83 | );
84 | column.primary = isPrimaryKey ? true : false;
85 |
86 | const isUnique = table.constraints?.find(
87 | (constraint: { type: string; column: any }) =>
88 | constraint.type?.toUpperCase() === 'UNIQUE' &&
89 | constraint.column === column.name
90 | );
91 | column.unique = isUnique ? true : false;
92 |
93 | const foreignKey = table.constraints?.find(
94 | (constraint: {
95 | type: string;
96 | column: any;
97 | columns: string | any[];
98 | }) => {
99 | if (
100 | constraint.type?.toUpperCase() ===
101 | 'FOREIGN KEY' &&
102 | constraint.column === column.name &&
103 | constraint.columns?.length > 0
104 | ) {
105 | const firstColumn = constraint.columns[0];
106 |
107 | const referenceExists =
108 | table.references.some(
109 | (ref: {
110 | name: string;
111 | table: any;
112 | schema: any;
113 | }) =>
114 | ref.name === firstColumn.name &&
115 | ref.table ===
116 | firstColumn.table &&
117 | ref.schema ===
118 | firstColumn.schema
119 | );
120 | if (!referenceExists) {
121 | table.references.push({
122 | name: firstColumn.name,
123 | table: firstColumn.table,
124 | schema: firstColumn.schema,
125 | });
126 | }
127 | }
128 |
129 | return (
130 | constraint.type?.toUpperCase() ===
131 | 'FOREIGN KEY' &&
132 | constraint.column === column.name
133 | );
134 | }
135 | );
136 | column.reference = foreignKey?.columns[0]?.table
137 | ? foreignKey?.columns[0]?.table
138 | : undefined;
139 |
140 | let currentType = column.type?.toLowerCase();
141 |
142 | // Convert `currentType` from database column types to TypeScript types
143 | switch (column.type?.toLowerCase()) {
144 | case 'int':
145 | case 'integer':
146 | case 'smallint':
147 | case 'tinyint':
148 | case 'mediumint':
149 | case 'bigint':
150 | currentType =
151 | column.type?.toLowerCase() === 'bigint'
152 | ? 'bigint'
153 | : 'number';
154 | break;
155 | case 'decimal':
156 | case 'numeric':
157 | case 'float':
158 | case 'double':
159 | case 'real':
160 | currentType = 'number';
161 | break;
162 | case 'varchar':
163 | case 'char':
164 | case 'character varying':
165 | case 'text':
166 | case 'tinytext':
167 | case 'mediumtext':
168 | case 'longtext':
169 | currentType = 'string';
170 | break;
171 | case 'timestamp':
172 | case 'datetime':
173 | case 'date':
174 | case 'time':
175 | currentType = 'Date';
176 | break;
177 | case 'boolean':
178 | currentType = 'boolean';
179 | break;
180 | case 'json':
181 | case 'jsonb':
182 | currentType = 'Record';
183 | break;
184 | case 'binary':
185 | case 'varbinary':
186 | case 'blob':
187 | case 'tinyblob':
188 | case 'mediumblob':
189 | case 'longblob':
190 | currentType = 'Blob';
191 | break;
192 | case 'enum':
193 | case 'set':
194 | currentType = 'string';
195 | break;
196 | case 'uuid':
197 | currentType = 'string';
198 | break;
199 | case 'bit':
200 | currentType = 'number';
201 | break;
202 | case 'array':
203 | currentType = 'any[]';
204 | break;
205 | case 'geometry':
206 | case 'geography':
207 | currentType = 'GeoJSON.Geometry';
208 | break;
209 | default:
210 | currentType = 'any';
211 | break;
212 | }
213 |
214 | column.type = currentType;
215 | }
216 |
217 | const currentFolderPath =
218 | folderPath + `${isPublic ? '' : '/' + key}`;
219 | const model = modelTemplate(table);
220 | const modelPath = path.resolve(
221 | currentFolderPath,
222 | `${table.name}.ts`
223 | );
224 |
225 | // Remove the existing models directory and create a new one if it doesn't exist
226 | // but only if it hasn't been deleted already.
227 | if (!deletedPathsMap[currentFolderPath]) {
228 | await fs.rmdir(currentFolderPath, { recursive: true });
229 | deletedPathsMap[currentFolderPath] = true;
230 | }
231 |
232 | await fs.mkdir(currentFolderPath, { recursive: true });
233 | await fs.writeFile(modelPath, model);
234 |
235 | tables.push({
236 | name: isPublic ? table.name : `${key}/${table.name}`,
237 | });
238 | }
239 | }
240 | }
241 |
242 | console.log('Generated models for tables:', tables);
243 |
244 | // Generate index file
245 | const index = indexTemplate({ tables: tables });
246 | const indexPath = path.resolve(folderPath, 'index.ts');
247 |
248 | // Write generated files
249 | await fs.writeFile(indexPath, index);
250 |
251 | console.log('Models generated successfully');
252 | } catch (error) {
253 | console.error('Error generating models:', error);
254 | }
255 | }
256 |
257 | main();
258 |
--------------------------------------------------------------------------------
/src/generators/index-template.handlebars:
--------------------------------------------------------------------------------
1 | {{#tables}}
2 | export * from './{{ name }}';
3 | {{/tables}}
--------------------------------------------------------------------------------
/src/generators/model-template.handlebars:
--------------------------------------------------------------------------------
1 | import { BaseTable, Column } from '@outerbase/sdk';
2 | {{#each references}}
3 | import { {{ capitalize (camelCase table) }} } from './{{#if (neq schema "public")}}{{schema}}/{{/if}}{{ camelCase table }}';
4 | {{/each}}
5 |
6 | export class {{ capitalize (camelCase name) }} extends BaseTable {
7 | {{#each columns}}
8 | @Column({ name: "{{name}}"{{#if is_nullable}}, nullable: true{{/if}}{{#if primary}}, primary: true{{/if}}{{#if unique}}, unique: true{{/if}}{{#if reference}}, relation: {{capitalize (camelCase reference)}}{{/if}} })
9 | {{ camelCase name }}{{#if is_nullable}}?{{/if}}: {{{ type }}};
10 |
11 | {{/each}}
12 |
13 | constructor(data: any) {
14 | super({
15 | _name: "{{name}}",
16 | _schema: "{{schema}}",
17 | _original: data
18 | });
19 |
20 | {{#each columns}}
21 | this.{{ camelCase name }} = data.{{ name }};
22 | {{/each}}
23 | }
24 | }
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export * from './connections';
2 | export * from './connections/sqlite/cloudflare';
3 | export * from './connections/motherduck';
4 | export * from './connections/bigquery';
5 | export * from './connections/sqlite/starbase';
6 | export * from './connections/mongodb';
7 | export * from './connections/mysql';
8 | export * from './connections/postgre/postgresql';
9 | export * from './connections/sqlite/turso';
10 | export * from './connections/snowflake/snowflake';
11 | export * from './client';
12 | export * from './models/decorators';
13 |
--------------------------------------------------------------------------------
/src/models/database.ts:
--------------------------------------------------------------------------------
1 | type SchemaName = string;
2 | export type Database = Record;
3 |
4 | type TableName = string;
5 | export type Schema = Record;
6 |
7 | export type Table = {
8 | name: string;
9 | columns: TableColumn[];
10 | indexes: TableIndex[];
11 | constraints: Constraint[];
12 | };
13 |
14 | export type ConstraintColumn = {
15 | columnName: string;
16 | referenceColumnName?: string;
17 | };
18 |
19 | export type Constraint = {
20 | name: string;
21 | schema: string;
22 | tableName: string;
23 | type: string;
24 | referenceTableName?: string;
25 | referenceSchema?: string;
26 | columns: ConstraintColumn[];
27 | };
28 |
29 | export interface TableColumn {
30 | name: string;
31 | position?: number;
32 | definition: TableColumnDefinition;
33 | }
34 |
35 | export type TableReference = {
36 | table: string;
37 | column: string;
38 | };
39 |
40 | export type TableRecord = Record;
41 | export type TableCondition = {
42 | column: string;
43 | value: any;
44 | // condition: 'eq' | 'neq' | 'gt' | 'lt' | 'gte' | 'lte' | 'like' | 'in' | 'nin'
45 | };
46 |
47 | export enum TableIndexType {
48 | PRIMARY = 'primary',
49 | UNIQUE = 'unique',
50 | INDEX = 'index',
51 | }
52 | export type TableIndex = {
53 | name: string;
54 | type: TableIndexType;
55 | columns: string[];
56 | };
57 |
58 | // This definition is trying to fit all database providers
59 | // - MySQL: https://dev.mysql.com/doc/refman/8.4/en/create-table.html
60 | // - Sqlite: https://www.sqlite.org/lang_createtable.html
61 | // - PostgreSQL: https://www.postgresql.org/docs/current/sql-createtable.html
62 | // - Motherduck: https://duckdb.org/docs/sql/statements/create_table.html
63 | // - SQL Server: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql?view=sql-server-ver16
64 | export interface TableColumnDefinition extends TableColumnConstraint {
65 | type: string;
66 |
67 | // An invisible column is normally hidden to queries,
68 | // but can be accessed if explicitly referenced
69 | // Supported: MySQL
70 | invisible?: boolean;
71 |
72 | // In MySQL: DISK, MEMORY
73 | // In PostgreSQL: PLAIN, EXTERNAL, EXTENDED, MAIN, DEFAULT
74 | storage?: boolean;
75 |
76 | // PostgreSQL: pglz, lz4, default
77 | compression?: string;
78 |
79 | // Other
80 | collate?: string;
81 | comment?: string;
82 | }
83 |
84 | export interface TableColumnConstraint {
85 | nullable?: boolean;
86 | nullConflict?: string;
87 |
88 | // Default value
89 | default?: string | null;
90 | defaultExpression?: string;
91 | autoIncrement?: boolean;
92 |
93 | // Column Constraints
94 | constraintName?: string;
95 |
96 | unique?: boolean;
97 | uniqueConflict?: string;
98 |
99 | primaryKey?: boolean;
100 | primaryKeyConflict?: string;
101 | primaryKeyOrder?: string;
102 |
103 | references?: TableReferenceDefinition;
104 | checkExpression?: string;
105 |
106 | // Generative columns
107 | generatedExpression?: string;
108 | generatedType?: 'VIRTUAL' | 'STORED';
109 | }
110 |
111 | export interface TableReferenceDefinition {
112 | table: string;
113 | column: string[];
114 | match?: string;
115 | onDelete?: string;
116 | onUpdate?: string;
117 | }
118 |
--------------------------------------------------------------------------------
/src/models/decorators.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * A registry of metadata for classes decorated with the @Entity decorator.
3 | * The metadata is stored as a Map where the key is the class constructor and
4 | * the value is an object with the following properties:
5 | * - columns: an object where the keys are property keys and the values are objects with column options
6 | * - primaryKey: the property key of the primary key column
7 | *
8 | * @type {Map}
9 | */
10 | export const metadataRegistry = new Map();
11 |
12 | export function Column(options?: {
13 | unique?: boolean;
14 | primary?: boolean;
15 | nullable?: boolean;
16 | name?: string;
17 | relation?: any;
18 | }): PropertyDecorator {
19 | return function (target: any, propertyKey: string | symbol): void {
20 | const constructor = target.constructor;
21 | if (!metadataRegistry.has(constructor)) {
22 | metadataRegistry.set(constructor, {
23 | columns: {},
24 | primaryKey: undefined,
25 | });
26 | }
27 |
28 | const classMetadata = metadataRegistry.get(constructor);
29 |
30 | const columnName = options?.name || propertyKey.toString();
31 | const relationName = options?.relation || propertyKey.toString();
32 |
33 | // Initialize the column metadata if it doesn't exist
34 | if (!classMetadata.columns[propertyKey]) {
35 | classMetadata.columns[propertyKey] = {};
36 | }
37 |
38 | // Update the column metadata with new options
39 | classMetadata.columns[propertyKey] = {
40 | ...classMetadata.columns[propertyKey],
41 | ...options,
42 | name: columnName,
43 | relation: relationName,
44 | };
45 |
46 | if (options?.primary) {
47 | if (classMetadata.primaryKey) {
48 | throw new Error(
49 | `Multiple primary keys are not allowed: ${constructor.name} already has a primary key on property '${String(classMetadata.primaryKey)}'.`
50 | );
51 | }
52 | classMetadata.primaryKey = propertyKey;
53 | }
54 | };
55 | }
56 |
57 | /**
58 | * Indicates that the provided property name is a valid column in the database
59 | * for this model class (database table).
60 | *
61 | * @param targetClass
62 | * @param propertyName
63 | * @returns Boolean – whether the property is a column
64 | */
65 | export function isColumn(targetClass: Function, propertyName: string): boolean {
66 | const metadata = metadataRegistry.get(targetClass);
67 | return metadata && metadata.columns[propertyName];
68 | }
69 |
70 | /**
71 | * Indicates whether a column is a unique column in the database.
72 | *
73 | * @param targetClass
74 | * @param propertyName
75 | * @returns Boolean – whether the column is a unique column
76 | */
77 | export function isPropertyUnique(
78 | targetClass: Function,
79 | propertyName: string
80 | ): boolean {
81 | const metadata = metadataRegistry.get(targetClass);
82 | return metadata && metadata[propertyName] && metadata[propertyName].unique;
83 | }
84 |
85 | /**
86 | * Indicates whether a column can be set as a null value in the database.
87 | *
88 | * @param targetClass
89 | * @param propertyName
90 | * @returns Boolean – whether the column can be null
91 | */
92 | export function isColumnNullable(
93 | targetClass: Function,
94 | propertyName: string
95 | ): boolean {
96 | const metadata = metadataRegistry.get(targetClass);
97 | if (
98 | metadata &&
99 | metadata.columns[propertyName] &&
100 | metadata.columns[propertyName].hasOwnProperty('nullable')
101 | ) {
102 | return metadata.columns[propertyName].nullable;
103 | }
104 | return false;
105 | }
106 |
107 | /**
108 | * Retrieve the primary key column names for a given model class
109 | * based on the metadata stored by the decorators.
110 | *
111 | * @param targetClass
112 | * @returns Array of strings – the primary key column names
113 | */
114 | export function getPrimaryKeys(targetClass: Function): string[] {
115 | const metadata = metadataRegistry.get(targetClass);
116 | if (metadata) {
117 | // Return the actual column name as it is stored in the database
118 | const primaryKeyColumnsNames = Object.keys(metadata.columns)
119 | .filter((key) => metadata.columns[key].primary)
120 | .map((key) => metadata.columns[key].name);
121 |
122 | return primaryKeyColumnsNames;
123 | }
124 | return [];
125 | }
126 |
127 | /**
128 | * Based on the column name value, however it is stored in the database, get the actual
129 | * key name of the property in the class.
130 | *
131 | * @param targetClass
132 | * @param columnName
133 | * @returns String – the property name
134 | */
135 | export function getColumnValueFromName(
136 | targetClass: Function,
137 | columnName: string
138 | ): string | null {
139 | const metadata = metadataRegistry.get(targetClass);
140 | if (metadata) {
141 | for (const key in metadata.columns) {
142 | if (metadata.columns[key].name === columnName) {
143 | return key;
144 | }
145 | }
146 | }
147 | return null;
148 | }
149 |
150 | /**
151 | * Based on the actual property name, usually camel cased, get the column name value
152 | * that is stored in the database.
153 | *
154 | * @param targetClass
155 | * @param propertyName
156 | * @returns String – the column name
157 | */
158 | export function getColumnValueFromProperty(
159 | targetClass: Function,
160 | propertyName: string
161 | ): string | null {
162 | const metadata = metadataRegistry.get(targetClass);
163 | if (metadata) {
164 | return metadata.columns[propertyName].name;
165 | }
166 | return null;
167 | }
168 |
--------------------------------------------------------------------------------
/src/models/index.backup.txt:
--------------------------------------------------------------------------------
1 | import { Connection } from 'src/connections';
2 | import { Outerbase } from '../client';
3 | import {
4 | getColumnValueFromName,
5 | getColumnValueFromProperty,
6 | getPrimaryKeys,
7 | } from './decorators';
8 |
9 | const RESERVED_PROPERTIES = ['_name', '_schema', '_original', '_connection'];
10 |
11 | export class BaseTable {
12 | _name: string;
13 | _schema?: string;
14 | _original?: Record;
15 | _connection?: Connection;
16 |
17 | constructor(_: {
18 | _name: string;
19 | _schema?: string;
20 | _original?: Record;
21 | }) {
22 | this._name = _._name;
23 | this._schema = _._schema;
24 | this._original = _._original;
25 | }
26 |
27 | /**
28 | * Attaches a connection object to the model. This enables the model to perform
29 | * actions on the database utilizing the query builder. See `pull`, `update`,
30 | * `insert`, and `delete` methods for examples.
31 | *
32 | * @param connection
33 | */
34 | attachConnection(connection: Connection) {
35 | this._connection = connection;
36 | }
37 |
38 | /**
39 | * Constructs the where clause for the current model based on the primary keys.
40 | * This WHERE clause is used to uniquely map to this specific model in the database.
41 | *
42 | * @returns string[]
43 | */
44 | getCurrentWhereClause(): string[] {
45 | if (!this._original) {
46 | throw new Error('Original data not found');
47 | }
48 |
49 | const primaryKeys = getPrimaryKeys(this.constructor);
50 |
51 | if (primaryKeys?.length === 0) {
52 | throw new Error('No primary keys found');
53 | }
54 |
55 | return [];
56 | // return primaryKeys.map((key) => {
57 | // const columnValue = getColumnValueFromName(this.constructor, key)
58 | // if (columnValue === null) return ''
59 |
60 | // return equals(key, this._original?.[columnValue])
61 | // })
62 | }
63 |
64 | /**
65 | * Returns the current values of the model. If `omitPrimaryKeys` is true, the primary
66 | * keys will be omitted from the returned object. Use this to get the current values
67 | * of the model to be used in an update query.
68 | *
69 | * @param _ An object with a boolean value to omit primary keys from the current values.
70 | * @returns Record
71 | */
72 | getCurrentValues(_: { omitPrimaryKeys: boolean }): Record {
73 | if (!this._original) {
74 | throw new Error('Original data not found');
75 | }
76 |
77 | const columns = Object.keys(this).filter((key) => {
78 | if (RESERVED_PROPERTIES.includes(key)) {
79 | return false;
80 | }
81 |
82 | if (_.omitPrimaryKeys) {
83 | const primaryKeys = getPrimaryKeys(this.constructor);
84 | if (primaryKeys?.length > 0) {
85 | return !primaryKeys.includes(key);
86 | }
87 | }
88 |
89 | return true;
90 | });
91 |
92 | let object: Record = {};
93 | columns.forEach((key) => {
94 | const columnName = getColumnValueFromProperty(
95 | this.constructor,
96 | key
97 | );
98 |
99 | if (columnName) {
100 | object[columnName] = this._original?.[columnName];
101 | console.log(columnName + ' = ' + this._original?.[columnName]);
102 | }
103 | });
104 |
105 | return object;
106 | }
107 |
108 | /**
109 | * Converts a string to camel case. For most of the model properties, the column
110 | * names are usually stored in snake case in the database. This method converts
111 | * the snake case column names to camel case for use in the model.
112 | *
113 | * @param str
114 | * @returns string
115 | */
116 | stringToCamelCase(str: string) {
117 | return str?.replace(/[-_](.)/g, (_, c) => c?.toUpperCase());
118 | }
119 |
120 | /**
121 | * Fetches the latest version of this model from the database.
122 | * When you want to make sure this model represents the latest
123 | * version of the data in the database, you can call this method.
124 | *
125 | * @returns Promise
126 | */
127 | async pull(): Promise {
128 | if (!this._connection) {
129 | throw new Error('Connection not attached');
130 | }
131 |
132 | const conditions = this.getCurrentWhereClause();
133 | const db = Outerbase(this._connection);
134 |
135 | // let { data, error } = await db
136 | // .selectFrom([
137 | // { schema: this._schema, table: this._name, columns: ['*'] },
138 | // ])
139 | // //.where(conditions)
140 | // .limit(1)
141 | // .query();
142 |
143 | // If an error occurs, exit early.
144 | // if (error) return;
145 |
146 | // // The response from the query builder call above is an array of results
147 | // // that match the query. We only want the first result.
148 | // this._original = data[0];
149 |
150 | for (let key in this._original) {
151 | if (typeof this._original[key] === 'function') {
152 | continue;
153 | }
154 |
155 | // If `key` is any of the reserved properties, we skip it.
156 | if (RESERVED_PROPERTIES.includes(key)) {
157 | continue;
158 | }
159 |
160 | const preparedKey = this.stringToCamelCase(key) ?? '';
161 | for (let prop in this) {
162 | if (prop === preparedKey) {
163 | this[prop] = this._original[key];
164 | }
165 | }
166 | }
167 |
168 | return;
169 | }
170 |
171 | /**
172 | * Deletes the current model from the database. This method will delete the
173 | * model from the database based on the primary keys of the model.
174 | *
175 | * @returns Promise
176 | */
177 | async delete(): Promise {
178 | if (!this._connection) {
179 | throw new Error('Connection not attached');
180 | }
181 |
182 | const conditions = this.getCurrentWhereClause();
183 | const db = Outerbase(this._connection);
184 |
185 | // const { data } = await db
186 | // .deleteFrom(this._name)
187 | // // .where(conditions)
188 | // .query();
189 |
190 | //return data;
191 | }
192 |
193 | /**
194 | * Updates the current model in the database. This method will update the
195 | * model in the database based on the primary keys of the model.
196 | *
197 | * @returns Promise
198 | */
199 | async update(): Promise {
200 | if (!this._connection) {
201 | throw new Error('Connection not attached');
202 | }
203 |
204 | const conditions = this.getCurrentWhereClause();
205 | const db = Outerbase(this._connection);
206 | const currentValues = this.getCurrentValues({ omitPrimaryKeys: true });
207 |
208 | // let { data, error } = await db
209 | // .update(currentValues)
210 | // .into(this._name)
211 | // // .where(conditions)
212 | // .query();
213 |
214 | // Update the original data with the new data
215 | // if (!error) {
216 | // this._original = {
217 | // ...this._original,
218 | // ...currentValues,
219 | // };
220 | // }
221 |
222 | // return data;
223 | }
224 |
225 | /**
226 | * Inserts the current model into the database. This method will insert the
227 | * model into the database.
228 | *
229 | * @returns Promise
230 | */
231 | async insert(): Promise {
232 | if (!this._connection) {
233 | throw new Error('Connection not attached');
234 | }
235 |
236 | const db = Outerbase(this._connection);
237 |
238 | // let { data } = await db
239 | // .insert(this.getCurrentValues({ omitPrimaryKeys: true }))
240 | // .into(this._name)
241 | // //.returning(['*'])
242 | // .query();
243 |
244 | // return data;
245 | }
246 | }
247 |
--------------------------------------------------------------------------------
/src/playground.ts:
--------------------------------------------------------------------------------
1 | // import { createConnection } from 'mysql2';
2 | // import { Connection, MySQLConnection, QueryResult } from '.';
3 |
4 | // function log(result: QueryResult) {
5 | // console.log('Result');
6 | // console.table(result.data);
7 |
8 | // console.log('Headers');
9 | // console.table(result.headers);
10 | // }
11 |
12 | // async function run(db: Connection, sql: string) {
13 | // console.log('------------------------------');
14 | // console.log(`\x1b[32m${sql}\x1b[0m`);
15 | // console.log('------------------------------');
16 |
17 | // log(await db.raw(sql));
18 | // }
19 |
20 | // async function main() {
21 | // const db = new MySQLConnection(
22 | // createConnection({
23 | // host: 'localhost',
24 | // user: 'root',
25 | // password: '123456',
26 | // database: 'testing',
27 | // })
28 | // );
29 |
30 | // await run(db, 'SELECT 1 AS `a`, 2 AS `a`;');
31 |
32 | // await run(
33 | // db,
34 | // 'SELECT * FROM students INNER JOIN teachers ON (students.teacher_id = teachers.id)'
35 | // );
36 | // }
37 |
38 | // main()
39 | // .then()
40 | // .finally(() => process.exit());
41 |
42 | import duckDB from 'duckdb';
43 |
44 | const client = new duckDB.Database('md:my_db', {
45 | motherduck_token:
46 | 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJlbWFpbCI6ImludmlzYWxAZ21haWwuY29tIiwic2Vzc2lvbiI6ImludmlzYWwuZ21haWwuY29tIiwicGF0IjoiVkdfZ1BmRXdaWjN5M29zY0VFemRLWElMVVJ4ZmxFdUpxbktZM3RkVjEtUSIsInVzZXJJZCI6ImVkZjQ4NjAyLTJlZmMtNGU0Ny04Y2VmLWNhNGU5NzQ3OTQ0MSIsImlzcyI6Im1kX3BhdCIsImlhdCI6MTcyOTEzMDcxMX0.ysqXODqC9BpMeOBeedjQW0y6GfiMdpOgHBy1OihUtKI',
47 | });
48 |
49 | client
50 | .connect()
51 | .prepare('SELECT 1;')
52 | .all((err, res) => {
53 | console.log(res);
54 | process.exit();
55 | });
56 |
--------------------------------------------------------------------------------
/src/query-builder/dialects/bigquery.ts:
--------------------------------------------------------------------------------
1 | import { MySQLDialect } from './mysql';
2 | export class BigQueryDialect extends MySQLDialect {
3 | protected ALWAY_NO_ENFORCED_CONSTRAINT = true;
4 |
5 | escapeId(identifier: string): string {
6 | return `\`${identifier}\``;
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/src/query-builder/dialects/default.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect } from '../index';
2 |
3 | export class DefaultDialect extends AbstractDialect {
4 |
5 | }
--------------------------------------------------------------------------------
/src/query-builder/dialects/duckdb.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect } from '../index';
2 |
3 | export class DuckDbDialect extends AbstractDialect {
4 | formatSchemaAndTable(schema: string | undefined, table: string): string {
5 | return table;
6 | }
7 | }
--------------------------------------------------------------------------------
/src/query-builder/dialects/mysql.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect, ColumnDataType } from '../index';
2 |
3 | export class MySQLDialect extends AbstractDialect {
4 | escapeId(identifier: string): string {
5 | return identifier
6 | .split('.')
7 | .map((str) => {
8 | if (str === '*') return '*';
9 | return '`' + str.replace(/`/g, '``') + '`';
10 | })
11 | .join('.');
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/src/query-builder/dialects/postgres.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect, ColumnDataType } from '../index';
2 | import { Query } from '../../query';
3 | import { QueryBuilderInternal } from '../../client';
4 | import { QueryType } from '../../query-params';
5 |
6 | export class PostgresDialect extends AbstractDialect {
7 | // insert(
8 | // builder: QueryBuilderInternal,
9 | // type: QueryType,
10 | // query: Query
11 | // ): Query {
12 | // query = super.insert(builder, type, query);
13 | // if (builder.returning?.length ?? 0 > 0) {
14 | // query.query += ` RETURNING ${builder.returning?.join(', ')}`;
15 | // }
16 | // return query;
17 | // }
18 | }
19 |
--------------------------------------------------------------------------------
/src/query-builder/dialects/sqlite-dialect.ts:
--------------------------------------------------------------------------------
1 | import { AbstractDialect } from '..';
2 |
3 | export class SqliteDialect extends AbstractDialect {
4 | protected AUTO_INCREMENT_KEYWORD = 'AUTOINCREMENT';
5 | }
6 |
--------------------------------------------------------------------------------
/src/query-params.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Database queries are constructed either by named or positional parameters.
3 | * Named parameters are used to identify the parameter by name in the query
4 | * and are replaced by the value of the parameter. Positional parameters are
5 | * used to identify the parameter by position in the query and are replaced
6 | * by the value of the parameter.
7 | */
8 | export enum QueryType {
9 | named = 'named',
10 | positional = 'positional',
11 | }
12 |
13 | export type QueryParamsNamed = Record
14 | export type QueryParamsPositional = any[]
15 | export type QueryParams = QueryParamsNamed | QueryParamsPositional
16 |
17 | export function isQueryParamsNamed(
18 | params?: QueryParams
19 | ): params is QueryParamsNamed {
20 | return typeof params === 'object' && !Array.isArray(params)
21 | }
22 |
23 | export function isQueryParamsPositional(
24 | params?: QueryParams
25 | ): params is QueryParamsPositional {
26 | return Array.isArray(params)
27 | }
28 |
--------------------------------------------------------------------------------
/src/query.ts:
--------------------------------------------------------------------------------
1 | import { QueryParamsPositional } from './query-params';
2 |
3 | export type Query = {
4 | query: string;
5 | parameters?: unknown[];
6 | };
7 |
8 | function rawQueryFromPositionalParams(query: Query): string {
9 | const params = query.parameters as QueryParamsPositional;
10 | let queryWithParams = query.query;
11 |
12 | for (let i = 0; i < params.length; i++) {
13 | const currentParam = params[i];
14 |
15 | if (typeof currentParam === 'string') {
16 | queryWithParams = queryWithParams.replace('?', `'${params[i]}'`);
17 | } else {
18 | queryWithParams = queryWithParams.replace('?', params[i]);
19 | }
20 | }
21 |
22 | return queryWithParams;
23 | }
24 |
25 | export function constructRawQuery(query: Query) {
26 | return rawQueryFromPositionalParams(query);
27 | }
28 |
--------------------------------------------------------------------------------
/src/utils/placeholder.ts:
--------------------------------------------------------------------------------
1 | const RE_PARAM = /(?:\?)|(?::(\d+|(?:[a-zA-Z][a-zA-Z0-9_]*)))/g,
2 | DQUOTE = 34,
3 | SQUOTE = 39,
4 | BSLASH = 92;
5 |
6 | /**
7 | * This code is based on https://github.com/mscdex/node-mariasql/blob/master/lib/Client.js#L296-L420
8 | * License: https://github.com/mscdex/node-mariasql/blob/master/LICENSE
9 | *
10 | * @param query
11 | * @returns
12 | */
13 | function parse(query: string): [string] | [string[], (string | number)[]] {
14 | let ppos = RE_PARAM.exec(query);
15 | let curpos = 0;
16 | let start = 0;
17 | let end;
18 | const parts = [];
19 | let inQuote = false;
20 | let escape = false;
21 | let qchr;
22 | const tokens = [];
23 | let qcnt = 0;
24 | let lastTokenEndPos = 0;
25 | let i;
26 |
27 | if (ppos) {
28 | do {
29 | for (i = curpos, end = ppos.index; i < end; ++i) {
30 | let chr = query.charCodeAt(i);
31 | if (chr === BSLASH) escape = !escape;
32 | else {
33 | if (escape) {
34 | escape = false;
35 | continue;
36 | }
37 | if (inQuote && chr === qchr) {
38 | if (query.charCodeAt(i + 1) === qchr) {
39 | // quote escaped via "" or ''
40 | ++i;
41 | continue;
42 | }
43 | inQuote = false;
44 | } else if (!inQuote && (chr === DQUOTE || chr === SQUOTE)) {
45 | inQuote = true;
46 | qchr = chr;
47 | }
48 | }
49 | }
50 | if (!inQuote) {
51 | parts.push(query.substring(start, end));
52 | tokens.push(ppos[0].length === 1 ? qcnt++ : ppos[1]);
53 | start = end + ppos[0].length;
54 | lastTokenEndPos = start;
55 | }
56 | curpos = end + ppos[0].length;
57 | } while ((ppos = RE_PARAM.exec(query)));
58 |
59 | if (tokens.length) {
60 | if (curpos < query.length) {
61 | parts.push(query.substring(lastTokenEndPos));
62 | }
63 | return [parts, tokens];
64 | }
65 | }
66 | return [query];
67 | }
68 |
69 | export function namedPlaceholder(
70 | query: string,
71 | params: Record,
72 | numbered = false
73 | ): { query: string; bindings: unknown[] } {
74 | const parts = parse(query);
75 |
76 | if (parts.length === 1) {
77 | return { query, bindings: [] };
78 | }
79 |
80 | const bindings = [];
81 | let newQuery = '';
82 |
83 | const [sqlFragments, placeholders] = parts;
84 |
85 | // If placeholders contains any number, then it's a mix of named and numbered placeholders
86 | if (placeholders.some((p) => typeof p === 'number')) {
87 | throw new Error(
88 | 'Mixing named and positional placeholder should throw error'
89 | );
90 | }
91 |
92 | for (let i = 0; i < sqlFragments.length; i++) {
93 | newQuery += sqlFragments[i];
94 |
95 | if (i < placeholders.length) {
96 | const key = placeholders[i];
97 |
98 | if (numbered) {
99 | newQuery += `$${i + 1}`;
100 | } else {
101 | newQuery += `?`;
102 | }
103 |
104 | const placeholderValue = params[key];
105 | if (placeholderValue === undefined) {
106 | throw new Error(`Missing value for placeholder ${key}`);
107 | }
108 |
109 | bindings.push(params[key]);
110 | }
111 | }
112 |
113 | return { query: newQuery, bindings };
114 | }
115 |
116 | export function toNumberedPlaceholders(
117 | query: string,
118 | params: unknown[]
119 | ): {
120 | query: string;
121 | bindings: unknown[];
122 | } {
123 | const parts = parse(query);
124 |
125 | if (parts.length === 1) {
126 | return { query, bindings: [] };
127 | }
128 |
129 | const bindings = [];
130 | let newQuery = '';
131 |
132 | const [sqlFragments, placeholders] = parts;
133 |
134 | if (placeholders.length !== params.length) {
135 | throw new Error(
136 | 'Number of positional placeholder should match with the number of values'
137 | );
138 | }
139 |
140 | // Mixing named and numbered placeholders should throw error
141 | if (placeholders.some((p) => typeof p === 'string')) {
142 | throw new Error(
143 | 'Mixing named and positional placeholder should throw error'
144 | );
145 | }
146 |
147 | for (let i = 0; i < sqlFragments.length; i++) {
148 | newQuery += sqlFragments[i];
149 |
150 | if (i < placeholders.length) {
151 | newQuery += `$${i + 1}`;
152 | bindings.push(params[i]);
153 | }
154 | }
155 |
156 | return { query: newQuery, bindings };
157 | }
158 |
--------------------------------------------------------------------------------
/src/utils/transformer.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Provides several functions to help transform common
3 | * database result format into our own query result formation
4 | */
5 |
6 | import { ColumnHeader, ResultSet } from '@outerbase/sdk-transform';
7 | import { QueryResult } from './../connections';
8 |
9 | export function transformFromSdkTransform(
10 | result: ResultSet
11 | ): QueryResult {
12 | const { rows, ...rest } = result;
13 |
14 | return {
15 | data: rows as T[],
16 | error: null,
17 | query: '',
18 | ...rest,
19 | };
20 | }
21 |
22 | export function createErrorResult(
23 | message: string
24 | ): QueryResult {
25 | return {
26 | data: [],
27 | error: { message, name: 'Error' },
28 | query: '',
29 | headers: [],
30 | stat: {
31 | queryDurationMs: 0,
32 | rowsAffected: 0,
33 | rowsRead: 0,
34 | rowsWritten: 0,
35 | },
36 | };
37 | }
38 |
39 | export function transformObjectBasedResult(
40 | arr: Record[]
41 | ): QueryResult {
42 | const usedColumnName = new Set();
43 | const columns: ColumnHeader[] = [];
44 |
45 | // Build the headers based on rows
46 | arr.forEach((row) => {
47 | Object.keys(row).forEach((key) => {
48 | if (!usedColumnName.has(key)) {
49 | usedColumnName.add(key);
50 | columns.push({
51 | name: key,
52 | displayName: key,
53 | originalType: null,
54 | });
55 | }
56 | });
57 | });
58 |
59 | return {
60 | data: arr,
61 | headers: columns,
62 | error: null,
63 | query: '',
64 | stat: {
65 | queryDurationMs: 0,
66 | rowsAffected: 0,
67 | rowsRead: 0,
68 | rowsWritten: 0,
69 | },
70 | };
71 | }
72 |
73 | export function transformObjectBasedResultFirstRow(
74 | arr: Record[]
75 | ): QueryResult {
76 | if (arr.length === 0) {
77 | return {
78 | data: [],
79 | headers: [],
80 | error: null,
81 | query: '',
82 | stat: {
83 | queryDurationMs: 0,
84 | rowsAffected: 0,
85 | rowsRead: 0,
86 | rowsWritten: 0,
87 | },
88 | };
89 | }
90 |
91 | const row = arr[0];
92 | const columns: ColumnHeader[] = [];
93 |
94 | return {
95 | data: arr,
96 | headers: Object.keys(row).map((key) => ({
97 | name: key,
98 | displayName: key,
99 | originalType: null,
100 | })),
101 | error: null,
102 | query: '',
103 | stat: {
104 | queryDurationMs: 0,
105 | rowsAffected: 0,
106 | rowsRead: 0,
107 | rowsWritten: 0,
108 | },
109 | };
110 | }
111 |
112 | export function createOkResult() {
113 | return {
114 | data: [],
115 | error: null,
116 | query: '',
117 | headers: [],
118 | stat: {
119 | queryDurationMs: 0,
120 | rowsAffected: 0,
121 | rowsRead: 0,
122 | rowsWritten: 0,
123 | },
124 | };
125 | }
126 |
--------------------------------------------------------------------------------
/tests/connections/connection.test.ts:
--------------------------------------------------------------------------------
1 | import { ColumnDataType } from '../../src/query-builder';
2 | import createTestClient from './create-test-connection';
3 | const { client: db, defaultSchema: DEFAULT_SCHEMA } = createTestClient();
4 |
5 | // Some drivers are just too slow such as Cloudflare and BigQuery
6 | jest.setTimeout(10000);
7 |
8 | beforeAll(async () => {
9 | await db.connect();
10 |
11 | // Clean up all tables
12 | const schemaList = await db.fetchDatabaseSchema();
13 | const currentSchema = schemaList[DEFAULT_SCHEMA] ?? {};
14 |
15 | for (const table of Object.values(currentSchema)) {
16 | await db.dropTable(DEFAULT_SCHEMA, table.name)
17 | }
18 | });
19 |
20 | afterAll(async () => {
21 | await db.disconnect();
22 | });
23 |
24 | afterEach(async () => {
25 | if (
26 | ['cloudflare', 'bigquery'].includes(process.env.CONNECTION_TYPE ?? '')
27 | ) {
28 | // 3 seconds delay on each operation
29 | await new Promise((r) => setTimeout(r, 3000));
30 | }
31 | });
32 |
33 | function cleanup(data: Record[]) {
34 | // Remove some database specified fields
35 | return data.map((d) => {
36 | const { _id, ...rest } = d;
37 | return rest;
38 | });
39 | }
40 |
41 | describe('Database Connection', () => {
42 | test('Support named parameters', async () => {
43 | if (process.env.CONNECTION_TYPE === 'mongodb') return;
44 |
45 | const sql =
46 | process.env.CONNECTION_TYPE === 'mysql'
47 | ? 'SELECT CONCAT(:hello, :world) AS testing_word'
48 | : 'SELECT (:hello || :world) AS testing_word';
49 |
50 | const { data } = await db.raw(sql, {
51 | hello: 'hello ',
52 | world: 'world',
53 | });
54 |
55 | if (process.env.CONNECTION_TYPE === 'snowflake') {
56 | expect(data).toEqual([{ TESTING_WORD: 'hello world' }]);
57 | } else {
58 | expect(data).toEqual([{ testing_word: 'hello world' }]);
59 | }
60 | });
61 |
62 | test('Support positional placeholder', async () => {
63 | if (process.env.CONNECTION_TYPE === 'mongodb') return;
64 |
65 | const sql =
66 | process.env.CONNECTION_TYPE === 'mysql'
67 | ? 'SELECT CONCAT(?, ?) AS testing_word'
68 | : 'SELECT (? || ?) AS testing_word';
69 |
70 | const { data } = await db.raw(sql, ['hello ', 'world']);
71 |
72 | if (process.env.CONNECTION_TYPE === 'snowflake') {
73 | expect(data).toEqual([{ TESTING_WORD: 'hello world' }]);
74 | } else {
75 | expect(data).toEqual([{ testing_word: 'hello world' }]);
76 | }
77 | });
78 |
79 | test('Create table', async () => {
80 | const { error: createTableTeamError } = await db.createTable(
81 | DEFAULT_SCHEMA,
82 | 'teams',
83 | [
84 | {
85 | name: 'id',
86 | definition: {
87 | type: ColumnDataType.NUMBER,
88 | primaryKey: true,
89 | },
90 | },
91 | {
92 | name: 'name',
93 | definition: {
94 | type: ColumnDataType.STRING,
95 | },
96 | },
97 | ]
98 | );
99 |
100 | const { error: createTablePersonError } = await db.createTable(
101 | DEFAULT_SCHEMA,
102 | 'persons',
103 | [
104 | {
105 | name: 'id',
106 | definition: {
107 | type: ColumnDataType.NUMBER,
108 | primaryKey: true,
109 | },
110 | },
111 | {
112 | name: 'name',
113 | definition: {
114 | type: ColumnDataType.STRING,
115 | },
116 | },
117 | { name: 'age', definition: { type: ColumnDataType.NUMBER } },
118 | {
119 | name: 'team_id',
120 | definition: {
121 | type: ColumnDataType.NUMBER,
122 | references: {
123 | column: ['id'],
124 | table: 'teams',
125 | },
126 | },
127 | },
128 | ]
129 | );
130 |
131 | expect(createTableTeamError).not.toBeTruthy();
132 | expect(createTablePersonError).not.toBeTruthy();
133 | });
134 |
135 | test('Insert data', async () => {
136 | const { error: insertError } = await db.insertMany(
137 | DEFAULT_SCHEMA,
138 | 'teams',
139 | [{ id: 1, name: 'Avenger' }]
140 | );
141 |
142 | const { error: insertError2 } = await db.insertMany(
143 | DEFAULT_SCHEMA,
144 | 'persons',
145 | [
146 | { id: 1, name: 'Visal', age: 25, team_id: 1 },
147 | { id: 2, name: 'Outerbase', age: 30, team_id: 1 },
148 | ]
149 | );
150 |
151 | expect(insertError).not.toBeTruthy();
152 | expect(insertError2).not.toBeTruthy();
153 | }, 20000);
154 |
155 | // Check schema must be done AFTER insert data
156 | // because some NoSQL database does not have schema
157 | // their schema is based on the data in the collection
158 | test('Check the schema', async () => {
159 | const schemas = await db.fetchDatabaseSchema();
160 |
161 | // Column names are sorted for easier comparison
162 | const expectedSchema = {
163 | [DEFAULT_SCHEMA]: {
164 | persons: {
165 | columns: [
166 | // MongoDB comes with _id by default
167 | process.env.CONNECTION_TYPE === 'mongodb'
168 | ? '_id'
169 | : undefined,
170 |
171 | // Actual columns
172 | 'age',
173 | 'id',
174 | 'name',
175 | 'team_id',
176 | ].filter(Boolean),
177 | },
178 | teams: {
179 | columns: [
180 | process.env.CONNECTION_TYPE === 'mongodb'
181 | ? '_id'
182 | : undefined,
183 | 'id',
184 | 'name',
185 | ].filter(Boolean),
186 | },
187 | },
188 | };
189 |
190 | // We only care about the columns for this test
191 | const actualSchema = Object.entries(schemas).reduce(
192 | (a, [schemaName, schemaTables]) => {
193 | a[schemaName] = Object.entries(schemaTables).reduce(
194 | (b, [tableName, table]) => {
195 | b[tableName] = {
196 | columns: table.columns
197 | .map((column) => column.name)
198 | .sort(),
199 | };
200 | return b;
201 | },
202 | {} as Record
203 | );
204 |
205 | return a;
206 | },
207 | {} as Record>
208 | );
209 |
210 | expect(actualSchema).toEqual(expectedSchema);
211 |
212 | // Check teams and persons table reference
213 | if (
214 | !['bigquery', 'mongodb', 'motherduck'].includes(
215 | process.env.CONNECTION_TYPE!
216 | )
217 | ) {
218 | expect(
219 | schemas[DEFAULT_SCHEMA].persons!.columns!.find(
220 | (c) => c.name === 'team_id'
221 | )!.definition.references
222 | ).toEqual({
223 | column: ['id'],
224 | table: 'teams',
225 | });
226 |
227 | // This is to make sure it works with Outerbase ERD
228 | const fkConstraint = schemas[
229 | DEFAULT_SCHEMA
230 | ].persons.constraints.find((c) => c.type === 'FOREIGN KEY');
231 | expect(fkConstraint).toBeTruthy();
232 |
233 | expect(fkConstraint!.columns[0].columnName).toBe('team_id');
234 | expect(fkConstraint!.referenceTableName).toBe('teams');
235 | expect(fkConstraint!.columns[0].referenceColumnName).toBe('id');
236 | }
237 |
238 | // Check the primary key
239 | if (process.env.CONNECTION_TYPE !== 'mongodb') {
240 | const pkList = Object.values(schemas[DEFAULT_SCHEMA])
241 | .map((c) => c.constraints)
242 | .flat()
243 | .filter((c) => c.type === 'PRIMARY KEY')
244 | .map((constraint) =>
245 | constraint.columns.map(
246 | (column) =>
247 | `${constraint.tableName}.${column.columnName}`
248 | )
249 | )
250 | .flat()
251 | .sort();
252 |
253 | expect(pkList).toEqual(['persons.id', 'teams.id']);
254 | }
255 | });
256 |
257 | test('Select data', async () => {
258 | const { data, count } = await db.select(DEFAULT_SCHEMA, 'persons', {
259 | orderBy: ['id'],
260 | });
261 |
262 | expect(cleanup(data)).toEqual([
263 | { id: 1, name: 'Visal', age: 25, team_id: 1 },
264 | { id: 2, name: 'Outerbase', age: 30, team_id: 1 },
265 | ]);
266 |
267 | // For mongodb, there is _id column. It should be string
268 | if (process.env.CONNECTION_TYPE === 'mongodb') {
269 | expect(typeof data[0]._id).toBe('string');
270 |
271 | // We should able to select data via _id
272 | const { data: dataById } = await db.select(
273 | DEFAULT_SCHEMA,
274 | 'persons',
275 | {
276 | where: [{ name: '_id', operator: '=', value: data[0]._id }],
277 | }
278 | );
279 |
280 | expect(dataById).toEqual([data[0]]);
281 | }
282 |
283 | expect(count).toBeUndefined();
284 | });
285 |
286 | test('[Mongodb] Execute raw query', async () => {
287 | if (process.env.CONNECTION_TYPE !== 'mongodb') return;
288 |
289 | const { data } = await db.raw('db.persons.find()');
290 | expect(cleanup(data)).toEqual([
291 | { id: 1, name: 'Visal', age: 25, team_id: 1 },
292 | { id: 2, name: 'Outerbase', age: 30, team_id: 1 },
293 | ]);
294 | });
295 |
296 | test('Select data with count', async () => {
297 | const { count } = await db.select(DEFAULT_SCHEMA, 'persons', {
298 | includeCounting: true,
299 | });
300 | expect(count).toEqual(2);
301 | }, 10000);
302 |
303 | test('Select from non-existing table should return error', async () => {
304 | // MongoDB does not show error when selecting from non-existing collection
305 | if (process.env.CONNECTION_TYPE === 'mongodb') return;
306 |
307 | const { error } = await db.select(
308 | DEFAULT_SCHEMA,
309 | 'non_existing_table',
310 | {}
311 | );
312 |
313 | expect(error).toBeTruthy();
314 |
315 | // It should contain friendly text error message, instead of just
316 | // some generic error message
317 | expect(error?.message).toContain('non_existing_table');
318 | });
319 |
320 | test('Update data', async () => {
321 | await db.update(
322 | DEFAULT_SCHEMA,
323 | 'persons',
324 | { name: 'Visal In' },
325 | { id: 1 }
326 | );
327 |
328 | const { data } = await db.select(DEFAULT_SCHEMA, 'persons', {
329 | orderBy: ['id'],
330 | limit: 1000,
331 | offset: 0,
332 | });
333 |
334 | expect(cleanup(data)).toEqual([
335 | { id: 1, name: 'Visal In', age: 25, team_id: 1 },
336 | { id: 2, name: 'Outerbase', age: 30, team_id: 1 },
337 | ]);
338 | });
339 |
340 | test('Rename table column', async () => {
341 | const { error } = await db.renameColumn(
342 | DEFAULT_SCHEMA,
343 | 'persons',
344 | 'name',
345 | 'full_name'
346 | );
347 |
348 | expect(error).not.toBeTruthy();
349 |
350 | const { data } = await db.select(DEFAULT_SCHEMA, 'persons', {
351 | orderBy: ['id'],
352 | limit: 1000,
353 | offset: 0,
354 | });
355 |
356 | expect(cleanup(data)).toEqual([
357 | { id: 1, full_name: 'Visal In', age: 25, team_id: 1 },
358 | { id: 2, full_name: 'Outerbase', age: 30, team_id: 1 },
359 | ]);
360 | });
361 |
362 | test('Add and drop table column', async () => {
363 | // Skip Mongodb because it does not have schema
364 | if (process.env.CONNECTION_TYPE === 'mongodb') return;
365 |
366 | const { error } = await db.addColumn(
367 | DEFAULT_SCHEMA,
368 | 'persons',
369 | 'email',
370 | {
371 | type: ColumnDataType.STRING,
372 | }
373 | );
374 |
375 | expect(error).not.toBeTruthy();
376 |
377 | const { data } = await db.select(DEFAULT_SCHEMA, 'persons', {
378 | orderBy: ['id'],
379 | });
380 |
381 | expect(cleanup(data)).toEqual([
382 | {
383 | id: 1,
384 | full_name: 'Visal In',
385 | age: 25,
386 | email: null,
387 | team_id: 1,
388 | },
389 | {
390 | id: 2,
391 | full_name: 'Outerbase',
392 | age: 30,
393 | email: null,
394 | team_id: 1,
395 | },
396 | ]);
397 |
398 | // Remove the column
399 | await db.dropColumn(DEFAULT_SCHEMA, 'persons', 'email');
400 |
401 | const { data: data2 } = await db.select(DEFAULT_SCHEMA, 'persons', {
402 | orderBy: ['id'],
403 | });
404 |
405 | expect(cleanup(data2)).toEqual([
406 | {
407 | id: 1,
408 | full_name: 'Visal In',
409 | age: 25,
410 | team_id: 1,
411 | },
412 | {
413 | id: 2,
414 | full_name: 'Outerbase',
415 | age: 30,
416 | team_id: 1,
417 | },
418 | ]);
419 | });
420 |
421 | test('Rename table name', async () => {
422 | // Skip BigQuery because you cannot rename table with
423 | // primary key column
424 | if (process.env.CONNECTION_TYPE === 'bigquery') return;
425 |
426 | const { error } = await db.renameTable(
427 | DEFAULT_SCHEMA,
428 | 'persons',
429 | 'people'
430 | );
431 |
432 | expect(error).not.toBeTruthy();
433 |
434 | const { data } = await db.select(DEFAULT_SCHEMA, 'people', {
435 | orderBy: ['id'],
436 | });
437 |
438 | expect(cleanup(data).length).toEqual(2);
439 |
440 | // Revert the operation back
441 | await db.renameTable(DEFAULT_SCHEMA, 'people', 'persons');
442 | });
443 |
444 | test('Delete a row', async () => {
445 | await db.delete(DEFAULT_SCHEMA, 'persons', { id: 1 });
446 |
447 | const { data } = await db.select(DEFAULT_SCHEMA, 'persons', {
448 | orderBy: ['id'],
449 | });
450 |
451 | expect(cleanup(data)).toEqual([
452 | {
453 | id: 2,
454 | full_name: 'Outerbase',
455 | age: 30,
456 | team_id: 1,
457 | },
458 | ]);
459 | });
460 | });
461 |
--------------------------------------------------------------------------------
/tests/connections/create-test-connection.ts:
--------------------------------------------------------------------------------
1 | import { Client as PgClient } from 'pg';
2 | import { BigQuery } from '@google-cloud/bigquery';
3 | import duckDB from 'duckdb';
4 | import snowflake from 'snowflake-sdk';
5 | import { createClient as createTursoConnection } from '@libsql/client';
6 | import { createConnection as createMySqlConnection } from 'mysql2';
7 | import {
8 | Connection,
9 | PostgreSQLConnection,
10 | MySQLConnection,
11 | BigQueryConnection,
12 | TursoConnection,
13 | CloudflareD1Connection,
14 | MongoDBConnection,
15 | DuckDBConnection,
16 | StarbaseConnection,
17 | SnowflakeConnection,
18 | } from '../../src';
19 | import { MongoClient } from 'mongodb';
20 |
21 | export default function createTestClient(): {
22 | client: Connection;
23 | defaultSchema: string;
24 | } {
25 | if (process.env.CONNECTION_TYPE === 'postgres') {
26 | const client = new PostgreSQLConnection(
27 | new PgClient({
28 | host: process.env.POSTGRES_HOST,
29 | port: Number(process.env.POSTGRES_PORT),
30 | user: process.env.POSTGRES_USER,
31 | password: process.env.POSTGRES_PASSWORD,
32 | database: process.env.POSTGRES_DB,
33 | })
34 | );
35 |
36 | return {
37 | client,
38 | defaultSchema: process.env.POSTGRES_DEFAULT_SCHEMA || 'public',
39 | };
40 | } else if (process.env.CONNECTION_TYPE === 'mysql') {
41 | const client = new MySQLConnection(
42 | createMySqlConnection({
43 | host: process.env.MYSQL_HOST,
44 | port: Number(process.env.MYSQL_PORT),
45 | user: process.env.MYSQL_USER,
46 | password: process.env.MYSQL_PASSWORD,
47 | database: process.env.MYSQL_DB,
48 | })
49 | );
50 | return {
51 | client,
52 | defaultSchema: process.env.MYSQL_DEFAULT_SCHEMA || 'public',
53 | };
54 | } else if (process.env.CONNECTION_TYPE === 'bigquery') {
55 | const client = new BigQueryConnection(
56 | new BigQuery({
57 | projectId: process.env.BIGQUERY_PROJECT_ID,
58 | credentials: {
59 | client_email: process.env.BIGQUERY_CLIENT_EMAIL,
60 | private_key: process.env.BIGQUERY_PRIVATE_KEY,
61 | },
62 | })
63 | );
64 |
65 | return {
66 | client,
67 | defaultSchema: process.env.BIGQUERY_DEFAULT_SCHEMA || 'public',
68 | };
69 | } else if (process.env.CONNECTION_TYPE === 'turso') {
70 | const client = new TursoConnection(
71 | createTursoConnection({ url: ':memory:' })
72 | );
73 | return { client, defaultSchema: 'main' };
74 | } else if (process.env.CONNECTION_TYPE === 'cloudflare') {
75 | const client = new CloudflareD1Connection({
76 | apiKey: process.env.CLOUDFLARE_API_KEY as string,
77 | accountId: process.env.CLOUDFLARE_ACCOUNT_ID as string,
78 | databaseId: process.env.CLOUDFLARE_DATABASE_ID as string,
79 | });
80 | return { client, defaultSchema: 'main' };
81 | } else if (process.env.CONNECTION_TYPE === 'mongodb') {
82 | const client = new MongoDBConnection(
83 | new MongoClient(process.env.MONGODB_URI as string),
84 | process.env.MONGODB_DB_NAME as string
85 | );
86 | return { client, defaultSchema: process.env.MONGODB_DB_NAME as string };
87 | } else if (process.env.CONNECTION_TYPE === 'motherduck') {
88 | const client = new DuckDBConnection(
89 | process.env.MOTHERDUCK_PATH
90 | ? new duckDB.Database(process.env.MOTHERDUCK_PATH, {
91 | motherduck_token: process.env.MOTHERDUCK_TOKEN as string,
92 | })
93 | : new duckDB.Database(':memory:')
94 | );
95 | return { client, defaultSchema: 'main' };
96 | } else if (process.env.CONNECTION_TYPE === 'starbase') {
97 | const client = new StarbaseConnection({
98 | apiKey: process.env.STARBASEDB_TOKEN as string,
99 | url: process.env.STARBASEDB_URL as string,
100 | });
101 |
102 | return { client, defaultSchema: 'main' };
103 | } else if (process.env.CONNECTION_TYPE === 'snowflake') {
104 | const client = new SnowflakeConnection(snowflake.createConnection({
105 | database: process.env.SNOWFLAKE_DATABASE as string,
106 | username: process.env.SNOWFLAKE_USERNAME as string,
107 | password: process.env.SNOWFLAKE_PASSWORD as string,
108 | account: process.env.SNOWFLAKE_ACCOUNT_ID as string,
109 | warehouse: process.env.SNOKWFLAKE_WAREHOUSE as string,
110 | }));
111 |
112 | return { client, defaultSchema: "PUBLIC" }
113 | }
114 |
115 | throw new Error('Invalid connection type');
116 | }
117 |
--------------------------------------------------------------------------------
/tests/connections/postgres.test.ts:
--------------------------------------------------------------------------------
1 | import createTestClient from './create-test-connection';
2 | const { client: db, defaultSchema: DEFAULT_SCHEMA } = createTestClient();
3 |
4 | beforeAll(async () => {
5 | if (process.env.CONNECTION_TYPE !== 'postgres') return;
6 | await db.connect();
7 | });
8 |
9 | afterAll(async () => {
10 | if (process.env.CONNECTION_TYPE !== 'postgres') return;
11 | await db.disconnect();
12 | });
13 |
14 | describe("Postgres Specified Tests", () => {
15 | test("Test timestamp data type", async () => {
16 | if (process.env.CONNECTION_TYPE !== 'postgres') return;
17 |
18 | await db.raw(`CREATE TABLE table_ts(
19 | id SERIAL PRIMARY KEY,
20 | ts TIMESTAMP,
21 | date_column DATE
22 | )`)
23 |
24 | await db.insert(DEFAULT_SCHEMA, 'table_ts', {
25 | id: 123,
26 | ts: '2022-10-10 11:30:30',
27 | date_column: '2022-10-10 00:00:00'
28 | });
29 |
30 | await db.insert(DEFAULT_SCHEMA, 'table_ts', {
31 | id: 124,
32 | ts: null,
33 | date_column: null
34 | });
35 |
36 | const rows = await db.select(DEFAULT_SCHEMA, 'table_ts', {});
37 |
38 | expect(rows.data.find(row => row.id === 123)).toEqual({
39 | id: 123,
40 | date_column: '2022-10-10',
41 | ts:
42 | '2022-10-10 11:30:30'
43 | });
44 |
45 | expect(rows.data.find(row => row.id === 124)).toEqual({
46 | id: 124,
47 | date_column: null,
48 | ts: null
49 | });
50 | });
51 |
52 | test("Test JSON data type", async () => {
53 | if (process.env.CONNECTION_TYPE !== 'postgres') return;
54 |
55 | await db.raw(`CREATE TABLE table_json(
56 | id SERIAL PRIMARY KEY,
57 | data_json JSON
58 | )`)
59 |
60 | const jsonData = JSON.stringify({
61 | name: 'Outerbase',
62 | age: 1000
63 | })
64 |
65 | await db.insert(DEFAULT_SCHEMA, 'table_json', {
66 | id: 123,
67 | data_json: jsonData
68 | });
69 |
70 | await db.insert(DEFAULT_SCHEMA, 'table_json', {
71 | id: 124,
72 | data_json: null
73 | });
74 |
75 | const rows = await db.select(DEFAULT_SCHEMA, 'table_json', {});
76 |
77 | expect(rows.data.find(row => row.id === 123)).toEqual({
78 | id: 123,
79 | data_json: jsonData
80 | });
81 |
82 | expect(rows.data.find(row => row.id === 124)).toEqual({
83 | id: 124,
84 | data_json: null,
85 | });
86 | });
87 | })
--------------------------------------------------------------------------------
/tests/units/placeholder.test.ts:
--------------------------------------------------------------------------------
1 | import {
2 | namedPlaceholder,
3 | toNumberedPlaceholders,
4 | } from './../../src/utils/placeholder';
5 |
6 | test('Positional placeholder', () => {
7 | expect(
8 | namedPlaceholder('SELECT * FROM users WHERE id = :id AND age > :age', {
9 | id: 1,
10 | age: 50,
11 | })
12 | ).toEqual({
13 | query: 'SELECT * FROM users WHERE id = ? AND age > ?',
14 | bindings: [1, 50],
15 | });
16 | });
17 |
18 | test('Positional placeholder inside the string should be ignored', () => {
19 | expect(
20 | namedPlaceholder(
21 | 'SELECT * FROM users WHERE name = :name AND email = ":email"',
22 | {
23 | name: 'John',
24 | }
25 | )
26 | ).toEqual({
27 | query: 'SELECT * FROM users WHERE name = ? AND email = ":email"',
28 | bindings: ['John'],
29 | });
30 | });
31 |
32 | test('Named placeholder to number placeholder', () => {
33 | expect(
34 | namedPlaceholder(
35 | 'SELECT * FROM users WHERE id = :id AND age > :age',
36 | {
37 | id: 1,
38 | age: 30,
39 | },
40 | true
41 | )
42 | ).toEqual({
43 | query: 'SELECT * FROM users WHERE id = $1 AND age > $2',
44 | bindings: [1, 30],
45 | });
46 | });
47 |
48 | test('Named placeholder to number placeholder with string', () => {
49 | expect(
50 | namedPlaceholder(
51 | 'SELECT * FROM users WHERE id = :id AND email = ":email"',
52 | {
53 | id: 1,
54 | },
55 | true
56 | )
57 | ).toEqual({
58 | query: 'SELECT * FROM users WHERE id = $1 AND email = ":email"',
59 | bindings: [1],
60 | });
61 | });
62 |
63 | test('Named placeholder with missing value should throw an error', () => {
64 | expect(() =>
65 | namedPlaceholder('SELECT * FROM users WHERE id = :id AND age > :age', {
66 | id: 1,
67 | })
68 | ).toThrow();
69 | });
70 |
71 | test('Number of positional placeholder should match with the number of values', () => {
72 | expect(() =>
73 | toNumberedPlaceholders('SELECT * FROM users WHERE id = ? AND age > ?', [
74 | 1,
75 | ])
76 | ).toThrow();
77 | });
78 |
79 | test('Mixing named and positional placeholder should throw error', () => {
80 | expect(() =>
81 | namedPlaceholder('SELECT * FROM users WHERE id = :id AND age > ?', {
82 | id: 1,
83 | })
84 | ).toThrow();
85 |
86 | expect(() => {
87 | toNumberedPlaceholders(
88 | `SELECT * FROM users WHERE id = ? AND age > :age`,
89 | [1, 30]
90 | );
91 | }).toThrow();
92 | });
93 |
94 | test('Convert positional placeholder to numbered placeholder', () => {
95 | expect(
96 | toNumberedPlaceholders(
97 | `SELECT * FROM users WHERE id = ? AND email = '?' AND name = 'Outer""base' AND age > ?`,
98 | [1, 30]
99 | )
100 | ).toEqual({
101 | query: `SELECT * FROM users WHERE id = $1 AND email = '?' AND name = 'Outer""base' AND age > $2`,
102 | bindings: [1, 30],
103 | });
104 | });
105 |
--------------------------------------------------------------------------------
/tests/units/query-builder/postgre.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, expect, test } from '@jest/globals';
2 | import { Outerbase } from '../../../src';
3 | import { DefaultDialect } from '../../../src/query-builder/dialects/default';
4 | import { SqlConnection } from '../../../src/connections/sql-base';
5 |
6 | function qb() {
7 | return Outerbase({
8 | dialect: new DefaultDialect(),
9 | } as SqlConnection);
10 | }
11 |
12 | describe('Query Builder - Postgre Dialect', () => {
13 | test('Select query without any filter', () => {
14 | const { query, parameters } = qb()
15 | .select()
16 | .from('public.users')
17 | .toQuery();
18 |
19 | expect(query).toBe('SELECT * FROM "public"."users"');
20 | expect(parameters).toEqual([]);
21 | });
22 |
23 | test('Select with where equals conditions', () => {
24 | const { query, parameters } = qb()
25 | .select()
26 | .from('public.users')
27 | .where({ id: 'visal' })
28 | .toQuery();
29 |
30 | expect(query).toBe('SELECT * FROM "public"."users" WHERE "id" = ?');
31 | expect(parameters).toEqual(['visal']);
32 | });
33 |
34 | test('Select with limit and offset', () => {
35 | const { query, parameters } = qb()
36 | .select()
37 | .from('public.users')
38 | .where({ id: 'visal' })
39 | .offset(10)
40 | .limit(20)
41 | .toQuery();
42 |
43 | expect(query).toBe(
44 | 'SELECT * FROM "public"."users" WHERE "id" = ? LIMIT ? OFFSET ?'
45 | );
46 | expect(parameters).toEqual(['visal', 20, 10]);
47 | });
48 |
49 | test('Select with where with custom condition', () => {
50 | const { query, parameters } = qb()
51 | .select('id', 'name')
52 | .from('users')
53 | .where('age', '>', 18)
54 | .toQuery();
55 |
56 | expect(query).toBe('SELECT "id", "name" FROM "users" WHERE "age" > ?');
57 | expect(parameters).toEqual([18]);
58 | });
59 |
60 | test('Select with where with OR', () => {
61 | const q = qb();
62 | const { query, parameters } = q
63 | .select('id', 'name')
64 | .from('users')
65 | .where(
66 | q.or(q.where('age', '>', 18), q.where('gender', '=', 'female'))
67 | )
68 | .toQuery();
69 |
70 | expect(query).toBe(
71 | 'SELECT "id", "name" FROM "users" WHERE "age" > ? OR "gender" = ?'
72 | );
73 | expect(parameters).toEqual([18, 'female']);
74 | });
75 |
76 | test('Select with where with OR and AND', () => {
77 | const q = qb();
78 | const { query, parameters } = q
79 | .select('id', 'name')
80 | .from('users')
81 | .where(
82 | q.or(
83 | q.where('age', '>', 18),
84 | q.where('gender', '=', 'female'),
85 | q.and(q.where('active', '=', 1), q.where('deleted', '=', 0))
86 | )
87 | )
88 | .toQuery();
89 |
90 | expect(query).toBe(
91 | 'SELECT "id", "name" FROM "users" WHERE "age" > ? OR "gender" = ? OR ("active" = ? AND "deleted" = ?)'
92 | );
93 | expect(parameters).toEqual([18, 'female', 1, 0]);
94 | });
95 |
96 | test('Select with where simplify nested OR', () => {
97 | const q = qb();
98 | const { query, parameters } = q
99 | .select('id', 'name')
100 | .from('users')
101 | .where(
102 | q.or(
103 | q.where('age', '>', 18),
104 | q.and(
105 | q.where('active', '=', 1),
106 | q.where('deleted', '=', 0)
107 | ),
108 | q.or(
109 | q.where('gender', '=', 'female'),
110 | q.where('planet', '=', 'earth')
111 | )
112 | )
113 | )
114 | .toQuery();
115 |
116 | expect(query).toBe(
117 | 'SELECT "id", "name" FROM "users" WHERE "age" > ? OR ("active" = ? AND "deleted" = ?) OR "gender" = ? OR "planet" = ?'
118 | );
119 | expect(parameters).toEqual([18, 1, 0, 'female', 'earth']);
120 | });
121 |
122 | test('Select with where simplify nested AND', () => {
123 | const q = qb();
124 | const { query, parameters } = q
125 | .select('id', 'name')
126 | .from('users')
127 | .where(
128 | q.and(
129 | q.where('active', '=', 1),
130 | q.and(
131 | q.where('deleted', '=', 0),
132 | q.and(q.where('age', '=', 18))
133 | )
134 | )
135 | )
136 | .toQuery();
137 |
138 | expect(query).toBe(
139 | 'SELECT "id", "name" FROM "users" WHERE "active" = ? AND "deleted" = ? AND "age" = ?'
140 | );
141 | expect(parameters).toEqual([1, 0, 18]);
142 | });
143 |
144 | test('Select with where simplify OR and AND', () => {
145 | const q = qb();
146 | const { query, parameters } = q
147 | .select('id', 'name')
148 | .from('users')
149 | .where(q.or(q.where('age', '>', 18)))
150 | .where(q.or(q.where('gender', '=', 'female')))
151 | .toQuery();
152 |
153 | expect(query).toBe(
154 | 'SELECT "id", "name" FROM "users" WHERE "age" > ? AND "gender" = ?'
155 | );
156 | expect(parameters).toEqual([18, 'female']);
157 | });
158 |
159 | test('Select with where, order by and limit', () => {
160 | const { query, parameters } = qb()
161 | .select('id', 'name')
162 | .from('users')
163 | .where('age', '>', 18)
164 | .orderBy('age', 'DESC')
165 | .limit(10)
166 | .toQuery();
167 |
168 | expect(query).toBe(
169 | `SELECT "id", "name" FROM "users" WHERE "age" > ? ORDER BY "age" DESC LIMIT ?`
170 | );
171 | expect(parameters).toEqual([18, 10]);
172 | });
173 |
174 | test('Update query without where condition', () => {
175 | const { query, parameters } = qb()
176 | .update({ last_name: 'Visal', banned: null, first_name: 'In' })
177 | .into('persons')
178 | .toQuery();
179 |
180 | expect(query).toBe(
181 | 'UPDATE "persons" SET "last_name" = ?, "banned" = NULL, "first_name" = ?'
182 | );
183 | expect(parameters).toEqual(['Visal', 'In']);
184 | });
185 |
186 | test('Update query where condition', () => {
187 | const { query, parameters } = qb()
188 | .update({ last_name: 'Visal', first_name: 'In' })
189 | .into('persons')
190 | .where({
191 | id: 123,
192 | active: 1,
193 | })
194 | .where('banned', 'IS', null)
195 | .toQuery();
196 |
197 | expect(query).toBe(
198 | 'UPDATE "persons" SET "last_name" = ?, "first_name" = ? WHERE "id" = ? AND "active" = ? AND "banned" IS NULL'
199 | );
200 | expect(parameters).toEqual(['Visal', 'In', 123, 1]);
201 | });
202 |
203 | test('Update without data SHOULD throw error', () => {
204 | expect(() => {
205 | qb().update({}).into('persons').toQuery();
206 | }).toThrowError();
207 |
208 | expect(() => {
209 | qb().update({ first_name: undefined }).into('persons').toQuery();
210 | }).toThrowError();
211 | });
212 |
213 | test('Insert data', () => {
214 | const { query, parameters } = qb()
215 | .insert({ last_name: 'Visal', banned: null, first_name: 'In' })
216 | .into('persons')
217 | .toQuery();
218 |
219 | expect(query).toBe(
220 | 'INSERT INTO "persons"("last_name", "banned", "first_name") VALUES(?, NULL, ?)'
221 | );
222 | expect(parameters).toEqual(['Visal', 'In']);
223 | });
224 |
225 | test('Insert data empty data SHOULD throw error', () => {
226 | expect(() => {
227 | qb().insert({}).into('persons').toQuery();
228 | }).toThrowError();
229 |
230 | expect(() => {
231 | qb().insert({ first_name: undefined }).into('persons').toQuery();
232 | }).toThrowError();
233 | });
234 |
235 | test('Create table', () => {
236 | // Create table test for postgresql
237 | const { query } = qb()
238 | .createTable('persons')
239 | .column('id', { type: 'SERIAL', primaryKey: true })
240 | .column('first_name', { type: 'VARCHAR(50)' })
241 | .column('last_name', { type: 'VARCHAR(50)' })
242 | .toQuery();
243 |
244 | expect(query).toBe(
245 | 'CREATE TABLE IF NOT EXISTS "persons" ("id" SERIAL PRIMARY KEY, "first_name" VARCHAR(50), "last_name" VARCHAR(50))'
246 | );
247 | });
248 |
249 | test('Drop table', () => {
250 | const { query } = qb().dropTable('persons').toQuery();
251 | expect(query).toBe('DROP TABLE IF EXISTS "persons"');
252 | });
253 |
254 | test('Rename column', () => {
255 | const { query } = qb()
256 | .alterTable('persons')
257 | .renameColumn('first_name', 'full_name')
258 | .toQuery();
259 |
260 | expect(query).toBe(
261 | 'ALTER TABLE "persons" RENAME COLUMN "first_name" TO "full_name"'
262 | );
263 | });
264 | });
265 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es2017",
4 | "module": "commonjs",
5 | "moduleResolution": "node",
6 | "esModuleInterop": true,
7 | "forceConsistentCasingInFileNames": true,
8 | "skipLibCheck": true,
9 | "declaration": true,
10 | "declarationMap": true,
11 | "outDir": "./dist",
12 | "experimentalDecorators": true,
13 | "emitDecoratorMetadata": true,
14 | "strict": true,
15 | "baseUrl": "./",
16 | "paths": {
17 | "src/*": ["src/*"],
18 | "src": ["src"]
19 | }
20 | },
21 | "include": ["src/**/*"],
22 | "exclude": ["node_modules"]
23 | }
24 |
--------------------------------------------------------------------------------