├── .DS_Store ├── .github └── workflows │ └── deploy.yml ├── .gitignore ├── .vitepress ├── config.ts ├── knexDialects.ts └── theme │ ├── AlgoliaSearchBox.vue │ ├── Layout.vue │ ├── SqlDialectSelector.vue │ ├── SqlOutput.vue │ ├── ToggleDark.vue │ ├── dialect.js │ ├── index.js │ └── styles.css ├── README.md ├── package.json ├── scripts └── deploy-doc.sh ├── src ├── changelog.md ├── faq │ ├── index.md │ ├── recipes.md │ └── support.md ├── guide │ ├── extending.md │ ├── index.md │ ├── interfaces.md │ ├── migrations.md │ ├── query-builder.md │ ├── raw.md │ ├── ref.md │ ├── schema-builder.md │ ├── transactions.md │ └── utility.md ├── index.md └── public │ └── knex-logo.png └── yarn.lock /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/knex/documentation/9b49f59e34297c212d10005e84770a8ffb7856a0/.DS_Store -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Knex / Documentation deployment 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | 8 | concurrency: 9 | group: ${{ github.workflow }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | deploy: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | - uses: actions/setup-node@v3 19 | with: 20 | node-version: 16.x 21 | 22 | # cache node_modules 23 | - name: Restore cached dependencies 24 | uses: actions/cache@v3 25 | id: yarn-cache 26 | with: 27 | path: | 28 | **/node_modules 29 | key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} 30 | restore-keys: | 31 | ${{ runner.os }}-yarn- 32 | 33 | # install dependencies if the cache did not hit 34 | - name: Install dependencies 35 | if: steps.yarn-cache.outputs.cache-hit != 'true' 36 | run: yarn --frozen-lockfile 37 | 38 | - name: Build documentation 39 | run: yarn build 40 | 41 | - name: Deploy to gh-pages 42 | uses: peaceiris/actions-gh-pages@v3 43 | with: 44 | github_token: ${{ secrets.GITHUB_TOKEN }} 45 | publish_dir: .vitepress/dist 46 | cname: knexjs.org 47 | force_orphan: true 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .vitepress/dist 3 | -------------------------------------------------------------------------------- /.vitepress/config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitepress' 2 | import KnexDialectsPlugins from './knexDialects' 3 | 4 | export default defineConfig({ 5 | title: 'Knex.js', 6 | description: 'Beta knex.js documentation.', 7 | base: '/', 8 | srcDir: 'src', 9 | head: [ 10 | ["link", { rel: "icon", type: "image/png", href: "/knex-logo.png" }], 11 | ], 12 | themeConfig: { 13 | logo: '/knex-logo.png', 14 | repo: 'knex/knex', 15 | docsRepo: 'knex/documentation', 16 | docsDir: 'src', 17 | docsBranch: 'main', 18 | editLinks: true, 19 | editLinkText: 'Edit this page on GitHub', 20 | lastUpdated: 'Last Updated', 21 | nav: [ 22 | { text: 'Guide', link: '/guide/', activeMatch: '^/guide/' }, 23 | { 24 | text: 'F.A.Q.', 25 | link: '/faq/', 26 | }, 27 | { 28 | text: 'Changelog', 29 | link: '/changelog.html', 30 | } 31 | ], 32 | sidebar: { 33 | '/guide/': getGuideSidebar(), 34 | '/faq/': getFaqSidebar(), 35 | }, 36 | algolia: { 37 | appId: 'V7E3EHUPD6', 38 | apiKey: '44b5077836c1c8fba0f364383dde7fb4', 39 | indexName: 'knex', 40 | initialQuery: '', 41 | } 42 | }, 43 | vite: { 44 | plugins: [ 45 | KnexDialectsPlugins() 46 | ] 47 | } 48 | }) 49 | 50 | function getGuideSidebar() { 51 | return [ 52 | { 53 | text: 'Installation', 54 | link: '/guide/' 55 | }, 56 | { 57 | text: 'Query Builder', 58 | link: '/guide/query-builder' 59 | }, 60 | { 61 | text: 'Transactions', 62 | link: '/guide/transactions' 63 | }, 64 | { 65 | text: 'Schema Builder', 66 | link: '/guide/schema-builder' 67 | }, 68 | { 69 | text: 'Raw', 70 | link: '/guide/raw' 71 | }, 72 | { 73 | text: 'Ref', 74 | link: '/guide/ref' 75 | }, 76 | { 77 | text: 'Utility', 78 | link: '/guide/utility' 79 | }, 80 | { 81 | text: 'Interfaces', 82 | link: '/guide/interfaces' 83 | }, 84 | { 85 | text: 'Migrations', 86 | link: '/guide/migrations' 87 | }, 88 | { 89 | text: 'Extending', 90 | link: '/guide/extending' 91 | } 92 | ] 93 | } 94 | function getFaqSidebar() { 95 | return [ 96 | { 97 | text: 'F.A.Q.', 98 | link: '/faq/' 99 | }, 100 | { 101 | text: 'Recipes', 102 | link: '/faq/recipes' 103 | }, 104 | { 105 | text: 'Support', 106 | link: '/faq/support' 107 | }, 108 | ] 109 | } 110 | -------------------------------------------------------------------------------- /.vitepress/knexDialects.ts: -------------------------------------------------------------------------------- 1 | 2 | import Knex from 'knex' 3 | import type { PluginOption } from 'vite' 4 | 5 | const dialects = { 6 | 'better-sqlite3': Knex({ client: 'better-sqlite3' }), 7 | cockroachdb: Knex({ client: 'cockroachdb' }), 8 | mssql: Knex({ client: 'mssql' }), 9 | mysql: Knex({ client: 'mysql' }), 10 | mysql2: Knex({ client: 'mysql2' }), 11 | oracledb: Knex({ client: 'oracledb' }), 12 | pgnative: Knex({ client: 'pgnative' }), 13 | postgres: Knex({ client: 'postgres' }), 14 | redshift: Knex({ client: 'redshift' }), 15 | sqlite3: Knex({ client: 'sqlite3' }), 16 | } 17 | 18 | export default function knexDialects (): PluginOption { 19 | const regex = //ig 20 | 21 | return { 22 | name: 'transform-file', 23 | enforce: 'pre', 24 | 25 | transform(src, id) { 26 | if (id.endsWith('.md')) { 27 | const matches = src.matchAll(regex) 28 | for (const match of matches) { 29 | let markdown = '' 30 | const getCode = Function("knex", `return knex.raw(${match[1]});`); 31 | 32 | for (const dialect in dialects) { 33 | const knex = dialects[dialect] 34 | const { sql } = getCode(knex) 35 | const output = sql.toString() 36 | 37 | markdown += `
\n\n\`\`\`sql\n${output}\n\`\`\`\n\n
\n` 38 | } 39 | 40 | src = src.replace(match[0], markdown) 41 | } 42 | } 43 | 44 | return src 45 | } 46 | } 47 | } -------------------------------------------------------------------------------- /.vitepress/theme/AlgoliaSearchBox.vue: -------------------------------------------------------------------------------- 1 | 142 | 143 | 146 | 147 | -------------------------------------------------------------------------------- /.vitepress/theme/Layout.vue: -------------------------------------------------------------------------------- 1 | 16 | 17 | -------------------------------------------------------------------------------- /.vitepress/theme/SqlDialectSelector.vue: -------------------------------------------------------------------------------- 1 | 23 | 24 | 40 | 41 | 52 | -------------------------------------------------------------------------------- /.vitepress/theme/SqlOutput.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 10 | -------------------------------------------------------------------------------- /.vitepress/theme/ToggleDark.vue: -------------------------------------------------------------------------------- 1 | 22 | 23 | 31 | 32 | -------------------------------------------------------------------------------- /.vitepress/theme/dialect.js: -------------------------------------------------------------------------------- 1 | import { watch, ref, nextTick, inject } from "vue"; 2 | 3 | export function createDialect(app) { 4 | const dialect = ref('mysql') 5 | 6 | if (!import.meta.url) { 7 | watch(dialect, (value) => { 8 | localStorage.setItem("sql-dialect", value); 9 | }) 10 | nextTick(() => { 11 | const value = localStorage.getItem("sql-dialect"); 12 | if (value) { 13 | dialect.value = value; 14 | } 15 | }) 16 | } 17 | 18 | // provide for later inject 19 | app.provide('dialect', dialect) 20 | 21 | // expose $dialect to templates 22 | Object.defineProperty(app.config.globalProperties, '$dialect', { 23 | get() { 24 | return dialect.value 25 | } 26 | }) 27 | 28 | return { 29 | dialect 30 | } 31 | } 32 | 33 | export function useDialect() { 34 | const dialect = inject('dialect') 35 | 36 | return { 37 | dialect 38 | } 39 | } -------------------------------------------------------------------------------- /.vitepress/theme/index.js: -------------------------------------------------------------------------------- 1 | import defaultTheme from "vitepress/theme"; 2 | import Layout from "./Layout.vue"; 3 | import { createDialect } from "./dialect"; 4 | import SqlOutput from "./SqlOutput.vue"; 5 | import "./styles.css"; 6 | 7 | // @todo: hack, vite.config.ts define option seem not to work 8 | globalThis.process = globalThis.process || { 9 | env: {} 10 | } 11 | 12 | export default { 13 | Layout, 14 | NotFound: defaultTheme.NotFound, 15 | 16 | enhanceApp({ app }) { 17 | createDialect(app) 18 | app.component("SqlOutput", SqlOutput); 19 | }, 20 | }; 21 | -------------------------------------------------------------------------------- /.vitepress/theme/styles.css: -------------------------------------------------------------------------------- 1 | @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@400;700&family=Source+Code+Pro&display=swap'); 2 | 3 | :root { 4 | --c-brand: #d26b38; 5 | --c-white-dark: #f5f5f5; 6 | --c-white-darker: #c5bab5; 7 | --c-brand-light: #ff8144; 8 | --font-family-base: 'Poppins', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Fira Sans', 'Droid Sans', 'Helvetica Neue', sans-serif; 9 | --font-family-mono: 'Source Code Pro', source-code-pro, Menlo, Monaco, Consolas, 'Courier New', monospace; 10 | 11 | --code-font-size: 14px; 12 | --code-line-height: 22px; 13 | /* --c-text-light-1: #c5bab5; */ 14 | --c-text-light-2: #cd7244; 15 | --c-text-light-3: #d26a38; 16 | --c-text-dark-1: #7f7f7f; 17 | --code-bg-color: #362f2d; 18 | --code-inline-bg-color: rgb(68 52 47 / 5%); 19 | } 20 | html { 21 | font-size: 14px; 22 | } 23 | .dark { 24 | --c-white: #2a2420; 25 | --c-white-dark: #342c27; 26 | --c-white-darker: #201d1c; 27 | --c-black: #ffffff; 28 | --c-text-light-1: #c5bab5; 29 | --c-text-light-2: #dfa486; 30 | --c-text-light-3: #d26a38; 31 | --c-text-dark-1: #9f9f9f; 32 | --code-bg-color: #201d1c; 33 | --code-inline-bg-color: #201d1c; 34 | 35 | color-scheme: dark; 36 | } 37 | 38 | .nav-bar .item { 39 | font-size: 1rem; 40 | } 41 | .nav-bar .nav-bar-title { 42 | font-size: 1.4rem; 43 | } 44 | .sidebar > .sidebar-links > .sidebar-link + .sidebar-link { 45 | padding-top: .3rem; 46 | } 47 | .sidebar > .sidebar-links > .sidebar-link > a.sidebar-link-item { 48 | font-weight: 600; 49 | } 50 | .sidebar > .sidebar-links > .sidebar-link > .sidebar-link-item { 51 | padding: 0.35rem 1.5rem 0.35rem 1.25rem; 52 | } 53 | 54 | a.header-anchor { 55 | float: left; 56 | margin-top: 0.125em; 57 | margin-left: -1.1em; 58 | padding-right: 0.23em; 59 | font-size: 0.85em; 60 | opacity: 0; 61 | } 62 | .custom-block.warning { 63 | border-color: var(--c-brand); 64 | color: #914926; 65 | background-color: rgb(210 106 56 / 15%); 66 | } 67 | .custom-block.warning .custom-block-title { 68 | color: #d66026; 69 | } 70 | .custom-block.info { 71 | background-color: #fbf6f4; 72 | } 73 | .dark .custom-block.info { 74 | background-color: #3a3532; 75 | } 76 | .dark .custom-block.warning { 77 | color: var(--c-text-light-1); 78 | background-color: #462414; 79 | } 80 | .dark .custom-block.warning .custom-block-title { 81 | color: #d66026; 82 | } 83 | 84 | .home-hero { 85 | max-width: 42rem; 86 | margin-left: auto !important; 87 | margin-right: auto !important; 88 | } 89 | .home-hero .image { 90 | animation: spin 5s linear infinite; 91 | animation-play-state: paused; 92 | } 93 | .home-hero:hover .image { 94 | animation-play-state: running; 95 | } 96 | .theme .container { 97 | max-width: 54rem; 98 | } 99 | .container-home { 100 | max-width: 42rem; 101 | margin: 0 auto; 102 | background: var(--c-white-dark); 103 | padding: .5rem 2rem; 104 | border-radius: 6px; 105 | } 106 | @keyframes spin { 107 | from { 108 | transform: rotate(0deg); 109 | } 110 | to { 111 | transform: rotate(360deg); 112 | } 113 | } 114 | 115 | [data-dialect]::before { 116 | content: attr(data-dialect); 117 | } 118 | 119 | .language-sql code { 120 | color: #ccc; 121 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # knex.js documentation 2 | 3 | > [!IMPORTANT] 4 | > The documentation has been moved to the [knex](https://github.com/knex/knex) repo under the [`docs`](https://github.com/knex/knex/tree/master/docs) folder 5 | 6 | The vitepress documentation for [http://knexjs.org](http://knexjs.org) 7 | 8 | #### Development: 9 | 10 | ```bash 11 | yarn dev # or npm run dev 12 | 13 | ``` 14 | npm run dev 15 | 16 | ```bash 17 | yarn install # or npm i 18 | yarn dev # or npm run dev 19 | 20 | 21 | ``` 22 | 23 | #### Production: 24 | 25 | ```bash 26 | yarn build # or npm run build 27 | ``` 28 | 29 | #### License: 30 | 31 | MIT 32 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@knex/documentation", 3 | "private": true, 4 | "version": "0.1.0", 5 | "description": "Knex Documentation Builder", 6 | "scripts": { 7 | "dev": "vitepress dev .", 8 | "build": "vitepress build .", 9 | "serve": "vitepress serve ." 10 | }, 11 | "devDependencies": { 12 | "knex": "^2.4.0", 13 | "typescript": "^4.6.3", 14 | "vitepress": "^0.22.4" 15 | }, 16 | "dependencies": {}, 17 | "author": { 18 | "name": "Tim Griesser", 19 | "web": "https://github.com/tgriesser" 20 | }, 21 | "license": "MIT" 22 | } 23 | -------------------------------------------------------------------------------- /scripts/deploy-doc.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # abort on errors 4 | set -e 5 | 6 | # build 7 | npm run build 8 | 9 | # navigate into the build output directory 10 | cd .vitepress/dist 11 | 12 | # if you are deploying to a custom domain 13 | # echo 'www.example.com' > CNAME 14 | 15 | git init 16 | git add -A 17 | git commit -m 'deploy' 18 | 19 | git push -f git@github.com:knex/documentation.git master:gh-pages 20 | 21 | cd - 22 | -------------------------------------------------------------------------------- /src/faq/index.md: -------------------------------------------------------------------------------- 1 | 2 | # F.A.Q. 3 | 4 | ## How do I help contribute? 5 | 6 | Glad you asked! Pull requests, or feature requests, though not always implemented, are a great way to help make Knex even better than it is now. If you're looking for something specific to help out with, there's a number of unit tests that aren't implemented yet, the library could never have too many of those. If you want to submit a fix or feature, take a look at the [Contributing](https://github.com/knex/knex/blob/master/CONTRIBUTING.md) readme in the Github and go ahead and open a ticket. 7 | 8 | ## How do I debug? 9 | 10 | Knex is beginning to make use of the [debug](https://github.com/visionmedia/debug) module internally, so you can set the `DEBUG` environment variable to `knex:*` to see all debugging, or select individual namespaces `DEBUG=knex:query,knex:tx` to constrain a bit. 11 | 12 | If you pass `{debug: true}` as one of the options in your initialize settings, you can see all of the query calls being made. Sometimes you need to dive a bit further into the various calls and see what all is going on behind the scenes. I'd recommend [node-inspector](https://github.com/dannycoates/node-inspector), which allows you to debug code with `debugger` statements like you would in the browser. 13 | 14 | At the start of your application code will catch any errors not otherwise caught in the normal promise chain handlers, which is very helpful in debugging. 15 | 16 | ## How do I run the test suite? 17 | 18 | The test suite looks for an environment variable called `KNEX_TEST` for the path to the database configuration. If you run the following command: 19 | 20 | ```bash 21 | $ export KNEX_TEST='/path/to/your/knex_config.js' 22 | $ npm test 23 | ``` 24 | 25 | replacing with the path to your config file, and the config file is valid, the test suite should run properly. 26 | 27 | ## My tests are failing because slow DB connection and short test timeouts! How to extend test timeouts? 28 | 29 | Sometimes, e.g. when running CI on travis, test suite's default timeout of 5 seconds might be too short. In such cases an alternative test timeout value in milliseconds can be specified using the `KNEX_TEST_TIMEOUT` environment variable. 30 | 31 | ```bash 32 | $ export KNEX_TEST_TIMEOUT=30000 33 | $ npm test 34 | ``` 35 | 36 | ## I found something broken with Amazon Redshift! Can you help? 37 | 38 | Because there is no testing platform available for Amazon Redshift, be aware that it is included as a dialect but is unsupported. With that said, please file an issue if something is found to be broken that is not noted in the documentation, and we will do our best. 39 | -------------------------------------------------------------------------------- /src/faq/recipes.md: -------------------------------------------------------------------------------- 1 | # Recipes 2 | 3 | ## Using non-standard database that is compatible with PostgreSQL wire protocol (such as CockroachDB) 4 | 5 | Specify PostgreSQL version that database you are using is compatible with protocol-wise using `version` option, e. g.: 6 | 7 | ```js 8 | const knex = require('knex')({ 9 | client: 'pg', 10 | version: '7.2', 11 | connection: { 12 | host: '127.0.0.1', 13 | user: 'your_database_user', 14 | password: 'your_database_password', 15 | database: 'myapp_test' 16 | } 17 | }); 18 | ``` 19 | 20 | Note that value of `version` option should be not the version of the database that you are using, but version of PostgreSQL that most closely matches functionality of the database that you are using. If not provided by database vendor, try using '7.2' as a baseline and keep increasing (within the range of existing PostgreSQL versions) until it starts (or stops) working. 21 | 22 | There are also known incompatibilities with migrations for databases that do not support select for update. See https://github.com/tgriesser/knex/issues/2002 for a workaround. 23 | 24 | ## Connecting to MSSQL on Azure SQL Database 25 | 26 | `{encrypt: true}` should be included in options branch of connection configuration: 27 | 28 | 29 | ```js 30 | knex({ 31 | client : 'mssql', 32 | connection: { 33 | database: 'mydatabase', 34 | server: 'myserver.database.windows.net', 35 | user: 'myuser', 36 | password: 'mypass', 37 | port: 1433, 38 | connectionTimeout: 30000, 39 | options: { 40 | encrypt: true 41 | } 42 | } 43 | }); 44 | ``` 45 | 46 | [See all of node-mssql's connection options](https://github.com/tediousjs/node-mssql#configuration-1) 47 | 48 | ## Adding a full-text index for PostgreSQL 49 | 50 | ```js 51 | exports.up = (knex) => { 52 | return knex.schema.createTable('foo', (table) => { 53 | table.increments('id'); 54 | table.specificType('fulltext', 'tsvector'); 55 | table.index('fulltext', null, 'gin'); 56 | }) 57 | }; 58 | ``` 59 | 60 | ## DB access using SQLite and SQLCipher 61 | 62 | After you build the SQLCipher source and the npm SQLite3 package, and encrypt your DB (look elsewhere for these things), then anytime you open your database, you need to provide your encryption key using the SQL statement: 63 | 64 | ```sql 65 | PRAGMA KEY = 'secret' 66 | ``` 67 | 68 | This PRAGMA is more completely documented in the SQLCipher site. When working with Knex this is best done when opening the DB, via the following: 69 | 70 | ```js 71 | const myDBConfig = { 72 | client: "sqlite3", 73 | connection: { 74 | filename: "myEncryptedSQLiteDbFile.db" 75 | }, 76 | pool: { 77 | afterCreate: function(conn, done) { 78 | conn.run("PRAGMA KEY = 'secret'"); 79 | done(); 80 | } 81 | } 82 | }; 83 | const knex = require('knex')(myDBConfig); 84 | ``` 85 | 86 | Of course embedding the key value in your code is a poor security practice. Instead, retrieve the 'secret' from elsewhere. 87 | 88 | The key Knex thing to note here is the "afterCreate" function. This is documented in the knexjs.org site, but is not in the Table of Contents at this time, so do a browser find when on the site to get to it. It allows auto-updating DB settings when creating any new pool connections (of which there will only ever be one per file for Knex-SQLite). 89 | 90 | If you don't use the "afterCreate" configuration, then you will need to run a knex.raw statement with each and every SQL you execute, something like as follows: 91 | 92 | ```js 93 | return knex.raw("PRAGMA KEY = 'secret'") 94 | .then(() => knex('some_table') 95 | .select() 96 | .on('query-error', function(ex, obj) { 97 | console.log( 98 | "KNEX select from some_table ERR ex:", 99 | ex, 100 | "obj:", 101 | obj 102 | ); 103 | }) 104 | ); 105 | ``` 106 | 107 | ## Maintaining changelog for seeds (version >= 0.16.0-next1) 108 | 109 | In case you would like to use Knex.js changelog functionality to ensure your environments are only seeded once, but don't want to mix seed files with migration files, you can specify multiple directories as a source for your migrations: 110 | 111 | ```ts 112 | await knex.migrate.latest({ 113 | directory: [ 114 | 'src/services/orders/database/migrations', 115 | 'src/services/orders/database/seeds' 116 | ], 117 | sortDirsSeparately: true, 118 | tableName: 'orders_migrations', 119 | schemaName: 'orders', 120 | }) 121 | ``` 122 | 123 | ## Using explicit transaction management together with async code 124 | 125 | ```ts 126 | await knex.transaction(trx => { 127 | async function stuff() { 128 | trx.rollback(new Error('Foo')); 129 | }; 130 | stuff() 131 | .then(() => { 132 | // do something 133 | }); 134 | }); 135 | ``` 136 | 137 | Or alternatively: 138 | 139 | ```ts 140 | try { 141 | await knex.transaction(trx => { 142 | async function stuff() { 143 | trx.rollback(new Error('always explicit rollback this time')); 144 | } 145 | stuff(); 146 | }); 147 | // transaction was committed 148 | } catch (err) { 149 | // transaction was rolled back 150 | } 151 | ``` 152 | (note that promise for `knex.transaction` resolves after transaction is rolled back or committed) 153 | 154 | ## Using parentheses with AND operator 155 | 156 | In order to generate query along the lines of 157 | 158 | ```sql 159 | SELECT "firstName", "lastName", "status" 160 | FROM "userInfo" 161 | WHERE "status" = 'active' 162 | AND ("firstName" ILIKE '%Ali%' OR "lastName" ILIKE '%Ali%'); 163 | ``` 164 | 165 | you need to use following approach: 166 | 167 | ```js 168 | queryBuilder 169 | .where('status', status.uuid) 170 | .andWhere((qB) => qB 171 | .where('firstName', 'ilike', `%${q}%`) 172 | .orWhere('lastName', 'ilike', `%${q}%`) 173 | ) 174 | ``` 175 | 176 | ## Calling an oracle stored procedure with bindout variables 177 | 178 | How to call and retrieve output from an oracle stored procedure 179 | 180 | ```ts 181 | const oracle = require('oracledb'); 182 | const bindVars = { 183 | input_var1: 6, 184 | input_var2: 7, 185 | output_var: { 186 | dir: oracle.BIND_OUT 187 | }, 188 | output_message: { 189 | dir: oracle.BIND_OUT 190 | } 191 | }; 192 | 193 | const sp = 'BEGIN MULTIPLY_STORED_PROCEDURE(:input_var1, :input_var2, :output_var, :output_message); END;'; 194 | const results = await knex.raw(sp, bindVars); 195 | console.log(results[0]); // 42 196 | console.log(results[1]); // 6 * 7 is the answer to life 197 | ``` 198 | 199 | ## Node instance doesn't stop after using knex 200 | 201 | Make sure to close knex instance after execution to avoid Node process hanging due to open connections: 202 | 203 | ```js 204 | async function migrate() { 205 | try { 206 | await knex.migrate.latest({/**config**/}) 207 | } catch (e) { 208 | process.exit(1) 209 | } finally { 210 | try { 211 | knex.destroy() 212 | } catch (e) { 213 | // ignore 214 | } 215 | } 216 | } 217 | 218 | migrate() 219 | ``` 220 | 221 | ## Manually Closing Streams 222 | 223 | When using Knex's [stream interface](/guide/interfaces#streams), you can typically just `pipe` the return stream to any writable stream. However, with [`HTTPIncomingMessage`](http://nodejs.org/api/http.html#http_http_incomingmessage), you'll need to take special care to handle aborted requests. 224 | 225 | An `HTTPIncomingMessage` object is typically called `request`. This is the first argument in `'request'` events emitted on `http.Server` instances. [Express's `req`](http://expressjs.com/4x/api.html#request) implements a compatible interface and Hapi exposes this object on [its request objects](http://hapijs.com/api#request-object) as `request.raw.req`. 226 | 227 | You need to explicitly handle the case where an `HTTPIncomingMessage` is closed prematurely when streaming from a database with Knex. The easiest way to cause this is: 228 | 229 | 1. Visit an endpoint that takes several seconds to fully transmit a response 230 | 2. Close the browser window immediately after beginning the request 231 | 232 | When this happens while you are streaming a query to a client, you need to manually tell Knex that it can release the database connection in use back to the connection pool. 233 | 234 | ```js 235 | server.on('request', function (request, response) { 236 | const stream = knex.select('*').from('items').stream(); 237 | request.on('close', stream.end.bind(stream)); 238 | }); 239 | ``` 240 | 241 | -------------------------------------------------------------------------------- /src/faq/support.md: -------------------------------------------------------------------------------- 1 | # Support 2 | 3 | Have questions about the library? Come join us in the [#bookshelf freenode IRC](http://webchat.freenode.net/?channels=bookshelf) channel for support on knex.js and [bookshelf.js](http://bookshelfjs.org), or post an issue on [Stack Overflow](http://stackoverflow.com/questions/tagged/knex.js) or in the GitHub [issue tracker](https://github.com/knex/knex/issues). 4 | -------------------------------------------------------------------------------- /src/guide/extending.md: -------------------------------------------------------------------------------- 1 | # Extending 2 | 3 | To extend knex's builders, we have the following methods 4 | 5 | ```js 6 | knex.SchemaBuilder.extend("functionName", function() { 7 | console.log('Custom Schema Builder Function'); 8 | return this; 9 | }); 10 | knex.TableBuilder.extend("functionName", function() { 11 | console.log('Custom Table Builder Function'); 12 | return this; 13 | }); 14 | knex.ViewBuilder.extend("functionName", function() { 15 | console.log('Custom View Builder Function'); 16 | return this; 17 | }); 18 | knex.ColumnBuilder.extend("functionName", function() { 19 | console.log('Custom Column Builder Function'); 20 | return this; 21 | }); 22 | ``` 23 | 24 | 25 | To add typescript support you can add the following (.d.ts): 26 | ```ts 27 | import "knex"; 28 | declare module "knex" { 29 | namespace Knex { 30 | interface SchemaBuilder { 31 | functionName (): Knex.SchemaBuilder; 32 | } 33 | interface TableBuilder { 34 | functionName (): Knex.TableBuilder; 35 | } 36 | interface ViewBuilder { 37 | functionName (): Knex.ViewBuilder; 38 | } 39 | interface ColumnBuilder { 40 | functionName (): Knex.ColumnBuilder; 41 | } 42 | } 43 | } 44 | ``` -------------------------------------------------------------------------------- /src/guide/index.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | 4 | Knex can be used as an SQL query builder in both Node.JS and the browser, limited to WebSQL's constraints (like the inability to drop tables or read schemas). Composing SQL queries in the browser for execution on the server is highly discouraged, as this can be the cause of serious security vulnerabilities. The browser builds outside of WebSQL are primarily for learning purposes - for example, you can pop open the console and build queries on this page using the **knex** object. 5 | 6 | ## Node.js 7 | 8 | The primary target environment for Knex is Node.js, you will need to install the `knex` library, and then install the appropriate database library: [`pg`](https://github.com/brianc/node-postgres) for PostgreSQL, CockroachDB and Amazon Redshift, [`pg-native`](https://github.com/brianc/node-pg-native) for PostgreSQL with native C++ `libpq` bindings (requires PostgresSQL installed to link against), [`mysql`](https://github.com/felixge/node-mysql) for MySQL or MariaDB, [`sqlite3`](https://github.com/mapbox/node-sqlite3) for SQLite3, or [`tedious`](https://github.com/tediousjs/tedious) for MSSQL. 9 | 10 | ```bash 11 | $ npm install knex --save 12 | 13 | # Then add one of the following (adding a --save) flag: 14 | $ npm install pg 15 | $ npm install pg-native 16 | $ npm install sqlite3 17 | $ npm install better-sqlite3 18 | $ npm install mysql 19 | $ npm install mysql2 20 | $ npm install oracledb 21 | $ npm install tedious 22 | ``` 23 | 24 | _If you want to use CockroachDB or Redshift instance, you can use the `pg` driver._ 25 | 26 | _If you want to use a MariaDB instance, you can use the `mysql` driver._ 27 | 28 | ## Browser 29 | 30 | Knex can be built using a JavaScript build tool such as [browserify](http://browserify.org/) or [webpack](https://github.com/webpack/webpack). In fact, this documentation uses a webpack build which [includes knex](https://github.com/knex/documentation/blob/a4de1b2eb50d6699f126be8d134f3d1acc4fc69d/components/Container.jsx#L3). View source on this page to see the browser build in-action (the global `knex` variable). 31 | 32 | ## Configuration Options 33 | 34 | The `knex` module is itself a function which takes a configuration object for Knex, accepting a few parameters. The `client` parameter is required and determines which client adapter will be used with the library. 35 | 36 | ```js 37 | const knex = require('knex')({ 38 | client: 'mysql', 39 | connection: { 40 | host : '127.0.0.1', 41 | port : 3306, 42 | user : 'your_database_user', 43 | password : 'your_database_password', 44 | database : 'myapp_test' 45 | } 46 | }); 47 | ``` 48 | 49 | The connection options are passed directly to the appropriate database client to create the connection, and may be either an object, a connection string, or a function returning an object: 50 | 51 | ::: info PostgreSQL 52 | Knex's PostgreSQL client allows you to set the initial search path for each connection automatically using an additional option "searchPath" as shown below. 53 | 54 | ```js 55 | const pg = require('knex')({ 56 | client: 'pg', 57 | connection: process.env.PG_CONNECTION_STRING, 58 | searchPath: ['knex', 'public'], 59 | }); 60 | ``` 61 | ::: 62 | 63 | When using the PostgreSQL driver, another usage pattern for instantiating the Knex configuration object could be to use a `connection: {}` object details to specify various flags such as enabling SSL, a connection string, and individual connection configuration fields all in the same object. Consider the following example: 64 | 65 | ::: info PostgreSQL 66 | If `connectionString` is highest priority to use. If left unspecified then connection details will be determined using the individual connection fields (`host`, `port`, etc), and finally an SSL configuration will be enabled based on a truthy value of `config["DB_SSL"]` which will also accept self-signed certificates. 67 | 68 | ```js 69 | const pg = require('knex')({ 70 | client: 'pg', 71 | connection: { 72 | connectionString: config.DATABASE_URL, 73 | host: config["DB_HOST"], 74 | port: config["DB_PORT"], 75 | user: config["DB_USER"], 76 | database: config["DB_NAME"], 77 | password: config["DB_PASSWORD"], 78 | ssl: config["DB_SSL"] ? { rejectUnauthorized: false } : false, 79 | } 80 | }); 81 | ``` 82 | ::: 83 | 84 | The following are SQLite usage patterns for instantiating the Knex configuration object: 85 | 86 | ::: info SQLite3 or Better-SQLite3 87 | When you use the SQLite3 or Better-SQLite3 adapter, there is a filename required, not a network connection. For example: 88 | 89 | ```js 90 | const knex = require('knex')({ 91 | client: 'sqlite3', // or 'better-sqlite3' 92 | connection: { 93 | filename: "./mydb.sqlite" 94 | } 95 | }); 96 | ``` 97 | 98 | You can also run either SQLite3 or Better-SQLite3 with an in-memory database by providing `:memory:` as the filename. For example: 99 | 100 | ```js 101 | const knex = require('knex')({ 102 | client: 'sqlite3', // or 'better-sqlite3' 103 | connection: { 104 | filename: ":memory:" 105 | } 106 | }); 107 | ``` 108 | ::: 109 | 110 | ::: info SQLite3 111 | When you use the SQLite3 adapter, you can set flags used to open the connection. For example: 112 | 113 | ```js 114 | const knex = require('knex')({ 115 | client: 'sqlite3', 116 | connection: { 117 | filename: "file:memDb1?mode=memory&cache=shared", 118 | flags: ['OPEN_URI', 'OPEN_SHAREDCACHE'] 119 | } 120 | }); 121 | ``` 122 | ::: 123 | 124 | 125 | ::: info Better-SQLite3 126 | With the Better-SQLite3 adapter, you can use `options.nativeBinding` to specify the location of the adapter's compiled C++ addon. This can be useful when your build system does a lot of transformation/relocation of files. 127 | 128 | Example use: 129 | 130 | ```js 131 | const knex = require('knex')({ 132 | client: 'better-sqlite3', 133 | connection: { 134 | filename: ":memory:", 135 | options: { 136 | nativeBinding: "/path/to/better_sqlite3.node", 137 | }, 138 | }, 139 | }); 140 | ``` 141 | 142 | Additionally, you can open the database in read-only mode using `options.readonly`: 143 | 144 | ```js 145 | const knex = require('knex')({ 146 | client: 'better-sqlite3', 147 | connection: { 148 | filename: "/path/to/db.sqlite3", 149 | options: { 150 | readonly: true, 151 | }, 152 | }, 153 | }); 154 | ``` 155 | 156 | For more information, see the [Better-SQLite3 documentation](https://github.com/WiseLibs/better-sqlite3/blob/master/docs/api.md#new-databasepath-options) on database connection options. 157 | 158 | ::: 159 | 160 | ::: info MSSQL 161 | When you use the MSSQL client, you can define a `mapBinding` function to define your own logic for mapping from knex query parameters to `tedious` types. 162 | Returning undefined from the function will fallback to the default mapping. 163 | ```js 164 | import { TYPES } from 'tedious'; 165 | 166 | const knex = require('knex')({ 167 | client: 'mssql', 168 | connection: { 169 | options: { 170 | mapBinding: value => { 171 | // bind all strings to varchar instead of nvarchar 172 | if (typeof value === 'string') { 173 | return { 174 | type: TYPES.VarChar, 175 | value 176 | }; 177 | } 178 | 179 | // allow devs to pass tedious type at query time 180 | if (value != null && value.type) { 181 | return { 182 | type: value.type, 183 | value: value.value 184 | }; 185 | } 186 | 187 | // undefined is returned; falling back to default mapping function 188 | } 189 | } 190 | } 191 | }); 192 | ``` 193 | ::: 194 | 195 | ::: info 196 | The database version can be added in knex configuration, when you use the PostgreSQL adapter to connect a non-standard database. 197 | 198 | ```js 199 | const knex = require('knex')({ 200 | client: 'pg', 201 | version: '7.2', 202 | connection: { 203 | host : '127.0.0.1', 204 | port : 5432, 205 | user : 'your_database_user', 206 | password : 'your_database_password', 207 | database : 'myapp_test' 208 | } 209 | }); 210 | ``` 211 | 212 | ```js 213 | const knex = require('knex')({ 214 | client: 'mysql', 215 | version: '5.7', 216 | connection: { 217 | host : '127.0.0.1', 218 | port : 3306, 219 | user : 'your_database_user', 220 | password : 'your_database_password', 221 | database : 'myapp_test' 222 | } 223 | }); 224 | ``` 225 | ::: 226 | 227 | ::: info 228 | When using a custom PostgreSQL client like `knex-aurora-data-api-client`, you can explicitly state if it supports jsonb column types 229 | 230 | ```js 231 | const knex = require('knex')({ 232 | client: require('knex-aurora-data-api-client').postgres, 233 | connection: { resourceArn, secretArn, database: `mydb` }, 234 | version: 'data-api', 235 | jsonbSupport: true 236 | }) 237 | ``` 238 | ::: 239 | 240 | A function can be used to determine the connection configuration dynamically. This function receives no parameters, and returns either a configuration object or a promise for a configuration object. 241 | 242 | ```js 243 | const knex = require('knex')({ 244 | client: 'sqlite3', 245 | connection: () => ({ 246 | filename: process.env.SQLITE_FILENAME 247 | }) 248 | }); 249 | ``` 250 | 251 | By default, the configuration object received via a function is cached and reused for all connections. To change this behavior, an `expirationChecker` function can be returned as part of the configuration object. The `expirationChecker` is consulted before trying to create new connections, and in case it returns `true`, a new configuration object is retrieved. For example, to work with an authentication token that has a limited lifespan: 252 | 253 | ```js 254 | const knex = require('knex')({ 255 | client: 'postgres', 256 | connection: async () => { 257 | const { 258 | token, 259 | tokenExpiration 260 | } = await someCallToGetTheToken(); 261 | 262 | return { 263 | host : 'your_host', 264 | port : 5432, 265 | user : 'your_database_user', 266 | password : token, 267 | database : 'myapp_test', 268 | expirationChecker: () => { 269 | return tokenExpiration <= Date.now(); 270 | } 271 | }; 272 | } 273 | }); 274 | ``` 275 | 276 | You can also connect via a unix domain socket, which will ignore host and port. 277 | 278 | ```js 279 | const knex = require('knex')({ 280 | client: 'mysql', 281 | connection: { 282 | socketPath : '/path/to/socket.sock', 283 | user : 'your_database_user', 284 | password : 'your_database_password', 285 | database : 'myapp_test' 286 | } 287 | }); 288 | ``` 289 | 290 | `userParams` is an optional parameter that allows you to pass arbitrary parameters which will be accessible via `knex.userParams` property: 291 | 292 | ```js 293 | const knex = require('knex')({ 294 | client: 'mysql', 295 | connection: { 296 | host : '127.0.0.1', 297 | port : 3306, 298 | user : 'your_database_user', 299 | password : 'your_database_password', 300 | database : 'myapp_test' 301 | }, 302 | userParams: { 303 | userParam1: '451' 304 | } 305 | }); 306 | ``` 307 | 308 | Initializing the library should normally only ever happen once in your application, as it creates a connection pool for the current database, you should use the instance returned from the initialize call throughout your library. 309 | 310 | Specify the client for the particular flavour of SQL you are interested in. 311 | 312 | ```js 313 | const pg = require('knex')({client: 'pg'}); 314 | 315 | knex('table') 316 | .insert({a: 'b'}) 317 | .returning('*') 318 | .toString(); 319 | // "insert into "table" ("a") values ('b')" 320 | 321 | pg('table') 322 | .insert({a: 'b'}) 323 | .returning('*') 324 | .toString(); 325 | // "insert into "table" ("a") values ('b') returning *" 326 | ``` 327 | 328 | ### withUserParams 329 | 330 | You can call method `withUserParams` on a Knex instance if you want to get a copy (with same connections) with custom parameters (e. g. to execute same migrations with different parameters) 331 | 332 | ```js 333 | const knex = require('knex')({ 334 | // Params 335 | }); 336 | 337 | const knexWithParams = knex.withUserParams({ 338 | customUserParam: 'table1' 339 | }); 340 | const customUserParam = knexWithParams 341 | .userParams 342 | .customUserParam; 343 | ``` 344 | 345 | ### debug 346 | 347 | Passing a `debug: true` flag on your initialization object will turn on [debugging](/guide/query-builder.html#debug) for all queries. 348 | 349 | ### asyncStackTraces 350 | 351 | Passing an `asyncStackTraces: true` flag on your initialization object will turn on stack trace capture for all query builders, raw queries and schema builders. When a DB driver returns an error, this previously captured stack trace is thrown instead of a new one. This helps to mitigate default behaviour of `await` in node.js/V8 which blows the stack away. This has small performance overhead, so it is advised to use only for development. Turned off by default. 352 | 353 | ### pool 354 | 355 | The client created by the configuration initializes a connection pool, using the [tarn.js](https://github.com/vincit/tarn.js) library. This connection pool has a default setting of a `min: 2, max: 10` for the MySQL and PG libraries, and a single connection for sqlite3 (due to issues with utilizing multiple connections on a single file). To change the config settings for the pool, pass a `pool` option as one of the keys in the initialize block. 356 | 357 | Note that the default value of `min` is 2 only for historical reasons. It can result in problems with stale connections, despite tarn's default idle connection timeout of 30 seconds, which is only applied when there are more than `min` active connections. It is recommended to set `min: 0` so all idle connections can be terminated. 358 | 359 | Checkout the [tarn.js](https://github.com/vincit/tarn.js) library for more information. 360 | 361 | ```js 362 | const knex = require('knex')({ 363 | client: 'mysql', 364 | connection: { 365 | host : '127.0.0.1', 366 | port : 3306, 367 | user : 'your_database_user', 368 | password : 'your_database_password', 369 | database : 'myapp_test' 370 | }, 371 | pool: { min: 0, max: 7 } 372 | }); 373 | ``` 374 | 375 | If you ever need to explicitly teardown the connection pool, you may use `knex.destroy([callback])`. You may use `knex.destroy` by passing a callback, or by chaining as a promise, just not both. To manually initialize a destroyed connection pool, you may use knex.initialize(\[config\]), if no config is passed, it will use the first knex configuration used. 376 | 377 | ### afterCreate 378 | 379 | `afterCreate` callback (rawDriverConnection, done) is called when the pool aquires a new connection from the database server. done(err, connection) callback must be called for `knex` to be able to decide if the connection is ok or if it should be discarded right away from the pool. 380 | 381 | ```js 382 | const knex = require('knex')({ 383 | client: 'pg', 384 | connection: {/*...*/}, 385 | pool: { 386 | afterCreate: function (conn, done) { 387 | // in this example we use pg driver's connection API 388 | conn.query('SET timezone="UTC";', function (err) { 389 | if (err) { 390 | // first query failed, 391 | // return error and don't try to make next query 392 | done(err, conn); 393 | } else { 394 | // do the second query... 395 | conn.query( 396 | 'SELECT set_limit(0.01);', 397 | function (err) { 398 | // if err is not falsy, 399 | // connection is discarded from pool 400 | // if connection aquire was triggered by a 401 | // query the error is passed to query promise 402 | done(err, conn); 403 | }); 404 | } 405 | }); 406 | } 407 | } 408 | }); 409 | ``` 410 | 411 | ### acquireConnectionTimeout 412 | 413 | `acquireConnectionTimeout` defaults to 60000ms and is used to determine how long knex should wait before throwing a timeout error when acquiring a connection is not possible. The most common cause for this is using up all the pool for transaction connections and then attempting to run queries outside of transactions while the pool is still full. The error thrown will provide information on the query the connection was for to simplify the job of locating the culprit. 414 | 415 | ```js 416 | const knex = require('knex')({ 417 | client: 'pg', 418 | connection: {/*...*/}, 419 | pool: {/*...*/}, 420 | acquireConnectionTimeout: 10000 421 | }); 422 | ``` 423 | 424 | ### fetchAsString 425 | 426 | Utilized by Oracledb. An array of types. The valid types are 'DATE', 'NUMBER' and 'CLOB'. When any column having one of the specified types is queried, the column data is returned as a string instead of the default representation. 427 | 428 | ```js 429 | const knex = require('knex')({ 430 | client: 'oracledb', 431 | connection: {/*...*/}, 432 | fetchAsString: [ 'number', 'clob' ] 433 | }); 434 | ``` 435 | 436 | ### migrations 437 | 438 | For convenience, any migration configuration may be specified when initializing the library. Read the [Migrations](/guide/migrations.html) section for more information and a full list of configuration options. 439 | 440 | ```js 441 | const knex = require('knex')({ 442 | client: 'mysql', 443 | connection: { 444 | host : '127.0.0.1', 445 | port : 3306, 446 | user : 'your_database_user', 447 | password : 'your_database_password', 448 | database : 'myapp_test' 449 | }, 450 | migrations: { 451 | tableName: 'migrations' 452 | } 453 | }); 454 | ``` 455 | 456 | ### postProcessResponse 457 | 458 | Hook for modifying returned rows, before passing them forward to user. One can do for example snake\_case -> camelCase conversion for returned columns with this hook. The `queryContext` is only available if configured for a query builder instance via [queryContext](/guide/schema-builder.html#querycontext). 459 | 460 | ```js 461 | const knex = require('knex')({ 462 | client: 'mysql', 463 | // overly simplified snake_case -> camelCase converter 464 | postProcessResponse: (result, queryContext) => { 465 | // TODO: add special case for raw results 466 | // (depends on dialect) 467 | if (Array.isArray(result)) { 468 | return result.map(row => convertToCamel(row)); 469 | } else { 470 | return convertToCamel(result); 471 | } 472 | } 473 | }); 474 | ``` 475 | 476 | ### wrapIdentifier 477 | 478 | Knex supports transforming identifier names automatically to quoted versions for each dialect. For example `'Table.columnName as foo'` for PostgreSQL is converted to "Table"."columnName" as "foo". 479 | 480 | With `wrapIdentifier` one may override the way how identifiers are transformed. It can be used to override default functionality and for example to help doing `camelCase` -> `snake_case` conversion. 481 | 482 | Conversion function `wrapIdentifier(value, dialectImpl, context): string` gets each part of the identifier as a single `value`, the original conversion function from the dialect implementation and the `queryContext`, which is only available if configured for a query builder instance via [builder.queryContext](/guide/query-builder.html#querycontext), and for schema builder instances via [schema.queryContext](/guide/schema-builder.html#querycontext) or [table.queryContext](/guide/schema-builder.html#querycontext-1). For example, with the query builder, `knex('table').withSchema('foo').select('table.field as otherName').where('id', 1)` will call `wrapIdentifier` converter for following values `'table'`, `'foo'`, `'table'`, `'field'`, `'otherName'` and `'id'`. 483 | 484 | ```js 485 | const knex = require('knex')({ 486 | client: 'mysql', 487 | // overly simplified camelCase -> snake_case converter 488 | wrapIdentifier: ( 489 | value, 490 | origImpl, 491 | queryContext 492 | ) => origImpl(convertToSnakeCase(value)) 493 | }); 494 | ``` 495 | 496 | ### log 497 | 498 | Knex contains some internal log functions for printing warnings, errors, deprecations, and debug information when applicable. These log functions typically log to the console, but can be overwritten using the log option and providing alternative functions. Different log functions can be used for separate knex instances. 499 | 500 | ```js 501 | const knex = require('knex')({ 502 | log: { 503 | warn(message) { 504 | }, 505 | error(message) { 506 | }, 507 | deprecate(message) { 508 | }, 509 | debug(message) { 510 | }, 511 | } 512 | }); 513 | ``` 514 | 515 | ### compileSqlOnError 516 | 517 | Knex builds an error message in case of query error. By default Knex adds compiled SQL (`SELECT * FROM users WHERE password = 'myPassword'`) to the error message. This can be changed to parameterized SQL (`SELECT * FROM users WHERE password = ?`) by setting `compileSqlOnError` to `false`. 518 | 519 | ```js 520 | const knex = require('knex')({ 521 | compileSqlOnError: false 522 | }); 523 | ``` 524 | 525 | ## TypeScript 526 | 527 | While knex is written in JavaScript, officially supported TypeScript bindings are available (within the knex npm package). 528 | 529 | However it is to be noted that TypeScript support is currently best-effort. Knex has a very flexible API and not all usage patterns can be type-checked and in most such cases we err on the side of flexibility. In particular, lack of type errors doesn't currently guarantee that the generated queries will be correct and therefore writing tests for them is recommended even if you are using TypeScript. 530 | 531 | Many of the APIs accept `TRecord` and `TResult` type parameters, using which we can specify the type of a row in the database table and the type of the result of the query respectively. This is helpful for auto-completion when using TypeScript-aware editors like VSCode. 532 | 533 | To reduce boilerplate and add inferred types, you can augment `Tables` interface in `'knex/types/tables'` module. 534 | 535 | ```ts 536 | import { Knex } from 'knex'; 537 | 538 | declare module 'knex/types/tables' { 539 | interface User { 540 | id: number; 541 | name: string; 542 | created_at: string; 543 | updated_at: string; 544 | } 545 | 546 | interface Tables { 547 | // This is same as specifying `knex('users')` 548 | users: User; 549 | // For more advanced types, you can specify separate type 550 | // for base model, "insert" type and "update" type. 551 | // But first: notice that if you choose to use this, 552 | // the basic typing showed above can be ignored. 553 | // So, this is like specifying 554 | // knex 555 | // .insert<{ name: string }>({ name: 'name' }) 556 | // .into<{ name: string, id: number }>('users') 557 | users_composite: Knex.CompositeTableType< 558 | // This interface will be used for return type and 559 | // `where`, `having` etc where full type is required 560 | User, 561 | // Specifying "insert" type will also make sure 562 | // data matches interface in full. Meaning 563 | // if interface is `{ a: string, b: string }`, 564 | // `insert({ a: '' })` will complain about missing fields. 565 | // 566 | // For example, this will require only "name" field when inserting 567 | // and make created_at and updated_at optional. 568 | // And "id" can't be provided at all. 569 | // Defaults to "base" type. 570 | Pick & Partial>, 571 | // This interface is used for "update()" calls. 572 | // As opposed to regular specifying interface only once, 573 | // when specifying separate update interface, user will be 574 | // required to match it exactly. So it's recommended to 575 | // provide partial interfaces for "update". Unless you want to always 576 | // require some field (e.g., `Partial & { updated_at: string }` 577 | // will allow updating any field for User but require updated_at to be 578 | // always provided as well. 579 | // 580 | // For example, this wil allow updating all fields except "id". 581 | // "id" will still be usable for `where` clauses so 582 | // knex('users_composite') 583 | // .update({ name: 'name2' }) 584 | // .where('id', 10)` 585 | // will still work. 586 | // Defaults to Partial "insert" type 587 | Partial> 588 | >; 589 | } 590 | } 591 | ``` 592 | 593 | When TypeScript is configured to use a modern module resolution setting (`node16`, `nodenext`, etc.), the compiler expects that the declared module name ends with a `.js` file type. You will need to declare your inferred types as follows instead: 594 | 595 | ```ts 596 | // The trailing `.js` is required by the TypeScript compiler in certain configs: 597 | declare module 'knex/types/tables.js' { // <----- Different module path!!! 598 | interface Tables { 599 | // ... 600 | } 601 | } 602 | ``` 603 | -------------------------------------------------------------------------------- /src/guide/interfaces.md: -------------------------------------------------------------------------------- 1 | # Interfaces 2 | 3 | Knex.js provides several options to deal with query output. The following methods are present on the query builder, schema builder, and the raw builder: 4 | 5 | ## Promises 6 | 7 | [Promises](https://github.com/petkaantonov/bluebird#what-are-promises-and-why-should-i-use-them) are the preferred way of dealing with queries in knex, as they allow you to return values from a fulfillment handler, which in turn become the value of the promise. The main benefit of promises are the ability to catch thrown errors without crashing the node app, making your code behave like a **.try / .catch / .finally** in synchronous code. 8 | 9 | ```js 10 | knex.select('name') 11 | .from('users') 12 | .where('id', '>', 20) 13 | .andWhere('id', '<', 200) 14 | .limit(10) 15 | .offset(x) 16 | .then(function(rows) { 17 | return _.pluck(rows, 'name'); 18 | }) 19 | .then(function(names) { 20 | return knex.select('id') 21 | .from('nicknames') 22 | .whereIn('nickname', names); 23 | }) 24 | .then(function(rows) { 25 | console.log(rows); 26 | }) 27 | .catch(function(error) { 28 | console.error(error) 29 | }); 30 | ``` 31 | 32 | ### then 33 | 34 | **.then(onFulfilled, [onRejected])** 35 | 36 | Coerces the current query builder chain into a promise state, accepting the resolve and reject handlers as specified by the Promises/A+ spec. As stated in the spec, more than one call to the then method for the current query chain will resolve with the same value, in the order they were called; the query will not be executed multiple times. 37 | 38 | ```js 39 | knex.select('*') 40 | .from('users') 41 | .where({name: 'Tim'}) 42 | .then(function(rows) { 43 | return knex 44 | .insert({user_id: rows[0].id, name: 'Test'}, 'id') 45 | .into('accounts'); 46 | }) 47 | .then(function(id) { 48 | console.log('Inserted Account ' + id); 49 | }) 50 | .catch(function(error) { console.error(error); }); 51 | ``` 52 | 53 | ### catch 54 | 55 | **.catch(onRejected)** 56 | 57 | Coerces the current query builder into a promise state, catching any error thrown by the query, the same as calling .then(null, onRejected). 58 | 59 | ```js 60 | return knex.insert({id: 1, name: 'Test'}, 'id') 61 | .into('accounts') 62 | .catch(function(error) { 63 | console.error(error); 64 | }) 65 | .then(function() { 66 | return knex.select('*') 67 | .from('accounts') 68 | .where('id', 1); 69 | }) 70 | .then(function(rows) { 71 | console.log(rows[0]); 72 | }) 73 | .catch(function(error) { 74 | console.error(error); 75 | }); 76 | ``` 77 | 78 | ## Callbacks 79 | 80 | ### asCallback 81 | 82 | **.asCallback(callback)** 83 | 84 | If you'd prefer a callback interface over promises, the asCallback function accepts a standard node style callback for executing the query chain. Note that as with the then method, subsequent calls to the same query chain will return the same result. 85 | 86 | ```js 87 | knex.select('name').from('users') 88 | .where('id', '>', 20) 89 | .andWhere('id', '<', 200) 90 | .limit(10) 91 | .offset(x) 92 | .asCallback(function(err, rows) { 93 | if (err) return console.error(err); 94 | knex.select('id') 95 | .from('nicknames') 96 | .whereIn('nickname', _.pluck(rows, 'name')) 97 | .asCallback(function(err, rows) { 98 | if (err) return console.error(err); 99 | console.log(rows); 100 | }); 101 | }); 102 | ``` 103 | 104 | ## Streams 105 | 106 | Streams are a powerful way of piping data through as it comes in, rather than all at once. You can read more about streams [here at substack's stream handbook](https://github.com/substack/stream-handbook). See the following for example uses of stream & pipe. If you wish to use streams with PostgreSQL, you must also install the [pg-query-stream](https://github.com/brianc/node-pg-query-stream) module. If you wish to use streams with the `pgnative` dialect, please be aware that the results will not be streamed as they are received, but rather streamed after the entire result set has returned. On an HTTP server, make sure to [manually close your streams](https://github.com/knex/knex/wiki/Manually-Closing-Streams) if a request is aborted. 107 | 108 | ### stream 109 | 110 | **.stream([options], [callback])** 111 | 112 | If called with a callback, the callback is passed the stream and a promise is returned. Otherwise, the readable stream is returned. 113 | When the stream is consumed as an [iterator](https://nodejs.org/api/stream.html#readablesymbolasynciterator), if the loop terminates with a `break`, `return`, or a `throw`, the stream will be destroyed. In other terms, iterating over a stream will consume the stream fully. 114 | 115 | ```js 116 | // Retrieve the stream: 117 | const stream = knex.select('*') 118 | .from('users') 119 | .stream(); 120 | stream.pipe(writableStream); 121 | ``` 122 | 123 | ```js 124 | // With options: 125 | const stream = knex.select('*') 126 | .from('users') 127 | .stream({highWaterMark: 5}); 128 | stream.pipe(writableStream); 129 | ``` 130 | 131 | ```js 132 | // Use as an iterator 133 | const stream = knex.select('*') 134 | .from('users') 135 | .stream(); 136 | 137 | for await (const row of stream) { 138 | /* ... */ 139 | } 140 | ``` 141 | 142 | ```js 143 | // Use as a promise: 144 | const stream = knex.select('*') 145 | .from('users') 146 | .where(knex.raw('id = ?', [1])) 147 | .stream(function(stream) { 148 | stream.pipe(writableStream); 149 | }) 150 | .then(function() { /* ... */ }) 151 | .catch(function(e) { console.error(e); }); 152 | ``` 153 | 154 | ### pipe 155 | 156 | **.pipe(writableStream)** 157 | 158 | Pipe a stream for the current query to a writableStream. 159 | 160 | ```js 161 | const stream = knex.select('*') 162 | .from('users') 163 | .pipe(writableStream); 164 | ``` 165 | 166 | ## Events 167 | 168 | ### query 169 | 170 | A query event is fired just before a query takes place, providing data about the query, including the connection's `__knexUid` / `__knexTxId` properties and any other information about the query as described in toSQL. Useful for logging all queries throughout your application. 171 | 172 | ```js 173 | knex.select('*') 174 | .from('users') 175 | .on('query', function(data) { 176 | app.log(data); 177 | }) 178 | .then(function() { 179 | // ... 180 | }); 181 | ``` 182 | 183 | ### query-error 184 | 185 | A query-error event is fired when an error occurs when running a query, providing the error object and data about the query, including the connection's `__knexUid` / `__knexTxId` properties and any other information about the query as described in toSQL. Useful for logging all query errors throughout your application. 186 | 187 | ```js 188 | knex.select(['NonExistentColumn']) 189 | .from('users') 190 | .on('query-error', function(error, obj) { 191 | app.log(error); 192 | }) 193 | .then(function() { /* ... */ }) 194 | .catch(function(error) { 195 | // Same error object as the query-error event provides. 196 | }); 197 | ``` 198 | 199 | ### query-response 200 | 201 | A query-response event is fired when a successful query has been run, providing the response of the query and data about the query, including the connection's `__knexUid` / `__knexTxId` properties and any other information about the query as described in toSQL, and finally the query builder used for the query. 202 | 203 | ```js 204 | knex.select('*') 205 | .from('users') 206 | .on('query-response', function(response, obj, builder) { 207 | // ... 208 | }) 209 | .then(function(response) { 210 | // Same response as the emitted event 211 | }) 212 | .catch(function(error) { }); 213 | ``` 214 | 215 | ### start 216 | 217 | A `start` event is fired right before a query-builder is compiled. 218 | 219 | ::: info 220 | While this event can be used to alter a builders state prior to compilation it is not to be recommended. Future goals include ways of doing this in a different manner such as hooks. 221 | ::: 222 | 223 | ```js 224 | knex.select('*') 225 | .from('users') 226 | .on('start', function(builder) { 227 | builder 228 | .where('IsPrivate', 0) 229 | }) 230 | .then(function(Rows) { 231 | //Only contains Rows where IsPrivate = 0 232 | }) 233 | .catch(function(error) { }); 234 | ``` 235 | 236 | ## Other 237 | 238 | ### toString 239 | 240 | **.toString()** 241 | 242 | Returns an array of query strings filled out with the correct values based on bindings, etc. Useful for debugging, but should not be used to create queries for running them against DB. 243 | 244 | ```js 245 | const toStringQuery = knex.select('*') 246 | .from('users') 247 | .where('id', 1) 248 | .toString(); 249 | 250 | // Outputs: console.log(toStringQuery); 251 | // select * from "users" where "id" = 1 252 | ``` 253 | 254 | ### toSQL 255 | 256 | **.toSQL()** 257 | **.toSQL().toNative()** 258 | 259 | Returns an array of query strings filled out with the correct values based on bindings, etc. Useful for debugging and building queries for running them manually with DB driver. `.toSQL().toNative()` outputs object with sql string and bindings in a dialects format in the same way that knex internally sends them to underlying DB driver. 260 | 261 | ```js 262 | knex.select('*') 263 | .from('users') 264 | .where(knex.raw('id = ?', [1])) 265 | .toSQL() 266 | // Outputs: 267 | // { 268 | // bindings: [1], 269 | // method: 'select', 270 | // sql: 'select * from "users" where id = ?', 271 | // options: undefined, 272 | // toNative: function () {} 273 | // } 274 | 275 | knex.select('*') 276 | .from('users') 277 | .where(knex.raw('id = ?', [1])) 278 | .toSQL() 279 | .toNative() 280 | // Outputs for postgresql dialect: 281 | // { 282 | // bindings: [1], 283 | // sql: 'select * from "users" where id = $1', 284 | // } 285 | ``` 286 | -------------------------------------------------------------------------------- /src/guide/migrations.md: -------------------------------------------------------------------------------- 1 | # Migrations 2 | 3 | Migrations allow for you to define sets of schema changes so upgrading a database is a breeze. 4 | 5 | ## Migration CLI 6 | 7 | The migration CLI is bundled with the knex install, and is driven by the [node-liftoff](https://github.com/tkellen/node-liftoff) module. To install globally, run: 8 | 9 | ```bash 10 | $ npm install knex -g 11 | ``` 12 | 13 | The migration CLI accepts the following general command-line options. You can view help text and additional options for each command using `--help`. E.g. `knex migrate:latest --help`. 14 | 15 | - `--debug`: Run with debugging 16 | - `--knexfile [path]`: Specify the knexfile path 17 | - `--knexpath [path]`: Specify the path to the knex instance 18 | - `--cwd [path]`: Specify the working directory 19 | - `--client [name]`: Set the DB client 20 | - `--connection [address]`: Set the DB connection 21 | - `--migrations-table-name`: Set the migration table name 22 | - `--migrations-directory`: Set the migrations directory 23 | - `--env`: environment, default: `process.env.NODE_ENV || development` 24 | - `--esm`: [Enables ESM module interoperability](#ecmascript-modules-esm-interoperability) 25 | - `--help`: Display help text for a particular command and exit. 26 | 27 | Migrations use a **knexfile**, which specify various configuration settings for the module. To create a new knexfile, run the following: 28 | 29 | ```bash 30 | $ knex init 31 | 32 | # or for .ts 33 | 34 | $ knex init -x ts 35 | ``` 36 | 37 | will create a sample knexfile.js - the file which contains our various database configurations. Once you have a knexfile.js, you can use the migration tool to create migration files to the specified directory (default migrations). Creating new migration files can be achieved by running: 38 | 39 | ```bash 40 | $ knex migrate:make migration_name 41 | 42 | # or for .ts 43 | 44 | $ knex migrate:make migration_name -x ts 45 | ``` 46 | 47 | - you can also create your migration using a specific stub file, this serves as a migration template to speed up development for common migration operations 48 | - if the --stub option is not passed, the CLI will use either the knex default stub for the chosen extension, or the config.stub file 49 | 50 | ```bash 51 | $ knex migrate:make --stub 52 | 53 | # or 54 | 55 | $ knex migrate:make --stub 56 | ``` 57 | 58 | - if a stub path is provided, it must be relative to the knexfile.\[js, ts, etc\] location 59 | - if a is used, the stub is selected by its file name. The CLI will look for this file in the config.migrations.directory folder. If the config.migrations.directory is not defined, this operation will fail 60 | 61 | Once you have finished writing the migrations, you can update the database matching your `NODE_ENV` by running: 62 | 63 | ```bash 64 | $ knex migrate:latest 65 | ``` 66 | 67 | You can also pass the `--env` flag or set `NODE_ENV` to select an alternative environment: 68 | 69 | ```bash 70 | $ knex migrate:latest --env production 71 | 72 | # or 73 | 74 | $ NODE_ENV=production knex migrate:latest 75 | ``` 76 | 77 | To rollback the last batch of migrations: 78 | 79 | ```bash 80 | $ knex migrate:rollback 81 | ``` 82 | 83 | To rollback all the completed migrations: 84 | 85 | ```bash 86 | $ knex migrate:rollback --all 87 | ``` 88 | 89 | To run the next migration that has not yet been run 90 | 91 | ```bash 92 | $ knex migrate:up 93 | ``` 94 | 95 | To run the specified migration that has not yet been run 96 | 97 | ```bash 98 | $ knex migrate:up 001_migration_name.js 99 | ``` 100 | 101 | To undo the last migration that was run 102 | 103 | ```bash 104 | $ knex migrate:down 105 | ``` 106 | 107 | To undo the specified migration that was run 108 | 109 | ```bash 110 | $ knex migrate:down 001_migration_name.js 111 | ``` 112 | 113 | To list both completed and pending migrations: 114 | 115 | ```bash 116 | $ knex migrate:list 117 | ``` 118 | 119 | ## Seed files 120 | 121 | Seed files allow you to populate your database with test or seed data independent of your migration files. 122 | 123 | ### Seed CLI 124 | 125 | To create a seed file, run: 126 | 127 | ```bash 128 | $ knex seed:make seed_name 129 | ``` 130 | 131 | Seed files are created in the directory specified in your knexfile.js for the current environment. A sample seed configuration looks like: 132 | 133 | ```js 134 | module.exports = { 135 | // ... 136 | development: { 137 | client: {/* ... */}, 138 | connection: {/* ... */}, 139 | seeds: { 140 | directory: './seeds/dev' 141 | } 142 | } 143 | // ... 144 | } 145 | ``` 146 | 147 | If no `seeds.directory` is defined, files are created in `./seeds`. Note that the seed directory needs to be a relative path. Absolute paths are not supported (nor is it good practice). 148 | 149 | To run seed files, execute: 150 | 151 | ```bash 152 | $ knex seed:run 153 | ``` 154 | 155 | Seed files are executed in alphabetical order. Unlike migrations, _every_ seed file will be executed when you run the command. You should design your seed files to reset tables as needed before inserting data. 156 | 157 | To run specific seed files, execute: 158 | 159 | ```bash 160 | $ knex seed:run --specific=seed-filename.js --specific=another-seed-filename.js 161 | ``` 162 | 163 | ## knexfile.js 164 | 165 | A knexfile.js generally contains all of the configuration for your database. It can optionally provide different configuration for different environments. You may pass a `--knexfile` option to any of the command line statements to specify an alternate path to your knexfile. 166 | 167 | ### Basic configuration 168 | 169 | ```js 170 | module.exports = { 171 | client: 'pg', 172 | connection: process.env.DATABASE_URL || { 173 | user: 'me', 174 | database: 'my_app' 175 | } 176 | }; 177 | ``` 178 | 179 | You can also use an async function to get connection details for your configuration. This is useful when you need to fetch credentials from a secure location like vault. 180 | 181 | ```js 182 | const getPassword = async () => { 183 | // TODO: implement me 184 | return 'my_pass' 185 | } 186 | 187 | module.exports = { 188 | client: 'pg', 189 | connection: async () => { 190 | const password = await getPassword() 191 | return { user: 'me', password } 192 | }, 193 | migrations: {} 194 | }; 195 | ``` 196 | 197 | ### Environment configuration 198 | 199 | ```js 200 | module.exports = { 201 | development: { 202 | client: 'pg', 203 | connection: { user: 'me', database: 'my_app' } 204 | }, 205 | production: { 206 | client: 'pg', 207 | connection: process.env.DATABASE_URL 208 | } 209 | }; 210 | ``` 211 | 212 | ### Custom migration 213 | 214 | You may provide a custom migration stub to be used in place of the default option. 215 | 216 | ```js 217 | module.exports = { 218 | client: 'pg', 219 | migrations: { 220 | stub: 'migration.stub' 221 | } 222 | }; 223 | ``` 224 | 225 | ### Custom migration name 226 | 227 | You may provide a custom migration name to be used in place of the default option. 228 | 229 | ```js 230 | module.exports = { 231 | client: 'pg', 232 | migrations: { 233 | getNewMigrationName: (name) => { 234 | return `${+new Date()}-${name}.js`; 235 | } 236 | } 237 | }; 238 | ``` 239 | 240 | ### Generated migration extension 241 | 242 | You can control extension of generated migrations. 243 | 244 | ```js 245 | module.exports = { 246 | client: 'pg', 247 | migrations: { 248 | extension: 'ts' 249 | } 250 | }; 251 | ``` 252 | 253 | ### Knexfile in other languages 254 | 255 | Knex uses [Liftoff](https://github.com/js-cli/js-liftoff) to support knexfile written in other compile-to-js languages. 256 | 257 | Depending on the language, this may require you to install additional dependencies. The complete list of dependencies for each supported language can be found [here](https://github.com/gulpjs/interpret#extensions). 258 | 259 | Most common cases are typescript (for which [typescript](https://www.npmjs.com/package/typescript) and [ts-node](https://www.npmjs.com/package/ts-node) packages are recommended), and coffeescript (for which [coffeescript](https://www.npmjs.com/package/coffeescript) dependency is required). 260 | 261 | If you don't specify the extension explicitly, the extension of generated migrations/seed files will be inferred from the knexfile extension 262 | 263 | ## Migration API 264 | 265 | `knex.migrate` is the class utilized by the knex migrations cli. 266 | 267 | Each method takes an optional `config` object, which may specify the following properties: 268 | 269 | - `directory`: a relative path to the directory containing the migration files. Can be an array of paths (default `./migrations`) 270 | - `extension`: the file extension used for the generated migration files (default `js`) 271 | - `tableName`: the table name used for storing the migration state (default `knex_migrations`) 272 | - `schemaName`: the schema name used for storing the table with migration state (optional parameter, only works on DBs that support multiple schemas in a single DB, such as PostgreSQL) 273 | - `disableTransactions`: don't run migrations inside transactions (default `false`) 274 | - `disableMigrationsListValidation`: do not validate that all the already executed migrations are still present in migration directories (default `false`) 275 | - `sortDirsSeparately`: if true and multiple directories are specified, all migrations from a single directory will be executed before executing migrations in the next folder (default `false`) 276 | - `loadExtensions`: array of file extensions which knex will treat as migrations. For example, if you have typescript transpiled into javascript in the same folder, you want to execute only javascript migrations. In this case, set `loadExtensions` to `['.js']` (Notice the dot!) (default `['.co', '.coffee', '.eg', '.iced', '.js', '.litcoffee', '.ls', '.ts']`) 277 | - `migrationSource`: specify a custom migration source, see [Custom Migration Source](#custom-migration-sources) for more info (default filesystem) 278 | 279 | ### Transactions in migrations 280 | 281 | By default, each migration is run inside a transaction. Whenever needed, one can disable transactions for all migrations via the common migration config option `config.disableTransactions` or per-migration, via exposing a boolean property `config.transaction` from a migration file: 282 | 283 | ```js 284 | exports.up = function(knex) { 285 | return knex.schema 286 | .createTable('users', function (table) { 287 | table.increments('id'); 288 | table.string('first_name', 255).notNullable(); 289 | table.string('last_name', 255).notNullable(); 290 | }) 291 | .createTable('products', function (table) { 292 | table.increments('id'); 293 | table.decimal('price').notNullable(); 294 | table.string('name', 1000).notNullable(); 295 | }); 296 | }; 297 | 298 | exports.down = function(knex) { 299 | return knex.schema 300 | .dropTable("products") 301 | .dropTable("users"); 302 | }; 303 | 304 | exports.config = { transaction: false }; 305 | ``` 306 | 307 | The same config property can be used for enabling transaction per-migration in case the common configuration has `disableTransactions: true`. 308 | 309 | ### make 310 | 311 | **knex.migrate.make(name, [config])** 312 | 313 | Creates a new migration, with the name of the migration being added. 314 | 315 | ### latest 316 | 317 | **knex.migrate.latest([config])** 318 | 319 | Runs all migrations that have not yet been run. 320 | 321 | If you need to run something only after all migrations have finished their execution, you can do something like this: 322 | 323 | ```js 324 | knex.migrate.latest() 325 | .then(function() { 326 | return knex.seed.run(); 327 | }) 328 | .then(function() { 329 | // migrations are finished 330 | }); 331 | ``` 332 | 333 | ### rollback 334 | 335 | **knex.migrate.rollback([config], [all])** 336 | 337 | Rolls back the latest migration group. If the `all` parameter is truthy, all applied migrations will be rolled back instead of just the last batch. The default value for this parameter is `false`. 338 | 339 | ### up 340 | 341 | **knex.migrate.up([config])** 342 | 343 | Runs the specified (by `config.name` parameter) or the next chronological migration that has not yet be run. 344 | 345 | ### down 346 | 347 | **knex.migrate.down([config])** 348 | 349 | Will undo the specified (by `config.name` parameter) or the last migration that was run. 350 | 351 | ### currentVersion 352 | 353 | **knex.migrate.currentVersion([config])** 354 | 355 | Retrieves and returns the current migration version, as a promise. If there aren't any migrations run yet, returns "none" as the value for the currentVersion. 356 | 357 | ### list 358 | 359 | **knex.migrate.list([config])** 360 | 361 | Will return list of completed and pending migrations 362 | 363 | ### unlock 364 | 365 | **knex.migrate.forceFreeMigrationsLock([config])** 366 | 367 | Forcibly unlocks the migrations lock table, and ensures that there is only one row in it. 368 | 369 | ## Notes about locks 370 | 371 | A lock system is there to prevent multiple processes from running the same migration batch in the same time. When a batch of migrations is about to be run, the migration system first tries to get a lock using a `SELECT ... FOR UPDATE` statement (preventing race conditions from happening). If it can get a lock, the migration batch will run. If it can't, it will wait until the lock is released. 372 | 373 | Please note that if your process unfortunately crashes, the lock will have to be _manually_ removed with `knex migrate:unlock` in order to let migrations run again. 374 | 375 | The locks are saved in a table called "`tableName`\_lock"; it has a column called `is_locked` that `knex migrate:unlock` sets to `0` in order to release the lock. The `index` column in the lock table exists for compatibility with some database clusters that require a primary key, but is otherwise unused. There must be only one row in this table, or an error will be thrown when running migrations: "Migration table is already locked". Run `knex migrate:unlock` to ensure that there is only one row in the table. 376 | 377 | ## Custom migration sources 378 | 379 | Knex supports custom migration sources, allowing you full control of where your migrations come from. This can be useful for custom folder structures, when bundling with webpack/browserify and other scenarios. 380 | 381 | ```js 382 | // Create a custom migration source class 383 | class MyMigrationSource { 384 | // Must return a Promise containing a list of migrations. 385 | // Migrations can be whatever you want, 386 | // they will be passed as arguments to getMigrationName 387 | // and getMigration 388 | getMigrations() { 389 | // In this example we are just returning migration names 390 | return Promise.resolve(['migration1']) 391 | } 392 | 393 | getMigrationName(migration) { 394 | return migration; 395 | } 396 | 397 | getMigration(migration) { 398 | switch(migration) { 399 | case 'migration1': 400 | return { 401 | up(knex) { /* ... */ }, 402 | down(knex) { /* ... */ }, 403 | } 404 | } 405 | } 406 | } 407 | 408 | // pass an instance of your migration source as knex config 409 | knex.migrate.latest({ 410 | migrationSource: new MyMigrationSource() 411 | }) 412 | ``` 413 | 414 | ### Webpack migration source example 415 | 416 | An example of how to create a migration source where migrations are included in a webpack bundle. 417 | 418 | ```js 419 | const path = require('path') 420 | 421 | class WebpackMigrationSource { 422 | constructor(migrationContext) { 423 | this.migrationContext = migrationContext 424 | } 425 | 426 | getMigrations() { 427 | return Promise.resolve( 428 | this.migrationContext.keys().sort() 429 | ) 430 | } 431 | 432 | getMigrationName(migration) { 433 | return path.parse(migration).base 434 | } 435 | 436 | getMigration(migration) { 437 | return this.migrationContext(migration) 438 | } 439 | } 440 | 441 | // pass an instance of your migration source as knex config 442 | knex.migrate.latest({ 443 | migrationSource: new WebpackMigrationSource( 444 | require.context('./migrations', false, /.js$/) 445 | ) 446 | }) 447 | 448 | // with webpack >=5, require.context will add 449 | // both the relative and absolute paths to the context 450 | // to avoid duplicate migration errors, you'll need 451 | // to filter out one or the other this example filters 452 | // out absolute paths, leaving only the relative 453 | // ones(./migrations/*.js): 454 | knex.migrate.latest({ 455 | migrationSource: new WebpackMigrationSource( 456 | require.context('./migrations', false, /^\.\/.*\.js$/) 457 | ) 458 | }) 459 | ``` 460 | 461 | ## ECMAScript modules (ESM) Interoperability 462 | 463 | ECMAScript Module support for knex CLI's configuration, migration and seeds 464 | enabled by the `--esm` flag, ECMAScript Interoperability is provided by the [_'esm'_](https://github.com/standard-things/esm) module. 465 | You can find [here](https://github.com/standard-things/esm) more information about 'esm' superpowers. 466 | 467 | Node 'mjs' files are handled by NodeJS own import mechanics 468 | and do not require the use of the '--esm' flag. 469 | But you might need it anyway for Node v10 under certain scenarios. 470 | You can find details about NodeJS ECMAScript modules [here](https://nodejs.org/api/esm.html) 471 | 472 | While it is possible to mix and match different module formats (extensions) 473 | between your knexfile, seeds and migrations, 474 | some format combinations will require specific NodeJS versions, 475 | _Notably mjs/cjs files will follow NodeJS import and require restrictions._ 476 | You can see [here](https://github.com/knex/knex/blob/master/test/cli/esm-interop.spec.js) many possible scenarios, 477 | and [here](https://github.com/knex/knex/tree/master/test/jake-util/knexfile-imports) some sample configurations 478 | 479 | Node v10.\* require the use of the '--experimental-module' flag in order to use the 'mjs' or 'cjs' extension. 480 | 481 | ```bash 482 | # launching knex on Node v10 to use mjs/cjs modules 483 | node --experimental-modules ./node_modules/.bin/knex $@ 484 | ``` 485 | 486 | When using migration and seed files with '.cjs' or '.mjs' extensions, you will need to specify that explicitly: 487 | 488 | ```ts 489 | /** 490 | * knexfile.mjs 491 | */ 492 | export default { 493 | migrations: { 494 | // ... client, connection,etc .... 495 | directory: './migrations', 496 | loadExtensions: ['.mjs'] // 497 | } 498 | } 499 | ``` 500 | 501 | When using '.mjs' extensions for your knexfile and '.js' for the seeds/migrations, you will need to specify that explicitly. 502 | 503 | ```ts 504 | /** 505 | * knexfile.mjs 506 | */ 507 | export default { 508 | migrations: { 509 | // ... client, connection,etc .... 510 | directory: './migrations', 511 | loadExtensions: ['.js'] // knex will search for 'mjs' file by default 512 | } 513 | } 514 | ``` 515 | 516 | For the knexfile you can use a default export, 517 | it will take precedence over named export. 518 | 519 | ```ts 520 | /** 521 | * filename: knexfile.js 522 | * For the knexfile you can use a default export 523 | **/ 524 | export default { 525 | client: 'sqlite3', 526 | connection: { 527 | filename: '../test.sqlite3', 528 | }, 529 | migrations: { 530 | directory: './migrations', 531 | }, 532 | seeds: { 533 | directory: './seeds', 534 | }, 535 | } 536 | 537 | /** 538 | * filename: knexfile.js 539 | * Let knex find the configuration by providing named exports, 540 | * but if exported a default, it will take precedence, and it will be used instead 541 | **/ 542 | const config = { 543 | client: 'sqlite3', 544 | connection: { 545 | filename: '../test.sqlite3', 546 | }, 547 | migrations: { 548 | directory: './migrations', 549 | }, 550 | seeds: { 551 | directory: './seeds', 552 | }, 553 | }; 554 | /** this will be used, it has precedence over named export */ 555 | export default config; 556 | /** Named exports, will be used if you didn't provide a default export */ 557 | export const { client, connection, migrations, seeds } = config; 558 | ``` 559 | 560 | Seed and migration files need to follow Knex conventions 561 | 562 | ```ts 563 | // file: seed.js 564 | /** 565 | * Same as with the CommonJS modules 566 | * You will need to export a "seed" named function. 567 | * */ 568 | export function seed(knex) { 569 | // ... seed logic here 570 | } 571 | 572 | // file: migration.js 573 | /** 574 | * Same as the CommonJS version, the miration file should export 575 | * "up" and "down" named functions 576 | */ 577 | export function up(knex) { 578 | // ... migration logic here 579 | } 580 | export function down(knex) { 581 | // ... migration logic here 582 | } 583 | ``` 584 | 585 | ## Seed API 586 | 587 | `knex.seed` is the class utilized by the knex seed CLI. 588 | 589 | Each method takes an optional `config` object, which may specify the following properties: 590 | 591 | - `directory`: a relative path to the directory containing the seed files. Can be an array of paths (default `./seeds`) 592 | - `loadExtensions`: array of file extensions which knex will treat as seeds. For example, if you have typescript transpiled into javascript in the same folder, you want to execute only javascript seeds. In this case, set `loadExtensions` to `['.js']` (Notice the dot!) (default `['.co', '.coffee', '.eg', '.iced', '.js', '.litcoffee', '.ls', '.ts']`) 593 | - `recursive`: if true, will find seed files recursively in the directory / directories specified 594 | - `specific`: a specific seed file or an array of seed files to run from the seeds directory, if its value is `undefined` it will run all the seeds (default `undefined`). If an array is specified, seed files will be run in the same order as the array 595 | - `sortDirsSeparately`: if true and multiple directories are specified, all seeds from a single directory will be executed before executing seeds in the next folder (default `false`) 596 | - `seedSource`: specify a custom seed source, see [Custom Seed Source](#custom-seed-sources) for more info (default filesystem) 597 | - `extension`: extension to be used for newly generated seeds (default `js`) 598 | - `timestampFilenamePrefix`: whether timestamp should be added as a prefix for newly generated seeds (default `false`) 599 | 600 | 601 | ### make 602 | 603 | **knex.seed.make(name, [config])** 604 | 605 | Creates a new seed file, with the name of the seed file being added. If the seed directory config is an array of paths, the seed file will be generated in the latest specified. 606 | 607 | ### run 608 | 609 | **knex.seed.run([config])** 610 | 611 | Runs all seed files for the current environment. 612 | 613 | ## Custom seed sources 614 | 615 | Knex supports custom seed sources, allowing you full control of where your seeds come from. This can be useful for custom folder structures, when bundling with webpack/browserify and other scenarios. 616 | 617 | ```js 618 | // Create a custom seed source class 619 | class MySeedSource { 620 | // Must return a Promise containing a list of seeds. 621 | // Seeds can be whatever you want, they will be passed as 622 | // arguments to getSeed 623 | getSeeds() { 624 | // In this example we are just returning seed names 625 | return Promise.resolve(['seed1']) 626 | } 627 | 628 | getSeed(seed) { 629 | switch(seed) { 630 | case 'seed1': 631 | return (knex) => { /* ... */ } 632 | } 633 | } 634 | } 635 | 636 | // pass an instance of your seed source as knex config 637 | knex.seed.run({ seedSource: new MySeedSource() }) 638 | ``` 639 | -------------------------------------------------------------------------------- /src/guide/raw.md: -------------------------------------------------------------------------------- 1 | # Raw 2 | 3 | 4 | Sometimes you may need to use a raw expression in a query. Raw query object may be injected pretty much anywhere you want, and using proper bindings can ensure your values are escaped properly, preventing SQL-injection attacks. 5 | 6 | ## Raw Parameter Binding 7 | 8 | One can parameterize sql given to `knex.raw(sql, bindings)`. Parameters can be positional named. One can also choose if parameter should be treated as value or as sql identifier e.g. in case of `'TableName.ColumnName'` reference. 9 | 10 | ```js 11 | knex('users') 12 | .select(knex.raw('count(*) as user_count, status')) 13 | .where(knex.raw(1)) 14 | .orWhere(knex.raw('status <> ?', [1])) 15 | .groupBy('status') 16 | ``` 17 | 18 | Positional bindings `?` are interpreted as values and `??` are interpreted as identifiers. 19 | 20 | ```js 21 | knex('users').where(knex.raw('?? = ?', ['user.name', 1])) 22 | ``` 23 | 24 | Named bindings such as `:name` are interpreted as values and `:name:` interpreted as identifiers. Named bindings are processed so long as the value is anything other than `undefined`. 25 | 26 | ```js 27 | const raw = ':name: = :thisGuy or :name: = :otherGuy or :name: = :undefinedBinding' 28 | 29 | knex('users') 30 | .where( 31 | knex.raw(raw, { 32 | name: 'users.name', 33 | thisGuy: 'Bob', 34 | otherGuy: 'Jay', 35 | undefinedBinding: undefined 36 | })) 37 | ``` 38 | 39 | For simpler queries where one only has a single binding, `.raw` can accept said binding as its second parameter. 40 | 41 | ```js 42 | knex('users') 43 | .where( 44 | knex.raw('LOWER("login") = ?', 'knex') 45 | ) 46 | .orWhere( 47 | knex.raw('accesslevel = ?', 1) 48 | ) 49 | .orWhere( 50 | knex.raw('updtime = ?', '01-01-2016') 51 | ) 52 | ``` 53 | 54 | Since there is no unified syntax for array bindings, instead you need to treat them as multiple values by adding `?` directly in your query. 55 | 56 | ```js 57 | const myArray = [1,2,3] 58 | knex.raw('select * from users where id in (' + myArray.map(_ => '?').join(',') + ')', [...myArray]); 59 | 60 | ``` 61 | query will become: 62 | 63 | ```sql 64 | select * from users where id in (?, ?, ?) /* with bindings [1,2,3] */ 65 | ``` 66 | 67 | To prevent replacement of `?` one can use the escape sequence `\\?`. 68 | 69 | ```js 70 | knex.select('*') 71 | .from('users') 72 | .where('id', '=', 1) 73 | .whereRaw('?? \\? ?', ['jsonColumn', 'jsonKey']) 74 | ``` 75 | 76 | To prevent replacement of named bindings one can use the escape sequence `\\:`. 77 | 78 | ```js 79 | knex.select('*') 80 | .from('users') 81 | .whereRaw(":property: = '\\:value' OR \\:property: = :value", { 82 | property: 'name', 83 | value: 'Bob' 84 | }) 85 | ``` 86 | 87 | ## Raw Expressions 88 | 89 | Raw expressions are created by using `knex.raw(sql, [bindings])` and passing this as a value for any value in the query chain. 90 | 91 | ```js 92 | knex('users').select(knex.raw('count(*) as user_count, status')) 93 | .where(knex.raw(1)) 94 | .orWhere(knex.raw('status <> ?', [1])) 95 | .groupBy('status') 96 | ``` 97 | 98 | ## Raw Queries 99 | 100 | The `knex.raw` may also be used to build a full query and execute it, as a standard query builder query would be executed. The benefit of this is that it uses the connection pool and provides a standard interface for the different client libraries. 101 | 102 | ```js 103 | knex.raw('select * from users where id = ?', [1]) 104 | .then(function(resp) { /*...*/ }); 105 | ``` 106 | 107 | Note that the response will be whatever the underlying sql library would typically return on a normal query, so you may need to look at the documentation for the base library the queries are executing against to determine how to handle the response. 108 | 109 | ## Wrapped Queries 110 | 111 | The raw query builder also comes with a `wrap` method, which allows wrapping the query in a value: 112 | 113 | ```js 114 | const subcolumn = knex.raw( 115 | 'select avg(salary) from employee where dept_no = e.dept_no' 116 | ) 117 | .wrap('(', ') avg_sal_dept'); 118 | 119 | knex.select('e.lastname', 'e.salary', subcolumn) 120 | .from('employee as e') 121 | .whereRaw('dept_no = e.dept_no') 122 | ``` 123 | 124 | Note that the example above be achieved more easily using the [as](/guide/query-builder#as) method. 125 | 126 | ```js 127 | const subcolumn = knex.avg('salary') 128 | .from('employee') 129 | .whereRaw('dept_no = e.dept_no') 130 | .as('avg_sal_dept'); 131 | 132 | knex.select('e.lastname', 'e.salary', subcolumn) 133 | .from('employee as e') 134 | .whereRaw('dept_no = e.dept_no') 135 | ``` 136 | -------------------------------------------------------------------------------- /src/guide/ref.md: -------------------------------------------------------------------------------- 1 | # Ref 2 | 3 | 4 | Can be used to create references in a query, such as column- or tablenames. This is a good and shorter alternative to using `knex.raw('??', 'tableName.columName')` which essentially does the same thing. 5 | 6 | ## Usage 7 | 8 | `knex.ref` can be used essentially anywhere in a build-chain. Here is an example: 9 | 10 | ```js 11 | knex(knex.ref('Users').withSchema('TenantId')) 12 | .where(knex.ref('Id'), 1) 13 | .orWhere(knex.ref('Name'), 'Admin') 14 | .select(['Id', knex.ref('Name').as('Username')]) 15 | ``` 16 | 17 | 21 | 22 | ### withSchema 23 | 24 | The Ref function supports schema using `.withSchema(string)`: 25 | 26 | ```js 27 | knex(knex.ref('users').withSchema('TenantId')).select() 28 | ``` 29 | 30 | ### alias 31 | 32 | Alias is supported using `.alias(string)` 33 | 34 | ```js 35 | knex('users') 36 | .select(knex.ref('Id').as('UserId')) 37 | ``` 38 | 39 | 41 | -------------------------------------------------------------------------------- /src/guide/schema-builder.md: -------------------------------------------------------------------------------- 1 | 2 | # Schema Builder 3 | 4 | The `knex.schema` is a **getter function**, which returns a stateful object containing the query. Therefore be sure to obtain a new instance of the `knex.schema` for every query. These methods return [promises](/guide/interfaces.html#promises). 5 | 6 | ## Essentials 7 | 8 | ### withSchema 9 | 10 | **knex.schema.withSchema([schemaName])** 11 | 12 | Specifies the schema to be used when using the schema-building commands. 13 | 14 | ```js 15 | knex.schema.withSchema('public').createTable('users', function (table) { 16 | table.increments(); 17 | }) 18 | ``` 19 | 20 | ### createTable 21 | 22 | **knex.schema.createTable(tableName, callback)** 23 | 24 | Creates a new table on the database, with a callback function to modify the table's structure, using the schema-building commands. 25 | 26 | ```js 27 | knex.schema.createTable('users', function (table) { 28 | table.increments(); 29 | table.string('name'); 30 | table.timestamps(); 31 | }) 32 | ``` 33 | 34 | ### createTableLike 35 | 36 | **knex.schema.createTableLike(tableName, tableNameToCopy, [callback])** 37 | 38 | Creates a new table on the database based on another table. Copy only the structure : columns, keys and indexes (expected on SQL Server which only copy columns) and not the data. Callback function can be specified to add columns in the duplicated table. 39 | 40 | ```js 41 | knex.schema.createTableLike('new_users', 'users') 42 | 43 | // "new_users" table contains columns 44 | // of users and two new columns 'age' and 'last_name'. 45 | knex.schema.createTableLike('new_users', 'users', (table) => { 46 | table.integer('age'); 47 | table.string('last_name'); 48 | }) 49 | ``` 50 | 51 | ### dropTable 52 | 53 | **knex.schema.dropTable(tableName)** 54 | 55 | Drops a table, specified by tableName. 56 | 57 | ```js 58 | knex.schema.dropTable('users') 59 | ``` 60 | 61 | ### dropTableIfExists 62 | 63 | **knex.schema.dropTableIfExists(tableName)** 64 | 65 | Drops a table conditionally if the table exists, specified by tableName. 66 | 67 | ```js 68 | knex.schema.dropTableIfExists('users') 69 | ``` 70 | 71 | ### renameTable 72 | 73 | **knex.schema.renameTable(from, to)** 74 | 75 | Renames a table from a current tableName to another. 76 | 77 | ```js 78 | knex.schema.renameTable('old_users', 'users') 79 | ``` 80 | 81 | ### hasTable 82 | 83 | **knex.schema.hasTable(tableName)** 84 | 85 | Checks for a table's existence by tableName, resolving with a boolean to signal if the table exists. 86 | 87 | ```js 88 | knex.schema.hasTable('users').then(function(exists) { 89 | if (!exists) { 90 | return knex.schema.createTable('users', function(t) { 91 | t.increments('id').primary(); 92 | t.string('first_name', 100); 93 | t.string('last_name', 100); 94 | t.text('bio'); 95 | }); 96 | } 97 | }); 98 | ``` 99 | 100 | ### hasColumn 101 | 102 | **knex.schema.hasColumn(tableName, columnName)** 103 | 104 | Checks if a column exists in the current table, resolves the promise with a boolean, true if the column exists, false otherwise. 105 | 106 | ### table 107 | 108 | **knex.schema.table(tableName, callback)** 109 | 110 | Chooses a database table, and then modifies the table, using the Schema Building functions inside of the callback. 111 | 112 | ```js 113 | knex.schema.table('users', function (table) { 114 | table.dropColumn('name'); 115 | table.string('first_name'); 116 | table.string('last_name'); 117 | }) 118 | ``` 119 | 120 | ### alterTable 121 | 122 | **knex.schema.alterTable(tableName, callback)** 123 | 124 | Chooses a database table, and then modifies the table, using the Schema Building functions inside of the callback. 125 | 126 | ```js 127 | knex.schema.alterTable('users', function (table) { 128 | table.dropColumn('name'); 129 | table.string('first_name'); 130 | table.string('last_name'); 131 | }) 132 | ``` 133 | 134 | ### createView 135 | 136 | **knex.schema.createView(tableName, callback)** 137 | 138 | Creates a new view on the database, with a callback function to modify the view's structure, using the schema-building commands. 139 | 140 | ```js 141 | knex.schema.createView('users_view', function (view) { 142 | view.columns(['first_name']); 143 | view.as(knex('users').select('first_name').where('age','>', '18')); 144 | }) 145 | ``` 146 | 147 | ### createViewOrReplace 148 | 149 | **knex.schema.createViewOrReplace(tableName, callback)** 150 | 151 | Creates a new view or replace it on the database, with a callback function to modify the view's structure, using the schema-building commands. You need to specify at least the same columns in same order (you can add extra columns). In SQLite, this function generate drop/create view queries (view columns can be different). 152 | 153 | ```js 154 | knex.schema.createViewOrReplace('users_view', function (view) { 155 | view.columns(['first_name']); 156 | view.as(knex('users').select('first_name').where('age','>', '18')); 157 | }) 158 | ``` 159 | 160 | ### createMaterializedView 161 | 162 | **knex.schema.createMaterializedView(viewName, callback)** 163 | 164 | Creates a new materialized view on the database, with a callback function to modify the view's structure, using the schema-building commands. Only on PostgreSQL, CockroachDb, Redshift and Oracle. 165 | 166 | ```js 167 | knex.schema.createMaterializedView('users_view', function (view) { 168 | view.columns(['first_name']); 169 | view.as(knex('users').select('first_name').where('age','>', '18')); 170 | }) 171 | ``` 172 | 173 | ### refreshMaterializedView 174 | 175 | **knex.schema.refreshMaterializedView(viewName)** 176 | 177 | Refresh materialized view on the database. Only on PostgreSQL, CockroachDb, Redshift and Oracle. 178 | 179 | ```js 180 | knex.schema.refreshMaterializedView('users_view') 181 | ``` 182 | 183 | ### dropView 184 | 185 | **knex.schema.dropView(viewName)** 186 | 187 | Drop view on the database. 188 | 189 | ```js 190 | knex.schema.dropView('users_view') 191 | ``` 192 | 193 | ### dropViewIfExists 194 | 195 | **knex.schema.dropViewIfExists(viewName)** 196 | 197 | Drop view on the database if exists. 198 | 199 | ```js 200 | knex.schema.dropViewIfExists('users_view') 201 | ``` 202 | 203 | ### dropMaterializedView 204 | 205 | **knex.schema.dropMaterializedView(viewName)** 206 | 207 | Drop materialized view on the database. Only on PostgreSQL, CockroachDb, Redshift and Oracle. 208 | 209 | ```js 210 | knex.schema.dropMaterializedView('users_view') 211 | ``` 212 | 213 | ### dropMaterializedViewIfExists 214 | 215 | **knex.schema.dropMaterializedViewIfExists(viewName)** 216 | 217 | Drop materialized view on the database if exists. Only on PostgreSQL, CockroachDb, Redshift and Oracle. 218 | 219 | ```js 220 | knex.schema.dropMaterializedViewIfExists('users_view') 221 | ``` 222 | 223 | ### renameView 224 | 225 | **knex.schema.renameView(viewName)** 226 | 227 | Rename a existing view in the database. Not supported by Oracle and SQLite. 228 | 229 | ```js 230 | knex.schema.renameView('users_view') 231 | ``` 232 | 233 | ### alterView 234 | 235 | **knex.schema.alterView(viewName)** 236 | 237 | Alter view to rename columns or change default values. Only available on PostgreSQL, MSSQL and Redshift. 238 | 239 | ```js 240 | knex.schema.alterView('view_test', function (view) { 241 | view.column('first_name').rename('name_user'); 242 | view.column('bio').defaultTo('empty'); 243 | }) 244 | ``` 245 | 246 | ### generateDdlCommands 247 | 248 | **knex.schema.generateDdlCommands()** 249 | 250 | Generates complete SQL commands for applying described schema changes, without executing anything. Useful when knex is being used purely as a query builder. Generally produces same result as .toSQL(), with a notable exception with SQLite, which relies on asynchronous calls to the database for building part of its schema modification statements 251 | 252 | ```js 253 | const ddlCommands = knex.schema.alterTable( 254 | 'users', 255 | (table) => { 256 | table 257 | .foreign('companyId') 258 | .references('company.companyId') 259 | .withKeyName('fk_fkey_company'); 260 | } 261 | ).generateDdlCommands(); 262 | ``` 263 | 264 | ### raw 265 | 266 | **knex.schema.raw(statement)** 267 | 268 | Run an arbitrary sql query in the schema builder chain. 269 | 270 | ```js 271 | knex.schema.raw("SET sql_mode='TRADITIONAL'") 272 | .table('users', function (table) { 273 | table.dropColumn('name'); 274 | table.string('first_name'); 275 | table.string('last_name'); 276 | }) 277 | ``` 278 | 279 | ### queryContext 280 | 281 | **knex.schema.queryContext(context)** 282 | 283 | Allows configuring a context to be passed to the [wrapIdentifier](/guide/#wrapidentifier) hook. The context can be any kind of value and will be passed to `wrapIdentifier` without modification. 284 | 285 | ```js 286 | knex.schema.queryContext({ foo: 'bar' }) 287 | .table('users', function (table) { 288 | table.string('first_name'); 289 | table.string('last_name'); 290 | }) 291 | ``` 292 | 293 | The context configured will be passed to `wrapIdentifier` for each identifier that needs to be formatted, including the table and column names. However, a different context can be set for the column names via [table.queryContext](/guide/query-builder#querycontext). 294 | 295 | Calling `queryContext` with no arguments will return any context configured for the schema builder instance. 296 | 297 | ### dropSchema 298 | 299 | **knex.schema.dropSchema(schemaName, [cascade])** 300 | 301 | Drop a schema, specified by the schema's name, with optional cascade option (default to false). Only supported by PostgreSQL. 302 | 303 | ```js 304 | //drop schema 'public' 305 | knex.schema.dropSchema('public') 306 | //drop schema 'public' cascade 307 | knex.schema.dropSchema('public', true) 308 | ``` 309 | 310 | ### dropSchemaIfExists 311 | 312 | **knex.schema.dropSchemaIfExists(schemaName, [cascade])** 313 | 314 | Drop a schema conditionally if the schema exists, specified by the schema's name, with optional cascade option (default to false). Only supported by PostgreSQL. 315 | 316 | ```js 317 | //drop schema if exists 'public' 318 | knex.schema.dropSchemaIfExists('public') 319 | //drop schema if exists 'public' cascade 320 | knex.schema.dropSchemaIfExists('public', true) 321 | ``` 322 | 323 | ## Schema Building 324 | 325 | ### dropColumn 326 | 327 | **table.dropColumn(name)** 328 | 329 | Drops a column, specified by the column's name 330 | 331 | ### dropColumns 332 | 333 | **table.dropColumns(columns)** 334 | 335 | Drops multiple columns, taking a variable number of column names. 336 | 337 | ### renameColumn 338 | 339 | **table.renameColumn(from, to)** 340 | 341 | Renames a column from one name to another. 342 | 343 | ### increments 344 | 345 | **table.increments(name, options={[primaryKey: boolean = true])** 346 | 347 | Adds an auto incrementing column. In PostgreSQL this is a serial; in Amazon Redshift an integer identity(1,1). This will be used as the primary key for the table if the column isn't in another primary key. Also available is a bigIncrements if you wish to add a bigint incrementing number (in PostgreSQL bigserial). Note that a primary key is created by default if the column isn't in primary key (with primary function), but you can override this behaviour by passing the `primaryKey` option. If you use this function with primary function, the column is added to the composite primary key. With SQLite, autoincrement column need to be a primary key, so if primary function is used, primary keys are transformed in unique index. MySQL don't support autoincrement column without primary key, so multiple queries are generated to create int column, add increments column to composite primary key then modify the column to autoincrement column. 348 | 349 | ```js 350 | // create table 'users' 351 | // with a primary key using 'increments()' 352 | knex.schema.createTable('users', function (table) { 353 | table.increments('userId'); 354 | table.string('name'); 355 | }); 356 | 357 | // create table 'users' 358 | // with a composite primary key ('userId', 'name'). 359 | // increments doesn't generate primary key. 360 | knex.schema.createTable('users', function (table) { 361 | table.primary(['userId', 'name']); 362 | table.increments('userId'); 363 | table.string('name'); 364 | }); 365 | 366 | // reference the 'users' primary key in new table 'posts' 367 | knex.schema.createTable('posts', function (table) { 368 | table.integer('author').unsigned().notNullable(); 369 | table.string('title', 30); 370 | table.string('content'); 371 | 372 | table.foreign('author').references('userId').inTable('users'); 373 | }); 374 | ``` 375 | 376 | A primaryKey option may be passed, to disable to automatic primary key creation: 377 | 378 | ```js 379 | // create table 'users' 380 | // with a primary key using 'increments()' 381 | // but also increments field 'other_id' 382 | // that does not need primary key 383 | knex.schema.createTable('users', function (table) { 384 | table.increments('id'); 385 | table.increments('other_id', { primaryKey: false }); 386 | }); 387 | ``` 388 | 389 | ### integer 390 | 391 | **table.integer(name, length)** 392 | 393 | Adds an integer column. On PostgreSQL you cannot adjust the length, you need to use other option such as bigInteger, etc 394 | 395 | ### bigInteger 396 | 397 | **table.bigInteger(name)** 398 | 399 | In MySQL or PostgreSQL, adds a bigint column, otherwise adds a normal integer. Note that bigint data is returned as a string in queries because JavaScript may be unable to parse them without loss of precision. 400 | 401 | ### tinyint 402 | 403 | **table.tinyint(name, length)** 404 | 405 | Adds a tinyint column 406 | 407 | ### smallint 408 | 409 | **table.smallint(name)** 410 | 411 | Adds a smallint column 412 | 413 | ### mediumint 414 | 415 | **table.mediumint(name)** 416 | 417 | Adds a mediumint column 418 | 419 | ### bigint 420 | 421 | **table.bigint(name)** 422 | 423 | Adds a bigint column 424 | 425 | ### text 426 | 427 | **table.text(name, [textType])** 428 | 429 | Adds a text column, with optional textType for MySql text datatype preference. textType may be mediumtext or longtext, otherwise defaults to text. 430 | 431 | ### string 432 | 433 | **table.string(name, [length])** 434 | 435 | Adds a string column, with optional length defaulting to 255. 436 | 437 | ### float 438 | 439 | **table.float(column, [precision], [scale])** 440 | 441 | Adds a float column, with optional precision (defaults to 8) and scale (defaults to 2). 442 | 443 | ### double 444 | 445 | **table.double(column, [precision], [scale])** 446 | 447 | Adds a double column, with optional precision (defaults to 8) and scale (defaults to 2). In SQLite/MSSQL this is a float with no precision/scale; In PostgreSQL this is a double precision; In Oracle this is a number with matching precision/scale. 448 | 449 | ### decimal 450 | 451 | **table.decimal(column, [precision], [scale])** 452 | 453 | Adds a decimal column, with optional precision (defaults to 8) and scale (defaults to 2). Specifying NULL as precision creates a decimal column that can store numbers of any precision and scale. (Only supported for Oracle, SQLite, Postgres) 454 | 455 | ### boolean 456 | 457 | **table.boolean(name)** 458 | 459 | Adds a boolean column. 460 | 461 | ### date 462 | 463 | **table.date(name)** 464 | 465 | Adds a date column. 466 | 467 | ### datetime 468 | 469 | **table.datetime(name, options={[useTz: boolean], [precision: number]})** 470 | 471 | Adds a datetime column. By default PostgreSQL creates column with timezone (timestamptz type). This behaviour can be overriden by passing the useTz option (which is by default true for PostgreSQL). MySQL and MSSQL do not have useTz option. 472 | 473 | A precision option may be passed: 474 | 475 | ```js 476 | table.datetime('some_time', { precision: 6 }).defaultTo(knex.fn.now(6)) 477 | ``` 478 | 479 | ### time 480 | 481 | **table.time(name, [precision])** 482 | 483 | Adds a time column, with optional precision for MySQL. Not supported on Amazon Redshift. 484 | 485 | In MySQL a precision option may be passed: 486 | 487 | ```js 488 | table.time('some_time', { precision: 6 }) 489 | ``` 490 | 491 | ### timestamp 492 | 493 | **table.timestamp(name, options={[useTz: boolean], [precision: number]})** 494 | 495 | Adds a timestamp column. By default PostgreSQL creates column with timezone (timestamptz type) and MSSQL does not (datetime2). This behaviour can be overriden by passing the useTz option (which is by default false for MSSQL and true for PostgreSQL). MySQL does not have useTz option. 496 | 497 | ```js 498 | table.timestamp('created_at').defaultTo(knex.fn.now()); 499 | ``` 500 | 501 | In PostgreSQL and MySQL a precision option may be passed: 502 | 503 | ```js 504 | table.timestamp('created_at', { precision: 6 }).defaultTo(knex.fn.now(6)); 505 | ``` 506 | 507 | In PostgreSQL and MSSQL a timezone option may be passed: 508 | 509 | ```js 510 | table.timestamp('created_at', { useTz: true }); 511 | ``` 512 | 513 | ### timestamps 514 | 515 | **table.timestamps([useTimestamps], [defaultToNow], [useCamelCase])** 516 | 517 | Adds created\_at and updated\_at columns on the database, setting each to datetime types. When true is passed as the first argument a timestamp type is used instead. Both columns default to being not null and using the current timestamp when true is passed as the second argument. Note that on MySQL the .timestamps() only have seconds precision, to get better precision use the .datetime or .timestamp methods directly with precision. If useCamelCase is true, the name of columns are createdAt and updatedAt. 518 | 519 | ::: info 520 | PostgreSQL `updated_at` field will not automatically be updated. Please see this [issue](https://github.com/knex/knex/issues/1928 "issue") for details 521 | ::: 522 | 523 | ### dropTimestamps 524 | 525 | **table.dropTimestamps([useCamelCase])** 526 | 527 | Drops the columns created\_at and updated\_at from the table, which can be created via timestamps. If useCamelCase is true, the name of columns are createdAt and updatedAt. 528 | 529 | ### binary 530 | 531 | **table.binary(name, [length])** 532 | 533 | Adds a binary column, with optional length argument for MySQL. 534 | 535 | ### enum / enu 536 | 537 | **table.enu(col, values, [options])** 538 | 539 | Adds a enum column, (aliased to enu, as enum is a reserved word in JavaScript). Implemented as unchecked varchar(255) on Amazon Redshift. Note that the second argument is an array of values. Example: 540 | 541 | ```js 542 | table.enu('column', ['value1', 'value2']) 543 | ``` 544 | 545 | For Postgres, an additional options argument can be provided to specify whether or not to use Postgres's native TYPE: 546 | 547 | ```js 548 | table.enu('column', ['value1', 'value2'], { useNative: true, enumName: 'foo_type' }) 549 | ``` 550 | 551 | It will use the values provided to generate the appropriate TYPE. Example: 552 | 553 | ```sql 554 | CREATE TYPE "foo_type" AS ENUM ('value1', 'value2'); 555 | ``` 556 | 557 | To use an existing native type across columns, specify 'existingType' in the options (this assumes the type has already been created): 558 | 559 | ::: info 560 | Since the enum values aren't utilized for a native && existing type, the type being passed in for values is immaterial. 561 | ::: 562 | 563 | ```js 564 | table.enu('column', null, { useNative: true, existingType: true, enumName: 'foo_type' }) 565 | ``` 566 | 567 | If you want to use existing enums from a schema, different from the schema of your current table, specify 'schemaName' in the options: 568 | 569 | ```js 570 | table.enu('column', null, { useNative: true, existingType: true, enumName: 'foo_type', schemaName: 'public' }) 571 | ``` 572 | 573 | Knex does not provide any way to alter enumerations after creation. To change an enumeration later on you must use Knex.raw, and the appropriate command for your database. 574 | 575 | ### json 576 | 577 | **table.json(name)** 578 | 579 | Adds a json column, using the built-in json type in PostgreSQL, MySQL and SQLite, defaulting to a text column in older versions or in unsupported databases. 580 | 581 | For PostgreSQL, due to incompatibility between native array and json types, when setting an array (or a value that could be an array) as the value of a json or jsonb column, you should use JSON.stringify() to convert your value to a string prior to passing it to the query builder, e.g. 582 | 583 | ```js 584 | knex.table('users') 585 | .where({id: 1}) 586 | .update({json_data: JSON.stringify(mightBeAnArray)}); 587 | ``` 588 | 589 | ### jsonb 590 | 591 | **table.jsonb(name)** 592 | 593 | Adds a jsonb column. Works similar to table.json(), but uses native jsonb type if possible. 594 | 595 | ### uuid 596 | 597 | **table.uuid(name, options=({[useBinaryUuid:boolean],[primaryKey:boolean]})** 598 | 599 | Adds a uuid column - this uses the built-in uuid type in PostgreSQL, and falling back to a char(36) in other databases by default. 600 | If useBinaryUuid is true, binary(16) is used. See uuidToBin function to convert uuid in binary before inserting and binToUuid to convert binary uuid to uuid. 601 | If primaryKey is true, then for PostgreSQL the field will be configured as `uuid primary key`, for CockroackDB an additional `default gen_random_uuid()` is set on the type. 602 | 603 | You may set the default value to the uuid helper function. Not supported by Redshift. 604 | 605 | ```js 606 | knex.schema.createTable(tblName, (table) => { 607 | table.uuid('uuidColumn').defaultTo(knex.fn.uuid()); 608 | }); 609 | ``` 610 | 611 | ### geometry 612 | 613 | **table.geometry(name)** 614 | 615 | Adds a geometry column. Supported by SQLite, MSSQL and PostgreSQL. 616 | 617 | ```js 618 | knex.schema.createTable(tblName, (table) => { 619 | table.geometry('geometryColumn'); 620 | }); 621 | ``` 622 | 623 | ### geography 624 | 625 | **table.geography(name)** 626 | 627 | Adds a geography column. Supported by SQLite, MSSQL and PostgreSQL (in PostGIS extension). 628 | 629 | ```js 630 | knex.schema.createTable(tblName, (table) => { 631 | table.geography('geographyColumn'); 632 | }); 633 | ``` 634 | 635 | ### point 636 | 637 | **table.point(name)** 638 | 639 | Add a point column. Not supported by CockroachDB and MSSQL. 640 | 641 | ```js 642 | knex.schema.createTable(tblName, (table) => { 643 | table.point('pointColumn'); 644 | }); 645 | ``` 646 | 647 | ### comment 648 | 649 | **table.comment(value)** 650 | 651 | Sets the comment for a table. 652 | 653 | ### engine 654 | 655 | **table.engine(val)** 656 | 657 | Sets the engine for the database table, only available within a createTable call, and only applicable to MySQL. 658 | 659 | ### charset 660 | 661 | **table.charset(val)** 662 | 663 | Sets the charset for the database table, only available within a createTable call, and only applicable to MySQL. 664 | 665 | ### collate 666 | 667 | **table.collate(val)** 668 | 669 | Sets the collation for the database table, only available within a createTable call, and only applicable to MySQL. 670 | 671 | ### inherits 672 | 673 | **table.inherits(val)** 674 | 675 | Sets the tables that this table inherits, only available within a createTable call, and only applicable to PostgreSQL. 676 | 677 | ### specificType 678 | 679 | **table.specificType(name, type)** 680 | 681 | Sets a specific type for the column creation, if you'd like to add a column type that isn't supported here. 682 | 683 | ### index 684 | 685 | **table.index(columns, [indexName], options=({[indexType: string], [storageEngineIndexType: 'btree'|'hash'], [predicate: QueryBuilder]}))** 686 | 687 | Adds an index to a table over the given columns. A default index name using the columns is used unless indexName is specified. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : [https://dev.mysql.com/doc/refman/8.0/en/create-index.html](https://dev.mysql.com/doc/refman/8.0/en/create-index.html). The indexType can be optionally specified for PostgreSQL and MySQL. Amazon Redshift does not allow creating an index. In PostgreSQL, SQLite and MSSQL a partial index can be specified by setting a 'where' predicate. 688 | 689 | ```js 690 | knex.table('users', function (table) { 691 | table.index(['name', 'last_name'], 'idx_name_last_name', { 692 | indexType: 'FULLTEXT', 693 | storageEngineIndexType: 'hash', 694 | predicate: knex.whereNotNull('email'), 695 | }); 696 | }); 697 | ``` 698 | 699 | ### dropIndex 700 | 701 | **table.dropIndex(columns, [indexName])** 702 | 703 | Drops an index from a table. A default index name using the columns is used unless indexName is specified (in which case columns is ignored). Amazon Redshift does not allow creating an index. 704 | 705 | ### setNullable 706 | 707 | **table.setNullable(column)** 708 | 709 | Makes table column nullable. 710 | 711 | ### dropNullable 712 | 713 | **table.dropNullable(column)** 714 | 715 | Makes table column not nullable. Note that this operation will fail if there are already null values in this column. 716 | 717 | ### primary 718 | 719 | **table.primary(columns, options=({[constraintName:string],[deferrable:'not deferrable'|'deferred'|'immediate']})** 720 | 721 | Create a primary key constraint on table using input `columns`. If you need to create a composite primary key, pass an array of columns to `columns`. Constraint name defaults to `tablename_pkey` unless `constraintName` is specified. On Amazon Redshift, all columns included in a primary key must be not nullable. Deferrable primary constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object. 722 | 723 | ```js 724 | knex.schema.alterTable('users', function(t) { 725 | t.unique('email') 726 | }) 727 | knex.schema.alterTable('job', function(t) { 728 | t.primary('email',{constraintName:'users_primary_key',deferrable:'deferred'}) 729 | }) 730 | ``` 731 | 732 | ::: info 733 | If you want to chain primary() while creating new column you can use [primary](#primary-1) 734 | ::: 735 | 736 | ### unique 737 | 738 | **table.unique(columns, options={[indexName: string], [deferrable:'not deferrable'|'immediate'|'deferred'], [storageEngineIndexType:'btree'|'hash'], [useConstraint:true|false], [predicate: QueryBuilder]})** 739 | 740 | Adds an unique index to a table over the given `columns`. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : [https://dev.mysql.com/doc/refman/8.0/en/create-index.html](https://dev.mysql.com/doc/refman/8.0/en/create-index.html). A default index name using the columns is used unless indexName is specified. If you need to create a composite index, pass an array of column to `columns`. Deferrable unique constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object. In MSSQL and Postgres, you can set the `useConstraint` option to true to create a unique constraint instead of a unique index (defaults to false for MSSQL, true for Postgres without `predicate`, false for Postgres with `predicate`). In PostgreSQL, SQLite and MSSQL a partial unique index can be specified by setting a 'where' predicate. 741 | 742 | ```js 743 | knex.schema.alterTable('users', function(t) { 744 | t.unique('email') 745 | }) 746 | knex.schema.alterTable('job', function(t) { 747 | t.unique(['account_id', 'program_id'], {indexName: 'job_composite_index', deferrable: 'deferred', storageEngineIndexType: 'hash'}) 748 | }) 749 | knex.schema.alterTable('job', function(t) { 750 | t.unique(['account_id', 'program_id'], {indexName: 'job_composite_index', useConstraint: true}) 751 | }) 752 | knex.schema.alterTable('job', function(t) { 753 | t.unique(['account_id', 'program_id'], {indexName: 'job_composite_index', predicate: knex.whereNotNull('account_id')}) 754 | }) 755 | ``` 756 | 757 | ::: info 758 | If you want to chain unique() while creating new column you can use [unique](#unique-1) 759 | ::: 760 | 761 | ### foreign 762 | 763 | **table.foreign(columns, [foreignKeyName])[.onDelete(statement).onUpdate(statement).withKeyName(foreignKeyName).deferrable(type)]** 764 | 765 | Adds a foreign key constraint to a table for an existing column using `table.foreign(column).references(column)` or multiple columns using `table.foreign(columns).references(columns).inTable(table)`. 766 | 767 | A default key name using the columns is used unless `foreignKeyName` is specified. 768 | 769 | You can also chain `onDelete()` and/or `onUpdate()` to set the reference option `(RESTRICT, CASCADE, SET NULL, NO ACTION)` for the operation. You can also chain `withKeyName()` to override default key name that is generated from table and column names (result is identical to specifying second parameter to function `foreign()`). 770 | 771 | Deferrable foreign constraint is supported on Postgres and Oracle and can be set by chaining `.deferrable(type)` 772 | 773 | Note that using `foreign()` is the same as `column.references(column)` but it works for existing columns. 774 | 775 | ```js 776 | knex.schema.table('users', function (table) { 777 | table.integer('user_id').unsigned() 778 | table.foreign('user_id').references('Items.user_id_in_items').deferrable('deferred') 779 | }) 780 | ``` 781 | 782 | ### dropForeign 783 | 784 | **table.dropForeign(columns, [foreignKeyName])** 785 | 786 | Drops a foreign key constraint from a table. A default foreign key name using the columns is used unless foreignKeyName is specified (in which case columns is ignored). 787 | 788 | ### dropUnique 789 | 790 | **table.dropUnique(columns, [indexName])** 791 | 792 | Drops a unique key constraint from a table. A default unique key name using the columns is used unless indexName is specified (in which case columns is ignored). 793 | 794 | ### dropPrimary 795 | 796 | **table.dropPrimary([constraintName])** 797 | 798 | Drops the primary key constraint on a table. Defaults to tablename\_pkey unless constraintName is specified. 799 | 800 | ### queryContext 801 | 802 | **table.queryContext(context)** 803 | 804 | Allows configuring a context to be passed to the [wrapIdentifier](/guide/#wrapidentifier) hook for formatting table builder identifiers. The context can be any kind of value and will be passed to `wrapIdentifier` without modification. 805 | 806 | ```js 807 | knex.schema.table('users', function (table) { 808 | table.queryContext({ foo: 'bar' }); 809 | table.string('first_name'); 810 | table.string('last_name'); 811 | }) 812 | ``` 813 | 814 | This method also enables overwriting the context configured for a schema builder instance via [schema.queryContext](/guide/schema-builder#querycontext): 815 | 816 | ```js 817 | knex.schema.queryContext('schema context') 818 | .table('users', function (table) { 819 | table.queryContext('table context'); 820 | table.string('first_name'); 821 | table.string('last_name'); 822 | }) 823 | ``` 824 | 825 | Note that it's also possible to overwrite the table builder context for any column in the table definition: 826 | 827 | ```js 828 | knex.schema.queryContext('schema context') 829 | .table('users', function (table) { 830 | table.queryContext('table context'); 831 | table.string('first_name').queryContext('first_name context'); 832 | table.string('last_name').queryContext('last_name context'); 833 | }) 834 | ``` 835 | 836 | Calling `queryContext` with no arguments will return any context configured for the table builder instance. 837 | 838 | ## Chainable Methods 839 | 840 | The following three methods may be chained on the schema building methods, as modifiers to the column. 841 | 842 | ### alter 843 | 844 | **column.alter(options={[alterNullable: boolean = true, alterType: boolean = true])** 845 | 846 | Marks the column as an alter / modify, instead of the default add. 847 | 848 | ::: warning 849 | This only works in .alterTable() and is not supported by SQlite or Amazon Redshift. Alter is _not_ done incrementally over older column type so if you like to add `notNullable` and keep the old default value, the alter statement must contain both `.notNullable().defaultTo(1).alter()`. If one just tries to add `.notNullable().alter()` the old default value will be dropped. Nullable alterations are done only if alterNullable is true. Type alterations are done only if alterType is true. 850 | ::: 851 | 852 | ```js 853 | knex.schema.alterTable('user', function(t) { 854 | t.increments().primary(); // add 855 | // drops previous default value from column, 856 | // change type to string and add not nullable constraint 857 | t.string('username', 35).notNullable().alter(); 858 | // drops both not null constraint and the default value 859 | t.integer('age').alter(); 860 | // if alterNullable is false, drops only the default value 861 | t.integer('age').alter({alterNullable : false}); 862 | // if alterType is false, type of column is not altered. 863 | t.integer('age').alter({alterType : false}); 864 | }); 865 | ``` 866 | 867 | ### index 868 | 869 | **column.index([indexName], options=({[indexType: string], [storageEngineIndexType: 'btree'|'hash'], [predicate: QueryBuilder]}))** 870 | 871 | Specifies a field as an index. If an indexName is specified, it is used in place of the standard index naming convention of tableName\_columnName. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : [https://dev.mysql.com/doc/refman/8.0/en/create-index.html](https://dev.mysql.com/doc/refman/8.0/en/create-index.html). The indexType can be optionally specified for PostgreSQL and MySQL. No-op if this is chained off of a field that cannot be indexed. In PostgreSQL, SQLite and MSSQL a partial index can be specified by setting a 'where' predicate. 872 | 873 | ### primary 874 | 875 | **column.primary(options=({[constraintName:string],[deferrable:'not deferrable'|'deferred'|'immediate']}));** 876 | 877 | Sets a primary key constraint on `column`. Constraint name defaults to `tablename_pkey` unless `constraintName` is specified. On Amazon Redshift, all columns included in a primary key must be not nullable. Deferrable primary constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object. 878 | 879 | ```js 880 | knex.schema.table('users', function (table) { 881 | table.integer('user_id').primary('email',{constraintName:'users_primary_key',deferrable:'deferred'}) 882 | }) 883 | ``` 884 | 885 | ::: info 886 | If you want to create primary constraint on existing column use [primary](#primary) 887 | ::: 888 | 889 | ### unique 890 | 891 | **column.unique(options={[indexName:string],[deferrable:'not deferrable'|'immediate'|'deferred']})** 892 | 893 | Sets the `column` as unique. On Amazon Redshift, this constraint is not enforced, but it is used by the query planner. Deferrable unique constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object. 894 | 895 | ```js 896 | knex.schema.table('users', function (table) { 897 | table.integer('user_id').unique({indexName:'user_unique_id', deferrable:'immediate'}) 898 | }) 899 | ``` 900 | 901 | ::: info 902 | If you want to create unique constraint on existing column use [unique](#unique) 903 | ::: 904 | 905 | ### references 906 | 907 | **column.references(column)** 908 | 909 | Sets the "column" that the current column references as a foreign key. "column" can either be "." syntax, or just the column name followed up with a call to inTable to specify the table. 910 | 911 | ### inTable 912 | 913 | **column.inTable(table)** 914 | 915 | Sets the "table" where the foreign key column is located after calling column.references. 916 | 917 | ### onDelete 918 | 919 | **column.onDelete(command)** 920 | 921 | Sets the SQL command to be run "onDelete". 922 | 923 | ### onUpdate 924 | 925 | **column.onUpdate(command)** 926 | 927 | Sets the SQL command to be run "onUpdate". 928 | 929 | ### defaultTo 930 | 931 | **column.defaultTo(value, options={[constraintName: string = undefined]))** 932 | 933 | Sets the default value for the column on an insert. 934 | 935 | In MSSQL a constraintName option may be passed to ensure a specific constraint name: 936 | 937 | ```js 938 | column.defaultTo('value', { constraintName: 'df_table_value' }); 939 | ``` 940 | 941 | ### unsigned 942 | 943 | **column.unsigned()** 944 | 945 | Specifies a number as unsigned. Only for numeric values. 946 | 947 | ### notNullable 948 | 949 | **column.notNullable()** 950 | 951 | Adds a not null on the current column being created. 952 | 953 | ### nullable 954 | 955 | **column.nullable()** 956 | 957 | Default on column creation, this explicitly sets a field to be nullable. 958 | 959 | ### first 960 | 961 | **column.first()** 962 | 963 | Sets the column to be inserted on the first position, only used in MySQL alter tables. 964 | 965 | ### after 966 | 967 | **column.after(field)** 968 | 969 | Sets the column to be inserted after another, only used in MySQL alter tables. 970 | 971 | ### comment 972 | 973 | **column.comment(value)** 974 | 975 | Sets the comment for a column. 976 | 977 | ```js 978 | knex.schema.createTable('accounts', function(t) { 979 | t.increments().primary(); 980 | t.string('email').unique().comment('This is the email field'); 981 | }); 982 | ``` 983 | 984 | ### collate 985 | 986 | **column.collate(collation)** 987 | 988 | Sets the collation for a column (only works in MySQL). Here is a list of all available collations: [https://dev.mysql.com/doc/refman/5.5/en/charset-charsets.html](https://dev.mysql.com/doc/refman/5.5/en/charset-charsets.html) 989 | 990 | ```js 991 | knex.schema.createTable('users', function(t) { 992 | t.increments(); 993 | t.string('email').unique().collate('utf8_unicode_ci'); 994 | }); 995 | ``` 996 | 997 | ## View 998 | 999 | ### columns 1000 | 1001 | **view.columns([columnNames])** 1002 | 1003 | Specify the columns of the view. 1004 | 1005 | ```js 1006 | knex.schema.createView('users_view', function (view) { 1007 | view.columns(['first_name', 'last_name']); 1008 | view.as(knex('users').select('first_name').where('age','>', '18')); 1009 | }); 1010 | ``` 1011 | 1012 | ### as 1013 | 1014 | **view.as(selectQuery)** 1015 | 1016 | Specify the select query of the view. 1017 | 1018 | ### checkOption 1019 | 1020 | **view.checkOption()** 1021 | 1022 | Add check option on the view definition. On OracleDb, MySQL, PostgreSQL and Redshift. 1023 | 1024 | ### localCheckOption 1025 | 1026 | **view.localCheckOption()** 1027 | 1028 | Add local check option on the view definition. On MySQL, PostgreSQL and Redshift. 1029 | 1030 | ### cascadedCheckOption 1031 | 1032 | **view.cascadedCheckOption()** 1033 | 1034 | Add cascaded check option on the view definition. On MySQL, PostgreSQL and Redshift. 1035 | 1036 | ## Checks 1037 | 1038 | ### check 1039 | 1040 | **table.check(checkPredicate, [bindings], [constraintName]))** 1041 | 1042 | Specify a check on table or column with raw predicate. 1043 | 1044 | ```js 1045 | knex.schema.createTable('product', function (table) { 1046 | table.integer('price_min'); 1047 | table.integer('price'); 1048 | table.check('?? >= ??', ['price', 'price_min']); 1049 | }) 1050 | ``` 1051 | 1052 | ### checkPositive 1053 | 1054 | **column.checkPositive([constraintName])** 1055 | 1056 | Specify a check on column that test if the value of column is positive. 1057 | 1058 | ```js 1059 | knex.schema.createTable('product', function (table) { 1060 | table.integer('price').checkPositive(); 1061 | }) 1062 | ``` 1063 | 1064 | ### checkNegative 1065 | 1066 | **column.checkNegative([constraintName])** 1067 | 1068 | Specify a check on column that test if the value of column is negative. 1069 | 1070 | ```js 1071 | knex.schema.createTable('product', function (table) { 1072 | table.integer('price_decrease').checkNegative(); 1073 | }) 1074 | ``` 1075 | 1076 | ### checkIn 1077 | 1078 | **column.checkIn(values, [constraintName])** 1079 | 1080 | Specify a check on column that test if the value of column is contained in a set of specified values. 1081 | 1082 | ```js 1083 | knex.schema.createTable('product', function (table) { 1084 | table.string('type').checkIn(['table', 'chair', 'sofa']); 1085 | }) 1086 | ``` 1087 | 1088 | ### checkNotIn 1089 | 1090 | **column.checkNotIn(values, [constraintName])** 1091 | 1092 | Specify a check on column that test if the value of column is not contains in a set of specified values. 1093 | 1094 | ```js 1095 | knex.schema.createTable('product', function (table) { 1096 | table.string('type').checkNotIn(['boot', 'shoe']); 1097 | }) 1098 | ``` 1099 | 1100 | ### checkBetween 1101 | 1102 | **column.checkBetween(values, [constraintName])** 1103 | 1104 | Specify a check on column that test if the value of column is within a range of values. 1105 | 1106 | ```js 1107 | knex.schema.createTable('product', function (table) { 1108 | table.integer('price').checkBetween([0, 100]); 1109 | }) 1110 | // You can add checks on multiple intervals 1111 | knex.schema.createTable('product', function (table) { 1112 | table.integer('price').checkBetween([ [0, 20], [30,40] ]); 1113 | }) 1114 | ``` 1115 | 1116 | ### checkLength 1117 | 1118 | **column.checkLength(operator, length, [constraintName])** 1119 | 1120 | Specify a check on column that test if the length of a string match the predicate. 1121 | 1122 | ```js 1123 | knex.schema.createTable('product', function (table) { 1124 | // operator can be =, !=, <=, >=, <, > 1125 | t.varchar('phone').checkLength('=', 8); 1126 | }) 1127 | ``` 1128 | 1129 | ### checkRegex 1130 | 1131 | **column.checkRegex(regex, [constraintName])** 1132 | 1133 | Specify a check on column that test if the value match the specified regular expression. In MSSQL only simple pattern matching in supported but not regex syntax. 1134 | 1135 | ```js 1136 | knex.schema.createTable('product', function (table) { 1137 | table.string('phone').checkRegex('[0-9]{8}'); 1138 | // In MSSQL, {8} syntax don't work, 1139 | // you need to duplicate [0-9]. 1140 | table.string('phone').checkRegex('[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'); 1141 | }) 1142 | ``` 1143 | 1144 | ### dropChecks 1145 | 1146 | **table.dropChecks([checkConstraintNames])** 1147 | 1148 | Drop checks constraint given an array of constraint names. 1149 | 1150 | ```js 1151 | knex.schema.createTable('product', function (table) { 1152 | table.integer('price').checkPositive('price_check') 1153 | table.integer('price_proportion').checkBetween([0, 100],'price_proportion_check') 1154 | table.dropChecks(['price_check', 'price_proportion_check']); 1155 | }) 1156 | ``` 1157 | -------------------------------------------------------------------------------- /src/guide/transactions.md: -------------------------------------------------------------------------------- 1 | # Transactions 2 | 3 | Transactions are an important feature of relational databases, as they allow correct recovery from failures and keep a database consistent even in cases of system failure. All queries within a transaction are executed on the same database connection, and run the entire set of queries as a single unit of work. Any failure will mean the database will rollback any queries executed on that connection to the pre-transaction state. 4 | 5 | Transactions are handled by passing a handler function into `knex.transaction`. The handler function accepts a single argument, an object which may be used in two ways: 6 | 7 | 1. As the "promise aware" knex connection 8 | 2. As an object passed into a query with [transacting](/guide/query-builder#transacting) and eventually call commit or rollback. 9 | 10 | Consider these two examples: 11 | 12 | ```js 13 | // Using trx as a query builder: 14 | knex.transaction(function(trx) { 15 | 16 | const books = [ 17 | {title: 'Canterbury Tales'}, 18 | {title: 'Moby Dick'}, 19 | {title: 'Hamlet'} 20 | ]; 21 | 22 | return trx 23 | .insert({name: 'Old Books'}, 'id') 24 | .into('catalogues') 25 | .then(function(ids) { 26 | books.forEach((book) => book.catalogue_id = ids[0]); 27 | return trx('books').insert(books); 28 | }); 29 | }) 30 | .then(function(inserts) { 31 | console.log(inserts.length + ' new books saved.'); 32 | }) 33 | .catch(function(error) { 34 | // If we get here, that means that 35 | // neither the 'Old Books' catalogues insert, 36 | // nor any of the books inserts will have taken place. 37 | console.error(error); 38 | }); 39 | ``` 40 | 41 | And then this example: 42 | 43 | ```js 44 | // Using trx as a transaction object: 45 | knex.transaction(function(trx) { 46 | 47 | const books = [ 48 | {title: 'Canterbury Tales'}, 49 | {title: 'Moby Dick'}, 50 | {title: 'Hamlet'} 51 | ]; 52 | 53 | knex.insert({name: 'Old Books'}, 'id') 54 | .into('catalogues') 55 | .transacting(trx) 56 | .then(function(ids) { 57 | books.forEach((book) => book.catalogue_id = ids[0]); 58 | return knex('books').insert(books).transacting(trx); 59 | }) 60 | .then(trx.commit) 61 | .catch(trx.rollback); 62 | }) 63 | .then(function(inserts) { 64 | console.log(inserts.length + ' new books saved.'); 65 | }) 66 | .catch(function(error) { 67 | // If we get here, that means that 68 | // neither the 'Old Books' catalogues insert, 69 | // nor any of the books inserts will have taken place. 70 | console.error(error); 71 | }); 72 | ``` 73 | 74 | Same example as above using await/async: 75 | 76 | ```ts 77 | try { 78 | await knex.transaction(async trx => { 79 | 80 | const books = [ 81 | {title: 'Canterbury Tales'}, 82 | {title: 'Moby Dick'}, 83 | {title: 'Hamlet'} 84 | ]; 85 | 86 | const ids = await trx('catalogues') 87 | .insert({ 88 | name: 'Old Books' 89 | }, 'id') 90 | 91 | books.forEach((book) => book.catalogue_id = ids[0]) 92 | const inserts = await trx('books').insert(books) 93 | 94 | console.log(inserts.length + ' new books saved.') 95 | }) 96 | } catch (error) { 97 | // If we get here, that means that neither the 'Old Books' catalogues insert, 98 | // nor any of the books inserts will have taken place. 99 | console.error(error); 100 | } 101 | ``` 102 | 103 | Same example as above using another await/async approach: 104 | 105 | ```ts 106 | try { 107 | await knex.transaction(async trx => { 108 | 109 | const books = [ 110 | {title: 'Canterbury Tales'}, 111 | {title: 'Moby Dick'}, 112 | {title: 'Hamlet'} 113 | ]; 114 | 115 | const ids = await knex('catalogues') 116 | .insert({ 117 | name: 'Old Books' 118 | }, 'id') 119 | .transacting(trx) 120 | 121 | books.forEach(book => book.catalogue_id = ids[0]) 122 | await knex('books') 123 | .insert(books) 124 | .transacting(trx) 125 | 126 | console.log(inserts.length + ' new books saved.') 127 | }) 128 | } catch (error) { 129 | console.error(error); 130 | } 131 | ``` 132 | 133 | Throwing an error directly from the transaction handler function automatically rolls back the transaction, same as returning a rejected promise. 134 | 135 | Notice that if a promise is not returned within the handler, it is up to you to ensure `trx.commit`, or `trx.rollback` are called, otherwise the transaction connection will hang. 136 | 137 | Calling `trx.rollback` will return a rejected Promise. If you don't pass any argument to `trx.rollback`, a generic `Error` object will be created and passed in to ensure the Promise always rejects with something. 138 | 139 | Note that Amazon Redshift does not support savepoints in transactions. 140 | 141 | In some cases you may prefer to create transaction but only execute statements in it later. In such case call method `transaction` without a handler function: 142 | 143 | ```ts 144 | // Using trx as a transaction object: 145 | const trx = await knex.transaction(); 146 | 147 | const books = [ 148 | {title: 'Canterbury Tales'}, 149 | {title: 'Moby Dick'}, 150 | {title: 'Hamlet'} 151 | ]; 152 | 153 | trx('catalogues') 154 | .insert({name: 'Old Books'}, 'id') 155 | .then(function(ids) { 156 | books.forEach((book) => book.catalogue_id = ids[0]); 157 | return trx('books').insert(books); 158 | }) 159 | .then(trx.commit) 160 | .catch(trx.rollback); 161 | ``` 162 | 163 | If you want to create a reusable transaction instance, but do not want to actually start it until it is used, you can create a transaction provider instance. It will start transaction after being called for the first time, and return same transaction on subsequent calls: 164 | 165 | ```ts 166 | // Does not start a transaction yet 167 | const trxProvider = knex.transactionProvider(); 168 | 169 | const books = [ 170 | {title: 'Canterbury Tales'}, 171 | {title: 'Moby Dick'}, 172 | {title: 'Hamlet'} 173 | ]; 174 | 175 | // Starts a transaction 176 | const trx = await trxProvider(); 177 | const ids = await trx('catalogues') 178 | .insert({name: 'Old Books'}, 'id') 179 | books.forEach((book) => book.catalogue_id = ids[0]); 180 | await trx('books').insert(books); 181 | 182 | // Reuses same transaction 183 | const sameTrx = await trxProvider(); 184 | const ids2 = await sameTrx('catalogues') 185 | .insert({name: 'New Books'}, 'id') 186 | books.forEach((book) => book.catalogue_id = ids2[0]); 187 | await sameTrx('books').insert(books); 188 | ``` 189 | 190 | You can access the promise that gets resolved after transaction is rolled back explicitly by user or committed, or rejected if it gets rolled back by DB itself, when using either way of creating transaction, from field `executionPromise`: 191 | 192 | ```ts 193 | const trxProvider = knex.transactionProvider(); 194 | const trx = await trxProvider(); 195 | const trxPromise = trx.executionPromise; 196 | 197 | const trx2 = await knex.transaction(); 198 | const trx2Promise = trx2.executionPromise; 199 | 200 | const trxInitPromise = new Promise(async (resolve, reject) => { 201 | knex.transaction((transaction) => { 202 | resolve(transaction); 203 | }); 204 | }); 205 | const trx3 = await trxInitPromise; 206 | const trx3Promise = trx3.executionPromise; 207 | ``` 208 | 209 | You can check if a transaction has been committed or rolled back with the method `isCompleted`: 210 | 211 | ```ts 212 | const trx = await knex.transaction(); 213 | trx.isCompleted(); // false 214 | await trx.commit(); 215 | trx.isCompleted(); // true 216 | 217 | const trx2 = knex.transactionProvider(); 218 | await trx2.rollback(); 219 | trx2.isCompleted(); // true 220 | ``` 221 | 222 | You can check the property `knex.isTransaction` to see if the current knex instance you are working with is a transaction. 223 | 224 | ## Transaction Modes 225 | 226 | In case you need to specify an isolation level for your transaction, you can use a config parameter `isolationLevel`. Not supported by oracle and sqlite, options are `read uncommitted`, `read committed`, `repeatable read`, `snapshot` (mssql only), `serializable`. 227 | 228 | ```ts 229 | // Simple read skew example 230 | const isolationLevel = 'read committed'; 231 | const trx = await knex.transaction({isolationLevel}); 232 | const result1 = await trx(tableName).select(); 233 | await knex(tableName).insert({ id: 1, value: 1 }); 234 | const result2 = await trx(tableName).select(); 235 | await trx.commit(); 236 | // result1 may or may not deep equal result2 depending on isolation level 237 | ``` 238 | 239 | You may also set the transaction mode as `read only` using the `readOnly` config parameter. It is currently only supported on mysql, postgres, and redshift. 240 | 241 | ```ts 242 | const trx = await knex.transaction({ readOnly: true }); 243 | // 💥 Cannot `INSERT` while inside a `READ ONLY` transaction 244 | const result = await trx(tableName).insert({ id: 1, foo: 'bar' }); 245 | ``` 246 | -------------------------------------------------------------------------------- /src/guide/utility.md: -------------------------------------------------------------------------------- 1 | # Utility 2 | 3 | A collection of utilities that the knex library provides for convenience. 4 | 5 | ## batchInsert 6 | **knex.batchInsert(tableName)** 7 | 8 | The `batchInsert` utility will insert a batch of rows wrapped inside a transaction _(which is automatically created unless explicitly given a transaction using [transacting](/guide/query-builder#transacting))_, at a given `chunkSize`. 9 | 10 | It's primarily designed to be used when you have thousands of rows to insert into a table. 11 | 12 | By default, the `chunkSize` is set to 1000. 13 | 14 | BatchInsert also allows for [returning values](/guide/query-builder#returning) and supplying transactions using [transacting](/guide/query-builder#transacting). 15 | 16 | ```js 17 | const rows = [{/*...*/}, {/*...*/}]; 18 | const chunkSize = 30; 19 | knex.batchInsert('TableName', rows, chunkSize) 20 | .returning('id') 21 | .then(function(ids) { /*...*/ }) 22 | .catch(function(error) { /*...*/ }); 23 | 24 | knex.transaction(function(tr) { 25 | return knex.batchInsert('TableName', rows, chunkSize) 26 | .transacting(tr) 27 | }) 28 | .then(function() { /*...*/ }) 29 | .catch(function(error) { /*...*/ }); 30 | ``` 31 | 32 | ## now 33 | 34 | **knex.fn.now(precision)** 35 | 36 | Return the current timestamp with a precision (optional) 37 | 38 | ```js 39 | table.datetime('some_time', { precision: 6 }).defaultTo(knex.fn.now(6)) 40 | ``` 41 | 42 | ## uuid 43 | 44 | **knex.fn.uuid()** 45 | 46 | Return a uuid generation function. Not supported by Redshift 47 | 48 | ```js 49 | table.uuid('uuid').defaultTo(knex.fn.uuid()) 50 | ``` 51 | 52 | ## uuidToBin 53 | 54 | **knex.fn.uuidToBin(uuid)** 55 | 56 | Convert a string uuid (char(36)) to a binary uuid (binary(16)) 57 | 58 | ```js 59 | knex.schema.createTable('uuid_table', (t) => { 60 | t.uuid('uuid_col_binary', { useBinaryUuid: true }); 61 | }); 62 | knex('uuid_table').insert({ 63 | uuid_col_binary: knex.fn.uuidToBin('3f06af63-a93c-11e4-9797-00505690773f'), 64 | }); 65 | ``` 66 | 67 | ## binToUuid 68 | 69 | **knex.fn.binToUuid(binaryUuid)** 70 | 71 | Convert a binary uuid (binary(16)) to a string uuid (char(36)) 72 | 73 | ```js 74 | const res = await knex('uuid_table').select('uuid_col_binary'); 75 | knex.fn.binToUuid(res[0].uuid_col_binary) 76 | ``` 77 | -------------------------------------------------------------------------------- /src/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | home: true 3 | heroImage: /knex-logo.png 4 | heroAlt: Logo knex 5 | heroText: Knex.js 6 | tagline: SQL query builder 7 | actionText: View guide 8 | actionLink: /guide/ 9 | altActionText: Star on GitHub 10 | altActionLink: https://github.com/knex/knex 11 | title: SQL Query Builder for Javascript 12 | --- 13 | 14 |
15 | 16 | **Knex.js** (pronounced [/kəˈnɛks/](https://youtu.be/19Av0Lxml-I?t=521)) is a "batteries included" SQL query builder for **PostgreSQL**, **CockroachDB**, **MSSQL**, **MySQL**, **MariaDB**, **SQLite3**, **Better-SQLite3**, **Oracle**, and **Amazon Redshift** designed to be flexible, portable, and fun to use. 17 | 18 | It features both traditional node style [callbacks](/guide/interfaces#callbacks) as well as a [promise](/guide/interfaces#promises) interface for cleaner async flow control, [a stream interface](/guide/interfaces#streams), full-featured [query](/guide/query-builder) and [schema](/guide/schema-builder) builders, [**transaction support (with savepoints)**](/guide/transactions), connection [pooling](/guide/#pool) and standardized responses between different query clients and dialects. 19 | 20 |
21 | -------------------------------------------------------------------------------- /src/public/knex-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/knex/documentation/9b49f59e34297c212d10005e84770a8ffb7856a0/src/public/knex-logo.png -------------------------------------------------------------------------------- /yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@algolia/autocomplete-core@1.5.2": 6 | version "1.5.2" 7 | resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.5.2.tgz#ec0178e07b44fd74a057728ac157291b26cecf37" 8 | integrity sha512-DY0bhyczFSS1b/CqJlTE/nQRtnTAHl6IemIkBy0nEWnhDzRDdtdx4p5Uuk3vwAFxwEEgi1WqKwgSSMx6DpNL4A== 9 | dependencies: 10 | "@algolia/autocomplete-shared" "1.5.2" 11 | 12 | "@algolia/autocomplete-preset-algolia@1.5.2": 13 | version "1.5.2" 14 | resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.5.2.tgz#36c5638cc6dba6ea46a86e5a0314637ca40a77ca" 15 | integrity sha512-3MRYnYQFJyovANzSX2CToS6/5cfVjbLLqFsZTKcvF3abhQzxbqwwaMBlJtt620uBUOeMzhdfasKhCc40+RHiZw== 16 | dependencies: 17 | "@algolia/autocomplete-shared" "1.5.2" 18 | 19 | "@algolia/autocomplete-shared@1.5.2": 20 | version "1.5.2" 21 | resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.5.2.tgz#e157f9ad624ab8fd940ff28bd2094cdf199cdd79" 22 | integrity sha512-ylQAYv5H0YKMfHgVWX0j0NmL8XBcAeeeVQUmppnnMtzDbDnca6CzhKj3Q8eF9cHCgcdTDdb5K+3aKyGWA0obug== 23 | 24 | "@algolia/cache-browser-local-storage@4.13.0": 25 | version "4.13.0" 26 | resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.13.0.tgz#f8aa4fe31104b19d616ea392f9ed5c2ea847d964" 27 | integrity sha512-nj1vHRZauTqP/bluwkRIgEADEimqojJgoTRCel5f6q8WCa9Y8QeI4bpDQP28FoeKnDRYa3J5CauDlN466jqRhg== 28 | dependencies: 29 | "@algolia/cache-common" "4.13.0" 30 | 31 | "@algolia/cache-common@4.13.0": 32 | version "4.13.0" 33 | resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.13.0.tgz#27b83fd3939d08d72261b36a07eeafc4cb4d2113" 34 | integrity sha512-f9mdZjskCui/dA/fA/5a+6hZ7xnHaaZI5tM/Rw9X8rRB39SUlF/+o3P47onZ33n/AwkpSbi5QOyhs16wHd55kA== 35 | 36 | "@algolia/cache-in-memory@4.13.0": 37 | version "4.13.0" 38 | resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.13.0.tgz#10801a74550cbabb64b59ff08c56bce9c278ff2d" 39 | integrity sha512-hHdc+ahPiMM92CQMljmObE75laYzNFYLrNOu0Q3/eyvubZZRtY2SUsEEgyUEyzXruNdzrkcDxFYa7YpWBJYHAg== 40 | dependencies: 41 | "@algolia/cache-common" "4.13.0" 42 | 43 | "@algolia/client-account@4.13.0": 44 | version "4.13.0" 45 | resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.13.0.tgz#f8646dd40d1e9e3353e10abbd5d6c293ea92a8e2" 46 | integrity sha512-FzFqFt9b0g/LKszBDoEsW+dVBuUe1K3scp2Yf7q6pgHWM1WqyqUlARwVpLxqyc+LoyJkTxQftOKjyFUqddnPKA== 47 | dependencies: 48 | "@algolia/client-common" "4.13.0" 49 | "@algolia/client-search" "4.13.0" 50 | "@algolia/transporter" "4.13.0" 51 | 52 | "@algolia/client-analytics@4.13.0": 53 | version "4.13.0" 54 | resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.13.0.tgz#a00bd02df45d71becb9dd4c5c993d805f2e1786d" 55 | integrity sha512-klmnoq2FIiiMHImkzOm+cGxqRLLu9CMHqFhbgSy9wtXZrqb8BBUIUE2VyBe7azzv1wKcxZV2RUyNOMpFqmnRZA== 56 | dependencies: 57 | "@algolia/client-common" "4.13.0" 58 | "@algolia/client-search" "4.13.0" 59 | "@algolia/requester-common" "4.13.0" 60 | "@algolia/transporter" "4.13.0" 61 | 62 | "@algolia/client-common@4.13.0": 63 | version "4.13.0" 64 | resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.13.0.tgz#8bc373d164dbdcce38b4586912bbe162492bcb86" 65 | integrity sha512-GoXfTp0kVcbgfSXOjfrxx+slSipMqGO9WnNWgeMmru5Ra09MDjrcdunsiiuzF0wua6INbIpBQFTC2Mi5lUNqGA== 66 | dependencies: 67 | "@algolia/requester-common" "4.13.0" 68 | "@algolia/transporter" "4.13.0" 69 | 70 | "@algolia/client-personalization@4.13.0": 71 | version "4.13.0" 72 | resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-4.13.0.tgz#10fb7af356422551f11a67222b39c52306f1512c" 73 | integrity sha512-KneLz2WaehJmNfdr5yt2HQETpLaCYagRdWwIwkTqRVFCv4DxRQ2ChPVW9jeTj4YfAAhfzE6F8hn7wkQ/Jfj6ZA== 74 | dependencies: 75 | "@algolia/client-common" "4.13.0" 76 | "@algolia/requester-common" "4.13.0" 77 | "@algolia/transporter" "4.13.0" 78 | 79 | "@algolia/client-search@4.13.0": 80 | version "4.13.0" 81 | resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.13.0.tgz#2d8ff8e755c4a37ec89968f3f9b358eed005c7f0" 82 | integrity sha512-blgCKYbZh1NgJWzeGf+caKE32mo3j54NprOf0LZVCubQb3Kx37tk1Hc8SDs9bCAE8hUvf3cazMPIg7wscSxspA== 83 | dependencies: 84 | "@algolia/client-common" "4.13.0" 85 | "@algolia/requester-common" "4.13.0" 86 | "@algolia/transporter" "4.13.0" 87 | 88 | "@algolia/logger-common@4.13.0": 89 | version "4.13.0" 90 | resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.13.0.tgz#be2606e71aae618a1ff1ea9a1b5f5a74284b35a8" 91 | integrity sha512-8yqXk7rMtmQJ9wZiHOt/6d4/JDEg5VCk83gJ39I+X/pwUPzIsbKy9QiK4uJ3aJELKyoIiDT1hpYVt+5ia+94IA== 92 | 93 | "@algolia/logger-console@4.13.0": 94 | version "4.13.0" 95 | resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.13.0.tgz#f28028a760e3d9191e28a10b12925e48f6c9afde" 96 | integrity sha512-YepRg7w2/87L0vSXRfMND6VJ5d6699sFJBRWzZPOlek2p5fLxxK7O0VncYuc/IbVHEgeApvgXx0WgCEa38GVuQ== 97 | dependencies: 98 | "@algolia/logger-common" "4.13.0" 99 | 100 | "@algolia/requester-browser-xhr@4.13.0": 101 | version "4.13.0" 102 | resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.13.0.tgz#e2483f4e8d7f09e27cd0daf6c77711d15c5a919f" 103 | integrity sha512-Dj+bnoWR5MotrnjblzGKZ2kCdQi2cK/VzPURPnE616NU/il7Ypy6U6DLGZ/ZYz+tnwPa0yypNf21uqt84fOgrg== 104 | dependencies: 105 | "@algolia/requester-common" "4.13.0" 106 | 107 | "@algolia/requester-common@4.13.0": 108 | version "4.13.0" 109 | resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.13.0.tgz#47fb3464cfb26b55ba43676d13f295d812830596" 110 | integrity sha512-BRTDj53ecK+gn7ugukDWOOcBRul59C4NblCHqj4Zm5msd5UnHFjd/sGX+RLOEoFMhetILAnmg6wMrRrQVac9vw== 111 | 112 | "@algolia/requester-node-http@4.13.0": 113 | version "4.13.0" 114 | resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.13.0.tgz#7d981bbd31492f51dd11820a665f9d8906793c37" 115 | integrity sha512-9b+3O4QFU4azLhGMrZAr/uZPydvzOR4aEZfSL8ZrpLZ7fbbqTO0S/5EVko+QIgglRAtVwxvf8UJ1wzTD2jvKxQ== 116 | dependencies: 117 | "@algolia/requester-common" "4.13.0" 118 | 119 | "@algolia/transporter@4.13.0": 120 | version "4.13.0" 121 | resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.13.0.tgz#f6379e5329efa2127da68c914d1141f5f21dbd07" 122 | integrity sha512-8tSQYE+ykQENAdeZdofvtkOr5uJ9VcQSWgRhQ9h01AehtBIPAczk/b2CLrMsw5yQZziLs5cZ3pJ3478yI+urhA== 123 | dependencies: 124 | "@algolia/cache-common" "4.13.0" 125 | "@algolia/logger-common" "4.13.0" 126 | "@algolia/requester-common" "4.13.0" 127 | 128 | "@babel/parser@^7.16.4": 129 | version "7.17.10" 130 | resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.17.10.tgz#873b16db82a8909e0fbd7f115772f4b739f6ce78" 131 | integrity sha512-n2Q6i+fnJqzOaq2VkdXxy2TCPCWQZHiCo0XqmrCvDWcZQKRyZzYi4Z0yxlBuN0w+r2ZHmre+Q087DSrw3pbJDQ== 132 | 133 | "@docsearch/css@3.0.0", "@docsearch/css@^3.0.0": 134 | version "3.0.0" 135 | resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.0.0.tgz#fe57b474802ffd706d3246eab25d52fac8aa3698" 136 | integrity sha512-1kkV7tkAsiuEd0shunYRByKJe3xQDG2q7wYg24SOw1nV9/2lwEd4WrUYRJC/ukGTl2/kHeFxsaUvtiOy0y6fFA== 137 | 138 | "@docsearch/js@^3.0.0": 139 | version "3.0.0" 140 | resolved "https://registry.yarnpkg.com/@docsearch/js/-/js-3.0.0.tgz#394a99f68895503d57faf523ecec0b25b02f638c" 141 | integrity sha512-j3tUJWlgW3slYqzGB8fm7y05kh2qqrIK1dZOXHeMUm/5gdKE85fiz/ltfCPMDFb/MXF+bLZChJXSMzqY0Ck30Q== 142 | dependencies: 143 | "@docsearch/react" "3.0.0" 144 | preact "^10.0.0" 145 | 146 | "@docsearch/react@3.0.0": 147 | version "3.0.0" 148 | resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.0.0.tgz#d02ebdc67573412185a6a4df13bc254c7c0da491" 149 | integrity sha512-yhMacqS6TVQYoBh/o603zszIb5Bl8MIXuOc6Vy617I74pirisDzzcNh0NEaYQt50fVVR3khUbeEhUEWEWipESg== 150 | dependencies: 151 | "@algolia/autocomplete-core" "1.5.2" 152 | "@algolia/autocomplete-preset-algolia" "1.5.2" 153 | "@docsearch/css" "3.0.0" 154 | algoliasearch "^4.0.0" 155 | 156 | "@vitejs/plugin-vue@^2.3.2": 157 | version "2.3.3" 158 | resolved "https://registry.yarnpkg.com/@vitejs/plugin-vue/-/plugin-vue-2.3.3.tgz#fbf80cc039b82ac21a1acb0f0478de8f61fbf600" 159 | integrity sha512-SmQLDyhz+6lGJhPELsBdzXGc+AcaT8stgkbiTFGpXPe8Tl1tJaBw1A6pxDqDuRsVkD8uscrkx3hA7QDOoKYtyw== 160 | 161 | "@vue/compiler-core@3.2.33": 162 | version "3.2.33" 163 | resolved "https://registry.yarnpkg.com/@vue/compiler-core/-/compiler-core-3.2.33.tgz#e915d59cce85898f5c5cfebe4c09e539278c3d59" 164 | integrity sha512-AAmr52ji3Zhk7IKIuigX2osWWsb2nQE5xsdFYjdnmtQ4gymmqXbjLvkSE174+fF3A3kstYrTgGkqgOEbsdLDpw== 165 | dependencies: 166 | "@babel/parser" "^7.16.4" 167 | "@vue/shared" "3.2.33" 168 | estree-walker "^2.0.2" 169 | source-map "^0.6.1" 170 | 171 | "@vue/compiler-dom@3.2.33": 172 | version "3.2.33" 173 | resolved "https://registry.yarnpkg.com/@vue/compiler-dom/-/compiler-dom-3.2.33.tgz#6db84296f949f18e5d3e7fd5e80f943dbed7d5ec" 174 | integrity sha512-GhiG1C8X98Xz9QUX/RlA6/kgPBWJkjq0Rq6//5XTAGSYrTMBgcLpP9+CnlUg1TFxnnCVughAG+KZl28XJqw8uQ== 175 | dependencies: 176 | "@vue/compiler-core" "3.2.33" 177 | "@vue/shared" "3.2.33" 178 | 179 | "@vue/compiler-sfc@3.2.33": 180 | version "3.2.33" 181 | resolved "https://registry.yarnpkg.com/@vue/compiler-sfc/-/compiler-sfc-3.2.33.tgz#7ce01dc947a8b76c099811dc6ca58494d4dc773d" 182 | integrity sha512-H8D0WqagCr295pQjUYyO8P3IejM3vEzeCO1apzByAEaAR/WimhMYczHfZVvlCE/9yBaEu/eu9RdiWr0kF8b71Q== 183 | dependencies: 184 | "@babel/parser" "^7.16.4" 185 | "@vue/compiler-core" "3.2.33" 186 | "@vue/compiler-dom" "3.2.33" 187 | "@vue/compiler-ssr" "3.2.33" 188 | "@vue/reactivity-transform" "3.2.33" 189 | "@vue/shared" "3.2.33" 190 | estree-walker "^2.0.2" 191 | magic-string "^0.25.7" 192 | postcss "^8.1.10" 193 | source-map "^0.6.1" 194 | 195 | "@vue/compiler-ssr@3.2.33": 196 | version "3.2.33" 197 | resolved "https://registry.yarnpkg.com/@vue/compiler-ssr/-/compiler-ssr-3.2.33.tgz#3e820267e4eea48fde9519f006dedca3f5e42e71" 198 | integrity sha512-XQh1Xdk3VquDpXsnoCd7JnMoWec9CfAzQDQsaMcSU79OrrO2PNR0ErlIjm/mGq3GmBfkQjzZACV+7GhfRB8xMQ== 199 | dependencies: 200 | "@vue/compiler-dom" "3.2.33" 201 | "@vue/shared" "3.2.33" 202 | 203 | "@vue/reactivity-transform@3.2.33": 204 | version "3.2.33" 205 | resolved "https://registry.yarnpkg.com/@vue/reactivity-transform/-/reactivity-transform-3.2.33.tgz#286063f44ca56150ae9b52f8346a26e5913fa699" 206 | integrity sha512-4UL5KOIvSQb254aqenW4q34qMXbfZcmEsV/yVidLUgvwYQQ/D21bGX3DlgPUGI3c4C+iOnNmDCkIxkILoX/Pyw== 207 | dependencies: 208 | "@babel/parser" "^7.16.4" 209 | "@vue/compiler-core" "3.2.33" 210 | "@vue/shared" "3.2.33" 211 | estree-walker "^2.0.2" 212 | magic-string "^0.25.7" 213 | 214 | "@vue/reactivity@3.2.33": 215 | version "3.2.33" 216 | resolved "https://registry.yarnpkg.com/@vue/reactivity/-/reactivity-3.2.33.tgz#c84eedb5225138dbfc2472864c151d3efbb4b673" 217 | integrity sha512-62Sq0mp9/0bLmDuxuLD5CIaMG2susFAGARLuZ/5jkU1FCf9EDbwUuF+BO8Ub3Rbodx0ziIecM/NsmyjardBxfQ== 218 | dependencies: 219 | "@vue/shared" "3.2.33" 220 | 221 | "@vue/runtime-core@3.2.33": 222 | version "3.2.33" 223 | resolved "https://registry.yarnpkg.com/@vue/runtime-core/-/runtime-core-3.2.33.tgz#2df8907c85c37c3419fbd1bdf1a2df097fa40df2" 224 | integrity sha512-N2D2vfaXsBPhzCV3JsXQa2NECjxP3eXgZlFqKh4tgakp3iX6LCGv76DLlc+IfFZq+TW10Y8QUfeihXOupJ1dGw== 225 | dependencies: 226 | "@vue/reactivity" "3.2.33" 227 | "@vue/shared" "3.2.33" 228 | 229 | "@vue/runtime-dom@3.2.33": 230 | version "3.2.33" 231 | resolved "https://registry.yarnpkg.com/@vue/runtime-dom/-/runtime-dom-3.2.33.tgz#123b8969247029ea0d9c1983676d4706a962d848" 232 | integrity sha512-LSrJ6W7CZTSUygX5s8aFkraDWlO6K4geOwA3quFF2O+hC3QuAMZt/0Xb7JKE3C4JD4pFwCSO7oCrZmZ0BIJUnw== 233 | dependencies: 234 | "@vue/runtime-core" "3.2.33" 235 | "@vue/shared" "3.2.33" 236 | csstype "^2.6.8" 237 | 238 | "@vue/server-renderer@3.2.33": 239 | version "3.2.33" 240 | resolved "https://registry.yarnpkg.com/@vue/server-renderer/-/server-renderer-3.2.33.tgz#4b45d6d2ae10ea4e3d2cf8e676804cf60f331979" 241 | integrity sha512-4jpJHRD4ORv8PlbYi+/MfP8ec1okz6rybe36MdpkDrGIdEItHEUyaHSKvz+ptNEyQpALmmVfRteHkU9F8vxOew== 242 | dependencies: 243 | "@vue/compiler-ssr" "3.2.33" 244 | "@vue/shared" "3.2.33" 245 | 246 | "@vue/shared@3.2.33": 247 | version "3.2.33" 248 | resolved "https://registry.yarnpkg.com/@vue/shared/-/shared-3.2.33.tgz#69a8c99ceb37c1b031d5cc4aec2ff1dc77e1161e" 249 | integrity sha512-UBc1Pg1T3yZ97vsA2ueER0F6GbJebLHYlEi4ou1H5YL4KWvMOOWwpYo9/QpWq93wxKG6Wo13IY74Hcn/f7c7Bg== 250 | 251 | algoliasearch@^4.0.0: 252 | version "4.13.0" 253 | resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.13.0.tgz#e36611fda82b1fc548c156ae7929a7f486e4b663" 254 | integrity sha512-oHv4faI1Vl2s+YC0YquwkK/TsaJs79g2JFg5FDm2rKN12VItPTAeQ7hyJMHarOPPYuCnNC5kixbtcqvb21wchw== 255 | dependencies: 256 | "@algolia/cache-browser-local-storage" "4.13.0" 257 | "@algolia/cache-common" "4.13.0" 258 | "@algolia/cache-in-memory" "4.13.0" 259 | "@algolia/client-account" "4.13.0" 260 | "@algolia/client-analytics" "4.13.0" 261 | "@algolia/client-common" "4.13.0" 262 | "@algolia/client-personalization" "4.13.0" 263 | "@algolia/client-search" "4.13.0" 264 | "@algolia/logger-common" "4.13.0" 265 | "@algolia/logger-console" "4.13.0" 266 | "@algolia/requester-browser-xhr" "4.13.0" 267 | "@algolia/requester-common" "4.13.0" 268 | "@algolia/requester-node-http" "4.13.0" 269 | "@algolia/transporter" "4.13.0" 270 | 271 | colorette@2.0.19: 272 | version "2.0.19" 273 | resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" 274 | integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== 275 | 276 | commander@^9.1.0: 277 | version "9.2.0" 278 | resolved "https://registry.yarnpkg.com/commander/-/commander-9.2.0.tgz#6e21014b2ed90d8b7c9647230d8b7a94a4a419a9" 279 | integrity sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w== 280 | 281 | csstype@^2.6.8: 282 | version "2.6.20" 283 | resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.20.tgz#9229c65ea0b260cf4d3d997cb06288e36a8d6dda" 284 | integrity sha512-/WwNkdXfckNgw6S5R125rrW8ez139lBHWouiBvX8dfMFtcn6V81REDqnH7+CRpRipfYlyU1CmOnOxrmGcFOjeA== 285 | 286 | debug@4.3.4: 287 | version "4.3.4" 288 | resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" 289 | integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== 290 | dependencies: 291 | ms "2.1.2" 292 | 293 | esbuild-android-64@0.14.39: 294 | version "0.14.39" 295 | resolved "https://registry.yarnpkg.com/esbuild-android-64/-/esbuild-android-64-0.14.39.tgz#09f12a372eed9743fd77ff6d889ac14f7b340c21" 296 | integrity sha512-EJOu04p9WgZk0UoKTqLId9VnIsotmI/Z98EXrKURGb3LPNunkeffqQIkjS2cAvidh+OK5uVrXaIP229zK6GvhQ== 297 | 298 | esbuild-android-arm64@0.14.39: 299 | version "0.14.39" 300 | resolved "https://registry.yarnpkg.com/esbuild-android-arm64/-/esbuild-android-arm64-0.14.39.tgz#f608d00ea03fe26f3b1ab92a30f99220390f3071" 301 | integrity sha512-+twajJqO7n3MrCz9e+2lVOnFplRsaGRwsq1KL/uOy7xK7QdRSprRQcObGDeDZUZsacD5gUkk6OiHiYp6RzU3CA== 302 | 303 | esbuild-darwin-64@0.14.39: 304 | version "0.14.39" 305 | resolved "https://registry.yarnpkg.com/esbuild-darwin-64/-/esbuild-darwin-64-0.14.39.tgz#31528daa75b4c9317721ede344195163fae3e041" 306 | integrity sha512-ImT6eUw3kcGcHoUxEcdBpi6LfTRWaV6+qf32iYYAfwOeV+XaQ/Xp5XQIBiijLeo+LpGci9M0FVec09nUw41a5g== 307 | 308 | esbuild-darwin-arm64@0.14.39: 309 | version "0.14.39" 310 | resolved "https://registry.yarnpkg.com/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.39.tgz#247f770d86d90a215fa194f24f90e30a0bd97245" 311 | integrity sha512-/fcQ5UhE05OiT+bW5v7/up1bDsnvaRZPJxXwzXsMRrr7rZqPa85vayrD723oWMT64dhrgWeA3FIneF8yER0XTw== 312 | 313 | esbuild-freebsd-64@0.14.39: 314 | version "0.14.39" 315 | resolved "https://registry.yarnpkg.com/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.39.tgz#479414d294905055eb396ebe455ed42213284ee0" 316 | integrity sha512-oMNH8lJI4wtgN5oxuFP7BQ22vgB/e3Tl5Woehcd6i2r6F3TszpCnNl8wo2d/KvyQ4zvLvCWAlRciumhQg88+kQ== 317 | 318 | esbuild-freebsd-arm64@0.14.39: 319 | version "0.14.39" 320 | resolved "https://registry.yarnpkg.com/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.39.tgz#cedeb10357c88533615921ae767a67dc870a474c" 321 | integrity sha512-1GHK7kwk57ukY2yI4ILWKJXaxfr+8HcM/r/JKCGCPziIVlL+Wi7RbJ2OzMcTKZ1HpvEqCTBT/J6cO4ZEwW4Ypg== 322 | 323 | esbuild-linux-32@0.14.39: 324 | version "0.14.39" 325 | resolved "https://registry.yarnpkg.com/esbuild-linux-32/-/esbuild-linux-32-0.14.39.tgz#d9f008c4322d771f3958f59c1eee5a05cdf92485" 326 | integrity sha512-g97Sbb6g4zfRLIxHgW2pc393DjnkTRMeq3N1rmjDUABxpx8SjocK4jLen+/mq55G46eE2TA0MkJ4R3SpKMu7dg== 327 | 328 | esbuild-linux-64@0.14.39: 329 | version "0.14.39" 330 | resolved "https://registry.yarnpkg.com/esbuild-linux-64/-/esbuild-linux-64-0.14.39.tgz#ba58d7f66858913aeb1ab5c6bde1bbd824731795" 331 | integrity sha512-4tcgFDYWdI+UbNMGlua9u1Zhu0N5R6u9tl5WOM8aVnNX143JZoBZLpCuUr5lCKhnD0SCO+5gUyMfupGrHtfggQ== 332 | 333 | esbuild-linux-arm64@0.14.39: 334 | version "0.14.39" 335 | resolved "https://registry.yarnpkg.com/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.39.tgz#708785a30072702b5b1c16b65cf9c25c51202529" 336 | integrity sha512-23pc8MlD2D6Px1mV8GMglZlKgwgNKAO8gsgsLLcXWSs9lQsCYkIlMo/2Ycfo5JrDIbLdwgP8D2vpfH2KcBqrDQ== 337 | 338 | esbuild-linux-arm@0.14.39: 339 | version "0.14.39" 340 | resolved "https://registry.yarnpkg.com/esbuild-linux-arm/-/esbuild-linux-arm-0.14.39.tgz#4e8b5deaa7ab60d0d28fab131244ef82b40684f4" 341 | integrity sha512-t0Hn1kWVx5UpCzAJkKRfHeYOLyFnXwYynIkK54/h3tbMweGI7dj400D1k0Vvtj2u1P+JTRT9tx3AjtLEMmfVBQ== 342 | 343 | esbuild-linux-mips64le@0.14.39: 344 | version "0.14.39" 345 | resolved "https://registry.yarnpkg.com/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.39.tgz#6f3bf3023f711084e5a1e8190487d2020f39f0f7" 346 | integrity sha512-epwlYgVdbmkuRr5n4es3B+yDI0I2e/nxhKejT9H0OLxFAlMkeQZxSpxATpDc9m8NqRci6Kwyb/SfmD1koG2Zuw== 347 | 348 | esbuild-linux-ppc64le@0.14.39: 349 | version "0.14.39" 350 | resolved "https://registry.yarnpkg.com/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.39.tgz#900e718a4ea3f6aedde8424828eeefdd4b48d4b9" 351 | integrity sha512-W/5ezaq+rQiQBThIjLMNjsuhPHg+ApVAdTz2LvcuesZFMsJoQAW2hutoyg47XxpWi7aEjJGrkS26qCJKhRn3QQ== 352 | 353 | esbuild-linux-riscv64@0.14.39: 354 | version "0.14.39" 355 | resolved "https://registry.yarnpkg.com/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.39.tgz#dcbff622fa37047a75d2ff7a1d8d2949d80277e4" 356 | integrity sha512-IS48xeokcCTKeQIOke2O0t9t14HPvwnZcy+5baG13Z1wxs9ZrC5ig5ypEQQh4QMKxURD5TpCLHw2W42CLuVZaA== 357 | 358 | esbuild-linux-s390x@0.14.39: 359 | version "0.14.39" 360 | resolved "https://registry.yarnpkg.com/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.39.tgz#3f725a7945b419406c99d93744b28552561dcdfd" 361 | integrity sha512-zEfunpqR8sMomqXhNTFEKDs+ik7HC01m3M60MsEjZOqaywHu5e5682fMsqOlZbesEAAaO9aAtRBsU7CHnSZWyA== 362 | 363 | esbuild-netbsd-64@0.14.39: 364 | version "0.14.39" 365 | resolved "https://registry.yarnpkg.com/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.39.tgz#e10e40b6a765798b90d4eb85901cc85c8b7ff85e" 366 | integrity sha512-Uo2suJBSIlrZCe4E0k75VDIFJWfZy+bOV6ih3T4MVMRJh1lHJ2UyGoaX4bOxomYN3t+IakHPyEoln1+qJ1qYaA== 367 | 368 | esbuild-openbsd-64@0.14.39: 369 | version "0.14.39" 370 | resolved "https://registry.yarnpkg.com/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.39.tgz#935ec143f75ce10bd9cdb1c87fee00287eb0edbc" 371 | integrity sha512-secQU+EpgUPpYjJe3OecoeGKVvRMLeKUxSMGHnK+aK5uQM3n1FPXNJzyz1LHFOo0WOyw+uoCxBYdM4O10oaCAA== 372 | 373 | esbuild-sunos-64@0.14.39: 374 | version "0.14.39" 375 | resolved "https://registry.yarnpkg.com/esbuild-sunos-64/-/esbuild-sunos-64-0.14.39.tgz#0e7aa82b022a2e6d55b0646738b2582c2d72c3c0" 376 | integrity sha512-qHq0t5gePEDm2nqZLb+35p/qkaXVS7oIe32R0ECh2HOdiXXkj/1uQI9IRogGqKkK+QjDG+DhwiUw7QoHur/Rwg== 377 | 378 | esbuild-windows-32@0.14.39: 379 | version "0.14.39" 380 | resolved "https://registry.yarnpkg.com/esbuild-windows-32/-/esbuild-windows-32-0.14.39.tgz#3f1538241f31b538545f4b5841b248cac260fa35" 381 | integrity sha512-XPjwp2OgtEX0JnOlTgT6E5txbRp6Uw54Isorm3CwOtloJazeIWXuiwK0ONJBVb/CGbiCpS7iP2UahGgd2p1x+Q== 382 | 383 | esbuild-windows-64@0.14.39: 384 | version "0.14.39" 385 | resolved "https://registry.yarnpkg.com/esbuild-windows-64/-/esbuild-windows-64-0.14.39.tgz#b100c59f96d3c2da2e796e42fee4900d755d3e03" 386 | integrity sha512-E2wm+5FwCcLpKsBHRw28bSYQw0Ikxb7zIMxw3OPAkiaQhLVr3dnVO8DofmbWhhf6b97bWzg37iSZ45ZDpLw7Ow== 387 | 388 | esbuild-windows-arm64@0.14.39: 389 | version "0.14.39" 390 | resolved "https://registry.yarnpkg.com/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.39.tgz#00268517e665b33c89778d61f144e4256b39f631" 391 | integrity sha512-sBZQz5D+Gd0EQ09tZRnz/PpVdLwvp/ufMtJ1iDFYddDaPpZXKqPyaxfYBLs3ueiaksQ26GGa7sci0OqFzNs7KA== 392 | 393 | esbuild@^0.14.27: 394 | version "0.14.39" 395 | resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.14.39.tgz#c926b2259fe6f6d3a94f528fb42e103c5a6d909a" 396 | integrity sha512-2kKujuzvRWYtwvNjYDY444LQIA3TyJhJIX3Yo4+qkFlDDtGlSicWgeHVJqMUP/2sSfH10PGwfsj+O2ro1m10xQ== 397 | optionalDependencies: 398 | esbuild-android-64 "0.14.39" 399 | esbuild-android-arm64 "0.14.39" 400 | esbuild-darwin-64 "0.14.39" 401 | esbuild-darwin-arm64 "0.14.39" 402 | esbuild-freebsd-64 "0.14.39" 403 | esbuild-freebsd-arm64 "0.14.39" 404 | esbuild-linux-32 "0.14.39" 405 | esbuild-linux-64 "0.14.39" 406 | esbuild-linux-arm "0.14.39" 407 | esbuild-linux-arm64 "0.14.39" 408 | esbuild-linux-mips64le "0.14.39" 409 | esbuild-linux-ppc64le "0.14.39" 410 | esbuild-linux-riscv64 "0.14.39" 411 | esbuild-linux-s390x "0.14.39" 412 | esbuild-netbsd-64 "0.14.39" 413 | esbuild-openbsd-64 "0.14.39" 414 | esbuild-sunos-64 "0.14.39" 415 | esbuild-windows-32 "0.14.39" 416 | esbuild-windows-64 "0.14.39" 417 | esbuild-windows-arm64 "0.14.39" 418 | 419 | escalade@^3.1.1: 420 | version "3.1.1" 421 | resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" 422 | integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== 423 | 424 | esm@^3.2.25: 425 | version "3.2.25" 426 | resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" 427 | integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== 428 | 429 | estree-walker@^2.0.2: 430 | version "2.0.2" 431 | resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" 432 | integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== 433 | 434 | fsevents@~2.3.2: 435 | version "2.3.2" 436 | resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" 437 | integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== 438 | 439 | function-bind@^1.1.1: 440 | version "1.1.1" 441 | resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" 442 | integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== 443 | 444 | get-package-type@^0.1.0: 445 | version "0.1.0" 446 | resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" 447 | integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== 448 | 449 | getopts@2.3.0: 450 | version "2.3.0" 451 | resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.3.0.tgz#71e5593284807e03e2427449d4f6712a268666f4" 452 | integrity sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA== 453 | 454 | has@^1.0.3: 455 | version "1.0.3" 456 | resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" 457 | integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== 458 | dependencies: 459 | function-bind "^1.1.1" 460 | 461 | interpret@^2.2.0: 462 | version "2.2.0" 463 | resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" 464 | integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== 465 | 466 | is-core-module@^2.8.1: 467 | version "2.9.0" 468 | resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" 469 | integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== 470 | dependencies: 471 | has "^1.0.3" 472 | 473 | knex@^2.4.0: 474 | version "2.4.0" 475 | resolved "https://registry.yarnpkg.com/knex/-/knex-2.4.0.tgz#7d33cc36f320cdac98741010544b4c6a98b8b19e" 476 | integrity sha512-i0GWwqYp1Hs2yvc2rlDO6nzzkLhwdyOZKRdsMTB8ZxOs2IXQyL5rBjSbS1krowCh6V65T4X9CJaKtuIfkaPGSA== 477 | dependencies: 478 | colorette "2.0.19" 479 | commander "^9.1.0" 480 | debug "4.3.4" 481 | escalade "^3.1.1" 482 | esm "^3.2.25" 483 | get-package-type "^0.1.0" 484 | getopts "2.3.0" 485 | interpret "^2.2.0" 486 | lodash "^4.17.21" 487 | pg-connection-string "2.5.0" 488 | rechoir "^0.8.0" 489 | resolve-from "^5.0.0" 490 | tarn "^3.0.2" 491 | tildify "2.0.0" 492 | 493 | lodash@^4.17.21: 494 | version "4.17.21" 495 | resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" 496 | integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== 497 | 498 | magic-string@^0.25.7: 499 | version "0.25.9" 500 | resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c" 501 | integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ== 502 | dependencies: 503 | sourcemap-codec "^1.4.8" 504 | 505 | ms@2.1.2: 506 | version "2.1.2" 507 | resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" 508 | integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== 509 | 510 | nanoid@^3.3.3: 511 | version "3.3.4" 512 | resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" 513 | integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== 514 | 515 | path-parse@^1.0.7: 516 | version "1.0.7" 517 | resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" 518 | integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== 519 | 520 | pg-connection-string@2.5.0: 521 | version "2.5.0" 522 | resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" 523 | integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== 524 | 525 | picocolors@^1.0.0: 526 | version "1.0.0" 527 | resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" 528 | integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== 529 | 530 | postcss@^8.1.10, postcss@^8.4.13: 531 | version "8.4.13" 532 | resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.13.tgz#7c87bc268e79f7f86524235821dfdf9f73e5d575" 533 | integrity sha512-jtL6eTBrza5MPzy8oJLFuUscHDXTV5KcLlqAWHl5q5WYRfnNRGSmOZmOZ1T6Gy7A99mOZfqungmZMpMmCVJ8ZA== 534 | dependencies: 535 | nanoid "^3.3.3" 536 | picocolors "^1.0.0" 537 | source-map-js "^1.0.2" 538 | 539 | preact@^10.0.0: 540 | version "10.7.2" 541 | resolved "https://registry.yarnpkg.com/preact/-/preact-10.7.2.tgz#5c632ba194b87345dcaee6598b3b6529b58e6a12" 542 | integrity sha512-GLjn0I3r6ka+NvxJUppsVFqb4V0qDTEHT/QxHlidPuClGaxF/4AI2Qti4a0cv3XMh5n1+D3hLScW10LRIm5msQ== 543 | 544 | prismjs@^1.25.0: 545 | version "1.28.0" 546 | resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.28.0.tgz#0d8f561fa0f7cf6ebca901747828b149147044b6" 547 | integrity sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw== 548 | 549 | rechoir@^0.8.0: 550 | version "0.8.0" 551 | resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.8.0.tgz#49f866e0d32146142da3ad8f0eff352b3215ff22" 552 | integrity sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ== 553 | dependencies: 554 | resolve "^1.20.0" 555 | 556 | resolve-from@^5.0.0: 557 | version "5.0.0" 558 | resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" 559 | integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== 560 | 561 | resolve@^1.20.0, resolve@^1.22.0: 562 | version "1.22.0" 563 | resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" 564 | integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== 565 | dependencies: 566 | is-core-module "^2.8.1" 567 | path-parse "^1.0.7" 568 | supports-preserve-symlinks-flag "^1.0.0" 569 | 570 | "rollup@>=2.59.0 <2.78.0": 571 | version "2.77.3" 572 | resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.77.3.tgz#8f00418d3a2740036e15deb653bed1a90ee0cc12" 573 | integrity sha512-/qxNTG7FbmefJWoeeYJFbHehJ2HNWnjkAFRKzWN/45eNBBF/r8lo992CwcJXEzyVxs5FmfId+vTSTQDb+bxA+g== 574 | optionalDependencies: 575 | fsevents "~2.3.2" 576 | 577 | source-map-js@^1.0.2: 578 | version "1.0.2" 579 | resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" 580 | integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== 581 | 582 | source-map@^0.6.1: 583 | version "0.6.1" 584 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" 585 | integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== 586 | 587 | sourcemap-codec@^1.4.8: 588 | version "1.4.8" 589 | resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" 590 | integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== 591 | 592 | supports-preserve-symlinks-flag@^1.0.0: 593 | version "1.0.0" 594 | resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" 595 | integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== 596 | 597 | tarn@^3.0.2: 598 | version "3.0.2" 599 | resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693" 600 | integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ== 601 | 602 | tildify@2.0.0: 603 | version "2.0.0" 604 | resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a" 605 | integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw== 606 | 607 | typescript@^4.6.3: 608 | version "4.6.4" 609 | resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.4.tgz#caa78bbc3a59e6a5c510d35703f6a09877ce45e9" 610 | integrity sha512-9ia/jWHIEbo49HfjrLGfKbZSuWo9iTMwXO+Ca3pRsSpbsMbc7/IU8NKdCZVRRBafVPGnoJeFL76ZOAA84I9fEg== 611 | 612 | vite@^2.9.7: 613 | version "2.9.16" 614 | resolved "https://registry.yarnpkg.com/vite/-/vite-2.9.16.tgz#daf7ba50f5cc37a7bf51b118ba06bc36e97898e9" 615 | integrity sha512-X+6q8KPyeuBvTQV8AVSnKDvXoBMnTx8zxh54sOwmmuOdxkjMmEJXH2UEchA+vTMps1xw9vL64uwJOWryULg7nA== 616 | dependencies: 617 | esbuild "^0.14.27" 618 | postcss "^8.4.13" 619 | resolve "^1.22.0" 620 | rollup ">=2.59.0 <2.78.0" 621 | optionalDependencies: 622 | fsevents "~2.3.2" 623 | 624 | vitepress@^0.22.4: 625 | version "0.22.4" 626 | resolved "https://registry.yarnpkg.com/vitepress/-/vitepress-0.22.4.tgz#d4d778fb06decfc2c31c105f6a7a136843cdfb08" 627 | integrity sha512-oZUnLO/SpYdThaBKefDeOiVlr0Rie4Ppx3FzMnMyLtJnI5GlBMNjqYqMy/4+umm/iC+ZDJfI+IlDKxv5fZnYzA== 628 | dependencies: 629 | "@docsearch/css" "^3.0.0" 630 | "@docsearch/js" "^3.0.0" 631 | "@vitejs/plugin-vue" "^2.3.2" 632 | prismjs "^1.25.0" 633 | vite "^2.9.7" 634 | vue "^3.2.33" 635 | 636 | vue@^3.2.33: 637 | version "3.2.33" 638 | resolved "https://registry.yarnpkg.com/vue/-/vue-3.2.33.tgz#7867eb16a3293a28c4d190a837bc447878bd64c2" 639 | integrity sha512-si1ExAlDUrLSIg/V7D/GgA4twJwfsfgG+t9w10z38HhL/HA07132pUQ2KuwAo8qbCyMJ9e6OqrmWrOCr+jW7ZQ== 640 | dependencies: 641 | "@vue/compiler-dom" "3.2.33" 642 | "@vue/compiler-sfc" "3.2.33" 643 | "@vue/runtime-dom" "3.2.33" 644 | "@vue/server-renderer" "3.2.33" 645 | "@vue/shared" "3.2.33" 646 | --------------------------------------------------------------------------------