├── .devcontainer
├── Dockerfile
└── devcontainer.json
├── .dockerignore
├── .eslintrc.json
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── config.js
├── content
├── announcements.mdx
├── api.mdx
├── api
│ ├── 1-pool.mdx
│ ├── 2-client.mdx
│ ├── 3-result.mdx
│ ├── 4-types.mdx
│ └── 5-cursor.mdx
├── features.mdx
├── features
│ ├── 1-connecting.mdx
│ ├── 2-queries.mdx
│ ├── 3-pooling.mdx
│ ├── 4-transactions.mdx
│ ├── 5-types.mdx
│ ├── 6-ssl.mdx
│ └── 7-native.mdx
├── guides.mdx
├── guides
│ ├── 1-project-structure.md
│ ├── 2-async-express.md
│ └── 3-upgrading.md
└── welcome.mdx
├── gatsby-config.js
├── gatsby-node.js
├── package.json
├── src
├── GithubLink.js
├── YoutubeEmbed.js
├── components
│ ├── Header.js
│ ├── NextPrevious.js
│ ├── container.js
│ ├── footer.js
│ ├── heading.js
│ ├── images
│ │ ├── github.svg
│ │ ├── logo.svg
│ │ └── twitter.svg
│ ├── index.js
│ ├── layout.js
│ ├── link.js
│ ├── mdxComponents
│ │ ├── anchor.js
│ │ ├── code.js
│ │ ├── codeBlock.js
│ │ ├── index.js
│ │ ├── pre.js
│ │ └── text.js
│ ├── notification.js
│ ├── rightSidebar.js
│ ├── sidebar.js
│ ├── styles.css
│ ├── theme.js
│ └── themeProvider.js
├── html.js
├── sortNavItems.js
└── templates
│ └── docs.js
├── static
├── crate-io.png
├── eaze.png
└── favicon.ico
└── yarn.lock
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------------------------------------------------------------------
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
4 | #-------------------------------------------------------------------------------------------------------------
5 |
6 | FROM node:12
7 |
8 | # Avoid warnings by switching to noninteractive
9 | ENV DEBIAN_FRONTEND=noninteractive
10 |
11 | # The node image includes a non-root user with sudo access. Use the "remoteUser"
12 | # property in devcontainer.json to use it. On Linux, the container user's GID/UIDs
13 | # will be updated to match your local UID/GID (when using the dockerFile property).
14 | # See https://aka.ms/vscode-remote/containers/non-root-user for details.
15 | ARG USERNAME=node
16 | ARG USER_UID=1000
17 | ARG USER_GID=$USER_UID
18 |
19 | # Configure apt and install packages
20 | RUN apt-get update \
21 | && apt-get -y install --no-install-recommends apt-utils dialog 2>&1 \
22 | #
23 | # Verify git and needed tools are installed
24 | && apt-get -y install git iproute2 procps \
25 | #
26 | # Remove outdated yarn from /opt and install via package
27 | # so it can be easily updated via apt-get upgrade yarn
28 | && rm -rf /opt/yarn-* \
29 | && rm -f /usr/local/bin/yarn \
30 | && rm -f /usr/local/bin/yarnpkg \
31 | && apt-get install -y curl apt-transport-https lsb-release \
32 | && curl -sS https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/pubkey.gpg | apt-key add - 2>/dev/null \
33 | && echo "deb https://dl.yarnpkg.com/$(lsb_release -is | tr '[:upper:]' '[:lower:]')/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
34 | && apt-get update \
35 | && apt-get -y install --no-install-recommends yarn \
36 | #
37 | # Install eslint globally
38 | && npm install -g eslint \
39 | #
40 | # [Optional] Update a non-root user to UID/GID if needed.
41 | && if [ "$USER_GID" != "1000" ] || [ "$USER_UID" != "1000" ]; then \
42 | groupmod --gid $USER_GID $USERNAME \
43 | && usermod --uid $USER_UID --gid $USER_GID $USERNAME \
44 | && chown -R $USER_UID:$USER_GID /home/$USERNAME; \
45 | fi \
46 | # [Optional] Add add sudo support for non-root user
47 | && apt-get install -y sudo \
48 | && echo node ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \
49 | && chmod 0440 /etc/sudoers.d/$USERNAME \
50 | #
51 | # Clean up
52 | && apt-get autoremove -y \
53 | && apt-get clean -y \
54 | && rm -rf /var/lib/apt/lists/*
55 |
56 | # Switch back to dialog for any ad-hoc use of apt-get
57 | ENV DEBIAN_FRONTEND=dialog
58 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | // For format details, see https://aka.ms/vscode-remote/devcontainer.json or the definition README at
2 | // https://github.com/microsoft/vscode-dev-containers/tree/master/containers/javascript-node-12
3 | {
4 | "name": "Node.js 12",
5 | "dockerFile": "Dockerfile",
6 |
7 | // Use 'settings' to set *default* container specific settings.json values on container create.
8 | // You can edit these settings after create using File > Preferences > Settings > Remote.
9 | "settings": {
10 | "terminal.integrated.shell.linux": "/bin/bash"
11 | },
12 |
13 | // Use 'appPort' to create a container with published ports. If the port isn't working, be sure
14 | // your server accepts connections from all interfaces (0.0.0.0 or '*'), not just localhost.
15 | // "appPort": [],
16 |
17 | // Uncomment the next line to run commands after the container is created.
18 | // "postCreateCommand": "yarn install",
19 |
20 | // Uncomment the next line to have VS Code connect as an existing non-root user in the container.
21 | // On Linux, by default, the container user's UID/GID will be updated to match your local user. See
22 | // https://aka.ms/vscode-remote/containers/non-root for details on adding a non-root user if none exist.
23 | // "remoteUser": "node",
24 |
25 | // Add the IDs of extensions you want installed when the container is created in the array below.
26 | "extensions": [
27 | "dbaeumer.vscode-eslint"
28 | ]
29 | }
30 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | public
3 | .cache
4 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "es6": true
5 | },
6 | "globals": {
7 | "graphql": false
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | public
2 | .cache
3 | node_modules
4 | *DS_Store
5 | *.env
6 |
7 | .idea/
8 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:carbon
2 |
3 | # Create app directory
4 | WORKDIR /app
5 |
6 | # Install app dependencies
7 | RUN npm -g install gatsby-cli
8 |
9 | COPY package*.json ./
10 |
11 | RUN npm ci
12 |
13 | # Bundle app source
14 | COPY . .
15 |
16 | # Build static files
17 | RUN npm run build
18 |
19 | # serve on port 8080
20 | CMD ["gatsby", "serve", "--verbose", "--prefix-paths", "-p", "8080", "--host", "0.0.0.0"]
21 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Hasura
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # node-postgres docs repo
2 |
3 | This repo holds the documentation running at [https://node-postgres.com](https://node-postgres.com).
4 |
5 | It's built on a gatsby template I found and modified. I'd like to go through and convert the gatsby scaffolding code to typescript at some point. If anything in the documentation is unclear please feel free to open an issue or just change it & submit a pull request!
6 |
7 | [](https://app.netlify.com/sites/hopeful-clarke-f36d36/deploys)
8 |
--------------------------------------------------------------------------------
/config.js:
--------------------------------------------------------------------------------
1 | const config = {
2 | gatsby: {
3 | // uncomment this if not hosted at root
4 | //pathPrefix: '/node-postgres-docs/',
5 | pathPrefix: '/',
6 | siteUrl: 'https://node-postgres.com',
7 | gaTrackingId: 'UA-100138145-1',
8 | },
9 | header: {
10 | logo: '',
11 | title: 'node-postgres',
12 | githubUrl: 'https://github.com/brianc/node-postgres',
13 | helpUrl: '',
14 | tweetText: '',
15 | links: [{ text: '', link: '' }],
16 | },
17 | sidebar: {
18 | forcedNavOrder: ['/', '/announcements', '/features', '/guides', '/api'],
19 | links: [{ text: '', link: '' }],
20 | },
21 | siteMetadata: {
22 | title: 'node-postgres',
23 | description: 'Documentation for node-postgres, the postgres database client for node.js',
24 | ogImage: null,
25 | docsLocation: 'https://github.com/brianc/node-postgres-docs/tree/master/content',
26 | favicon: '/favicon.ico',
27 | },
28 | }
29 |
30 | module.exports = config
31 |
--------------------------------------------------------------------------------
/content/announcements.mdx:
--------------------------------------------------------------------------------
1 | ## 2020-02-25
2 |
3 | ### pg@8.0 release
4 |
5 | `pg@8.0` is [being released](https://github.com/brianc/node-postgres/pull/2117) which contains a handful of breaking changes.
6 |
7 | I will outline each breaking change here and try to give some historical context on them. Most of them are small and subtle and likely wont impact you; __however__, there is one larger breaking change you will likely run into:
8 |
9 | ___
10 |
11 | #### Support all `tls.connect` [options](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback) being passed to the client/pool constructor under the `ssl` option.
12 |
13 | Previously we white listed the parameters passed here and did slight massaging of some of them. The main __breaking__ change here is that now if you do this:
14 |
15 | ```
16 | const client = new Client({ ssl: true })
17 | ```
18 |
19 |
20 | Now we will use the default ssl options to tls.connect which includes rejectUnauthorized being enabled. This means your connection attempt may fail if you are using a self-signed cert. To use the old behavior you should do this:
21 |
22 |
23 | ```
24 | const client = new Client({ ssl: { rejectUnauthorized: false } })
25 | ```
26 |
27 | This makes pg a bit more secure "out of the box" while still enabling you to opt in to the old behavior.
28 |
29 | ___
30 |
31 | The rest of the changes are relatively minor & you likely wont need to do anything, but good to be aware none the less!
32 |
33 | #### change default database name
34 |
35 | If a database name is not specified, available in the environment at `PGDATABASE`, or available at `pg.defaults`, we used to use the username of the process user as the name of the database. Now we will use the `user` property supplied to the client as the database name, if it exists. What this means is this:
36 |
37 | ```
38 | new Client({
39 | user: 'foo'
40 | })
41 | ```
42 |
43 | `pg@7.x` will default the database name to the _process_ user. `pg@8.x` will use the `user` property supplied to the client. If you have not supplied `user` to the client, and it isn't available through any of its existing lookup mechanisms (environment variables, pg.defaults) then it will still use the process user for the database name.
44 |
45 |
46 | #### drop support for versions of node older than 8.0
47 |
48 | Node@6.0 has been out of LTS for quite some time now, and I've removed it from our test matrix. `pg@8.0` _may_ still work on older versions of node, but it isn't a goal of the project anymore. Node@8.0 is actually no longer in the LTS support line, but pg will continue to test against and support 8.0 until there is a compelling reason to drop support for it. Any security vulnerability issues which come up I will back-port fixes to the `pg@7.x` line and do a release, but any other fixes or improvments will not be back ported.
49 |
50 | #### prevent password from being logged accidentally
51 |
52 | `pg@8.0` makes the password field on the pool and client non-enumerable. This means when you do `console.log(client)` you wont have your database password printed out unintenionally. You can still do `console.log(client.password)` if you really want to see it!
53 |
54 | #### make `pg.native` non-enumerable
55 |
56 | You can use `pg.native.Client` to access the native client. The first time you access the `pg.native` getter it imports the native bindings...which must be installed. In some cases (such as webpacking the pg code for lambda deployment) the `.native` property would be traversed and trigger an import of the native bindings as a side-effect. Making this property non-enumerable will fix this issue. An easy fix, but its technically a breaking change in cases where people _are_ relying on this side effect for any reason.
57 |
58 | #### make `pg.Pool` an es6 class
59 |
60 | This makes extending `pg.Pool` possible. Previously it was not a "proper" es6 class and `class MyPool extends pg.Pool` wouldn't work.
61 |
62 | #### make `Notice` messages _not_ an instance of a JavaScript error
63 |
64 | The code path for parsing `notice` and `error` messages from the postgres backend is the same. Previously created a JavaScript `Error` instance for _both_ of these message types. Now, only actual `errors` from the postgres backend will be an instance of an `Error`. The _shape_ and _properties_ of the two messages did not change outside of this.
65 |
66 | #### monorepo
67 |
68 | While not technically a breaking change for the module itself, I have begun the process of [consolidating](https://github.com/brianc/node-pg-query-stream) [separate](https://github.com/brianc/node-pg-cursor/) [repos](https://github.com/brianc/node-pg-pool) into the main [repo](https://github.com/brianc/node-postgres) and converted it into a monorepo managed by lerna. This will help me stay on top of issues better (it was hard to bounce between 3-4 separate repos) and coordinate bug fixes and changes between dependant modules.
69 |
70 | Thanks for reading that! pg tries to be super pedantic about not breaking backwards-compatibility in non semver major releases....even for seemingly small things. If you ever notice a breaking change on a semver minor/patch release please stop by the [repo](https://github.com/brianc/node-postgres) and open an issue!
71 |
72 | _If you find `pg` valuable to you or your business please consider [supporting](http://github.com/sponsors/brianc) it's continued development! Big performance improvements, typescript, better docs, query pipelining and more are all in the works!_
73 |
74 | ## 2019-07-18
75 |
76 | ### New documentation
77 |
78 | After a _very_ long time on my todo list I've ported the docs from my old hand-rolled webapp running on route53 + elb + ec2 + dokku (I know, I went overboard!) to [gatsby](https://www.gatsbyjs.org/) hosted on [netlify](https://www.netlify.com/) which is _so_ much easier to manage. I've released the code at [https://github.com/brianc/node-postgres-docs](https://github.com/brianc/node-postgres-docs) and invite your contributions! Let's make this documentation better together. Any time changes are merged to master on the documentation repo it will automatically deploy.
79 |
80 | If you see an error in the docs, big or small, use the "edit on github" button to edit the page & submit a pull request right there. I'll get a new version out ASAP with your changes! If you want to add new pages of documentation open an issue if you need guidance, and I'll help you get started.
81 |
82 | I want to extend a special **thank you** to all the [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) and [contributors](https://github.com/brianc/node-postgres/graphs/contributors) to the project that have helped keep me going through times of burnout or life "getting in the way." ❤️
83 |
84 | It's been quite a journey, and I look forward continuing it for as long as I can provide value to all y'all. 🤠
85 |
86 | ## 2017-08-12
87 |
88 | ### code execution vulnerability
89 |
90 | Today [@sehrope](https://github.com/sehrope) found and reported a code execution vulnerability in node-postgres. This affects all versions from `pg@2.x` through `pg@7.1.0`.
91 |
92 | I have published a fix on the tip of each major version branch of all affected versions as well as a fix on each minor version branch of `pg@6.x` and `pg@7.x`:
93 |
94 | ### Fixes
95 |
96 | The following versions have been published to npm & contain a patch to fix the vulnerability:
97 |
98 | ```
99 | pg@2.11.2
100 | pg@3.6.4
101 | pg@4.5.7
102 | pg@5.2.1
103 | pg@6.0.5
104 | pg@6.1.6
105 | pg@6.2.5
106 | pg@6.3.3
107 | pg@6.4.2
108 | pg@7.0.3
109 | pg@7.1.2
110 | ```
111 |
112 | ### Example
113 |
114 | To demonstrate the issue & see if you are vunerable execute the following in node:
115 |
116 | ```js
117 | const { Client } = require('pg')
118 | const client = new Client()
119 | client.connect()
120 |
121 | const sql = `SELECT 1 AS "\\'/*", 2 AS "\\'*/\n + console.log(process.env)] = null;\n//"`
122 |
123 | client.query(sql, (err, res) => {
124 | client.end()
125 | })
126 | ```
127 |
128 | You will see your environment variables printed to your console. An attacker can use this exploit to execute any arbitrary node code within your process.
129 |
130 | ### Impact
131 |
132 | This vulnerability _likely_ does not impact you if you are connecting to a database you control and not executing user-supplied sql. Still, you should **absolutely** upgrade to the most recent patch version as soon as possible to be safe.
133 |
134 | Two attack vectors we quickly thought of:
135 |
136 | - 1 - executing unsafe, user-supplied sql which contains a malicious column name like the one above.
137 | - 2 - connecting to an untrusted database and executing a query which returns results where any of the column names are malicious.
138 |
139 | ### Support
140 |
141 | I have created [an issue](https://github.com/brianc/node-postgres/issues/1408) you can use to discuss the vulnerability with me or ask questions, and I have reported this issue [on twitter](https://twitter.com/briancarlson) and directly to Heroku and [nodesecurity.io](https://nodesecurity.io/).
142 |
143 | I take security very seriously. If you or your company benefit from node-postgres **[please sponsor my work](https://www.patreon.com/node_postgres)**: this type of issue is one of the many things I am responsible for, and I want to be able to continue to tirelessly provide a world-class PostgreSQL experience in node for years to come.
144 |
--------------------------------------------------------------------------------
/content/api.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: API
3 | ---
4 |
--------------------------------------------------------------------------------
/content/api/1-pool.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: pg.Pool
3 | slug: /api/pool
4 | ---
5 |
6 | ## constructor
7 |
8 | ### new Pool([config: object])
9 |
10 | Every field of the `config` object is entirely optional. The config passed to the pool is also passed to every client instance within the pool when the pool creates that client.
11 |
12 | ```flow
13 | config = {
14 | // all valid client config options are also valid here
15 | // in addition here are the pool specific configuration parameters:
16 |
17 | // number of milliseconds to wait before timing out when connecting a new client
18 | // by default this is 0 which means no timeout
19 | connectionTimeoutMillis?: int,
20 |
21 | // number of milliseconds a client must sit idle in the pool and not be checked out
22 | // before it is disconnected from the backend and discarded
23 | // default is 10000 (10 seconds) - set to 0 to disable auto-disconnection of idle clients
24 | idleTimeoutMillis?: int,
25 |
26 | // maximum number of clients the pool should contain
27 | // by default this is set to 10.
28 | max?: int,
29 |
30 | // Default behavior is the pool will keep clients open & connected to the backend
31 | // until idleTimeoutMillis expire for each client and node will maintain a ref
32 | // to the socket on the client, keeping the event loop alive until all clients are closed
33 | // after being idle or the pool is manually shutdown with `pool.end()`.
34 | //
35 | // Setting `allowExitOnIdle: true` in the config will allow the node event loop to exit
36 | // as soon as all clients in the pool are idle, even if their socket is still open
37 | // to the postgres server. This can be handy in scripts & tests
38 | // where you don't want to wait for your clients to go idle before your process exits.
39 | allowExitOnIdle?: boolean
40 | }
41 | ```
42 |
43 | example to create a new pool with configuration:
44 |
45 | ```js
46 | const { Pool } = require('pg')
47 |
48 | const pool = new Pool({
49 | host: 'localhost',
50 | user: 'database-user',
51 | max: 20,
52 | idleTimeoutMillis: 30000,
53 | connectionTimeoutMillis: 2000,
54 | })
55 | ```
56 |
57 | ## pool.connect
58 |
59 | ### `pool.connect(callback: (err?: Error, client?: pg.Client, release?: releaseCallback) => void) => void`
60 |
61 | Acquires a client from the pool. If the pool is 'full' and all clients are currently checked out, this will wait in a FIFO queue until a client becomes available by it being released back to the pool. If there are idle clients in the pool it will be returned to the callback on `process.nextTick`. If the pool is not full a new client will be created & returned to this callback.
62 |
63 | ```js
64 | const { Pool } = require('pg')
65 |
66 | const pool = new Pool()
67 |
68 | pool.connect((err, client, release) => {
69 | if (err) {
70 | return console.error('Error acquiring client', err.stack)
71 | }
72 | client.query('SELECT NOW()', (err, result) => {
73 | release()
74 | if (err) {
75 | return console.error('Error executing query', err.stack)
76 | }
77 | console.log(result.rows)
78 | })
79 | })
80 | ```
81 |
82 | ### `pool.connect() => Promise`
83 |
84 | ```js
85 | const { Pool } = require('pg')
86 |
87 | const pool = new Pool()
88 |
89 | ;(async function() {
90 | const client = await pool.connect()
91 | await client.query('SELECT NOW()')
92 | client.release()
93 | })()
94 | ```
95 |
96 |
97 | You must call the releaseCallback or{' '}
98 | client.release (which points to the releaseCallback) when
99 | you are finished with a client. If you forget to release the client then your application will quickly exhaust
100 | available, idle clients in the pool and all further calls to pool.connect will timeout
101 | with an error or hang indefinitely if you have connectionTimeoutMillis configured to 0.
102 |
103 |
104 | ## releaseCallback
105 |
106 | ### release: (err?: Error)
107 |
108 | The `releaseCallback` releases an acquired client back to the pool. If you pass a truthy value in the `err` position to the callback, instead of releasing the client to the pool, the pool will be instructed to disconnect and destroy this client, leaving a space within itself for a new client.
109 |
110 | ```js
111 | const { Pool } = require('pg')
112 |
113 | const pool = new Pool()
114 | assert(pool.totalCount === 0)
115 | assert(pool.idleCount === 0)
116 | ;(async function() {
117 | const client = await pool.connect()
118 | await client.query('SELECT NOW()')
119 | assert(pool.totalCount === 1)
120 | assert(pool.idleCount === 0)
121 |
122 | // tell the pool to destroy this client
123 | client.release(true)
124 | assert(pool.idleCount === 0)
125 | assert(pool.totalCount === 0)
126 | })()
127 | ```
128 |
129 | Client instances returned from `pool.connect` will have a `release` method which will release them from the pool. This is the same method that is passed to the connect callback as the 3rd argument if you're using the pool with callbacks.
130 |
131 | ```js
132 | const { Pool } = require('pg')
133 |
134 | const pool = new Pool()
135 | pool.connect((err, client, release) => {
136 | assert(client.release === release)
137 | })
138 | ```
139 |
140 | ## pool.query
141 |
142 | Often we only need to run a single query on the database, so as convenience the pool has a method to run a query on the first available idle client and return its result.
143 |
144 | ### pool.query(callback: (err?: Error, result: pg.Result)) => void
145 |
146 | ```js
147 | const { Pool } = require('pg')
148 |
149 | const pool = new Pool()
150 |
151 | pool.query('SELECT $1::text as name', ['brianc'], (err, result) => {
152 | if (err) {
153 | return console.error('Error executing query', err.stack)
154 | }
155 | console.log(result.rows[0].name) // brianc
156 | })
157 | ```
158 |
159 | Promises are also supported:
160 |
161 | ### `pool.query() => Promise`
162 |
163 | ```js
164 | const { Pool } = require('pg')
165 |
166 | const pool = new Pool()
167 |
168 | pool
169 | .query('SELECT $1::text as name', ['brianc'])
170 | .then(res => console.log(res.rows[0].name)) // brianc
171 | .catch(err => console.error('Error executing query', err.stack))
172 | ```
173 |
174 | Notice in the example above no `releaseCallback` was necessary. The pool is doing the acquiring and releasing internally. I find `pool.query` to be a handy shortcut in a lot of situations.
175 |
176 |
177 | Do not use pool.query if you need transactional integrity: the pool will dispatch every query passed
178 | to pool.query on the first available idle client. Transactions within PostgreSQL are scoped to a single client and so
179 | dispatching individual queries within a single transaction across multiple, random clients will cause big problems in
180 | your app and not work. For more info please read transactions.
181 |
182 |
183 | ## pool.end
184 |
185 | Calling `pool.end` will drain the pool of all active clients, disconnect them, and shut down any internal timers in the pool. It is common to call this at the end of a script using the pool or when your process is attempting to shut down cleanly.
186 |
187 | ```js
188 | // again both promises and callbacks are supported:
189 | const { Pool } = require('pg')
190 |
191 | const pool = new Pool()
192 |
193 | // either this:
194 | pool.end(() => {
195 | console.log('pool has ended')
196 | })
197 |
198 | // or this:
199 | pool.end().then(() => console.log('pool has ended'))
200 | ```
201 |
202 | ## properties
203 |
204 | ### `pool.totalCount: int`
205 |
206 | The total number of clients existing within the pool.
207 |
208 | ### `pool.idleCount: int`
209 |
210 | The number of clients which are not checked out but are currently idle in the pool.
211 |
212 | ### `pool.waitingCount: int`
213 |
214 | The number of queued requests waiting on a client when all clients are checked out. It can be helpful to monitor this number to see if you need to adjust the size of the pool.
215 |
216 | ## events
217 |
218 | `Pool` instances are also instances of [`EventEmitter`](https://nodejs.org/api/events.html).
219 |
220 | ### `pool.on('connect', (client: Client) => void) => void`
221 |
222 | Whenever the pool establishes a new client connection to the PostgreSQL backend it will emit the `connect` event with the newly connected client. This presents an opportunity for you to run setup commands on a client.
223 |
224 | ```js
225 | const pool = new Pool()
226 | pool.on('connect', client => {
227 | client.query('SET DATESTYLE = iso, mdy')
228 | })
229 | ```
230 |
231 | ### `pool.on('acquire', (client: Client) => void) => void`
232 |
233 | Whenever a client is checked out from the pool the pool will emit the `acquire` event with the client that was acquired.
234 |
235 | ### `pool.on('error', (err: Error, client: Client) => void) => void`
236 |
237 | When a client is sitting idly in the pool it can still emit errors because it is connected to a live backend. If the backend goes down or a network partition is encountered all the idle, connected clients in your application will emit an error _through_ the pool's error event emitter. The error listener is passed the error as the first argument and the client upon which the error occurred as the 2nd argument. The client will be automatically terminated and removed from the pool, it is only passed to the error handler in case you want to inspect it.
238 |
239 |
240 | It is important you add an event listener to the pool to catch errors. Just like other event emitters, if a pool emits
241 | an error event and no listeners are added node will emit an uncaught error and
242 | potentially exit.
243 |
244 |
245 | ### `pool.on('remove', (client: Client) => void) => void`
246 |
247 | Whenever a client is closed & removed from the pool the pool will emit the `remove` event.
248 |
--------------------------------------------------------------------------------
/content/api/2-client.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: pg.Client
3 | slug: /api/client
4 | ---
5 |
6 | ## constructor
7 |
8 | ### new Client([config: object])
9 |
10 | Every field of the `config` object is entirely optional. A `Client` instance will use [environment variables](/features/connecting#environment-variables) for all missing values.
11 |
12 | ```flow
13 | config = {
14 | user?: string, // default process.env.PGUSER || process.env.USER
15 | password?: string or function, //default process.env.PGPASSWORD
16 | host?: string, // default process.env.PGHOST
17 | database?: string, // default process.env.PGDATABASE || user
18 | port?: number, // default process.env.PGPORT
19 | connectionString?: string, // e.g. postgres://user:password@host:5432/database
20 | ssl?: any, // passed directly to node.TLSSocket, supports all tls.connect options
21 | types?: any, // custom type parsers
22 | statement_timeout?: number, // number of milliseconds before a statement in query will time out, default is no timeout
23 | query_timeout?: number, // number of milliseconds before a query call will timeout, default is no timeout
24 | application_name?: string, // The name of the application that created this Client instance
25 | connectionTimeoutMillis?: number, // number of milliseconds to wait for connection, default is no timeout
26 | idle_in_transaction_session_timeout?: number // number of milliseconds before terminating any session with an open idle transaction, default is no timeout
27 | }
28 | ```
29 |
30 | example to create a client with specific connection information:
31 |
32 | ```js
33 | const { Client } = require('pg')
34 |
35 | const client = new Client({
36 | host: 'my.database-server.com',
37 | port: 5334,
38 | user: 'database-user',
39 | password: 'secretpassword!!',
40 | })
41 | ```
42 |
43 | ## client.connect
44 |
45 | ### `client.connect(callback: (err: Error) => void) => void`
46 |
47 | Calling `client.connect` with a callback:
48 |
49 | ```js
50 | const { Client } = require('pg')
51 | const client = new Client()
52 | client.connect(err => {
53 | if (err) {
54 | console.error('connection error', err.stack)
55 | } else {
56 | console.log('connected')
57 | }
58 | })
59 | ```
60 |
61 | ### `client.connect() => Promise`
62 |
63 | Calling `client.connect` without a callback yields a promise:
64 |
65 | ```js
66 | const { Client } = require('pg')
67 | const client = new Client()
68 | client
69 | .connect()
70 | .then(() => console.log('connected'))
71 | .catch(err => console.error('connection error', err.stack))
72 | ```
73 |
74 | _note: connect returning a promise only available in pg@7.0 or above_
75 |
76 | ## client.query
77 |
78 | ### `client.query` - text, optional values, and callback.
79 |
80 | Passing query text, optional query parameters, and a callback to `client.query` results in a type-signature of:
81 |
82 | ```flow
83 | client.query(
84 | text: string,
85 | values?: Array,
86 | callback: (err: Error, result: Result) => void
87 | ) => void
88 | ```
89 |
90 | That is a kinda gross type signature but it translates out to this:
91 |
92 | **Plain text query with a callback:**
93 |
94 | ```js
95 | const { Client } = require('pg')
96 | const client = new Client()
97 | client.connect()
98 | client.query('SELECT NOW()', (err, res) => {
99 | if (err) throw err
100 | console.log(res)
101 | client.end()
102 | })
103 | ```
104 |
105 | **Parameterized query with a callback:**
106 |
107 | ```js
108 | const { Client } = require('pg')
109 | const client = new Client()
110 | client.connect()
111 | client.query('SELECT $1::text as name', ['brianc'], (err, res) => {
112 | if (err) throw err
113 | console.log(res)
114 | client.end()
115 | })
116 | ```
117 |
118 | ### `client.query` - text, optional values: Promise
119 |
120 | If you call `client.query` with query text and optional parameters but **don't** pass a callback, then you will receive a `Promise` for a query result.
121 |
122 | ```flow
123 | client.query(
124 | text: string,
125 | values?: Array
126 | ) => Promise
127 | ```
128 |
129 | **Plain text query with a promise**
130 |
131 | ```js
132 | const { Client } = require('pg')
133 | const client = new Client()
134 | client.connect()
135 | client
136 | .query('SELECT NOW()')
137 | .then(result => console.log(result))
138 | .catch(e => console.error(e.stack))
139 | .then(() => client.end())
140 | ```
141 |
142 | **Parameterized query with a promise**
143 |
144 | ```js
145 | const { Client } = require('pg')
146 | const client = new Client()
147 | client.connect()
148 | client
149 | .query('SELECT $1::text as name', ['brianc'])
150 | .then(result => console.log(result))
151 | .catch(e => console.error(e.stack))
152 | .then(() => client.end())
153 | ```
154 |
155 | ### `client.query(config: QueryConfig, callback: (err?: Error, result?: Result) => void) => void`
156 |
157 | ### `client.query(config: QueryConfig) => Promise`
158 |
159 | You can pass an object to `client.query` with the signature of:
160 |
161 | ```flow
162 | interface QueryConfig {
163 | // the raw query text
164 | text: string;
165 |
166 | // an array of query parameters
167 | values?: Array;
168 |
169 | // name of the query - used for prepared statements
170 | name?: string;
171 |
172 | // by default rows come out as a key/value pair for each row
173 | // pass the string 'array' here to receive rows as an array of values
174 | rowMode?: string;
175 |
176 | // custom type parsers just for this query result
177 | types?: Types;
178 | }
179 | ```
180 |
181 | **client.query with a QueryConfig and a callback**
182 |
183 | If you pass a `name` parameter to the `client.query` method, the client will create a [prepared statement](/features/queries#prepared-statements).
184 |
185 | ```js
186 | const query = {
187 | name: 'get-name',
188 | text: 'SELECT $1::text',
189 | values: ['brianc'],
190 | rowMode: 'array',
191 | }
192 |
193 | client.query(query, (err, res) => {
194 | if (err) {
195 | console.error(err.stack)
196 | } else {
197 | console.log(res.rows) // ['brianc']
198 | }
199 | })
200 | ```
201 |
202 | **client.query with a QueryConfig and a Promise**
203 |
204 | ```js
205 | const query = {
206 | name: 'get-name',
207 | text: 'SELECT $1::text',
208 | values: ['brianc'],
209 | rowMode: 'array',
210 | }
211 |
212 | // promise
213 | client
214 | .query(query)
215 | .then(res => {
216 | console.log(res.rows) // ['brianc']
217 | })
218 | .catch(e => {
219 | console.error(e.stack)
220 | })
221 | ```
222 |
223 | **client.query with a `Submittable`**
224 |
225 | If you pass an object to `client.query` and the object has a `.submit` function on it, the client will pass it's PostgreSQL server connection to the object and delegate query dispatching to the supplied object. This is an advanced feature mostly intended for library authors. It is incidentally also currently how the callback and promise based queries above are handled internally, but this is subject to change. It is also how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work.
226 |
227 | ```js
228 | const Query = require('pg').Query
229 | const query = new Query('select $1::text as name', ['brianc'])
230 |
231 | const result = client.query(query)
232 |
233 | assert(query === result) // true
234 |
235 | query.on('row', row => {
236 | console.log('row!', row) // { name: 'brianc' }
237 | })
238 | query.on('end', () => {
239 | console.log('query done')
240 | })
241 | query.on('error', err => {
242 | console.error(err.stack)
243 | })
244 | ```
245 |
246 | ---
247 |
248 | ## client.end
249 |
250 | ### client.end(cb?: (err?: Error) => void) => void
251 |
252 | Disconnects the client from the PostgreSQL server.
253 |
254 | ```js
255 | client.end(err => {
256 | console.log('client has disconnected')
257 | if (err) {
258 | console.log('error during disconnection', err.stack)
259 | }
260 | })
261 | ```
262 |
263 | ### `client.end() => Promise`
264 |
265 | Calling end without a callback yields a promise:
266 |
267 | ```js
268 | client
269 | .end()
270 | .then(() => console.log('client has disconnected'))
271 | .catch(err => console.error('error during disconnection', err.stack))
272 | ```
273 |
274 | _note: end returning a promise is only available in pg7.0 and above_
275 |
276 | ## events
277 |
278 | ### client.on('error', (err: Error) => void) => void
279 |
280 | When the client is in the process of connecting, dispatching a query, or disconnecting it will catch and foward errors from the PostgreSQL server to the respective `client.connect` `client.query` or `client.end` callback/promise; however, the client maintains a long-lived connection to the PostgreSQL back-end and due to network partitions, back-end crashes, fail-overs, etc the client can (and over a long enough time period _will_) eventually be disconnected while it is idle. To handle this you may want to attach an error listener to a client to catch errors. Here's a contrived example:
281 |
282 | ```js
283 | const client = new pg.Client()
284 | client.connect()
285 |
286 | client.on('error', err => {
287 | console.error('something bad has happened!', err.stack)
288 | })
289 |
290 | // walk over to server, unplug network cable
291 |
292 | // process output: 'something bad has happened!' followed by stacktrace :P
293 | ```
294 |
295 | ### client.on('end') => void
296 |
297 | When the client disconnects from the PostgreSQL server it will emit an end event once.
298 |
299 | ### client.on('notification', (notification: Notification) => void) => void
300 |
301 | Used for `listen/notify` events:
302 |
303 | ```flow
304 | type Notification {
305 | processId: int,
306 | channel: string,
307 | payload?: string
308 | }
309 | ```
310 |
311 | ```js
312 | const client = new pg.Client()
313 | client.connect()
314 |
315 | client.query('LISTEN foo')
316 |
317 | client.on('notification', msg => {
318 | console.log(msg.channel) // foo
319 | console.log(msg.payload) // bar!
320 | })
321 |
322 | client.query(`NOTIFY foo, 'bar!'`)
323 | ```
324 |
325 | ### client.on('notice', (notice: Error) => void) => void
326 |
327 | Used to log out [notice messages](https://www.postgresql.org/docs/9.6/static/plpgsql-errors-and-messages.html) from the PostgreSQL server.
328 |
329 | ```js
330 | client.on('notice', msg => console.warn('notice:', msg))
331 | ```
332 |
--------------------------------------------------------------------------------
/content/api/3-result.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: pg.Result
3 | slug: /api/result
4 | ---
5 |
6 | The `pg.Result` shape is returned for every successful query.
7 |
8 |
note: you cannot instantiate this directly
9 |
10 | ## properties
11 |
12 | ### `result.rows: Array`
13 |
14 | Every result will have a rows array. If no rows are returned the array will be empty. Otherwise the array will contain one item for each row returned from the query. By default node-postgres creates a map from the name to value of each column, giving you a json-like object back for each row.
15 |
16 | ### `result.fields: Array`
17 |
18 | Every result will have a fields array. This array contains the `name` and `dataTypeID` of each field in the result. These fields are ordered in the same order as the columns if you are using `arrayMode` for the query:
19 |
20 | ```js
21 | const { Pool } = require('pg')
22 |
23 | const pool = new Pool()
24 |
25 | const client = await pool.connect()
26 | const result = await client.query({
27 | rowMode: 'array',
28 | text: 'SELECT 1 as one, 2 as two;',
29 | })
30 | console.log(result.fields[0].name) // one
31 | console.log(result.fields[1].name) // two
32 | console.log(result.rows) // [ [ 1, 2 ] ]
33 | await client.end()
34 | ```
35 |
36 | ### `result.command: string`
37 |
38 | The command type last executed: `INSERT` `UPDATE` `CREATE` `SELECT` etc.
39 |
40 | ### `result.rowCount: int`
41 |
42 | The number of rows processed by the last command.
43 |
44 | _note: this does not reflect the number of rows __returned__ from a query. e.g. an update statement could update many rows (so high `result.rowCount` value) but `result.rows.length` would be zero. To check for an empty query reponse on a `SELECT` query use `result.rows.length === 0`_.
45 |
46 | [@sehrope](https://github.com/brianc/node-postgres/issues/2182#issuecomment-620553915) has a good explanation:
47 |
48 | The `rowCount` is populated from the command tag supplied by the PostgreSQL server. It's generally of the form: `COMMAND [OID] [ROWS]`
49 |
50 | For DML commands (INSERT, UPDATE, etc), it reflects how many rows the server modified to process the command. For SELECT or COPY commands it reflects how many rows were retrieved or copied. More info on the specifics here: https://www.postgresql.org/docs/current/protocol-message-formats.html (search for CommandComplete for the message type)
51 |
52 | The note in the docs about the difference is because that value is controlled by the server. It's possible for a non-standard server (ex: PostgreSQL fork) or a server version in the future to provide different information in some situations so it'd be best not to rely on it to assume that the rows array length matches the `rowCount`. It's fine to use it for DML counts though.
53 |
--------------------------------------------------------------------------------
/content/api/4-types.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Types
3 | slug: /api/types
4 | ---
5 |
6 | These docs are incomplete, for now please reference [pg-types docs](https://github.com/brianc/node-pg-types).
7 |
--------------------------------------------------------------------------------
/content/api/5-cursor.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: pg.Cursor
3 | slug: /api/cursor
4 | ---
5 |
6 | A cursor can be used to efficiently read through large result sets without loading the entire result-set into memory ahead of time. It's useful to simulate a 'streaming' style read of data, or exit early from a large result set. The cursor is passed to `client.query` and is dispatched internally in a way very similar to how normal queries are sent, but the API it presents for consuming the result set is different.
7 |
8 | ## install
9 |
10 | ```
11 | $ npm install pg pg-cursor
12 | ```
13 |
14 | ## constructor
15 |
16 | ### `new Cursor(text: String, values: Any[][, config: CursorQueryConfig])`
17 |
18 | Instantiates a new Cursor. A cursor is an instance of `Submittable` and should be passed directly to the `client.query` method.
19 |
20 | ```js
21 | const { Pool } = require('pg')
22 | const Cursor = require('pg-cursor')
23 |
24 | const pool = new Pool()
25 | const client = await pool.connect()
26 | const text = 'SELECT * FROM my_large_table WHERE something > $1'
27 | const values = [10]
28 |
29 | const cursor = client.query(new Cursor(text, values))
30 |
31 | cursor.read(100, (err, rows) => {
32 | cursor.close(() => {
33 | client.release()
34 | })
35 | })
36 | ```
37 |
38 | ```
39 | interface CursorQueryConfig {
40 | // by default rows come out as a key/value pair for each row
41 | // pass the string 'array' here to receive rows as an array of values
42 | rowMode?: string;
43 |
44 | // custom type parsers just for this query result
45 | types?: Types;
46 | }
47 | ```
48 |
49 | ## read
50 |
51 | ### `cursor.read(rowCount: Number, callback: (err: Error, rows: Row[], result: pg.Result) => void) => void`
52 |
53 | Read `rowCount` rows from the cursor instance. The callback will be called when the rows are available, loaded into memory, parsed, and converted to JavaScript types.
54 |
55 | If the cursor has read to the end of the result sets all subsequent calls to cursor#read will return a 0 length array of rows. Calling `read` on a cursor that has read to the end.
56 |
57 | Here is an example of reading to the end of a cursor:
58 |
59 | ```js
60 | const { Pool } = require('pg')
61 | const Cursor = require('pg-cursor')
62 |
63 | const pool = new Pool()
64 | const client = await pool.connect()
65 | const cursor = client.query(new Cursor('select * from generate_series(0, 5)'))
66 | cursor.read(100, (err, rows) => {
67 | if (err) {
68 | throw err
69 | }
70 | assert(rows.length == 6)
71 | cursor.read(100, (err, rows) => {
72 | assert(rows.length == 0)
73 | })
74 | })
75 | ```
76 |
77 | ## close
78 |
79 | ### `cursor.close(callback: () => void) => void`
80 |
81 | Used to close the cursor early. If you want to stop reading from the cursor before you get all of the rows returned, call this.
82 |
--------------------------------------------------------------------------------
/content/features.mdx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/brianc/node-postgres-docs/1e4fed04ff2f864968994fbc999b679b47cdf606/content/features.mdx
--------------------------------------------------------------------------------
/content/features/1-connecting.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Connecting
3 | slug: /features/connecting
4 | ---
5 |
6 | ## Environment variables
7 |
8 | node-postgres uses the same [environment variables](https://www.postgresql.org/docs/9.1/static/libpq-envars.html) as libpq to connect to a PostgreSQL server. Both individual clients & pools will use these environment variables. Here's a tiny program connecting node.js to the PostgreSQL server:
9 |
10 | ```js
11 | const { Pool, Client } = require('pg')
12 |
13 | // pools will use environment variables
14 | // for connection information
15 | const pool = new Pool()
16 |
17 | pool.query('SELECT NOW()', (err, res) => {
18 | console.log(err, res)
19 | pool.end()
20 | })
21 |
22 | // you can also use async/await
23 | const res = await pool.query('SELECT NOW()')
24 | await pool.end()
25 |
26 | // clients will also use environment variables
27 | // for connection information
28 | const client = new Client()
29 | await client.connect()
30 |
31 | const res = await client.query('SELECT NOW()')
32 | await client.end()
33 | ```
34 |
35 | To run the above program and specify which database to connect to we can invoke it like so:
36 |
37 | ```sh
38 | $ PGUSER=dbuser \
39 | PGHOST=database.server.com \
40 | PGPASSWORD=secretpassword \
41 | PGDATABASE=mydb \
42 | PGPORT=3211 \
43 | node script.js
44 | ```
45 |
46 | This allows us to write our programs without having to specify connection information in the program and lets us reuse them to connect to different databases without having to modify the code.
47 |
48 | The default values for the environment variables used are:
49 |
50 | ```
51 | PGHOST='localhost'
52 | PGUSER=process.env.USER
53 | PGDATABASE=process.env.USER
54 | PGPASSWORD=null
55 | PGPORT=5432
56 | ```
57 |
58 | ## Programmatic
59 |
60 | node-postgres also supports configuring a pool or client programmatically with connection information. Here's our same script from above modified to use programmatic (hard-coded in this case) values. This can be useful if your application already has a way to manage config values or you don't want to use environment variables.
61 |
62 | ```js
63 | const { Pool, Client } = require('pg')
64 |
65 | const pool = new Pool({
66 | user: 'dbuser',
67 | host: 'database.server.com',
68 | database: 'mydb',
69 | password: 'secretpassword',
70 | port: 3211,
71 | })
72 |
73 | pool.query('SELECT NOW()', (err, res) => {
74 | console.log(err, res)
75 | pool.end()
76 | })
77 |
78 | const client = new Client({
79 | user: 'dbuser',
80 | host: 'database.server.com',
81 | database: 'mydb',
82 | password: 'secretpassword',
83 | port: 3211,
84 | })
85 | client.connect()
86 |
87 | client.query('SELECT NOW()', (err, res) => {
88 | console.log(err, res)
89 | client.end()
90 | })
91 | ```
92 |
93 | Many cloud providers include alternative methods for connecting to database instances using short-lived authentication tokens. node-postgres supports dynamic passwords via a callback function, either synchronous or asynchronous. The callback function must resolve to a string.
94 |
95 | ```js
96 | const { Pool } = require('pg')
97 | const { RDS } = require('aws-sdk')
98 |
99 | const signerOptions = {
100 | credentials: {
101 | accessKeyId: 'YOUR-ACCESS-KEY',
102 | secretAccessKey: 'YOUR-SECRET-ACCESS-KEY',
103 | },
104 | region: 'us-east-1',
105 | hostname: 'example.aslfdewrlk.us-east-1.rds.amazonaws.com',
106 | port: 5432,
107 | username: 'api-user',
108 | }
109 |
110 | const signer = new RDS.Signer()
111 |
112 | const getPassword = () => signer.getAuthToken(signerOptions)
113 |
114 | const pool = new Pool({
115 | host: signerOptions.hostname,
116 | port: signerOptions.port,
117 | user: signerOptions.username,
118 | database: 'my-db',
119 | password: getPassword,
120 | })
121 | ```
122 |
123 | ### Programmatic Connection to Sockets
124 |
125 | Connections to unix sockets can also be made. This can be useful on distros like Ubuntu, where authentication is managed via the socket connection instead of a password.
126 |
127 | ```js
128 | const { Client } = require('pg');
129 | client = new Client({
130 | host: '/cloudsql/myproject:zone:mydb',
131 | user: 'username',
132 | password: 'password',
133 | database: 'database_name',
134 | });
135 | ```
136 |
137 | ## Connection URI
138 |
139 | You can initialize both a pool and a client with a connection string URI as well. This is common in environments like Heroku where the database connection string is supplied to your application dyno through an environment variable. Connection string parsing brought to you by [pg-connection-string](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string).
140 |
141 | ```js
142 | const { Pool, Client } = require('pg')
143 | const connectionString = 'postgresql://dbuser:secretpassword@database.server.com:3211/mydb'
144 |
145 | const pool = new Pool({
146 | connectionString,
147 | })
148 |
149 | pool.query('SELECT NOW()', (err, res) => {
150 | console.log(err, res)
151 | pool.end()
152 | })
153 |
154 | const client = new Client({
155 | connectionString,
156 | })
157 | client.connect()
158 |
159 | client.query('SELECT NOW()', (err, res) => {
160 | console.log(err, res)
161 | client.end()
162 | })
163 | ```
164 |
--------------------------------------------------------------------------------
/content/features/2-queries.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Queries
3 | slug: /features/queries
4 | ---
5 |
6 | The api for executing queries supports both callbacks and promises. I'll provide an example for both _styles_ here. For the sake of brevity I am using the `client.query` method instead of the `pool.query` method - both methods support the same API. In fact, `pool.query` delegates directly to `client.query` internally.
7 |
8 | ## Text only
9 |
10 | If your query has no parameters you do not need to include them to the query method:
11 |
12 | ```js
13 | // callback
14 | client.query('SELECT NOW() as now', (err, res) => {
15 | if (err) {
16 | console.log(err.stack)
17 | } else {
18 | console.log(res.rows[0])
19 | }
20 | })
21 |
22 | // promise
23 | client
24 | .query('SELECT NOW() as now')
25 | .then(res => console.log(res.rows[0]))
26 | .catch(e => console.error(e.stack))
27 | ```
28 |
29 | ## Parameterized query
30 |
31 | If you are passing parameters to your queries you will want to avoid string concatenating parameters into the query text directly. This can (and often does) lead to sql injection vulnerabilities. node-postgres supports parameterized queries, passing your query text _unaltered_ as well as your parameters to the PostgreSQL server where the parameters are safely substituted into the query with battle-tested parameter substitution code within the server itself.
32 |
33 | ```js
34 | const text = 'INSERT INTO users(name, email) VALUES($1, $2) RETURNING *'
35 | const values = ['brianc', 'brian.m.carlson@gmail.com']
36 |
37 | // callback
38 | client.query(text, values, (err, res) => {
39 | if (err) {
40 | console.log(err.stack)
41 | } else {
42 | console.log(res.rows[0])
43 | // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
44 | }
45 | })
46 |
47 | // promise
48 | client
49 | .query(text, values)
50 | .then(res => {
51 | console.log(res.rows[0])
52 | // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
53 | })
54 | .catch(e => console.error(e.stack))
55 |
56 | // async/await
57 | try {
58 | const res = await client.query(text, values)
59 | console.log(res.rows[0])
60 | // { name: 'brianc', email: 'brian.m.carlson@gmail.com' }
61 | } catch (err) {
62 | console.log(err.stack)
63 | }
64 | ```
65 |
66 |
67 | PostgreSQL does not support parameters for identifiers. If you need to have dynamic database, schema, table, or column names (e.g. in DDL statements) use pg-format package for handling escaping these values to ensure you do not have SQL injection!
68 |
69 |
70 | Parameters passed as the second argument to `query()` will be converted to raw data types using the following rules:
71 |
72 | **null and undefined**
73 |
74 | If parameterizing `null` and `undefined` then both will be converted to `null`.
75 |
76 | **Date**
77 |
78 | Custom conversion to a UTC date string.
79 |
80 | **Buffer**
81 |
82 | Buffer instances are unchanged.
83 |
84 | **Array**
85 |
86 | Converted to a string that describes a Postgres array. Each array item is recursively converted using the rules described here.
87 |
88 | **Object**
89 |
90 | If a parameterized value has the method `toPostgres` then it will be called and its return value will be used in the query.
91 | The signature of `toPostgres` is the following:
92 |
93 | ```
94 | toPostgres (prepareValue: (value) => any): any
95 | ```
96 |
97 | The `prepareValue` function provided can be used to convert nested types to raw data types suitable for the database.
98 |
99 | Otherwise if no `toPostgres` method is defined then `JSON.stringify` is called on the parameterized value.
100 |
101 | **Everything else**
102 |
103 | All other parameterized values will be converted by calling `value.toString` on the value.
104 |
105 | ## Query config object
106 |
107 | `pool.query` and `client.query` both support taking a config object as an argument instead of taking a string and optional array of parameters. The same example above could also be performed like so:
108 |
109 | ```js
110 | const query = {
111 | text: 'INSERT INTO users(name, email) VALUES($1, $2)',
112 | values: ['brianc', 'brian.m.carlson@gmail.com'],
113 | }
114 |
115 | // callback
116 | client.query(query, (err, res) => {
117 | if (err) {
118 | console.log(err.stack)
119 | } else {
120 | console.log(res.rows[0])
121 | }
122 | })
123 |
124 | // promise
125 | client
126 | .query(query)
127 | .then(res => console.log(res.rows[0]))
128 | .catch(e => console.error(e.stack))
129 | ```
130 |
131 | The query config object allows for a few more advanced scenarios:
132 |
133 | ### Prepared statements
134 |
135 | PostgreSQL has the concept of a [prepared statement](https://www.postgresql.org/docs/9.3/static/sql-prepare.html). node-postgres supports this by supplying a `name` parameter to the query config object. If you supply a `name` parameter the query execution plan will be cached on the PostgreSQL server on a **per connection basis**. This means if you use two different connections each will have to parse & plan the query once. node-postgres handles this transparently for you: a client only requests a query to be parsed the first time that particular client has seen that query name:
136 |
137 | ```js
138 | const query = {
139 | // give the query a unique name
140 | name: 'fetch-user',
141 | text: 'SELECT * FROM user WHERE id = $1',
142 | values: [1],
143 | }
144 |
145 | // callback
146 | client.query(query, (err, res) => {
147 | if (err) {
148 | console.log(err.stack)
149 | } else {
150 | console.log(res.rows[0])
151 | }
152 | })
153 |
154 | // promise
155 | client
156 | .query(query)
157 | .then(res => console.log(res.rows[0]))
158 | .catch(e => console.error(e.stack))
159 | ```
160 |
161 | In the above example the first time the client sees a query with the name `'fetch-user'` it will send a 'parse' request to the PostgreSQL server & execute the query as normal. The second time, it will skip the 'parse' request and send the _name_ of the query to the PostgreSQL server.
162 |
163 |
164 |
165 | Be careful not to fall into the trap of premature optimization. Most of your queries will likely not benefit much, if at all, from using prepared statements. This is a somewhat "power user" feature of PostgreSQL that is best used when you know how to use it - namely with very complex queries with lots of joins and advanced operations like union and switch statements. I rarely use this feature in my own apps unless writing complex aggregate queries for reports and I know the reports are going to be executed very frequently.
166 |
167 |
168 |
169 | ### Row mode
170 |
171 | By default node-postgres reads rows and collects them into JavaScript objects with the keys matching the column names and the values matching the corresponding row value for each column. If you do not need or do not want this behavior you can pass `rowMode: 'array'` to a query object. This will inform the result parser to bypass collecting rows into a JavaScript object, and instead will return each row as an array of values.
172 |
173 | ```js
174 | const query = {
175 | text: 'SELECT $1::text as first_name, $2::text as last_name',
176 | values: ['Brian', 'Carlson'],
177 | rowMode: 'array',
178 | }
179 |
180 | // callback
181 | client.query(query, (err, res) => {
182 | if (err) {
183 | console.log(err.stack)
184 | } else {
185 | console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
186 | console.log(res.rows[0]) // ['Brian', 'Carlson']
187 | }
188 | })
189 |
190 | // promise
191 | client
192 | .query(query)
193 | .then(res => {
194 | console.log(res.fields.map(field => field.name)) // ['first_name', 'last_name']
195 | console.log(res.rows[0]) // ['Brian', 'Carlson']
196 | })
197 | .catch(e => console.error(e.stack))
198 | ```
199 |
200 | ### Types
201 |
202 | You can pass in a custom set of type parsers to use when parsing the results of a particular query. The `types` property must conform to the [Types](/api/types) API. Here is an example in which every value is returned as a string:
203 |
204 | ```js
205 | const query = {
206 | text: 'SELECT * from some_table',
207 | types: {
208 | getTypeParser: () => val => val,
209 | },
210 | }
211 | ```
212 |
--------------------------------------------------------------------------------
/content/features/3-pooling.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Pooling
3 | slug: /features/pooling
4 | ---
5 |
6 | If you're working on a web application or other software which makes frequent queries you'll want to use a connection pool.
7 |
8 | The easiest and by far most common way to use node-postgres is through a connection pool.
9 |
10 | ## Why?
11 |
12 | - Connecting a new client to the PostgreSQL server requires a handshake which can take 20-30 milliseconds. During this time passwords are negotiated, SSL may be established, and configuration information is shared with the client & server. Incurring this cost _every time_ we want to execute a query would substantially slow down our application.
13 |
14 | - The PostgreSQL server can only handle a [limited number of clients at a time](https://wiki.postgresql.org/wiki/Number_Of_Database_Connections). Depending on the available memory of your PostgreSQL server you may even crash the server if you connect an unbounded number of clients. _note: I have crashed a large production PostgreSQL server instance in RDS by opening new clients and never disconnecting them in a python application long ago. It was not fun._
15 |
16 | - PostgreSQL can only process one query at a time on a single connected client in a first-in first-out manner. If your multi-tenant web application is using only a single connected client all queries among all simultaneous requests will be pipelined and executed serially, one after the other. No good!
17 |
18 | ### Good news
19 |
20 | node-postgres ships with built-in connection pooling via the [pg-pool](/api/pool) module.
21 |
22 | ## Examples
23 |
24 | The client pool allows you to have a reusable pool of clients you can check out, use, and return. You generally want a limited number of these in your application and usually just 1. Creating an unbounded number of pools defeats the purpose of pooling at all.
25 |
26 | ### Checkout, use, and return
27 |
28 | ```js
29 | const { Pool } = require('pg')
30 |
31 | const pool = new Pool()
32 |
33 | // the pool will emit an error on behalf of any idle clients
34 | // it contains if a backend error or network partition happens
35 | pool.on('error', (err, client) => {
36 | console.error('Unexpected error on idle client', err)
37 | process.exit(-1)
38 | })
39 |
40 | // callback - checkout a client
41 | pool.connect((err, client, done) => {
42 | if (err) throw err
43 | client.query('SELECT * FROM users WHERE id = $1', [1], (err, res) => {
44 | done()
45 |
46 | if (err) {
47 | console.log(err.stack)
48 | } else {
49 | console.log(res.rows[0])
50 | }
51 | })
52 | })
53 |
54 | // promise - checkout a client
55 | pool
56 | .connect()
57 | .then(client => {
58 | return client
59 | .query('SELECT * FROM users WHERE id = $1', [1])
60 | .then(res => {
61 | client.release()
62 | console.log(res.rows[0])
63 | })
64 | .catch(err => {
65 | client.release()
66 | console.log(err.stack)
67 | })
68 | })
69 |
70 | // async/await - check out a client
71 | ;(async () => {
72 | const client = await pool.connect()
73 | try {
74 | const res = await client.query('SELECT * FROM users WHERE id = $1', [1])
75 | console.log(res.rows[0])
76 | } catch (err) {
77 | console.log(err.stack)
78 | } finally {
79 | client.release()
80 | }
81 | })()
82 | ```
83 |
84 |
85 |
86 | You must always return the client to the pool if you successfully check it out, regardless of whether or not there was an error with the queries you ran on the client. If you don't check in the client your application will leak them and eventually your pool will be empty forever and all future requests to check out a client from the pool will wait forever.
87 |
88 |
89 |
90 | ### Single query
91 |
92 | If you don't need a transaction or you just need to run a single query, the pool has a convenience method to run a query on any available client in the pool. This is the preferred way to query with node-postgres if you can as it removes the risk of leaking a client.
93 |
94 | ```js
95 | const { Pool } = require('pg')
96 |
97 | const pool = new Pool()
98 |
99 | pool.query('SELECT * FROM users WHERE id = $1', [1], (err, res) => {
100 | if (err) {
101 | throw err
102 | }
103 |
104 | console.log('user:', res.rows[0])
105 | })
106 | ```
107 |
108 | node-postgres also has built-in support for promises throughout all of its async APIs.
109 |
110 | ```js
111 | const { Pool } = require('pg')
112 |
113 | const pool = new Pool()
114 |
115 | pool
116 | .query('SELECT * FROM users WHERE id = $1', [1])
117 | .then(res => console.log('user:', res.rows[0]))
118 | .catch(err =>
119 | setImmediate(() => {
120 | throw err
121 | })
122 | )
123 | ```
124 |
125 | Promises allow us to use `async`/`await` in node v8.0 and above (or earlier if you're using babel).
126 |
127 | ```js
128 | const { Pool } = require('pg')
129 | const pool = new Pool()
130 |
131 | ;(async () => {
132 | const { rows } = await pool.query('SELECT * FROM users WHERE id = $1', [1])
133 | console.log('user:', rows[0])
134 | })().catch(err =>
135 | setImmediate(() => {
136 | throw err
137 | })
138 | )
139 | ```
140 |
141 | ### Shutdown
142 |
143 | To shut down a pool call `pool.end()` on the pool. This will wait for all checked-out clients to be returned and then shut down all the clients and the pool timers.
144 |
145 | ```js
146 | const { Pool } = require('pg')
147 | const pool = new Pool()
148 |
149 | ;(async () => {
150 | console.log('starting async query')
151 | const result = await pool.query('SELECT NOW()')
152 | console.log('async query finished')
153 |
154 | console.log('starting callback query')
155 | pool.query('SELECT NOW()', (err, res) => {
156 | console.log('callback query finished')
157 | })
158 |
159 | console.log('calling end')
160 | await pool.end()
161 | console.log('pool has drained')
162 | })()
163 | ```
164 |
165 | The output of the above will be:
166 |
167 | ```
168 | starting async query
169 | async query finished
170 | starting callback query
171 | calling end
172 | callback query finished
173 | pool has drained
174 | ```
175 |
176 |
177 |
178 | The pool will return errors when attempting to check out a client after you've called `pool.end()` on the pool.
179 |
180 |
181 |
--------------------------------------------------------------------------------
/content/features/4-transactions.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Transactions
3 | slug: /features/transactions
4 | ---
5 |
6 | To execute a transaction with node-postgres you simply execute `BEGIN / COMMIT / ROLLBACK` queries yourself through a client. Because node-postgres strives to be low level and un-opinionated, it doesn't provide any higher level abstractions specifically around transactions.
7 |
8 |
9 | You must use the same client instance for all statements within a transaction. PostgreSQL
10 | isolates a transaction to individual clients. This means if you initialize or use transactions with the{' '}
11 | pool.query method you will have problems. Do not use transactions with
12 | the pool.query method.
13 |
63 | I omitted any additional libraries from the example for clarity, but if you're using callbacks you'd typically be
64 | using a flow control library like async.
65 |
66 |
67 |
68 | ### A pooled client with async/await
69 |
70 | Things are considerably more straightforward if you're using async/await:
71 |
72 | ```js
73 | const { Pool } = require('pg')
74 | const pool = new Pool()
75 | ;(async () => {
76 | // note: we don't try/catch this because if connecting throws an exception
77 | // we don't need to dispose of the client (it will be undefined)
78 | const client = await pool.connect()
79 |
80 | try {
81 | await client.query('BEGIN')
82 | const queryText = 'INSERT INTO users(name) VALUES($1) RETURNING id'
83 | const res = await client.query(queryText, ['brianc'])
84 |
85 | const insertPhotoText = 'INSERT INTO photos(user_id, photo_url) VALUES ($1, $2)'
86 | const insertPhotoValues = [res.rows[0].id, 's3.bucket.foo']
87 | await client.query(insertPhotoText, insertPhotoValues)
88 | await client.query('COMMIT')
89 | } catch (e) {
90 | await client.query('ROLLBACK')
91 | throw e
92 | } finally {
93 | client.release()
94 | }
95 | })().catch(e => console.error(e.stack))
96 | ```
97 |
--------------------------------------------------------------------------------
/content/features/5-types.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Data Types
3 | slug: /features/types
4 | ---
5 |
6 | PostgreSQL has a rich system of supported [data types](https://www.postgresql.org/docs/9.5/static/datatype.html). node-postgres does its best to support the most common data types out of the box and supplies an extensible type parser to allow for custom type serialization and parsing.
7 |
8 | ## strings by default
9 |
10 | node-postgres will convert a database type to a JavaScript string if it doesn't have a registered type parser for the database type. Furthermore, you can send any type to the PostgreSQL server as a string and node-postgres will pass it through without modifying it in any way. To circumvent the type parsing completely do something like the following.
11 |
12 | ```js
13 | const queryText = 'SELECT int_col::text, date_col::text, json_col::text FROM my_table'
14 | const result = await client.query(queryText)
15 |
16 | console.log(result.rows[0]) // will contain the unparsed string value of each column
17 | ```
18 |
19 | ## type parsing examples
20 |
21 | ### uuid + json / jsonb
22 |
23 | There is no data type in JavaScript for a uuid/guid so node-postgres converts a uuid to a string. JavaScript has great support for JSON and node-postgres converts json/jsonb objects directly into their JavaScript object via [`JSON.parse`](https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L193). Likewise sending an object to the PostgreSQL server via a query from node-postgres, node-posgres will call [`JSON.stringify`](https://github.com/brianc/node-postgres/blob/e5f0e5d36a91a72dda93c74388ac890fa42b3be0/lib/utils.js#L47) on your outbound value, automatically converting it to json for the server.
24 |
25 | ```js
26 | const createTableText = `
27 | CREATE EXTENSION IF NOT EXISTS "pgcrypto";
28 |
29 | CREATE TEMP TABLE IF NOT EXISTS users (
30 | id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
31 | data JSONB
32 | );
33 | `
34 | // create our temp table
35 | await client.query(createTableText)
36 |
37 | const newUser = { email: 'brian.m.carlson@gmail.com' }
38 | // create a new user
39 | await client.query('INSERT INTO users(data) VALUES($1)', [newUser])
40 |
41 | const { rows } = await client.query('SELECT * FROM users')
42 |
43 | console.log(rows)
44 | /*
45 | output:
46 | [{
47 | id: 'd70195fd-608e-42dc-b0f5-eee975a621e9',
48 | data: { email: 'brian.m.carlson@gmail.com' }
49 | }]
50 | */
51 | ```
52 |
53 | ### date / timestamp / timestamptz
54 |
55 | node-postgres will convert instances of JavaScript date objects into the expected input value for your PostgreSQL server. Likewise, when reading a `date`, `timestamp`, or `timestamptz` column value back into JavaScript, node-postgres will parse the value into an instance of a JavaScript `Date` object.
56 |
57 | ```js
58 | const createTableText = `
59 | CREATE TEMP TABLE dates(
60 | date_col DATE,
61 | timestamp_col TIMESTAMP,
62 | timestamptz_col TIMESTAMPTZ
63 | );
64 | `
65 | // create our temp table
66 | await client.query(createTableText)
67 |
68 | // insert the current time into it
69 | const now = new Date()
70 | const insertText = 'INSERT INTO dates(date_col, timestamp_col, timestamptz_col) VALUES ($1, $2, $3)'
71 | await client.query(insertText, [now, now, now])
72 |
73 | // read the row back out
74 | const result = await client.query('SELECT * FROM dates')
75 |
76 | console.log(result.rows)
77 | // {
78 | // date_col: 2017-05-29T05:00:00.000Z,
79 | // timestamp_col: 2017-05-29T23:18:13.263Z,
80 | // timestamptz_col: 2017-05-29T23:18:13.263Z
81 | // }
82 | ```
83 |
84 | psql output:
85 |
86 | ```psql
87 | bmc=# select * from dates;
88 | date_col | timestamp_col | timestamptz_col
89 | ------------+-------------------------+----------------------------
90 | 2017-05-29 | 2017-05-29 18:18:13.263 | 2017-05-29 18:18:13.263-05
91 | (1 row)
92 | ```
93 |
94 | node-postgres converts `DATE` and `TIMESTAMP` columns into the **local** time of the node process set at `process.env.TZ`.
95 |
96 | _note: I generally use `TIMESTAMPTZ` when storing dates; otherwise, inserting a time from a process in one timezone and reading it out in a process in another timezone can cause unexpected differences in the time._
97 |
98 |
99 |
100 | Although PostgreSQL supports microseconds in dates, JavaScript only supports dates to the millisecond precision. Keep this in mind when you send dates to and from PostgreSQL from node: your microseconds will be truncated when converting to a JavaScript date object even if they exist in the database. If you need to preserve them, I recommend using a custom type parser.
101 |
102 |
103 |
--------------------------------------------------------------------------------
/content/features/6-ssl.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: SSL
3 | slug: /features/ssl
4 | ---
5 |
6 | node-postgres supports TLS/SSL connections to your PostgreSQL server as long as the server is configured to support it. When instantiating a pool or a client you can provide an `ssl` property on the config object and it will be passed to the constructor for the [node TLSSocket](https://nodejs.org/api/tls.html#tls_class_tls_tlssocket).
7 |
8 | ## Self-signed cert
9 |
10 | Here's an example of a configuration you can use to connect a client or a pool to a PostgreSQL server.
11 |
12 | ```js
13 | const config = {
14 | database: 'database-name',
15 | host: 'host-or-ip',
16 | // this object will be passed to the TLSSocket constructor
17 | ssl: {
18 | rejectUnauthorized: false,
19 | ca: fs.readFileSync('/path/to/server-certificates/root.crt').toString(),
20 | key: fs.readFileSync('/path/to/client-key/postgresql.key').toString(),
21 | cert: fs.readFileSync('/path/to/client-certificates/postgresql.crt').toString(),
22 | },
23 | }
24 |
25 | import { Client, Pool } from 'pg'
26 |
27 | const client = new Client(config)
28 | client.connect(err => {
29 | if (err) {
30 | console.error('error connecting', err.stack)
31 | } else {
32 | console.log('connected')
33 | client.end()
34 | }
35 | })
36 |
37 | const pool = new Pool(config)
38 | pool
39 | .connect()
40 | .then(client => {
41 | console.log('connected')
42 | client.release()
43 | })
44 | .catch(err => console.error('error connecting', err.stack))
45 | .then(() => pool.end())
46 | ```
47 |
48 | ## Usage with `connectionString`
49 |
50 | If you plan to use a combination of a database connection string from the environment and SSL settings in the config object directly, then you must avoid including any of `sslcert`, `sslkey`, `sslrootcert`, or `sslmode` in the connection string. If any of these options are used then the `ssl` object is replaced and any additional options provided there will be lost.
51 |
52 | ```js
53 | const config = {
54 | connectionString: 'postgres://user:password@host:port/db?sslmode=require',
55 | // Beware! The ssl object is overwritten when parsing the connectionString
56 | ssl: {
57 | rejectUnauthorized: false,
58 | ca: fs.readFileSync('/path/to/server-certificates/root.crt').toString(),
59 | },
60 | }
61 | ```
62 |
--------------------------------------------------------------------------------
/content/features/7-native.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Native Bindings
3 | slug: /features/native
4 | metaTitle: bar
5 | ---
6 |
7 | Native bindings between node.js & [libpq](https://www.postgresql.org/docs/9.5/static/libpq.html) are provided by the [node-pg-native](https://github.com/brianc/node-pg-native) package. node-postgres can consume this package & use the native bindings to access the PostgreSQL server while giving you the same interface that is used with the JavaScript version of the library.
8 |
9 | To use the native bindings first you'll need to install them:
10 |
11 | ```sh
12 | $ npm install pg pg-native
13 | ```
14 |
15 | Once `pg-native` is installed instead of requiring a `Client` or `Pool` constructor from `pg` you do the following:
16 |
17 | ```js
18 | const { Client, Pool } = require('pg').native
19 | ```
20 |
21 | When you access the `.native` property on `require('pg')` it will automatically require the `pg-native` package and wrap it in the same API.
22 |
23 |
24 | Care has been taken to normalize between the two, but there might still be edge cases where things behave subtly differently due to the nature of using libpq over handling the binary protocol directly in JavaScript, so it's recommended you chose to either use the JavaScript driver or the native bindings both in development and production. For what its worth: I use the pure JavaScript driver because the JavaScript driver is more portable (doesn't need a compiler), and the pure JavaScript driver is plenty fast.
25 |
26 |
27 | Some of the modules using advanced features of PostgreSQL such as [pg-query-stream](https://github.com/brianc/node-pg-query-stream), [pg-cursor](https://github.com/brianc/node-pg-cursor),and [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams) need to operate directly on the binary stream and therefore are incompatible with the native bindings.
28 |
--------------------------------------------------------------------------------
/content/guides.mdx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/brianc/node-postgres-docs/1e4fed04ff2f864968994fbc999b679b47cdf606/content/guides.mdx
--------------------------------------------------------------------------------
/content/guides/1-project-structure.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Suggested Project Structure
3 | slug: /guides/project-structure
4 | ---
5 |
6 | Whenever I am writing a project & using node-postgres I like to create a file within it and make all interactions with the database go through this file. This serves a few purposes:
7 |
8 | - Allows my project to adjust to any changes to the node-postgres API without having to trace down all the places I directly use node-postgres in my application.
9 | - Allows me to have a single place to put logging and diagnostics around my database.
10 | - Allows me to make custom extensions to my database access code & share it throughout the project.
11 | - Allows a single place to bootstrap & configure the database.
12 |
13 | ## example
14 |
15 | _note: I am using callbacks in this example to introduce as few concepts as possible at a time, but the same is doable with promises or async/await_
16 |
17 | The location doesn't really matter - I've found it usually ends up being somewhat app specific and in line with whatever folder structure conventions you're using. For this example I'll use an express app structured like so:
18 |
19 | ```
20 | - app.js
21 | - index.js
22 | - routes/
23 | - index.js
24 | - photos.js
25 | - user.js
26 | - db/
27 | - index.js <--- this is where I put data access code
28 | ```
29 |
30 | Typically I'll start out my `db/index.js` file like so:
31 |
32 | ```js
33 | const { Pool } = require('pg')
34 |
35 | const pool = new Pool()
36 |
37 | module.exports = {
38 | query: (text, params, callback) => {
39 | return pool.query(text, params, callback)
40 | },
41 | }
42 | ```
43 |
44 | That's it. But now everywhere else in my application instead of requiring `pg` directly, I'll require this file. Here's an example of a route within `routes/user.js`:
45 |
46 | ```js
47 | // notice here I'm requiring my database adapter file
48 | // and not requiring node-postgres directly
49 | const db = require('../db')
50 |
51 | app.get('/:id', (req, res, next) => {
52 | db.query('SELECT * FROM users WHERE id = $1', [req.params.id], (err, result) => {
53 | if (err) {
54 | return next(err)
55 | }
56 | res.send(result.rows[0])
57 | })
58 | })
59 |
60 | // ... many other routes in this file
61 | ```
62 |
63 | Imagine we have lots of routes scattered throughout many files under our `routes/` directory. We now want to go back and log every single query that's executed, how long it took, and the number of rows it returned. If we had required node-postgres directly in every route file we'd have to go edit every single route - that would take forever & be really error prone! But thankfully we put our data access into `db/index.js`. Let's go add some logging:
64 |
65 | ```js
66 | const { Pool } = require('pg')
67 |
68 | const pool = new Pool()
69 |
70 | module.exports = {
71 | query: (text, params, callback) => {
72 | const start = Date.now()
73 | return pool.query(text, params, (err, res) => {
74 | const duration = Date.now() - start
75 | console.log('executed query', { text, duration, rows: res.rowCount })
76 | callback(err, res)
77 | })
78 | },
79 | }
80 | ```
81 |
82 | That was pretty quick! And now all of our queries everywhere in our application are being logged.
83 |
84 | _note: I didn't log the query parameters. Depending on your application you might be storing encrypted passwords or other sensitive information in your database. If you log your query parameters you might accidentally log sensitive information. Every app is different though so do what suits you best!_
85 |
86 | Now what if we need to check out a client from the pool to run several queries in a row in a transaction? We can add another method to our `db/index.js` file when we need to do this:
87 |
88 | ```js
89 | const { Pool } = require('pg')
90 |
91 | const pool = new Pool()
92 |
93 | module.exports = {
94 | query: (text, params, callback) => {
95 | const start = Date.now()
96 | return pool.query(text, params, (err, res) => {
97 | const duration = Date.now() - start
98 | console.log('executed query', { text, duration, rows: res.rowCount })
99 | callback(err, res)
100 | })
101 | },
102 | getClient: (callback) => {
103 | pool.connect((err, client, done) => {
104 | callback(err, client, done)
105 | })
106 | }
107 | }
108 | ```
109 |
110 | Okay. Great - the simplest thing that could possibly work. It seems like one of our routes that checks out a client to run a transaction is forgetting to call `done` in some situation! Oh no! We are leaking a client & have hundreds of these routes to go audit. Good thing we have all our client access going through this single file. Lets add some deeper diagnostic information here to help us track down where the client leak is happening.
111 |
112 | ```js
113 | const { Pool } = require('pg')
114 |
115 | const pool = new Pool()
116 |
117 | module.exports = {
118 | query: (text, params, callback) => {
119 | const start = Date.now()
120 | return pool.query(text, params, (err, res) => {
121 | const duration = Date.now() - start
122 | console.log('executed query', { text, duration, rows: res.rowCount })
123 | callback(err, res)
124 | })
125 | },
126 | getClient: (callback) => {
127 | pool.connect((err, client, done) => {
128 | const query = client.query
129 |
130 | // monkey patch the query method to keep track of the last query executed
131 | client.query = (...args) => {
132 | client.lastQuery = args
133 | return query.apply(client, args)
134 | }
135 |
136 | // set a timeout of 5 seconds, after which we will log this client's last query
137 | const timeout = setTimeout(() => {
138 | console.error('A client has been checked out for more than 5 seconds!')
139 | console.error(`The last executed query on this client was: ${client.lastQuery}`)
140 | }, 5000)
141 |
142 | const release = (err) => {
143 | // call the actual 'done' method, returning this client to the pool
144 | done(err)
145 |
146 | // clear our timeout
147 | clearTimeout(timeout)
148 |
149 | // set the query method back to its old un-monkey-patched version
150 | client.query = query
151 | }
152 |
153 | callback(err, client, release)
154 | })
155 | }
156 | }
157 | ```
158 |
159 | Using async/await:
160 |
161 | ```
162 | module.exports = {
163 | async query(text, params) {
164 | const start = Date.now()
165 | const res = await pool.query(text, params)
166 | const duration = Date.now() - start
167 | console.log('executed query', { text, duration, rows: res.rowCount })
168 | return res
169 | },
170 |
171 | async getClient() {
172 | const client = await pool.connect()
173 | const query = client.query
174 | const release = client.release
175 | // set a timeout of 5 seconds, after which we will log this client's last query
176 | const timeout = setTimeout(() => {
177 | console.error('A client has been checked out for more than 5 seconds!')
178 | console.error(`The last executed query on this client was: ${client.lastQuery}`)
179 | }, 5000)
180 | // monkey patch the query method to keep track of the last query executed
181 | client.query = (...args) => {
182 | client.lastQuery = args
183 | return query.apply(client, args)
184 | }
185 | client.release = () => {
186 | // clear our timeout
187 | clearTimeout(timeout)
188 | // set the methods back to their old un-monkey-patched version
189 | client.query = query
190 | client.release = release
191 | return release.apply(client)
192 | }
193 | return client
194 | }
195 | }
196 | ```
197 | That should hopefully give us enough diagnostic information to track down any leaks.
198 |
--------------------------------------------------------------------------------
/content/guides/2-async-express.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Express with async/await
3 | slug: /guides/async-express
4 | ---
5 |
6 | My preferred way to use node-postgres (and all async code in node.js) is with `async/await`. I find it makes reasoning about control-flow easier and allows me to write more concise and maintainable code.
7 |
8 | This is how I typically structure express web-applications with node-postgres to use `async/await`:
9 |
10 | ```
11 | - app.js
12 | - index.js
13 | - routes/
14 | - index.js
15 | - photos.js
16 | - user.js
17 | - db/
18 | - index.js <--- this is where I put data access code
19 | ```
20 |
21 | That's the same structure I used in the [project structure](/guides/project-structure) example.
22 |
23 | My `db/index.js` file usually starts out like this:
24 |
25 | ```js
26 | const { Pool } = require('pg')
27 |
28 | const pool = new Pool()
29 |
30 | module.exports = {
31 | query: (text, params) => pool.query(text, params),
32 | }
33 | ```
34 |
35 | Then I will install [express-promise-router](https://www.npmjs.com/package/express-promise-router) and use it to define my routes. Here is my `routes/user.js` file:
36 |
37 | ```js
38 | const Router = require('express-promise-router')
39 |
40 | const db = require('../db')
41 |
42 | // create a new express-promise-router
43 | // this has the same API as the normal express router except
44 | // it allows you to use async functions as route handlers
45 | const router = new Router()
46 |
47 | // export our router to be mounted by the parent application
48 | module.exports = router
49 |
50 | router.get('/:id', async (req, res) => {
51 | const { id } = req.params
52 | const { rows } = await db.query('SELECT * FROM users WHERE id = $1', [id])
53 | res.send(rows[0])
54 | })
55 | ```
56 |
57 | Then in my `routes/index.js` file I'll have something like this which mounts each individual router into the main application:
58 |
59 | ```js
60 | // ./routes/index.js
61 | const users = require('./user')
62 | const photos = require('./photos')
63 |
64 | module.exports = app => {
65 | app.use('/users', users)
66 | app.use('/photos', photos)
67 | // etc..
68 | }
69 | ```
70 |
71 | And finally in my `app.js` file where I bootstrap express I will have my `routes/index.js` file mount all my routes. The routes know they're using async functions but because of express-promise-router the main express app doesn't know and doesn't care!
72 |
73 | ```js
74 | // ./app.js
75 | const express = require('express')
76 | const mountRoutes = require('./routes')
77 |
78 | const app = express()
79 | mountRoutes(app)
80 |
81 | // ... more express setup stuff can follow
82 | ```
83 |
84 | Now you've got `async/await`, node-postgres, and express all working together!
85 |
--------------------------------------------------------------------------------
/content/guides/3-upgrading.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Upgrading
3 | slug: /guides/upgrading
4 | ---
5 |
6 | # Upgrading to 8.0
7 |
8 | node-postgres at 8.0 introduces a breaking change to ssl-verified connections. If you connect with ssl and use
9 |
10 | ```
11 | const client = new Client({ ssl: true })
12 | ```
13 |
14 | and the server's SSL certificate is self-signed, connections will fail as of node-postgres 8.0. To keep the existing behavior, modify the invocation to
15 |
16 | ```
17 | const client = new Client({ ssl: { rejectUnauthorized: false } })
18 | ```
19 |
20 | The rest of the changes are relatively minor and unlikely to cause issues; see [the announcement](/announcements#2020-02-25) for full details.
21 |
22 | # Upgrading to 7.0
23 |
24 | node-postgres at 7.0 introduces somewhat significant breaking changes to the public API.
25 |
26 | ## node version support
27 |
28 | Starting with `pg@7.0` the earliest version of node supported will be `node@4.x LTS`. Support for `node@0.12.x` and `node@.10.x` is dropped, and the module wont work as it relies on new es6 features not available in older versions of node.
29 |
30 | ## pg singleton
31 |
32 | In the past there was a singleton pool manager attached to the root `pg` object in the package. This singleton could be used to provision connection pools automatically by calling `pg.connect`. This API caused a lot of confusion for users. It also introduced a opaque module-managed singleton which was difficult to reason about, debug, error-prone, and inflexible. Starting in pg@6.0 the methods' documentation was removed, and starting in pg@6.3 the methods were deprecated with a warning message.
33 |
34 | If your application still relies on these they will be _gone_ in `pg@7.0`. In order to migrate you can do the following:
35 |
36 | ```js
37 | // old way, deprecated in 6.3.0:
38 |
39 | // connection using global singleton
40 | pg.connect(function(err, client, done) {
41 | client.query(/* etc, etc */)
42 | done()
43 | })
44 |
45 | // singleton pool shutdown
46 | pg.end()
47 |
48 | // ------------------
49 |
50 | // new way, available since 6.0.0:
51 |
52 | // create a pool
53 | var pool = new pg.Pool()
54 |
55 | // connection using created pool
56 | pool.connect(function(err, client, done) {
57 | client.query(/* etc, etc */)
58 | done()
59 | })
60 |
61 | // pool shutdown
62 | pool.end()
63 | ```
64 |
65 | node-postgres ships with a built-in pool object provided by [pg-pool](https://github.com/brianc/node-pg-pool) which is already used internally by the `pg.connect` and `pg.end` methods. Migrating to a user-managed pool (or set of pools) allows you to more directly control their set up their life-cycle.
66 |
67 | ## client.query(...).on
68 |
69 | Before `pg@7.0` the `client.query` method would _always_ return an instance of a query. The query instance was an event emitter, accepted a callback, and was also a promise. A few problems...
70 |
71 | - too many flow control options on a single object was confusing
72 | - event emitter `.on('error')` does not mix well with promise `.catch`
73 | - the `row` event was a common source of errors: it looks like a stream but has no support for back-pressure, misleading users into trying to pipe results or handling them in the event emitter for a desired performance gain.
74 | - error handling with a `.done` and `.error` emitter pair for every query is cumbersome and returning the emitter from `client.query` indicated this sort of pattern may be encouraged: it is not.
75 |
76 | Starting with `pg@7.0` the return value `client.query` will be dependent on what you pass to the method: I think this aligns more with how most node libraries handle the callback/promise combo, and I hope it will make the "just works" :tm: feeling better while reducing surface area and surprises around event emitter / callback combos.
77 |
78 | ### client.query with a callback
79 |
80 | ```js
81 | const query = client.query('SELECT NOW()', (err, res) => {
82 | /* etc, etc */
83 | })
84 | assert(query === undefined) // true
85 | ```
86 |
87 | If you pass a callback to the method `client.query` will return `undefined`. This limits flow control to the callback which is in-line with almost all of node's core APIs.
88 |
89 | ### client.query without a callback
90 |
91 | ```js
92 | const query = client.query('SELECT NOW()')
93 | assert(query instanceof Promise) // true
94 | assert(query.on === undefined) // true
95 | query.then((res) => /* etc, etc */)
96 | ```
97 |
98 | If you do **not** pass a callback `client.query` will return an instance of a `Promise`. This will **not** be a query instance and will not be an event emitter. This is in line with how most promise-based APIs work in node.
99 |
100 | ### client.query(Submittable)
101 |
102 | `client.query` has always accepted any object that has a `.submit` method on it. In this scenario the client calls `.submit` on the object, delegating execution responsibility to it. In this situation the client also **returns the instance it was passed**. This is how [pg-cursor](https://github.com/brianc/node-pg-cursor) and [pg-query-stream](https://github.com/brianc/node-pg-query-stream) work. So, if you need the event emitter functionality on your queries for some reason, it is still possible because `Query` is an instance of `Submittable`:
103 |
104 | ```js
105 | const { Client, Query } = require('pg')
106 | const query = client.query(new Query('SELECT NOW()'))
107 | query.on('row', row => {})
108 | query.on('end', res => {})
109 | query.on('error', res => {})
110 | ```
111 |
112 | `Query` is considered a public, documented part of the API of node-postgres and this form will be supported indefinitely.
113 |
114 | _note: I have been building apps with node-postgres for almost 7 years. In that time I have never used the event emitter API as the primary way to execute queries. I used to use callbacks and now I use async/await. If you need to stream results I highly recommend you use [pg-cursor](https://github.com/brianc/node-pg-cursor) or [pg-query-stream](https://github.com/brianc/node-pg-query-stream) and **not** the query object as an event emitter._
115 |
--------------------------------------------------------------------------------
/content/welcome.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Welcome
3 | slug: /
4 | ---
5 |
6 | node-postgres is a collection of node.js modules for interfacing with your PostgreSQL database. It has support for callbacks, promises, async/await, connection pooling, prepared statements, cursors, streaming results, C/C++ bindings, rich type parsing, and more! Just like PostgreSQL itself there are a lot of features: this documentation aims to get you up and running quickly and in the right direction. It also tries to provide guides for more advanced & edge-case topics allowing you to tap into the full power of PostgreSQL from node.js.
7 |
8 | ## Install
9 |
10 | ```bash
11 | $ npm install pg
12 | ```
13 |
14 | ## Supporters
15 |
16 | node-postgres continued development and support is made possible by the many [supporters](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) with a special thanks to our featured supporters:
17 |
18 |