├── .gitignore ├── .npmignore ├── .travis.yml ├── Makefile ├── README.md ├── bench ├── copy-from-memory.js ├── copy-from.js └── copy-to.js ├── copy-both.js ├── copy-from.js ├── copy-to.js ├── eslint.config.js ├── index.js ├── message-formats.js ├── obuf.js ├── package-lock.json ├── package.json └── test ├── binary.js ├── copy-both.js ├── copy-from.js ├── copy-to.js └── obuf.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .gitignore 2 | .travis.yml 3 | bench/ 4 | test/ 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | 3 | jobs: 4 | include: 5 | - node_js: '10' 6 | addons: 7 | postgresql: '9.6' 8 | env: 9 | - PGVERSION=9.6 PGUSER=postgres PGDATABASE=postgres 10 | - node_js: '12' 11 | addons: 12 | postgresql: '9.6' 13 | env: 14 | - PGVERSION=9.6 PGUSER=postgres PGDATABASE=postgres 15 | - node_js: '14' 16 | addons: 17 | postgresql: '9.6' 18 | env: 19 | - PGVERSION=9.6 PGUSER=postgres PGDATABASE=postgres 20 | - node_js: '16' 21 | addons: 22 | postgresql: '9.6' 23 | env: 24 | - PGVERSION=9.6 PGUSER=postgres PGDATABASE=postgres 25 | - node_js: '10' 26 | addons: 27 | postgresql: '11' 28 | apt: 29 | packages: 30 | - postgresql-11 31 | env: 32 | - PGVERSION=11 PGUSER=postgres PGDATABASE=postgres 33 | - node_js: '10' 34 | addons: 35 | postgresql: '12' 36 | apt: 37 | packages: 38 | - postgresql-12 39 | env: 40 | - PGVERSION=12 PGUSER=postgres PGDATABASE=postgres 41 | 42 | services: 43 | - postgresql 44 | 45 | before_install: 46 | - npm install npm --global 47 | 48 | env: 49 | - PGUSER=postgres PGDATABASE=postgres 50 | 51 | before_script: 52 | - sudo bash -c "sed -i 's/^/#/' /etc/postgresql/$PGVERSION/main/pg_hba.conf" 53 | - sudo bash -c "echo 'host all all 127.0.0.1/32 trust' >> /etc/postgresql/$PGVERSION/main/pg_hba.conf" 54 | - sudo bash -c "echo 'host replication all 127.0.0.1/32 trust' >> /etc/postgresql/$PGVERSION/main/pg_hba.conf" 55 | - sudo bash -c "sed -i 's/port = 5433/port = 5432/g' /etc/postgresql/$PGVERSION/main/postgresql.conf" 56 | - sudo bash -c "sed -i 's/#wal_level = minimal/wal_level = logical/g' /etc/postgresql/$PGVERSION/main/postgresql.conf" 57 | - sudo bash -c "sed -i 's/#wal_level = replica/wal_level = logical/g' /etc/postgresql/$PGVERSION/main/postgresql.conf" 58 | - sudo bash -c "sed -i 's/#max_wal_senders = 0/max_wal_senders = 10/g' /etc/postgresql/$PGVERSION/main/postgresql.conf" 59 | - sudo bash -c "sed -i 's/#max_replication_slots = 0/max_replication_slots = 10/g' /etc/postgresql/$PGVERSION/main/postgresql.conf" 60 | - sudo bash -c "cat /etc/postgresql/$PGVERSION/main/pg_hba.conf" 61 | - sudo bash -c "cat /etc/postgresql/$PGVERSION/main/postgresql.conf" 62 | - sudo systemctl restart postgresql@$PGVERSION-main 63 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: publish-patch test 2 | 3 | test: 4 | npm test 5 | 6 | patch: test 7 | npm version patch -m "Bump version" 8 | git push origin master --tags 9 | npm publish 10 | 11 | minor: test 12 | npm version minor -m "Bump version" 13 | git push origin master --tags 14 | npm publish 15 | 16 | major: test 17 | npm version major -m "Bump version" 18 | git push origin master --tags 19 | npm publish 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## pg-copy-streams 2 | 3 | [![Build Status](https://travis-ci.org/brianc/node-pg-copy-streams.svg)](https://travis-ci.org/brianc/node-pg-copy-streams) 4 | 5 | COPY FROM / COPY TO for node-postgres. Stream from one database to another, and stuff. 6 | 7 | ## how? what? huh? 8 | 9 | Did you know that PostgreSQL supports streaming data directly into and out of a table? 10 | This means you can take your favorite CSV or TSV file and pipe it directly into an existing PostgreSQL table. 11 | 12 | PostgreSQL supports text, csv/tsv and binary data. If you have data in another format (say for example JSON) convert it to one of the supported format and pipe it directly into an existing PostgreSQL table ! 13 | 14 | You can also take a table and pipe it directly to a file, another database, stdout, even to `/dev/null` if you're crazy! 15 | 16 | What this module gives you is a [Readable](http://nodejs.org/api/stream.html#stream_class_stream_readable) or [Writable](http://nodejs.org/api/stream.html#stream_class_stream_writable) stream directly into/out of a table in your database. 17 | This mode of interfacing with your table is _very fast_ and _very brittle_. You are responsible for properly encoding and ordering all your columns. If anything is out of place PostgreSQL will send you back an error. The stream works within a transaction so you wont leave things in a 1/2 borked state, but it's still good to be aware of. 18 | 19 | If you're not familiar with the feature (I wasn't either) you can read this for some good helps: https://www.postgresql.org/docs/current/sql-copy.html 20 | 21 | ## examples 22 | 23 | ### pipe from a table to stdout (copyOut - copy-to) 24 | 25 | ```js 26 | var { Pool } = require('pg') 27 | var { to as copyTo } = require('pg-copy-streams') 28 | 29 | var pool = new Pool() 30 | 31 | pool.connect(function (err, client, done) { 32 | var stream = client.query(copyTo('COPY my_table TO STDOUT')) 33 | stream.pipe(process.stdout) 34 | stream.on('end', done) 35 | stream.on('error', done) 36 | }) 37 | 38 | 39 | // async/await 40 | import { pipeline } from 'node:stream/promises' 41 | import pg from 'pg' 42 | import { to as copyTo } from 'pg-copy-streams' 43 | 44 | const pool = new pg.Pool() 45 | const client = await pool.connect() 46 | try { 47 | const stream = client.query(copyTo('COPY my_table TO STDOUT')) 48 | await pipeline(stream, process.stdout) 49 | } finally { 50 | client.release() 51 | } 52 | await pool.end() 53 | ``` 54 | 55 | _Important_: When copying data out of postgresql, postgresql will chunk the data on 64kB boundaries. You should expect rows to be cut across the boundaries of these chunks (the end of a chunk will not always match the end of a row). If you are piping the csv output of postgres into a file, this might not be a problem. But if you are trying to analyse the csv output on-the-fly, you need to make sure that you correctly discover the lines of the csv output across the chunk boundaries. We are not recommending any specific streaming csv parser but `csv-parser` and `csv-parse` seem to correctly handle this. 56 | 57 | ### pipe from a file to table (copyIn - copy-from) 58 | 59 | ```js 60 | var fs = require('node:fs') 61 | var { Pool } = require('pg') 62 | var { from as copyFrom } = require('pg-copy-streams') 63 | 64 | var pool = new Pool() 65 | 66 | pool.connect(function (err, client, done) { 67 | var stream = client.query(copyFrom('COPY my_table FROM STDIN')) 68 | var fileStream = fs.createReadStream('some_file.tsv') 69 | fileStream.on('error', done) 70 | stream.on('error', done) 71 | stream.on('finish', done) 72 | fileStream.pipe(stream) 73 | }) 74 | 75 | 76 | // async/await 77 | import { pipeline } from 'node:stream/promises' 78 | import fs from 'node:fs' 79 | import pg from 'pg' 80 | import { from as copyFrom } from 'pg-copy-streams' 81 | 82 | const pool = new pg.Pool() 83 | const client = await pool.connect() 84 | try { 85 | const ingestStream = client.query(copyFrom('COPY my_table FROM STDIN')) 86 | const sourceStream = fs.createReadStream('some_file.tsv') 87 | await pipeline(sourceStream, ingestStream) 88 | } finally { 89 | client.release() 90 | } 91 | await pool.end() 92 | ``` 93 | 94 | _Note_: In version prior to 4.0.0, when copying data into postgresql, it was necessary to wait for the 'end' event of `pg-copy-streams.from` to correctly detect the end of the COPY operation. This was necessary due to the internals of the module but non-standard. This is not true for versions including and after 4.0.0. The end of the COPY operation must now be detected via the standard 'finish' event. **Users of 4.0.0+ should not wait for the 'end' event because it is not fired anymore.** 95 | 96 | In version 6.0.0+, If you have not yet finished ingesting data into a copyFrom stream and you want to ask postgresql to abort the process, you can call `destroy()` on the stream (or let `pipeline` do it for you if it detects an error in the pipeline). This will send a CopyFail message to the backend that will rollback the operation. Please take into account that this will not revert the operation if the CopyDone message has already been sent and is being processed by the backend. 97 | 98 | ### duplex stream for replication / logical decoding scenarios (copyBoth - copy-both) 99 | 100 | This is a more advanded topic. 101 | Check the test/copy-both.js file for an example of how this can be used. 102 | 103 | _Note regarding logical decoding_: Parsers for logical decoding scenarios are easier to write when copy-both.js pushes chunks that are aligned on the copyData protocol frames. This is not the default mode of operation of copy-both.js in order to increase the streaming performance. If you need the pushed chunks to be aligned on copyData frames, use the `alignOnCopyDataFrame: true` option. 104 | 105 | 106 | ## install 107 | 108 | ```sh 109 | $ npm install pg-copy-streams 110 | ``` 111 | 112 | ## notice 113 | 114 | This module **only** works with the pure JavaScript bindings. If you're using `require('pg').native` please make sure to use normal `require('pg')` or `require('pg.js')` when you're using copy streams. 115 | 116 | Before you set out on this magical piping journey, you _really_ should read this: http://www.postgresql.org/docs/current/static/sql-copy.html, and you might want to take a look at the [tests](https://github.com/brianc/node-pg-copy-streams/tree/master/test) to get an idea of how things work. 117 | 118 | Take note of the following warning in the PostgreSQL documentation: 119 | 120 | > COPY stops operation at the first error. This should not lead to problems in the event of a COPY TO, but the target table will already have received earlier rows in a COPY FROM. These rows will not be visible or accessible, but they still occupy disk space. This might amount to a considerable amount of wasted disk space if the failure happened well into a large copy operation. You might wish to invoke VACUUM to recover the wasted space. 121 | 122 | ## benchmarks 123 | 124 | The COPY command is commonly used to move huge sets of data. This can put some pressure on the node.js loop, the amount of CPU or the amount of memory used. 125 | There is a bench/ directory in the repository where benchmark scripts are stored. If you have performance issues with `pg-copy-stream` do not hesitate to write a new benchmark that highlights your issue. Please avoid to commit huge files (PR won't be accepted) and find other ways to generate huge datasets. 126 | 127 | If you have a local instance of postgres on your machine, you can start a benchmark for example with 128 | 129 | ```sh 130 | $ cd bench 131 | $ PGPORT=5432 PGDATABASE=postgres node copy-from.js 132 | ``` 133 | 134 | ## tests 135 | 136 | In order to launch the test suite, you need to have a local instance of postgres running on your machine. 137 | 138 | Since version 5.1.0 and the implementation of copy-both.js for logical decoding scenarios, your local postgres instance will need to be configured to accept replication scenarios : 139 | 140 | ``` 141 | postgresql.conf 142 | wal_level = logical 143 | max_wal_senders > 0 144 | max_replication_slots > 0 145 | 146 | pg_hba.conf 147 | make sure your user can connect using the replication mode 148 | ``` 149 | 150 | ```sh 151 | $ PGPORT=5432 PGDATABASE=postgres make test 152 | ``` 153 | 154 | ## contributing 155 | 156 | Instead of adding a bunch more code to the already bloated [node-postgres](https://github.com/brianc/node-postgres) I am trying to make the internals extensible and work on adding edge-case features as 3rd party modules. 157 | This is one of those. 158 | 159 | Please, if you have any issues with this, open an issue. 160 | 161 | Better yet, submit a pull request. I _love_ pull requests. 162 | 163 | Generally how I work is if you submit a few pull requests and you're interested I'll make you a contributor and give you full access to everything. 164 | 165 | Since this isn't a module with tons of installs and dependent modules I hope we can work together on this to iterate faster here and make something really useful. 166 | 167 | ## changelog 168 | 169 | ### version 7.0.0 - published 2025-05-27 170 | 171 | No new feature. This is a maintenance release. 172 | The major version was decided because the 'obuf' dependency was internalized and slightly modified to pass prettier, pass eslint and replace deprecated `new Buffer(size)` calls by `Buffer.alloc(size)` which while not beeing a revolution, is worth a major version after (previous version was 2 years ago) 173 | 174 | - "Forked" the obuf project into the repo in order to keep maintain it internally after no response from the maintainer 175 | https://github.com/indutny/offset-buffer - License MIT - By Fedor Indutny, 2015 176 | open issue asking for transfer of npm ownership of the original obuf project - https://github.com/indutny/offset-buffer/issues/8#issuecomment-2817245503 177 | This library has an API that is elegant for protocol/buffer handling, which is why it was chosen originally 178 | Original tests were also added to the test suite of `pg-copy-stream` 179 | - updated dev dependencies: mocha, eslint, cvs-parse, cvs-parser, pg 180 | 181 | ### version 6.0.6 - published 2023-07-17 182 | 183 | - copy-to: fix rowCount in BINARY mode. The file trailer was incorrectly counted as a row 184 | 185 | ### version 6.0.5 - published 2023-03-07 186 | 187 | - improve ejs/cjs Named exports compatibility for easier async/await usage 188 | 189 | ### version 6.0.4 - published 2022-09-05 190 | ### version 6.0.3 - published 2022-09-05 191 | 192 | - copy-from: fix issue #136 when the _writev mechanism was triggered with a very large number of chunks 193 | 194 | ### version 6.0.2 - published 2021-09-13 195 | 196 | - copy-from : fix interaction with `pg` optional timeout mechanism 197 | 198 | ### version 6.0.1 - published 2021-08-23 199 | 200 | - Bugfix for node 14+. The order of _destroy / _final calls are different before and after node 14 which caused an issue with the COPY FROM _destroy implementation that appeared in version 6.0.0. 201 | 202 | ### version 6.0.0 - published 2021-08-20 203 | 204 | - Implement _destroy in COPY FROM operations. `pipeline` will automatically send a CopyFail message to the backend is a source triggers an error. cf #115 205 | 206 | This version is a major change because some users of the library may have been using other techniques in order to ask the backend to rollback the current operation. 207 | 208 | ### version 5.1.1 - published 2020-07-21 209 | 210 | Bugfix release handling a corner case when an empty stream is piped into copy-from 211 | 212 | - fix copy-from.js handling of an empty source 213 | 214 | ### version 5.1.0 - published 2020-06-07 215 | 216 | This version adds a Duplex stream implementation of the PostgreSQL copyBoth mode described on https://www.postgresql.org/docs/9.6/protocol-flow.html. This mode opens the possibility of dealing with replication and logical decoding scenarios. 217 | 218 | - implement copy-both.js 219 | 220 | ### version 5.0.0 - published 2020-05-14 221 | 222 | This version's major change is a modification in the COPY TO implementation. The new implementation now extends `Readable` while previous version where extending `Transform`. This should not have an effect on how users use the module but was considered to justify a major version number because even if the test suite coverage is wide, it could have an impact on the streaming dynamics in certain edge cases that are not yet captured by the tests. 223 | 224 | - Rewrite copy-to in order to have it extend `Readable` instead of `Transform` 225 | 226 | 227 | ### version 4.0.0 - published 2020-05-11 228 | 229 | This version's major change is a modification in the COPY FROM implementation. In previous version, copy-from was internally designed as a `Transform` duplex stream. The user-facing API was writable, and the readable side of the `Transform` was piped into the postgres connection stream to copy the data inside the database. 230 | This led to an issue because `Transform` was emitting its 'finish' too early after the writable side was ended. Postgres had not yet read all the data on the readable side and had not confirmed that the COPY operation was finished. The recommendation was to wait for the 'end' event on the readable side which correctly detected the end of the COPY operation and the fact that the pg connection was ready for new queries. 231 | This recommendation worked ok but this way of detecting the end of a writable is not standard and was leading to different issues (interaction with the `finished` and `pipeline` API for example) 232 | The new copy-from implementation extends writable and now emits 'finish' with the correct timing : after the COPY operation and after the postgres connection has reached the readyForQuery state. 233 | Another big change in this version is that copy-to now shortcuts the core `pg` parsing during the COPY operation. This avoids double-parsing and avoids the fact that `pg` buffers whole postgres protocol messages. 234 | 235 | - Rewrite copy-from in order to have it extend `Writable` instead of `Transform` 236 | - Modify copy-to to shortcut the pg protocol parser during the COPY operation 237 | - Add Stream compliance tests for copy-to and copy-from 238 | 239 | 240 | ### version 3.0.0 - published 2020-05-02 241 | 242 | This version's major change is a modification in the COPY TO implementation. In the previous versions, a row could be pushed downstream only after the full row was gathered in memory. In many cases, rows are small and this is not an issue. But there are some use cases where rows can grow bigger (think of a row containing a 1MB raw image in a BYTEA field. cf issue #91). In these cases, the library was constantly trying to allocate very big buffers and this could lead to severe performance issues. 243 | In the new implementation, all the data payload received from a postgres chunk is sent downstream without waiting for full row boundaries. 244 | 245 | Some users may in the past have relied on the fact the the copy-to chunk boundaries exactly matched row boundaries. A major difference in the 3.x version is that the module does not offer any guarantee that its chunk boundaries match row boundaries. A row data could (and you have to realize that this will happen) be split across 2 or more chunks depending on the size of the rows and on postgres's own chunking decisions. 246 | 247 | As a consequence, when the copy-to stream is piped into a pipeline that does row/CSV parsing, you need to make sure that this pipeline correctly handles rows than span across chunk boundaries. For its tests, this module uses the [csv-parser](https://github.com/mafintosh/csv-parser) module 248 | 249 | - Add `prettier` configuration following discussion on brianc/node-postgres#2172 250 | - Rewrite the copy-to implementation in order to avoid fetching whole rows in memory 251 | - Use mocha for tests 252 | - Add new tests for copy-to.js focusing on chunk boundaries 253 | - Add integration tests for two streaming csv parsers: csv-parser and csv-parse 254 | - Add eslint 255 | - Add test for quick&dirty bytea binary extraction 256 | - Add benchmark for copy-to in bench/copy-to.js 257 | 258 | ### version 2.2.2 - published 2019-07-22 259 | 260 | - Bugfix copy-to could pause the client connection, preventing re-use 261 | 262 | ### version 2.2.1 - published 2019-07-22 263 | 264 | - Bugfix copy-from was not correctly unpiped from the the connection stream 265 | 266 | ### version 2.2.0 - published 2019-03-21 267 | 268 | - Small refactor in copy-from passing from 3 push to 2 push in every chunk transform loop 269 | - Add bench/ directory for benchmarks 270 | - Add benchmark to compare performance of pg-copy-stream wrt psql during copy-from 271 | - Add benchmark to measure memory usage of copy-from 272 | 273 | ### version 2.1.0 - published 2019-03-19 274 | 275 | - Change README to stop using the pg pool singleton (removed after pg 7.0) 276 | - Do not register copy-to.pushBufferIfNeeded on the instance itself (avoid dangling method on the object) 277 | - Fix copy-to test wrt intermittent unhandled promise bug 278 | - Add tests regarding client re-use 279 | 280 | ### version 2.0.0 - published 2019-03-14 281 | 282 | This version's major change is a modification in the COPY TO implementation. In the previous version, when a chunk was received from the database, it was analyzed and every row contained within that chunk was pushed individually down the stream pipeline. Small rows could lead to a "one chunk" / "thousands of row pushed" performance issue in node. Thanks to @rafatower & CartoDB for the patch. 283 | This is considered to be a major change since some people could be relying on the fact that each outgoing chunk is an individual row. 284 | 285 | Other changes in this version 286 | 287 | - Use Strict 288 | - Travis deprecation of old node version (0.12, 0.4). Support LTS 6, 8, 10 and Current 11 289 | - Update dev dependencies (pg, lodash) 290 | - Stop using deprecated Buffer constructor 291 | - Add package-lock.json 292 | 293 | ## license 294 | 295 | The MIT License (MIT) 296 | 297 | Copyright (c) 2013 Brian M. Carlson 298 | 299 | Permission is hereby granted, free of charge, to any person obtaining a copy 300 | of this software and associated documentation files (the "Software"), to deal 301 | in the Software without restriction, including without limitation the rights 302 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 303 | copies of the Software, and to permit persons to whom the Software is 304 | furnished to do so, subject to the following conditions: 305 | 306 | The above copyright notice and this permission notice shall be included in 307 | all copies or substantial portions of the Software. 308 | 309 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 310 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 311 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 312 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 313 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 314 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 315 | THE SOFTWARE. 316 | -------------------------------------------------------------------------------- /bench/copy-from-memory.js: -------------------------------------------------------------------------------- 1 | const cp = require('duplex-child-process') 2 | const pg = require('pg') 3 | 4 | const copy = require('../').from 5 | 6 | const client = function () { 7 | const client = new pg.Client() 8 | client.connect() 9 | return client 10 | } 11 | 12 | const inStream = function () { 13 | return cp.spawn('seq', ['0', '29999999']) 14 | } 15 | 16 | let running = true 17 | 18 | const c = client() 19 | c.query('DROP TABLE IF EXISTS plugnumber', function () { 20 | c.query('CREATE TABLE plugnumber (num int)', function () { 21 | const seq = inStream() 22 | const from = c.query(copy('COPY plugnumber FROM STDIN')) 23 | seq.pipe(from) 24 | from.on('end', function () { 25 | running = false 26 | c.end() 27 | }) 28 | }) 29 | }) 30 | 31 | let rssMin = process.memoryUsage().rss / 1024 / 1024 32 | let rssMax = rssMin 33 | 34 | const memlog = function () { 35 | const rss = process.memoryUsage().rss / 1024 / 1024 36 | rssMin = Math.min(rss, rssMin) 37 | rssMax = Math.max(rss, rssMax) 38 | console.log( 39 | 'rss:' + 40 | Math.round(rss * 100) / 100 + 41 | 'MB rssMin:' + 42 | Math.round(rssMin * 100) / 100 + 43 | 'MB rssMax:' + 44 | Math.round(rssMax * 100) / 100 + 45 | 'MB' 46 | ) 47 | if (running) { 48 | setTimeout(memlog, 1000) 49 | } 50 | } 51 | 52 | memlog() 53 | -------------------------------------------------------------------------------- /bench/copy-from.js: -------------------------------------------------------------------------------- 1 | const Benchmark = require('benchmark') 2 | const cp = require('duplex-child-process') 3 | const pg = require('pg') 4 | 5 | const copy = require('../').from 6 | 7 | const client = function () { 8 | const client = new pg.Client() 9 | client.connect() 10 | return client 11 | } 12 | 13 | const psql = '/opt/postgresql-9.6.1/bin/psql' 14 | const limit = 999999 15 | const inStream = function () { 16 | return cp.spawn('seq', ['0', '' + limit]) 17 | } 18 | const suite = new Benchmark.Suite() 19 | suite 20 | .add({ 21 | name: 'unix pipe into psql COPY', 22 | defer: true, 23 | fn: function (d) { 24 | const c = client() 25 | c.query('DROP TABLE IF EXISTS plugnumber', function () { 26 | c.query('CREATE TABLE plugnumber (num int)', function () { 27 | c.end() 28 | const from = cp.spawn('sh', [ 29 | '-c', 30 | 'seq 0 ' + limit + ' | ' + psql + " postgres -c 'COPY plugnumber FROM STDIN'", 31 | ]) 32 | from.on('close', function () { 33 | d.resolve() 34 | }) 35 | }) 36 | }) 37 | }, 38 | }) 39 | .add({ 40 | name: 'pipe into psql COPY', 41 | defer: true, 42 | fn: function (d) { 43 | const c = client() 44 | c.query('DROP TABLE IF EXISTS plugnumber', function () { 45 | c.query('CREATE TABLE plugnumber (num int)', function () { 46 | c.end() 47 | const seq = inStream() 48 | const from = cp.spawn(psql, ['postgres', '-c', 'COPY plugnumber FROM STDIN']) 49 | seq.pipe(from) 50 | from.on('close', function () { 51 | d.resolve() 52 | }) 53 | }) 54 | }) 55 | }, 56 | }) 57 | .add({ 58 | name: 'pipe into pg-copy-stream COPY', 59 | defer: true, 60 | fn: function (d) { 61 | const c = client() 62 | c.query('DROP TABLE IF EXISTS plugnumber', function () { 63 | c.query('CREATE TABLE plugnumber (num int)', function () { 64 | const seq = inStream() 65 | const from = c.query(copy('COPY plugnumber FROM STDIN')) 66 | seq.pipe(from) 67 | from.on('finish', function () { 68 | c.end() 69 | d.resolve() 70 | }) 71 | }) 72 | }) 73 | }, 74 | }) 75 | 76 | .on('cycle', function (event) { 77 | console.log(String(event.target)) 78 | }) 79 | .on('complete', function () { 80 | console.log('Fastest is ' + this.filter('fastest').map('name')) 81 | }) 82 | 83 | const c = client() 84 | c.query('DROP TABLE IF EXISTS plugnumber', function () { 85 | c.end() 86 | suite.run() 87 | }) 88 | -------------------------------------------------------------------------------- /bench/copy-to.js: -------------------------------------------------------------------------------- 1 | const Benchmark = require('benchmark') 2 | const cp = require('duplex-child-process') 3 | const pg = require('pg') 4 | 5 | const copy = require('../').to 6 | 7 | const client = function () { 8 | const client = new pg.Client() 9 | client.connect() 10 | return client 11 | } 12 | 13 | const psql = '/opt/postgresql-9.6.1/bin/psql' 14 | const suite = new Benchmark.Suite() 15 | suite 16 | .add({ 17 | name: 'psql COPY out of postgres', 18 | defer: true, 19 | fn: function (d) { 20 | const from = cp.spawn(psql, ['postgres', '-c', 'COPY plug TO STDOUT']) 21 | from.resume() 22 | from.on('close', function () { 23 | d.resolve() 24 | }) 25 | }, 26 | }) 27 | .add({ 28 | name: 'pg-copy-stream COPY out of postgres', 29 | defer: true, 30 | fn: function (d) { 31 | const c = client() 32 | const copyOut = c.query(copy('COPY plug TO STDOUT')) 33 | copyOut.resume() 34 | copyOut.on('end', function () { 35 | c.end() 36 | d.resolve() 37 | }) 38 | }, 39 | }) 40 | .on('cycle', function (event) { 41 | console.log(String(event.target)) 42 | }) 43 | .on('complete', function () { 44 | console.log('Fastest is ' + this.filter('fastest').map('name')) 45 | }) 46 | 47 | const c = client() 48 | c.query('DROP TABLE IF EXISTS plug', function () { 49 | c.query('CREATE TABLE plug (field text)', function () { 50 | c.query("INSERT INTO plug(field) SELECT (repeat('-', CAST(2^17 AS int))) FROM generate_series(1, 10)", function () { 51 | c.end() 52 | suite.run() 53 | }) 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /copy-both.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = function (txt, options) { 4 | return new CopyStreamQuery(txt, options) 5 | } 6 | 7 | const { Duplex } = require('stream') 8 | const assert = require('assert') 9 | const BufferList = require('./obuf') 10 | const code = require('./message-formats') 11 | 12 | // Readable decoder states 13 | const PG_CODE = 0 14 | const PG_LENGTH = 1 15 | const PG_MESSAGE = 2 16 | 17 | class CopyStreamQuery extends Duplex { 18 | constructor(text, options) { 19 | super(options) 20 | this.text = text 21 | 22 | // Readable side 23 | this._state = PG_CODE 24 | this._buffer = new BufferList() 25 | this._unreadMessageContentLength = 0 26 | this._copyDataChunks = new BufferList() 27 | this._pgDataHandler = null 28 | this._drained = false 29 | this._forwarding = false 30 | this._onReadableEvent = this._onReadable.bind(this) 31 | this.alignOnCopyDataFrame = options ? options.alignOnCopyDataFrame === true : false 32 | 33 | // Writable side 34 | this._gotCopyInResponse = false 35 | this.chunks = [] 36 | this.cb = null 37 | this.cork() 38 | } 39 | 40 | submit(connection) { 41 | this.connection = connection 42 | this._attach() 43 | connection.query(this.text) 44 | } 45 | 46 | /* Readable implementation */ 47 | _attach() { 48 | const connectionStream = this.connection.stream 49 | const pgDataListeners = connectionStream.listeners('data') 50 | assert(pgDataListeners.length == 1) 51 | this._pgDataHandler = pgDataListeners.pop() 52 | connectionStream.removeListener('data', this._pgDataHandler) 53 | connectionStream.pause() 54 | this._forward() 55 | connectionStream.on('readable', this._onReadableEvent) 56 | } 57 | 58 | _detach() { 59 | const connectionStream = this.connection.stream 60 | const unreadBuffer = this._buffer.take(this._buffer.size) 61 | connectionStream.removeListener('readable', this._onReadableEvent) 62 | connectionStream.addListener('data', this._pgDataHandler) 63 | this._pgDataHandler(unreadBuffer) 64 | 65 | // unpipe can pause the stream but also underlying onData event can potentially pause the stream because of hitting 66 | // the highWaterMark and pausing the stream, so we resume the stream in the next tick after the underlying onData 67 | // event has finished 68 | process.nextTick(function () { 69 | connectionStream.resume() 70 | }) 71 | } 72 | 73 | _cleanup() { 74 | this._buffer = null 75 | this._copyDataChunks = null 76 | this._pgDataHandler = null 77 | this._onReadableEvent = null 78 | } 79 | 80 | _onReadable() { 81 | this._forward() 82 | } 83 | 84 | _read() { 85 | this._drained = true 86 | this._forward() 87 | } 88 | 89 | _forward() { 90 | if (this._forwarding || !this._drained || !this.connection) return 91 | this._forwarding = true 92 | const connectionStream = this.connection.stream 93 | let chunk 94 | while (this._drained && (chunk = connectionStream.read()) !== null) { 95 | this._drained = this._parse(chunk) 96 | } 97 | this._forwarding = false 98 | } 99 | 100 | _parse(chunk) { 101 | let done = false 102 | let drained = true 103 | this._buffer.push(chunk) 104 | 105 | while (!done && this._buffer.size > 0) { 106 | if (PG_CODE === this._state) { 107 | if (!this._buffer.has(1)) break 108 | this._code = this._buffer.peekUInt8() 109 | if (this._code === code.ErrorResponse) { 110 | // ErrorResponse Interception 111 | // We must let pg parse future messages and handle their consequences on 112 | // the ActiveQuery 113 | this._detach() 114 | return 115 | } 116 | this._buffer.readUInt8() 117 | this._state = PG_LENGTH 118 | } 119 | 120 | if (PG_LENGTH === this._state) { 121 | if (!this._buffer.has(4)) break 122 | this._unreadMessageContentLength = this._buffer.readUInt32BE() - 4 123 | this._state = PG_MESSAGE 124 | } 125 | 126 | if (PG_MESSAGE === this._state) { 127 | if (this._unreadMessageContentLength > 0 && this._buffer.size > 0) { 128 | const n = Math.min(this._buffer.size, this._unreadMessageContentLength) 129 | const messageContentChunk = this._buffer.take(n) 130 | this._unreadMessageContentLength -= n 131 | if (this._code === code.CopyData) { 132 | this._copyDataChunks.push(messageContentChunk) 133 | } 134 | } 135 | 136 | if (this._unreadMessageContentLength === 0) { 137 | // a full message has been captured 138 | switch (this._code) { 139 | case code.CopyBothResponse: 140 | this._startCopyIn() 141 | break 142 | case code.CopyData: 143 | if (this.alignOnCopyDataFrame) { 144 | drained = this._flushCopyData() 145 | } 146 | break 147 | // standard interspersed messages. 148 | // see https://www.postgresql.org/docs/9.6/protocol-flow.html#PROTOCOL-COPY 149 | case code.ParameterStatus: 150 | case code.NoticeResponse: 151 | case code.NotificationResponse: 152 | break 153 | case code.CopyDone: 154 | default: 155 | done = true 156 | break 157 | } 158 | this._state = PG_CODE 159 | } 160 | } 161 | } 162 | 163 | // When we are not in alignOnCopyDataFrame, copyData payload is not buffered 164 | // Forward payload bytes as they arrive 165 | if (!this.alignOnCopyDataFrame) { 166 | drained = this._flushCopyData() 167 | } 168 | 169 | if (done) { 170 | this._detach() 171 | this.push(null) 172 | this._cleanup() 173 | } 174 | 175 | return drained 176 | } 177 | 178 | _flushCopyData() { 179 | let drained = true 180 | const len = this._copyDataChunks.size 181 | if (len > 0) { 182 | drained = this.push(this._copyDataChunks.take(len)) 183 | } 184 | return drained 185 | } 186 | 187 | /* Writable implementation */ 188 | _write(chunk, enc, cb) { 189 | this.chunks.push({ chunk: chunk, encoding: enc }) 190 | if (this._gotCopyInResponse) { 191 | return this.flush(cb) 192 | } 193 | this.cb = cb 194 | } 195 | 196 | _writev(chunks, cb) { 197 | this.chunks.push(...chunks) 198 | if (this._gotCopyInResponse) { 199 | return this.flush(cb) 200 | } 201 | this.cb = cb 202 | } 203 | 204 | _final(cb) { 205 | this.flush() 206 | const Int32Len = 4 207 | const finBuffer = Buffer.from([code.CopyDone, 0, 0, 0, Int32Len]) 208 | this.connection.stream.write(finBuffer) 209 | this.cb_flush = cb 210 | } 211 | 212 | flush(callback) { 213 | let chunk 214 | let ok = true 215 | while (ok && (chunk = this.chunks.shift())) { 216 | ok = this.flushChunk(chunk.chunk) 217 | } 218 | if (callback) { 219 | if (ok) { 220 | callback() 221 | } else { 222 | if (this.chunks.length) { 223 | this.connection.stream.once('drain', this.flush.bind(this, callback)) 224 | } else { 225 | this.connection.stream.once('drain', callback) 226 | } 227 | } 228 | } 229 | } 230 | 231 | flushChunk(chunk) { 232 | const Int32Len = 4 233 | const lenBuffer = Buffer.from([code.CopyData, 0, 0, 0, 0]) 234 | lenBuffer.writeUInt32BE(chunk.length + Int32Len, 1) 235 | this.connection.stream.write(lenBuffer) 236 | return this.connection.stream.write(chunk) 237 | } 238 | 239 | _startCopyIn() { 240 | this._gotCopyInResponse = true 241 | this.uncork() 242 | this.flush() 243 | if (this.cb) { 244 | const { cb } = this 245 | this.cb = null 246 | cb() 247 | } 248 | } 249 | 250 | handleError(err) { 251 | this.emit('error', err) 252 | this._cleanup() 253 | } 254 | 255 | handleCopyData(chunk) { 256 | // an out of band copyData message 257 | // is received after copyDone 258 | // this is currently discarded 259 | } 260 | 261 | handleCommandComplete() {} 262 | 263 | handleReadyForQuery() { 264 | this.connection = null 265 | this.cb_flush() 266 | } 267 | } 268 | -------------------------------------------------------------------------------- /copy-from.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = function (txt, options) { 4 | return new CopyStreamQuery(txt, options) 5 | } 6 | 7 | const { Writable } = require('stream') 8 | const code = require('./message-formats') 9 | 10 | class CopyStreamQuery extends Writable { 11 | constructor(text, options) { 12 | super(options) 13 | this.text = text 14 | this.rowCount = 0 15 | this._gotCopyInResponse = false 16 | this.chunks = [] 17 | this.cb_CopyInResponse = null 18 | this.cb_ReadyForQuery = null 19 | this.cb_destroy = null 20 | this.cork() 21 | } 22 | 23 | submit(connection) { 24 | this.connection = connection 25 | connection.query(this.text) 26 | } 27 | 28 | callback() { 29 | // this callback is empty but defining it allows 30 | // `pg` to discover it and overwrite it 31 | // with its timeout mechanism when query_timeout config is set 32 | } 33 | 34 | _write(chunk, enc, cb) { 35 | this.chunks.push({ chunk: chunk, encoding: enc }) 36 | if (this._gotCopyInResponse) { 37 | return this.flush(cb) 38 | } 39 | this.cb_CopyInResponse = cb 40 | } 41 | 42 | _writev(chunks, cb) { 43 | // this.chunks.push(...chunks) 44 | // => issue #136, RangeError: Maximum call stack size exceeded 45 | // Using hybrid approach as advised on https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/apply 46 | if (this.chunks.length == 0) { 47 | this.chunks = chunks 48 | } else { 49 | // https://stackoverflow.com/questions/22747068/is-there-a-max-number-of-arguments-javascript-functions-can-accept 50 | // 100K seems to be a reasonable size for v8 51 | const QUANTUM = 125000 52 | for (let i = 0; i < chunks.length; i += QUANTUM) { 53 | this.chunks.push(...chunks.slice(i, Math.min(i + QUANTUM, chunks.length))) 54 | } 55 | } 56 | if (this._gotCopyInResponse) { 57 | return this.flush(cb) 58 | } 59 | this.cb_CopyInResponse = cb 60 | } 61 | 62 | _destroy(err, cb) { 63 | // writable.destroy([error]) was called. 64 | // send a CopyFail message that will rollback the COPY operation. 65 | // the cb will be called only after the ErrorResponse message is received 66 | // from the backend 67 | if (this.cb_ReadyForQuery) return cb(err) 68 | this.cb_destroy = cb 69 | const msg = err ? err.message : 'NODE-PG-COPY-STREAMS destroy() was called' 70 | const self = this 71 | const done = function () { 72 | self.connection.sendCopyFail(msg) 73 | } 74 | 75 | this.chunks = [] 76 | if (this._gotCopyInResponse) { 77 | return this.flush(done) 78 | } 79 | this.cb_CopyInResponse = done 80 | } 81 | 82 | _final(cb) { 83 | this.cb_ReadyForQuery = cb 84 | const self = this 85 | const done = function () { 86 | const Int32Len = 4 87 | const finBuffer = Buffer.from([code.CopyDone, 0, 0, 0, Int32Len]) 88 | self.connection.stream.write(finBuffer) 89 | } 90 | 91 | if (this._gotCopyInResponse) { 92 | return this.flush(done) 93 | } 94 | this.cb_CopyInResponse = done 95 | } 96 | 97 | flush(callback) { 98 | let chunk 99 | let ok = true 100 | while (ok && (chunk = this.chunks.shift())) { 101 | ok = this.flushChunk(chunk.chunk) 102 | } 103 | if (callback) { 104 | if (ok) { 105 | callback() 106 | } else { 107 | if (this.chunks.length) { 108 | this.connection.stream.once('drain', this.flush.bind(this, callback)) 109 | } else { 110 | this.connection.stream.once('drain', callback) 111 | } 112 | } 113 | } 114 | } 115 | 116 | flushChunk(chunk) { 117 | const Int32Len = 4 118 | const lenBuffer = Buffer.from([code.CopyData, 0, 0, 0, 0]) 119 | lenBuffer.writeUInt32BE(chunk.length + Int32Len, 1) 120 | this.connection.stream.write(lenBuffer) 121 | return this.connection.stream.write(chunk) 122 | } 123 | 124 | handleError(e) { 125 | // clear `pg` timeout mechanism 126 | this.callback() 127 | 128 | if (this.cb_destroy) { 129 | const cb = this.cb_destroy 130 | this.cb_destroy = null 131 | cb(e) 132 | } else { 133 | this.emit('error', e) 134 | } 135 | this.connection = null 136 | } 137 | 138 | handleCopyInResponse(connection) { 139 | this._gotCopyInResponse = true 140 | if (!this.destroyed) { 141 | this.uncork() 142 | } 143 | const cb = this.cb_CopyInResponse || function () {} 144 | this.cb_CopyInResponse = null 145 | this.flush(cb) 146 | } 147 | 148 | handleCommandComplete(msg) { 149 | // Parse affected row count as in 150 | // https://github.com/brianc/node-postgres/blob/35e5567f86774f808c2a8518dd312b8aa3586693/lib/result.js#L37 151 | const match = /COPY (\d+)/.exec((msg || {}).text) 152 | if (match) { 153 | this.rowCount = parseInt(match[1], 10) 154 | } 155 | } 156 | 157 | handleReadyForQuery() { 158 | // triggered after ReadyForQuery 159 | // we delay the _final callback so that the 'finish' event is 160 | // sent only when the ingested data is visible inside postgres and 161 | // after the postgres connection is ready for a new query 162 | 163 | // Note: `pg` currently does not call this callback when the backend 164 | // sends an ErrorResponse message during the query (for example during 165 | // a CopyFail) 166 | 167 | // clear `pg` timeout mechanism 168 | this.callback() 169 | 170 | this.cb_ReadyForQuery() 171 | this.connection = null 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /copy-to.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = function (txt, options) { 4 | return new CopyStreamQuery(txt, options) 5 | } 6 | 7 | const { Readable } = require('stream') 8 | const assert = require('assert') 9 | const BufferList = require('./obuf') 10 | const code = require('./message-formats') 11 | 12 | // decoder states 13 | const PG_CODE = 0 14 | const PG_LENGTH = 1 15 | const PG_MESSAGE = 2 16 | 17 | class CopyStreamQuery extends Readable { 18 | constructor(text, options) { 19 | super(options) 20 | this.text = text 21 | this.rowCount = 0 22 | this._state = PG_CODE 23 | this._buffer = new BufferList() 24 | this._unreadMessageContentLength = 0 25 | this._copyDataChunks = new BufferList() 26 | this._pgDataHandler = null 27 | this._drained = false 28 | this._forwarding = false 29 | this._onReadableEvent = this._onReadable.bind(this) 30 | } 31 | 32 | submit(connection) { 33 | this.connection = connection 34 | this._attach() 35 | connection.query(this.text) 36 | } 37 | 38 | _attach() { 39 | const connectionStream = this.connection.stream 40 | const pgDataListeners = connectionStream.listeners('data') 41 | assert(pgDataListeners.length == 1) 42 | this._pgDataHandler = pgDataListeners.pop() 43 | connectionStream.removeListener('data', this._pgDataHandler) 44 | connectionStream.pause() 45 | this._forward() 46 | connectionStream.on('readable', this._onReadableEvent) 47 | } 48 | 49 | _detach() { 50 | const connectionStream = this.connection.stream 51 | const unreadBuffer = this._buffer.take(this._buffer.size) 52 | //connectionStream.unpipe(this) 53 | connectionStream.removeListener('readable', this._onReadableEvent) 54 | connectionStream.addListener('data', this._pgDataHandler) 55 | this._pgDataHandler(unreadBuffer) 56 | 57 | // unpipe can pause the stream but also underlying onData event can potentially pause the stream because of hitting 58 | // the highWaterMark and pausing the stream, so we resume the stream in the next tick after the underlying onData 59 | // event has finished 60 | process.nextTick(function () { 61 | connectionStream.resume() 62 | }) 63 | } 64 | 65 | _cleanup() { 66 | this._buffer = null 67 | this._copyDataChunks = null 68 | this._pgDataHandler = null 69 | this._onReadableEvent = null 70 | } 71 | 72 | _onReadable() { 73 | this._forward() 74 | } 75 | 76 | _read() { 77 | this._drained = true 78 | this._forward() 79 | } 80 | 81 | _forward() { 82 | if (this._forwarding || !this._drained || !this.connection) return 83 | this._forwarding = true 84 | const connectionStream = this.connection.stream 85 | let chunk 86 | while (this._drained && (chunk = connectionStream.read()) !== null) { 87 | this._drained = this._parse(chunk) 88 | } 89 | this._forwarding = false 90 | } 91 | 92 | _parse(chunk) { 93 | let done = false 94 | let drained = true 95 | this._buffer.push(chunk) 96 | 97 | while (!done && this._buffer.size > 0) { 98 | if (PG_CODE === this._state) { 99 | if (!this._buffer.has(1)) break 100 | this._code = this._buffer.peekUInt8() 101 | if (this._code === code.ErrorResponse) { 102 | // ErrorResponse Interception 103 | // We must let pg parse future messages and handle their consequences on 104 | // the ActiveQuery 105 | this._detach() 106 | return 107 | } 108 | this._buffer.readUInt8() 109 | this._state = PG_LENGTH 110 | } 111 | 112 | if (PG_LENGTH === this._state) { 113 | if (!this._buffer.has(4)) break 114 | this._unreadMessageContentLength = this._buffer.readUInt32BE() - 4 115 | this._state = PG_MESSAGE 116 | } 117 | 118 | if (PG_MESSAGE === this._state) { 119 | if (this._unreadMessageContentLength > 0 && this._buffer.size > 0) { 120 | const n = Math.min(this._buffer.size, this._unreadMessageContentLength) 121 | const messageContentChunk = this._buffer.take(n) 122 | this._unreadMessageContentLength -= n 123 | if (this._code === code.CopyData) { 124 | this._copyDataChunks.push(messageContentChunk) 125 | } 126 | } 127 | 128 | if (this._unreadMessageContentLength === 0) { 129 | // a full message has been captured 130 | switch (this._code) { 131 | case code.CopyOutResponse: 132 | case code.CopyData: 133 | // standard interspersed messages. 134 | // see https://www.postgresql.org/docs/9.6/protocol-flow.html#PROTOCOL-COPY 135 | case code.ParameterStatus: 136 | case code.NoticeResponse: 137 | case code.NotificationResponse: 138 | break 139 | case code.CopyDone: 140 | default: 141 | done = true 142 | break 143 | } 144 | this._state = PG_CODE 145 | } 146 | } 147 | } 148 | 149 | // flush data if any data has been captured 150 | const len = this._copyDataChunks.size 151 | if (len > 0) { 152 | drained = this.push(this._copyDataChunks.take(len)) 153 | } 154 | 155 | if (done) { 156 | this._detach() 157 | this.push(null) 158 | this._cleanup() 159 | } 160 | 161 | return drained 162 | } 163 | 164 | handleError(err) { 165 | this.emit('error', err) 166 | this._cleanup() 167 | } 168 | 169 | handleCommandComplete(msg) { 170 | const match = /COPY (\d+)/.exec((msg || {}).text) 171 | if (match) { 172 | this.rowCount = parseInt(match[1], 10) 173 | } 174 | } 175 | 176 | handleReadyForQuery() {} 177 | } 178 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | const { defineConfig } = require('eslint/config') 2 | 3 | const prettier = require('eslint-plugin-prettier') 4 | const globals = require('globals') 5 | const js = require('@eslint/js') 6 | 7 | const { FlatCompat } = require('@eslint/eslintrc') 8 | 9 | const compat = new FlatCompat({ 10 | baseDirectory: __dirname, 11 | recommendedConfig: js.configs.recommended, 12 | allConfig: js.configs.all, 13 | }) 14 | 15 | module.exports = defineConfig([ 16 | { 17 | plugins: { 18 | prettier, 19 | }, 20 | 21 | extends: compat.extends('plugin:prettier/recommended'), 22 | 23 | languageOptions: { 24 | ecmaVersion: 2018, 25 | sourceType: 'module', 26 | parserOptions: {}, 27 | 28 | globals: { 29 | ...globals.node, 30 | ...globals.mocha, 31 | }, 32 | }, 33 | 34 | rules: { 35 | 'prefer-const': ['error'], 36 | 'no-var': ['error'], 37 | 38 | 'no-unused-vars': [ 39 | 'error', 40 | { 41 | args: 'none', 42 | }, 43 | ], 44 | 45 | 'prefer-destructuring': [ 46 | 'error', 47 | { 48 | array: false, 49 | }, 50 | ], 51 | 52 | 'no-useless-rename': ['error'], 53 | }, 54 | }, 55 | ]) 56 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const CopyToQueryStream = require('./copy-to') 4 | const CopyFromQueryStream = require('./copy-from') 5 | const CopyBothQueryStream = require('./copy-both') 6 | 7 | exports.to = function copyTo(txt, options) { 8 | return new CopyToQueryStream(txt, options) 9 | } 10 | 11 | exports.from = function copyFrom(txt, options) { 12 | return new CopyFromQueryStream(txt, options) 13 | } 14 | 15 | exports.both = function copyBoth(txt, options) { 16 | return new CopyBothQueryStream(txt, options) 17 | } 18 | -------------------------------------------------------------------------------- /message-formats.js: -------------------------------------------------------------------------------- 1 | /** 2 | * The COPY feature uses the following protocol codes. 3 | * The codes for the most recent protocol version are documented on 4 | * https://www.postgresql.org/docs/current/static/protocol-message-formats.html 5 | * 6 | * The protocol flow itself is described on 7 | * https://www.postgresql.org/docs/current/static/protocol-flow.html 8 | */ 9 | module.exports = { 10 | ErrorResponse: 0x45, // E 11 | CopyInResponse: 0x47, // G 12 | CopyOutResponse: 0x48, // H 13 | CopyBothResponse: 0x57, // W 14 | CopyDone: 0x63, // c 15 | CopyData: 0x64, // d 16 | CopyFail: 0x66, // f 17 | CommandComplete: 0x43, // C 18 | ReadyForQuery: 0x5a, // Z 19 | 20 | // It is possible for NoticeResponse and ParameterStatus messages to be interspersed between CopyData messages; 21 | // frontends must handle these cases, and should be prepared for other asynchronous message types as well 22 | // (see Section 50.2.6). 23 | // Otherwise, any message type other than CopyData or CopyDone may be treated as terminating copy-out mode. 24 | NotificationResponse: 0x41, // A 25 | NoticeResponse: 0x4e, // N 26 | ParameterStatus: 0x53, // S 27 | } 28 | -------------------------------------------------------------------------------- /obuf.js: -------------------------------------------------------------------------------- 1 | const Buffer = require('buffer').Buffer 2 | 3 | function OffsetBuffer() { 4 | this.offset = 0 5 | this.size = 0 6 | this.buffers = [] 7 | } 8 | module.exports = OffsetBuffer 9 | 10 | OffsetBuffer.prototype.isEmpty = function isEmpty() { 11 | return this.size === 0 12 | } 13 | 14 | OffsetBuffer.prototype.clone = function clone(size) { 15 | const r = new OffsetBuffer() 16 | r.offset = this.offset 17 | r.size = size 18 | r.buffers = this.buffers.slice() 19 | return r 20 | } 21 | 22 | OffsetBuffer.prototype.toChunks = function toChunks() { 23 | if (this.size === 0) return [] 24 | 25 | // We are going to slice it anyway 26 | if (this.offset !== 0) { 27 | this.buffers[0] = this.buffers[0].slice(this.offset) 28 | this.offset = 0 29 | } 30 | 31 | const chunks = [] 32 | let off = 0 33 | let i 34 | for (i = 0; off <= this.size && i < this.buffers.length; i++) { 35 | let buf = this.buffers[i] 36 | off += buf.length 37 | 38 | // Slice off last buffer 39 | if (off > this.size) { 40 | buf = buf.slice(0, buf.length - (off - this.size)) 41 | this.buffers[i] = buf 42 | } 43 | 44 | chunks.push(buf) 45 | } 46 | 47 | // If some buffers were skipped - trim length 48 | if (i < this.buffers.length) this.buffers.length = i 49 | 50 | return chunks 51 | } 52 | 53 | OffsetBuffer.prototype.toString = function toString(enc) { 54 | return this.toChunks() 55 | .map(function (c) { 56 | return c.toString(enc) 57 | }) 58 | .join('') 59 | } 60 | 61 | OffsetBuffer.prototype.use = function use(buf, off, n) { 62 | this.buffers = [buf] 63 | this.offset = off 64 | this.size = n 65 | } 66 | 67 | OffsetBuffer.prototype.push = function push(data) { 68 | // Ignore empty writes 69 | if (data.length === 0) return 70 | 71 | this.size += data.length 72 | this.buffers.push(data) 73 | } 74 | 75 | OffsetBuffer.prototype.has = function has(n) { 76 | return this.size >= n 77 | } 78 | 79 | OffsetBuffer.prototype.skip = function skip(n) { 80 | if (this.size === 0) return 81 | 82 | this.size -= n 83 | 84 | // Fast case, skip bytes in a first buffer 85 | if (this.offset + n < this.buffers[0].length) { 86 | this.offset += n 87 | return 88 | } 89 | 90 | let left = n - (this.buffers[0].length - this.offset) 91 | this.offset = 0 92 | let shift 93 | for (shift = 1; left > 0 && shift < this.buffers.length; shift++) { 94 | const buf = this.buffers[shift] 95 | if (buf.length > left) { 96 | this.offset = left 97 | break 98 | } 99 | left -= buf.length 100 | } 101 | this.buffers = this.buffers.slice(shift) 102 | } 103 | 104 | OffsetBuffer.prototype.copy = function copy(target, targetOff, off, n) { 105 | if (this.size === 0) return 106 | if (off !== 0) throw new Error('Unsupported offset in .copy()') 107 | 108 | let toff = targetOff 109 | const first = this.buffers[0] 110 | const toCopy = Math.min(n, first.length - this.offset) 111 | first.copy(target, toff, this.offset, this.offset + toCopy) 112 | 113 | toff += toCopy 114 | let left = n - toCopy 115 | for (let i = 1; left > 0 && i < this.buffers.length; i++) { 116 | const buf = this.buffers[i] 117 | const toCopy = Math.min(left, buf.length) 118 | 119 | buf.copy(target, toff, 0, toCopy) 120 | 121 | toff += toCopy 122 | left -= toCopy 123 | } 124 | } 125 | 126 | OffsetBuffer.prototype.take = function take(n) { 127 | if (n === 0) return Buffer.alloc(0) 128 | 129 | this.size -= n 130 | 131 | // Fast cases 132 | const first = this.buffers[0].length - this.offset 133 | if (first === n) { 134 | let r = this.buffers.shift() 135 | if (this.offset !== 0) { 136 | r = r.slice(this.offset) 137 | this.offset = 0 138 | } 139 | return r 140 | } else if (first > n) { 141 | const r = this.buffers[0].slice(this.offset, this.offset + n) 142 | this.offset += n 143 | return r 144 | } 145 | 146 | // Allocate and fill buffer 147 | const out = Buffer.alloc(n) 148 | let toOff = 0 149 | let startOff = this.offset 150 | let i 151 | for (i = 0; toOff !== n && i < this.buffers.length; i++) { 152 | const buf = this.buffers[i] 153 | const toCopy = Math.min(buf.length - startOff, n - toOff) 154 | 155 | buf.copy(out, toOff, startOff, startOff + toCopy) 156 | if (startOff + toCopy < buf.length) { 157 | this.offset = startOff + toCopy 158 | break 159 | } else { 160 | toOff += toCopy 161 | startOff = 0 162 | } 163 | } 164 | 165 | this.buffers = this.buffers.slice(i) 166 | if (this.buffers.length === 0) this.offset = 0 167 | 168 | return out 169 | } 170 | 171 | OffsetBuffer.prototype.peekUInt8 = function peekUInt8() { 172 | return this.buffers[0][this.offset] 173 | } 174 | 175 | OffsetBuffer.prototype.readUInt8 = function readUInt8() { 176 | this.size -= 1 177 | const first = this.buffers[0] 178 | const r = first[this.offset] 179 | if (++this.offset === first.length) { 180 | this.offset = 0 181 | this.buffers.shift() 182 | } 183 | 184 | return r 185 | } 186 | 187 | OffsetBuffer.prototype.readUInt16LE = function readUInt16LE() { 188 | const first = this.buffers[0] 189 | this.size -= 2 190 | 191 | let r 192 | let shift 193 | 194 | // Fast case - first buffer has all bytes 195 | if (first.length - this.offset >= 2) { 196 | r = first.readUInt16LE(this.offset) 197 | shift = 0 198 | this.offset += 2 199 | 200 | // One byte here - one byte there 201 | } else { 202 | r = first[this.offset] | (this.buffers[1][0] << 8) 203 | shift = 1 204 | this.offset = 1 205 | } 206 | 207 | if (this.offset === this.buffers[shift].length) { 208 | this.offset = 0 209 | shift++ 210 | } 211 | if (shift !== 0) this.buffers = this.buffers.slice(shift) 212 | 213 | return r 214 | } 215 | 216 | OffsetBuffer.prototype.readUInt24LE = function readUInt24LE() { 217 | const first = this.buffers[0] 218 | 219 | let r 220 | let shift 221 | const firstHas = first.length - this.offset 222 | 223 | // Fast case - first buffer has all bytes 224 | if (firstHas >= 3) { 225 | r = first.readUInt16LE(this.offset) | (first[this.offset + 2] << 16) 226 | shift = 0 227 | this.offset += 3 228 | 229 | // First buffer has 2 of 3 bytes 230 | } else if (firstHas >= 2) { 231 | r = first.readUInt16LE(this.offset) | (this.buffers[1][0] << 16) 232 | shift = 1 233 | this.offset = 1 234 | 235 | // Slow case: First buffer has 1 of 3 bytes 236 | } else { 237 | r = first[this.offset] 238 | this.offset = 0 239 | this.buffers.shift() 240 | this.size -= 1 241 | 242 | r |= this.readUInt16LE() << 8 243 | return r 244 | } 245 | 246 | this.size -= 3 247 | if (this.offset === this.buffers[shift].length) { 248 | this.offset = 0 249 | shift++ 250 | } 251 | if (shift !== 0) this.buffers = this.buffers.slice(shift) 252 | 253 | return r 254 | } 255 | 256 | OffsetBuffer.prototype.readUInt32LE = function readUInt32LE() { 257 | const first = this.buffers[0] 258 | 259 | let r 260 | let shift 261 | const firstHas = first.length - this.offset 262 | 263 | // Fast case - first buffer has all bytes 264 | if (firstHas >= 4) { 265 | r = first.readUInt32LE(this.offset) 266 | shift = 0 267 | this.offset += 4 268 | 269 | // First buffer has 3 of 4 bytes 270 | } else if (firstHas >= 3) { 271 | r = (first.readUInt16LE(this.offset) | (first[this.offset + 2] << 16)) + this.buffers[1][0] * 0x1000000 272 | shift = 1 273 | this.offset = 1 274 | 275 | // Slow case: First buffer has 2 of 4 bytes 276 | } else if (firstHas >= 2) { 277 | r = first.readUInt16LE(this.offset) 278 | this.offset = 0 279 | this.buffers.shift() 280 | this.size -= 2 281 | 282 | r += this.readUInt16LE() * 0x10000 283 | return r 284 | 285 | // Slow case: First buffer has 1 of 4 bytes 286 | } else { 287 | r = first[this.offset] 288 | this.offset = 0 289 | this.buffers.shift() 290 | this.size -= 1 291 | 292 | r += this.readUInt24LE() * 0x100 293 | return r 294 | } 295 | 296 | this.size -= 4 297 | if (this.offset === this.buffers[shift].length) { 298 | this.offset = 0 299 | shift++ 300 | } 301 | if (shift !== 0) this.buffers = this.buffers.slice(shift) 302 | 303 | return r 304 | } 305 | 306 | OffsetBuffer.prototype.readUInt16BE = function readUInt16BE() { 307 | const r = this.readUInt16LE() 308 | 309 | return ((r & 0xff) << 8) | (r >> 8) 310 | } 311 | 312 | OffsetBuffer.prototype.readUInt24BE = function readUInt24BE() { 313 | const r = this.readUInt24LE() 314 | 315 | return ((r & 0xff) << 16) | (((r >> 8) & 0xff) << 8) | (r >> 16) 316 | } 317 | 318 | OffsetBuffer.prototype.readUInt32BE = function readUInt32BE() { 319 | const r = this.readUInt32LE() 320 | 321 | return (((r & 0xff) << 24) | (((r >>> 8) & 0xff) << 16) | (((r >>> 16) & 0xff) << 8) | (r >>> 24)) >>> 0 322 | } 323 | 324 | // Signed number APIs 325 | 326 | function signedInt8(num) { 327 | if (num >= 0x80) return -(0xff ^ num) - 1 328 | else return num 329 | } 330 | 331 | OffsetBuffer.prototype.peekInt8 = function peekInt8() { 332 | return signedInt8(this.peekUInt8()) 333 | } 334 | 335 | OffsetBuffer.prototype.readInt8 = function readInt8() { 336 | return signedInt8(this.readUInt8()) 337 | } 338 | 339 | function signedInt16(num) { 340 | if (num >= 0x8000) return -(0xffff ^ num) - 1 341 | else return num 342 | } 343 | 344 | OffsetBuffer.prototype.readInt16BE = function readInt16BE() { 345 | return signedInt16(this.readUInt16BE()) 346 | } 347 | 348 | OffsetBuffer.prototype.readInt16LE = function readInt16LE() { 349 | return signedInt16(this.readUInt16LE()) 350 | } 351 | 352 | function signedInt24(num) { 353 | if (num >= 0x800000) return -(0xffffff ^ num) - 1 354 | else return num 355 | } 356 | 357 | OffsetBuffer.prototype.readInt24BE = function readInt24BE() { 358 | return signedInt24(this.readUInt24BE()) 359 | } 360 | 361 | OffsetBuffer.prototype.readInt24LE = function readInt24LE() { 362 | return signedInt24(this.readUInt24LE()) 363 | } 364 | 365 | function signedInt32(num) { 366 | if (num >= 0x80000000) return -(0xffffffff ^ num) - 1 367 | else return num 368 | } 369 | 370 | OffsetBuffer.prototype.readInt32BE = function readInt32BE() { 371 | return signedInt32(this.readUInt32BE()) 372 | } 373 | 374 | OffsetBuffer.prototype.readInt32LE = function readInt32LE() { 375 | return signedInt32(this.readUInt32LE()) 376 | } 377 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pg-copy-streams", 3 | "version": "7.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "pg-copy-streams", 9 | "version": "7.0.0", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "@eslint/eslintrc": "^3.3.1", 13 | "@eslint/js": "^9.27.0", 14 | "async": "~0.2.10", 15 | "benchmark": "^2.1.4", 16 | "concat-stream": "^2.0.0", 17 | "csv-parse": "^5.6.0", 18 | "csv-parser": "^3.2.0", 19 | "duplex-child-process": "^1.0.0", 20 | "eslint": "^9.27.0", 21 | "eslint-config-prettier": "^10.1.5", 22 | "eslint-plugin-prettier": "^5.4.0", 23 | "globals": "^16.2.0", 24 | "lodash": "^4.17.21", 25 | "mocha": "^11.5.0", 26 | "pg": "^8.16.0", 27 | "prettier": "^3.5.3" 28 | } 29 | }, 30 | "node_modules/@eslint-community/eslint-utils": { 31 | "version": "4.7.0", 32 | "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz", 33 | "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==", 34 | "dev": true, 35 | "license": "MIT", 36 | "dependencies": { 37 | "eslint-visitor-keys": "^3.4.3" 38 | }, 39 | "engines": { 40 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 41 | }, 42 | "funding": { 43 | "url": "https://opencollective.com/eslint" 44 | }, 45 | "peerDependencies": { 46 | "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" 47 | } 48 | }, 49 | "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { 50 | "version": "3.4.3", 51 | "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", 52 | "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", 53 | "dev": true, 54 | "license": "Apache-2.0", 55 | "engines": { 56 | "node": "^12.22.0 || ^14.17.0 || >=16.0.0" 57 | }, 58 | "funding": { 59 | "url": "https://opencollective.com/eslint" 60 | } 61 | }, 62 | "node_modules/@eslint-community/regexpp": { 63 | "version": "4.12.1", 64 | "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", 65 | "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", 66 | "dev": true, 67 | "license": "MIT", 68 | "engines": { 69 | "node": "^12.0.0 || ^14.0.0 || >=16.0.0" 70 | } 71 | }, 72 | "node_modules/@eslint/config-array": { 73 | "version": "0.20.0", 74 | "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.20.0.tgz", 75 | "integrity": "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==", 76 | "dev": true, 77 | "license": "Apache-2.0", 78 | "dependencies": { 79 | "@eslint/object-schema": "^2.1.6", 80 | "debug": "^4.3.1", 81 | "minimatch": "^3.1.2" 82 | }, 83 | "engines": { 84 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 85 | } 86 | }, 87 | "node_modules/@eslint/config-helpers": { 88 | "version": "0.2.2", 89 | "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.2.tgz", 90 | "integrity": "sha512-+GPzk8PlG0sPpzdU5ZvIRMPidzAnZDl/s9L+y13iodqvb8leL53bTannOrQ/Im7UkpsmFU5Ily5U60LWixnmLg==", 91 | "dev": true, 92 | "license": "Apache-2.0", 93 | "engines": { 94 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 95 | } 96 | }, 97 | "node_modules/@eslint/core": { 98 | "version": "0.14.0", 99 | "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.14.0.tgz", 100 | "integrity": "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==", 101 | "dev": true, 102 | "license": "Apache-2.0", 103 | "dependencies": { 104 | "@types/json-schema": "^7.0.15" 105 | }, 106 | "engines": { 107 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 108 | } 109 | }, 110 | "node_modules/@eslint/eslintrc": { 111 | "version": "3.3.1", 112 | "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", 113 | "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", 114 | "dev": true, 115 | "license": "MIT", 116 | "dependencies": { 117 | "ajv": "^6.12.4", 118 | "debug": "^4.3.2", 119 | "espree": "^10.0.1", 120 | "globals": "^14.0.0", 121 | "ignore": "^5.2.0", 122 | "import-fresh": "^3.2.1", 123 | "js-yaml": "^4.1.0", 124 | "minimatch": "^3.1.2", 125 | "strip-json-comments": "^3.1.1" 126 | }, 127 | "engines": { 128 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 129 | }, 130 | "funding": { 131 | "url": "https://opencollective.com/eslint" 132 | } 133 | }, 134 | "node_modules/@eslint/eslintrc/node_modules/globals": { 135 | "version": "14.0.0", 136 | "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", 137 | "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", 138 | "dev": true, 139 | "license": "MIT", 140 | "engines": { 141 | "node": ">=18" 142 | }, 143 | "funding": { 144 | "url": "https://github.com/sponsors/sindresorhus" 145 | } 146 | }, 147 | "node_modules/@eslint/js": { 148 | "version": "9.27.0", 149 | "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.27.0.tgz", 150 | "integrity": "sha512-G5JD9Tu5HJEu4z2Uo4aHY2sLV64B7CDMXxFzqzjl3NKd6RVzSXNoE80jk7Y0lJkTTkjiIhBAqmlYwjuBY3tvpA==", 151 | "dev": true, 152 | "license": "MIT", 153 | "engines": { 154 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 155 | }, 156 | "funding": { 157 | "url": "https://eslint.org/donate" 158 | } 159 | }, 160 | "node_modules/@eslint/object-schema": { 161 | "version": "2.1.6", 162 | "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", 163 | "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", 164 | "dev": true, 165 | "license": "Apache-2.0", 166 | "engines": { 167 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 168 | } 169 | }, 170 | "node_modules/@eslint/plugin-kit": { 171 | "version": "0.3.1", 172 | "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.1.tgz", 173 | "integrity": "sha512-0J+zgWxHN+xXONWIyPWKFMgVuJoZuGiIFu8yxk7RJjxkzpGmyja5wRFqZIVtjDVOQpV+Rw0iOAjYPE2eQyjr0w==", 174 | "dev": true, 175 | "license": "Apache-2.0", 176 | "dependencies": { 177 | "@eslint/core": "^0.14.0", 178 | "levn": "^0.4.1" 179 | }, 180 | "engines": { 181 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 182 | } 183 | }, 184 | "node_modules/@humanfs/core": { 185 | "version": "0.19.1", 186 | "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", 187 | "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", 188 | "dev": true, 189 | "license": "Apache-2.0", 190 | "engines": { 191 | "node": ">=18.18.0" 192 | } 193 | }, 194 | "node_modules/@humanfs/node": { 195 | "version": "0.16.6", 196 | "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", 197 | "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", 198 | "dev": true, 199 | "license": "Apache-2.0", 200 | "dependencies": { 201 | "@humanfs/core": "^0.19.1", 202 | "@humanwhocodes/retry": "^0.3.0" 203 | }, 204 | "engines": { 205 | "node": ">=18.18.0" 206 | } 207 | }, 208 | "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { 209 | "version": "0.3.1", 210 | "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", 211 | "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", 212 | "dev": true, 213 | "license": "Apache-2.0", 214 | "engines": { 215 | "node": ">=18.18" 216 | }, 217 | "funding": { 218 | "type": "github", 219 | "url": "https://github.com/sponsors/nzakas" 220 | } 221 | }, 222 | "node_modules/@humanwhocodes/module-importer": { 223 | "version": "1.0.1", 224 | "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", 225 | "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", 226 | "dev": true, 227 | "license": "Apache-2.0", 228 | "engines": { 229 | "node": ">=12.22" 230 | }, 231 | "funding": { 232 | "type": "github", 233 | "url": "https://github.com/sponsors/nzakas" 234 | } 235 | }, 236 | "node_modules/@humanwhocodes/retry": { 237 | "version": "0.4.3", 238 | "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", 239 | "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", 240 | "dev": true, 241 | "license": "Apache-2.0", 242 | "engines": { 243 | "node": ">=18.18" 244 | }, 245 | "funding": { 246 | "type": "github", 247 | "url": "https://github.com/sponsors/nzakas" 248 | } 249 | }, 250 | "node_modules/@isaacs/cliui": { 251 | "version": "8.0.2", 252 | "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", 253 | "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", 254 | "dev": true, 255 | "license": "ISC", 256 | "dependencies": { 257 | "string-width": "^5.1.2", 258 | "string-width-cjs": "npm:string-width@^4.2.0", 259 | "strip-ansi": "^7.0.1", 260 | "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", 261 | "wrap-ansi": "^8.1.0", 262 | "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" 263 | }, 264 | "engines": { 265 | "node": ">=12" 266 | } 267 | }, 268 | "node_modules/@pkgjs/parseargs": { 269 | "version": "0.11.0", 270 | "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", 271 | "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", 272 | "dev": true, 273 | "license": "MIT", 274 | "optional": true, 275 | "engines": { 276 | "node": ">=14" 277 | } 278 | }, 279 | "node_modules/@pkgr/core": { 280 | "version": "0.2.4", 281 | "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.4.tgz", 282 | "integrity": "sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==", 283 | "dev": true, 284 | "license": "MIT", 285 | "engines": { 286 | "node": "^12.20.0 || ^14.18.0 || >=16.0.0" 287 | }, 288 | "funding": { 289 | "url": "https://opencollective.com/pkgr" 290 | } 291 | }, 292 | "node_modules/@types/estree": { 293 | "version": "1.0.7", 294 | "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", 295 | "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", 296 | "dev": true, 297 | "license": "MIT" 298 | }, 299 | "node_modules/@types/json-schema": { 300 | "version": "7.0.15", 301 | "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", 302 | "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", 303 | "dev": true, 304 | "license": "MIT" 305 | }, 306 | "node_modules/acorn": { 307 | "version": "8.14.1", 308 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", 309 | "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", 310 | "dev": true, 311 | "license": "MIT", 312 | "bin": { 313 | "acorn": "bin/acorn" 314 | }, 315 | "engines": { 316 | "node": ">=0.4.0" 317 | } 318 | }, 319 | "node_modules/acorn-jsx": { 320 | "version": "5.3.2", 321 | "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", 322 | "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", 323 | "dev": true, 324 | "license": "MIT", 325 | "peerDependencies": { 326 | "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" 327 | } 328 | }, 329 | "node_modules/ajv": { 330 | "version": "6.12.6", 331 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", 332 | "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", 333 | "dev": true, 334 | "license": "MIT", 335 | "dependencies": { 336 | "fast-deep-equal": "^3.1.1", 337 | "fast-json-stable-stringify": "^2.0.0", 338 | "json-schema-traverse": "^0.4.1", 339 | "uri-js": "^4.2.2" 340 | }, 341 | "funding": { 342 | "type": "github", 343 | "url": "https://github.com/sponsors/epoberezkin" 344 | } 345 | }, 346 | "node_modules/ansi-regex": { 347 | "version": "6.1.0", 348 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", 349 | "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", 350 | "dev": true, 351 | "license": "MIT", 352 | "engines": { 353 | "node": ">=12" 354 | }, 355 | "funding": { 356 | "url": "https://github.com/chalk/ansi-regex?sponsor=1" 357 | } 358 | }, 359 | "node_modules/ansi-styles": { 360 | "version": "4.3.0", 361 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", 362 | "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", 363 | "dev": true, 364 | "license": "MIT", 365 | "dependencies": { 366 | "color-convert": "^2.0.1" 367 | }, 368 | "engines": { 369 | "node": ">=8" 370 | }, 371 | "funding": { 372 | "url": "https://github.com/chalk/ansi-styles?sponsor=1" 373 | } 374 | }, 375 | "node_modules/argparse": { 376 | "version": "2.0.1", 377 | "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", 378 | "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", 379 | "dev": true, 380 | "license": "Python-2.0" 381 | }, 382 | "node_modules/async": { 383 | "version": "0.2.10", 384 | "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz", 385 | "integrity": "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ==", 386 | "dev": true 387 | }, 388 | "node_modules/balanced-match": { 389 | "version": "1.0.2", 390 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", 391 | "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", 392 | "dev": true, 393 | "license": "MIT" 394 | }, 395 | "node_modules/benchmark": { 396 | "version": "2.1.4", 397 | "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz", 398 | "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==", 399 | "dev": true, 400 | "license": "MIT", 401 | "dependencies": { 402 | "lodash": "^4.17.4", 403 | "platform": "^1.3.3" 404 | } 405 | }, 406 | "node_modules/brace-expansion": { 407 | "version": "1.1.11", 408 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", 409 | "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", 410 | "dev": true, 411 | "license": "MIT", 412 | "dependencies": { 413 | "balanced-match": "^1.0.0", 414 | "concat-map": "0.0.1" 415 | } 416 | }, 417 | "node_modules/browser-stdout": { 418 | "version": "1.3.1", 419 | "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", 420 | "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", 421 | "dev": true, 422 | "license": "ISC" 423 | }, 424 | "node_modules/buffer-from": { 425 | "version": "1.1.2", 426 | "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", 427 | "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", 428 | "dev": true, 429 | "license": "MIT" 430 | }, 431 | "node_modules/callsites": { 432 | "version": "3.1.0", 433 | "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", 434 | "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", 435 | "dev": true, 436 | "license": "MIT", 437 | "engines": { 438 | "node": ">=6" 439 | } 440 | }, 441 | "node_modules/camelcase": { 442 | "version": "6.3.0", 443 | "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", 444 | "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", 445 | "dev": true, 446 | "license": "MIT", 447 | "engines": { 448 | "node": ">=10" 449 | }, 450 | "funding": { 451 | "url": "https://github.com/sponsors/sindresorhus" 452 | } 453 | }, 454 | "node_modules/chalk": { 455 | "version": "4.1.2", 456 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", 457 | "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", 458 | "dev": true, 459 | "license": "MIT", 460 | "dependencies": { 461 | "ansi-styles": "^4.1.0", 462 | "supports-color": "^7.1.0" 463 | }, 464 | "engines": { 465 | "node": ">=10" 466 | }, 467 | "funding": { 468 | "url": "https://github.com/chalk/chalk?sponsor=1" 469 | } 470 | }, 471 | "node_modules/chokidar": { 472 | "version": "4.0.3", 473 | "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", 474 | "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", 475 | "dev": true, 476 | "license": "MIT", 477 | "dependencies": { 478 | "readdirp": "^4.0.1" 479 | }, 480 | "engines": { 481 | "node": ">= 14.16.0" 482 | }, 483 | "funding": { 484 | "url": "https://paulmillr.com/funding/" 485 | } 486 | }, 487 | "node_modules/cliui": { 488 | "version": "8.0.1", 489 | "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", 490 | "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", 491 | "dev": true, 492 | "license": "ISC", 493 | "dependencies": { 494 | "string-width": "^4.2.0", 495 | "strip-ansi": "^6.0.1", 496 | "wrap-ansi": "^7.0.0" 497 | }, 498 | "engines": { 499 | "node": ">=12" 500 | } 501 | }, 502 | "node_modules/cliui/node_modules/ansi-regex": { 503 | "version": "5.0.1", 504 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 505 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 506 | "dev": true, 507 | "license": "MIT", 508 | "engines": { 509 | "node": ">=8" 510 | } 511 | }, 512 | "node_modules/cliui/node_modules/emoji-regex": { 513 | "version": "8.0.0", 514 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 515 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", 516 | "dev": true, 517 | "license": "MIT" 518 | }, 519 | "node_modules/cliui/node_modules/string-width": { 520 | "version": "4.2.3", 521 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", 522 | "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", 523 | "dev": true, 524 | "license": "MIT", 525 | "dependencies": { 526 | "emoji-regex": "^8.0.0", 527 | "is-fullwidth-code-point": "^3.0.0", 528 | "strip-ansi": "^6.0.1" 529 | }, 530 | "engines": { 531 | "node": ">=8" 532 | } 533 | }, 534 | "node_modules/cliui/node_modules/strip-ansi": { 535 | "version": "6.0.1", 536 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 537 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 538 | "dev": true, 539 | "license": "MIT", 540 | "dependencies": { 541 | "ansi-regex": "^5.0.1" 542 | }, 543 | "engines": { 544 | "node": ">=8" 545 | } 546 | }, 547 | "node_modules/cliui/node_modules/wrap-ansi": { 548 | "version": "7.0.0", 549 | "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", 550 | "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", 551 | "dev": true, 552 | "license": "MIT", 553 | "dependencies": { 554 | "ansi-styles": "^4.0.0", 555 | "string-width": "^4.1.0", 556 | "strip-ansi": "^6.0.0" 557 | }, 558 | "engines": { 559 | "node": ">=10" 560 | }, 561 | "funding": { 562 | "url": "https://github.com/chalk/wrap-ansi?sponsor=1" 563 | } 564 | }, 565 | "node_modules/color-convert": { 566 | "version": "2.0.1", 567 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", 568 | "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", 569 | "dev": true, 570 | "license": "MIT", 571 | "dependencies": { 572 | "color-name": "~1.1.4" 573 | }, 574 | "engines": { 575 | "node": ">=7.0.0" 576 | } 577 | }, 578 | "node_modules/color-name": { 579 | "version": "1.1.4", 580 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", 581 | "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", 582 | "dev": true, 583 | "license": "MIT" 584 | }, 585 | "node_modules/concat-map": { 586 | "version": "0.0.1", 587 | "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", 588 | "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", 589 | "dev": true, 590 | "license": "MIT" 591 | }, 592 | "node_modules/concat-stream": { 593 | "version": "2.0.0", 594 | "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", 595 | "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", 596 | "dev": true, 597 | "engines": [ 598 | "node >= 6.0" 599 | ], 600 | "license": "MIT", 601 | "dependencies": { 602 | "buffer-from": "^1.0.0", 603 | "inherits": "^2.0.3", 604 | "readable-stream": "^3.0.2", 605 | "typedarray": "^0.0.6" 606 | } 607 | }, 608 | "node_modules/cross-spawn": { 609 | "version": "7.0.6", 610 | "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", 611 | "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", 612 | "dev": true, 613 | "license": "MIT", 614 | "dependencies": { 615 | "path-key": "^3.1.0", 616 | "shebang-command": "^2.0.0", 617 | "which": "^2.0.1" 618 | }, 619 | "engines": { 620 | "node": ">= 8" 621 | } 622 | }, 623 | "node_modules/csv-parse": { 624 | "version": "5.6.0", 625 | "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.6.0.tgz", 626 | "integrity": "sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q==", 627 | "dev": true, 628 | "license": "MIT" 629 | }, 630 | "node_modules/csv-parser": { 631 | "version": "3.2.0", 632 | "resolved": "https://registry.npmjs.org/csv-parser/-/csv-parser-3.2.0.tgz", 633 | "integrity": "sha512-fgKbp+AJbn1h2dcAHKIdKNSSjfp43BZZykXsCjzALjKy80VXQNHPFJ6T9Afwdzoj24aMkq8GwDS7KGcDPpejrA==", 634 | "dev": true, 635 | "license": "MIT", 636 | "bin": { 637 | "csv-parser": "bin/csv-parser" 638 | }, 639 | "engines": { 640 | "node": ">= 10" 641 | } 642 | }, 643 | "node_modules/debug": { 644 | "version": "4.4.1", 645 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", 646 | "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", 647 | "dev": true, 648 | "license": "MIT", 649 | "dependencies": { 650 | "ms": "^2.1.3" 651 | }, 652 | "engines": { 653 | "node": ">=6.0" 654 | }, 655 | "peerDependenciesMeta": { 656 | "supports-color": { 657 | "optional": true 658 | } 659 | } 660 | }, 661 | "node_modules/decamelize": { 662 | "version": "4.0.0", 663 | "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", 664 | "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", 665 | "dev": true, 666 | "license": "MIT", 667 | "engines": { 668 | "node": ">=10" 669 | }, 670 | "funding": { 671 | "url": "https://github.com/sponsors/sindresorhus" 672 | } 673 | }, 674 | "node_modules/deep-is": { 675 | "version": "0.1.4", 676 | "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", 677 | "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", 678 | "dev": true, 679 | "license": "MIT" 680 | }, 681 | "node_modules/diff": { 682 | "version": "7.0.0", 683 | "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz", 684 | "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==", 685 | "dev": true, 686 | "license": "BSD-3-Clause", 687 | "engines": { 688 | "node": ">=0.3.1" 689 | } 690 | }, 691 | "node_modules/duplex-child-process": { 692 | "version": "1.0.1", 693 | "resolved": "https://registry.npmjs.org/duplex-child-process/-/duplex-child-process-1.0.1.tgz", 694 | "integrity": "sha512-tWbt4tyioDjyK5nh+qicbdvBvNjSXsTUF5zKUwSauuKPg1mokjwn/HezwfvWhh6hXoLdgetY+ZlzU/sMwUMJkg==", 695 | "dev": true, 696 | "license": "MIT" 697 | }, 698 | "node_modules/eastasianwidth": { 699 | "version": "0.2.0", 700 | "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", 701 | "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", 702 | "dev": true, 703 | "license": "MIT" 704 | }, 705 | "node_modules/emoji-regex": { 706 | "version": "9.2.2", 707 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", 708 | "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", 709 | "dev": true, 710 | "license": "MIT" 711 | }, 712 | "node_modules/escalade": { 713 | "version": "3.2.0", 714 | "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", 715 | "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", 716 | "dev": true, 717 | "license": "MIT", 718 | "engines": { 719 | "node": ">=6" 720 | } 721 | }, 722 | "node_modules/escape-string-regexp": { 723 | "version": "4.0.0", 724 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", 725 | "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", 726 | "dev": true, 727 | "license": "MIT", 728 | "engines": { 729 | "node": ">=10" 730 | }, 731 | "funding": { 732 | "url": "https://github.com/sponsors/sindresorhus" 733 | } 734 | }, 735 | "node_modules/eslint": { 736 | "version": "9.27.0", 737 | "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.27.0.tgz", 738 | "integrity": "sha512-ixRawFQuMB9DZ7fjU3iGGganFDp3+45bPOdaRurcFHSXO1e/sYwUX/FtQZpLZJR6SjMoJH8hR2pPEAfDyCoU2Q==", 739 | "dev": true, 740 | "license": "MIT", 741 | "dependencies": { 742 | "@eslint-community/eslint-utils": "^4.2.0", 743 | "@eslint-community/regexpp": "^4.12.1", 744 | "@eslint/config-array": "^0.20.0", 745 | "@eslint/config-helpers": "^0.2.1", 746 | "@eslint/core": "^0.14.0", 747 | "@eslint/eslintrc": "^3.3.1", 748 | "@eslint/js": "9.27.0", 749 | "@eslint/plugin-kit": "^0.3.1", 750 | "@humanfs/node": "^0.16.6", 751 | "@humanwhocodes/module-importer": "^1.0.1", 752 | "@humanwhocodes/retry": "^0.4.2", 753 | "@types/estree": "^1.0.6", 754 | "@types/json-schema": "^7.0.15", 755 | "ajv": "^6.12.4", 756 | "chalk": "^4.0.0", 757 | "cross-spawn": "^7.0.6", 758 | "debug": "^4.3.2", 759 | "escape-string-regexp": "^4.0.0", 760 | "eslint-scope": "^8.3.0", 761 | "eslint-visitor-keys": "^4.2.0", 762 | "espree": "^10.3.0", 763 | "esquery": "^1.5.0", 764 | "esutils": "^2.0.2", 765 | "fast-deep-equal": "^3.1.3", 766 | "file-entry-cache": "^8.0.0", 767 | "find-up": "^5.0.0", 768 | "glob-parent": "^6.0.2", 769 | "ignore": "^5.2.0", 770 | "imurmurhash": "^0.1.4", 771 | "is-glob": "^4.0.0", 772 | "json-stable-stringify-without-jsonify": "^1.0.1", 773 | "lodash.merge": "^4.6.2", 774 | "minimatch": "^3.1.2", 775 | "natural-compare": "^1.4.0", 776 | "optionator": "^0.9.3" 777 | }, 778 | "bin": { 779 | "eslint": "bin/eslint.js" 780 | }, 781 | "engines": { 782 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 783 | }, 784 | "funding": { 785 | "url": "https://eslint.org/donate" 786 | }, 787 | "peerDependencies": { 788 | "jiti": "*" 789 | }, 790 | "peerDependenciesMeta": { 791 | "jiti": { 792 | "optional": true 793 | } 794 | } 795 | }, 796 | "node_modules/eslint-config-prettier": { 797 | "version": "10.1.5", 798 | "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.5.tgz", 799 | "integrity": "sha512-zc1UmCpNltmVY34vuLRV61r1K27sWuX39E+uyUnY8xS2Bex88VV9cugG+UZbRSRGtGyFboj+D8JODyme1plMpw==", 800 | "dev": true, 801 | "license": "MIT", 802 | "bin": { 803 | "eslint-config-prettier": "bin/cli.js" 804 | }, 805 | "funding": { 806 | "url": "https://opencollective.com/eslint-config-prettier" 807 | }, 808 | "peerDependencies": { 809 | "eslint": ">=7.0.0" 810 | } 811 | }, 812 | "node_modules/eslint-plugin-prettier": { 813 | "version": "5.4.0", 814 | "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.4.0.tgz", 815 | "integrity": "sha512-BvQOvUhkVQM1i63iMETK9Hjud9QhqBnbtT1Zc642p9ynzBuCe5pybkOnvqZIBypXmMlsGcnU4HZ8sCTPfpAexA==", 816 | "dev": true, 817 | "license": "MIT", 818 | "dependencies": { 819 | "prettier-linter-helpers": "^1.0.0", 820 | "synckit": "^0.11.0" 821 | }, 822 | "engines": { 823 | "node": "^14.18.0 || >=16.0.0" 824 | }, 825 | "funding": { 826 | "url": "https://opencollective.com/eslint-plugin-prettier" 827 | }, 828 | "peerDependencies": { 829 | "@types/eslint": ">=8.0.0", 830 | "eslint": ">=8.0.0", 831 | "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", 832 | "prettier": ">=3.0.0" 833 | }, 834 | "peerDependenciesMeta": { 835 | "@types/eslint": { 836 | "optional": true 837 | }, 838 | "eslint-config-prettier": { 839 | "optional": true 840 | } 841 | } 842 | }, 843 | "node_modules/eslint-scope": { 844 | "version": "8.3.0", 845 | "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", 846 | "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", 847 | "dev": true, 848 | "license": "BSD-2-Clause", 849 | "dependencies": { 850 | "esrecurse": "^4.3.0", 851 | "estraverse": "^5.2.0" 852 | }, 853 | "engines": { 854 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 855 | }, 856 | "funding": { 857 | "url": "https://opencollective.com/eslint" 858 | } 859 | }, 860 | "node_modules/eslint-visitor-keys": { 861 | "version": "4.2.0", 862 | "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", 863 | "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", 864 | "dev": true, 865 | "license": "Apache-2.0", 866 | "engines": { 867 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 868 | }, 869 | "funding": { 870 | "url": "https://opencollective.com/eslint" 871 | } 872 | }, 873 | "node_modules/espree": { 874 | "version": "10.3.0", 875 | "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", 876 | "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", 877 | "dev": true, 878 | "license": "BSD-2-Clause", 879 | "dependencies": { 880 | "acorn": "^8.14.0", 881 | "acorn-jsx": "^5.3.2", 882 | "eslint-visitor-keys": "^4.2.0" 883 | }, 884 | "engines": { 885 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 886 | }, 887 | "funding": { 888 | "url": "https://opencollective.com/eslint" 889 | } 890 | }, 891 | "node_modules/esquery": { 892 | "version": "1.6.0", 893 | "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", 894 | "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", 895 | "dev": true, 896 | "license": "BSD-3-Clause", 897 | "dependencies": { 898 | "estraverse": "^5.1.0" 899 | }, 900 | "engines": { 901 | "node": ">=0.10" 902 | } 903 | }, 904 | "node_modules/esrecurse": { 905 | "version": "4.3.0", 906 | "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", 907 | "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", 908 | "dev": true, 909 | "license": "BSD-2-Clause", 910 | "dependencies": { 911 | "estraverse": "^5.2.0" 912 | }, 913 | "engines": { 914 | "node": ">=4.0" 915 | } 916 | }, 917 | "node_modules/estraverse": { 918 | "version": "5.3.0", 919 | "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", 920 | "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", 921 | "dev": true, 922 | "license": "BSD-2-Clause", 923 | "engines": { 924 | "node": ">=4.0" 925 | } 926 | }, 927 | "node_modules/esutils": { 928 | "version": "2.0.3", 929 | "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", 930 | "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", 931 | "dev": true, 932 | "license": "BSD-2-Clause", 933 | "engines": { 934 | "node": ">=0.10.0" 935 | } 936 | }, 937 | "node_modules/fast-deep-equal": { 938 | "version": "3.1.3", 939 | "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", 940 | "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", 941 | "dev": true, 942 | "license": "MIT" 943 | }, 944 | "node_modules/fast-diff": { 945 | "version": "1.3.0", 946 | "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", 947 | "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", 948 | "dev": true, 949 | "license": "Apache-2.0" 950 | }, 951 | "node_modules/fast-json-stable-stringify": { 952 | "version": "2.1.0", 953 | "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", 954 | "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", 955 | "dev": true, 956 | "license": "MIT" 957 | }, 958 | "node_modules/fast-levenshtein": { 959 | "version": "2.0.6", 960 | "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", 961 | "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", 962 | "dev": true, 963 | "license": "MIT" 964 | }, 965 | "node_modules/file-entry-cache": { 966 | "version": "8.0.0", 967 | "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", 968 | "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", 969 | "dev": true, 970 | "license": "MIT", 971 | "dependencies": { 972 | "flat-cache": "^4.0.0" 973 | }, 974 | "engines": { 975 | "node": ">=16.0.0" 976 | } 977 | }, 978 | "node_modules/find-up": { 979 | "version": "5.0.0", 980 | "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", 981 | "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", 982 | "dev": true, 983 | "license": "MIT", 984 | "dependencies": { 985 | "locate-path": "^6.0.0", 986 | "path-exists": "^4.0.0" 987 | }, 988 | "engines": { 989 | "node": ">=10" 990 | }, 991 | "funding": { 992 | "url": "https://github.com/sponsors/sindresorhus" 993 | } 994 | }, 995 | "node_modules/flat": { 996 | "version": "5.0.2", 997 | "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", 998 | "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", 999 | "dev": true, 1000 | "license": "BSD-3-Clause", 1001 | "bin": { 1002 | "flat": "cli.js" 1003 | } 1004 | }, 1005 | "node_modules/flat-cache": { 1006 | "version": "4.0.1", 1007 | "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", 1008 | "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", 1009 | "dev": true, 1010 | "license": "MIT", 1011 | "dependencies": { 1012 | "flatted": "^3.2.9", 1013 | "keyv": "^4.5.4" 1014 | }, 1015 | "engines": { 1016 | "node": ">=16" 1017 | } 1018 | }, 1019 | "node_modules/flatted": { 1020 | "version": "3.3.3", 1021 | "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", 1022 | "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", 1023 | "dev": true, 1024 | "license": "ISC" 1025 | }, 1026 | "node_modules/foreground-child": { 1027 | "version": "3.3.1", 1028 | "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", 1029 | "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", 1030 | "dev": true, 1031 | "license": "ISC", 1032 | "dependencies": { 1033 | "cross-spawn": "^7.0.6", 1034 | "signal-exit": "^4.0.1" 1035 | }, 1036 | "engines": { 1037 | "node": ">=14" 1038 | }, 1039 | "funding": { 1040 | "url": "https://github.com/sponsors/isaacs" 1041 | } 1042 | }, 1043 | "node_modules/get-caller-file": { 1044 | "version": "2.0.5", 1045 | "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", 1046 | "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", 1047 | "dev": true, 1048 | "license": "ISC", 1049 | "engines": { 1050 | "node": "6.* || 8.* || >= 10.*" 1051 | } 1052 | }, 1053 | "node_modules/glob": { 1054 | "version": "10.4.5", 1055 | "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", 1056 | "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", 1057 | "dev": true, 1058 | "license": "ISC", 1059 | "dependencies": { 1060 | "foreground-child": "^3.1.0", 1061 | "jackspeak": "^3.1.2", 1062 | "minimatch": "^9.0.4", 1063 | "minipass": "^7.1.2", 1064 | "package-json-from-dist": "^1.0.0", 1065 | "path-scurry": "^1.11.1" 1066 | }, 1067 | "bin": { 1068 | "glob": "dist/esm/bin.mjs" 1069 | }, 1070 | "funding": { 1071 | "url": "https://github.com/sponsors/isaacs" 1072 | } 1073 | }, 1074 | "node_modules/glob-parent": { 1075 | "version": "6.0.2", 1076 | "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", 1077 | "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", 1078 | "dev": true, 1079 | "license": "ISC", 1080 | "dependencies": { 1081 | "is-glob": "^4.0.3" 1082 | }, 1083 | "engines": { 1084 | "node": ">=10.13.0" 1085 | } 1086 | }, 1087 | "node_modules/glob/node_modules/brace-expansion": { 1088 | "version": "2.0.1", 1089 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", 1090 | "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", 1091 | "dev": true, 1092 | "license": "MIT", 1093 | "dependencies": { 1094 | "balanced-match": "^1.0.0" 1095 | } 1096 | }, 1097 | "node_modules/glob/node_modules/minimatch": { 1098 | "version": "9.0.5", 1099 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", 1100 | "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", 1101 | "dev": true, 1102 | "license": "ISC", 1103 | "dependencies": { 1104 | "brace-expansion": "^2.0.1" 1105 | }, 1106 | "engines": { 1107 | "node": ">=16 || 14 >=14.17" 1108 | }, 1109 | "funding": { 1110 | "url": "https://github.com/sponsors/isaacs" 1111 | } 1112 | }, 1113 | "node_modules/globals": { 1114 | "version": "16.2.0", 1115 | "resolved": "https://registry.npmjs.org/globals/-/globals-16.2.0.tgz", 1116 | "integrity": "sha512-O+7l9tPdHCU320IigZZPj5zmRCFG9xHmx9cU8FqU2Rp+JN714seHV+2S9+JslCpY4gJwU2vOGox0wzgae/MCEg==", 1117 | "dev": true, 1118 | "license": "MIT", 1119 | "engines": { 1120 | "node": ">=18" 1121 | }, 1122 | "funding": { 1123 | "url": "https://github.com/sponsors/sindresorhus" 1124 | } 1125 | }, 1126 | "node_modules/has-flag": { 1127 | "version": "4.0.0", 1128 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", 1129 | "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", 1130 | "dev": true, 1131 | "license": "MIT", 1132 | "engines": { 1133 | "node": ">=8" 1134 | } 1135 | }, 1136 | "node_modules/he": { 1137 | "version": "1.2.0", 1138 | "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", 1139 | "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", 1140 | "dev": true, 1141 | "license": "MIT", 1142 | "bin": { 1143 | "he": "bin/he" 1144 | } 1145 | }, 1146 | "node_modules/ignore": { 1147 | "version": "5.3.2", 1148 | "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", 1149 | "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", 1150 | "dev": true, 1151 | "license": "MIT", 1152 | "engines": { 1153 | "node": ">= 4" 1154 | } 1155 | }, 1156 | "node_modules/import-fresh": { 1157 | "version": "3.3.1", 1158 | "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", 1159 | "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", 1160 | "dev": true, 1161 | "license": "MIT", 1162 | "dependencies": { 1163 | "parent-module": "^1.0.0", 1164 | "resolve-from": "^4.0.0" 1165 | }, 1166 | "engines": { 1167 | "node": ">=6" 1168 | }, 1169 | "funding": { 1170 | "url": "https://github.com/sponsors/sindresorhus" 1171 | } 1172 | }, 1173 | "node_modules/imurmurhash": { 1174 | "version": "0.1.4", 1175 | "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", 1176 | "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", 1177 | "dev": true, 1178 | "license": "MIT", 1179 | "engines": { 1180 | "node": ">=0.8.19" 1181 | } 1182 | }, 1183 | "node_modules/inherits": { 1184 | "version": "2.0.4", 1185 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 1186 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 1187 | "dev": true, 1188 | "license": "ISC" 1189 | }, 1190 | "node_modules/is-extglob": { 1191 | "version": "2.1.1", 1192 | "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", 1193 | "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", 1194 | "dev": true, 1195 | "license": "MIT", 1196 | "engines": { 1197 | "node": ">=0.10.0" 1198 | } 1199 | }, 1200 | "node_modules/is-fullwidth-code-point": { 1201 | "version": "3.0.0", 1202 | "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", 1203 | "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", 1204 | "dev": true, 1205 | "license": "MIT", 1206 | "engines": { 1207 | "node": ">=8" 1208 | } 1209 | }, 1210 | "node_modules/is-glob": { 1211 | "version": "4.0.3", 1212 | "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", 1213 | "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", 1214 | "dev": true, 1215 | "license": "MIT", 1216 | "dependencies": { 1217 | "is-extglob": "^2.1.1" 1218 | }, 1219 | "engines": { 1220 | "node": ">=0.10.0" 1221 | } 1222 | }, 1223 | "node_modules/is-plain-obj": { 1224 | "version": "2.1.0", 1225 | "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", 1226 | "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", 1227 | "dev": true, 1228 | "license": "MIT", 1229 | "engines": { 1230 | "node": ">=8" 1231 | } 1232 | }, 1233 | "node_modules/is-unicode-supported": { 1234 | "version": "0.1.0", 1235 | "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", 1236 | "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", 1237 | "dev": true, 1238 | "license": "MIT", 1239 | "engines": { 1240 | "node": ">=10" 1241 | }, 1242 | "funding": { 1243 | "url": "https://github.com/sponsors/sindresorhus" 1244 | } 1245 | }, 1246 | "node_modules/isexe": { 1247 | "version": "2.0.0", 1248 | "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", 1249 | "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", 1250 | "dev": true, 1251 | "license": "ISC" 1252 | }, 1253 | "node_modules/jackspeak": { 1254 | "version": "3.4.3", 1255 | "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", 1256 | "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", 1257 | "dev": true, 1258 | "license": "BlueOak-1.0.0", 1259 | "dependencies": { 1260 | "@isaacs/cliui": "^8.0.2" 1261 | }, 1262 | "funding": { 1263 | "url": "https://github.com/sponsors/isaacs" 1264 | }, 1265 | "optionalDependencies": { 1266 | "@pkgjs/parseargs": "^0.11.0" 1267 | } 1268 | }, 1269 | "node_modules/js-yaml": { 1270 | "version": "4.1.0", 1271 | "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", 1272 | "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", 1273 | "dev": true, 1274 | "license": "MIT", 1275 | "dependencies": { 1276 | "argparse": "^2.0.1" 1277 | }, 1278 | "bin": { 1279 | "js-yaml": "bin/js-yaml.js" 1280 | } 1281 | }, 1282 | "node_modules/json-buffer": { 1283 | "version": "3.0.1", 1284 | "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", 1285 | "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", 1286 | "dev": true, 1287 | "license": "MIT" 1288 | }, 1289 | "node_modules/json-schema-traverse": { 1290 | "version": "0.4.1", 1291 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", 1292 | "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", 1293 | "dev": true, 1294 | "license": "MIT" 1295 | }, 1296 | "node_modules/json-stable-stringify-without-jsonify": { 1297 | "version": "1.0.1", 1298 | "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", 1299 | "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", 1300 | "dev": true, 1301 | "license": "MIT" 1302 | }, 1303 | "node_modules/keyv": { 1304 | "version": "4.5.4", 1305 | "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", 1306 | "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", 1307 | "dev": true, 1308 | "license": "MIT", 1309 | "dependencies": { 1310 | "json-buffer": "3.0.1" 1311 | } 1312 | }, 1313 | "node_modules/levn": { 1314 | "version": "0.4.1", 1315 | "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", 1316 | "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", 1317 | "dev": true, 1318 | "license": "MIT", 1319 | "dependencies": { 1320 | "prelude-ls": "^1.2.1", 1321 | "type-check": "~0.4.0" 1322 | }, 1323 | "engines": { 1324 | "node": ">= 0.8.0" 1325 | } 1326 | }, 1327 | "node_modules/locate-path": { 1328 | "version": "6.0.0", 1329 | "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", 1330 | "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", 1331 | "dev": true, 1332 | "license": "MIT", 1333 | "dependencies": { 1334 | "p-locate": "^5.0.0" 1335 | }, 1336 | "engines": { 1337 | "node": ">=10" 1338 | }, 1339 | "funding": { 1340 | "url": "https://github.com/sponsors/sindresorhus" 1341 | } 1342 | }, 1343 | "node_modules/lodash": { 1344 | "version": "4.17.21", 1345 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 1346 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", 1347 | "dev": true, 1348 | "license": "MIT" 1349 | }, 1350 | "node_modules/lodash.merge": { 1351 | "version": "4.6.2", 1352 | "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", 1353 | "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", 1354 | "dev": true, 1355 | "license": "MIT" 1356 | }, 1357 | "node_modules/log-symbols": { 1358 | "version": "4.1.0", 1359 | "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", 1360 | "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", 1361 | "dev": true, 1362 | "license": "MIT", 1363 | "dependencies": { 1364 | "chalk": "^4.1.0", 1365 | "is-unicode-supported": "^0.1.0" 1366 | }, 1367 | "engines": { 1368 | "node": ">=10" 1369 | }, 1370 | "funding": { 1371 | "url": "https://github.com/sponsors/sindresorhus" 1372 | } 1373 | }, 1374 | "node_modules/lru-cache": { 1375 | "version": "10.4.3", 1376 | "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", 1377 | "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", 1378 | "dev": true, 1379 | "license": "ISC" 1380 | }, 1381 | "node_modules/minimatch": { 1382 | "version": "3.1.2", 1383 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", 1384 | "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", 1385 | "dev": true, 1386 | "license": "ISC", 1387 | "dependencies": { 1388 | "brace-expansion": "^1.1.7" 1389 | }, 1390 | "engines": { 1391 | "node": "*" 1392 | } 1393 | }, 1394 | "node_modules/minipass": { 1395 | "version": "7.1.2", 1396 | "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", 1397 | "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", 1398 | "dev": true, 1399 | "license": "ISC", 1400 | "engines": { 1401 | "node": ">=16 || 14 >=14.17" 1402 | } 1403 | }, 1404 | "node_modules/mocha": { 1405 | "version": "11.5.0", 1406 | "resolved": "https://registry.npmjs.org/mocha/-/mocha-11.5.0.tgz", 1407 | "integrity": "sha512-VKDjhy6LMTKm0WgNEdlY77YVsD49LZnPSXJAaPNL9NRYQADxvORsyG1DIQY6v53BKTnlNbEE2MbVCDbnxr4K3w==", 1408 | "dev": true, 1409 | "license": "MIT", 1410 | "dependencies": { 1411 | "browser-stdout": "^1.3.1", 1412 | "chokidar": "^4.0.1", 1413 | "debug": "^4.3.5", 1414 | "diff": "^7.0.0", 1415 | "escape-string-regexp": "^4.0.0", 1416 | "find-up": "^5.0.0", 1417 | "glob": "^10.4.5", 1418 | "he": "^1.2.0", 1419 | "js-yaml": "^4.1.0", 1420 | "log-symbols": "^4.1.0", 1421 | "minimatch": "^9.0.5", 1422 | "ms": "^2.1.3", 1423 | "picocolors": "^1.1.1", 1424 | "serialize-javascript": "^6.0.2", 1425 | "strip-json-comments": "^3.1.1", 1426 | "supports-color": "^8.1.1", 1427 | "workerpool": "^6.5.1", 1428 | "yargs": "^17.7.2", 1429 | "yargs-parser": "^21.1.1", 1430 | "yargs-unparser": "^2.0.0" 1431 | }, 1432 | "bin": { 1433 | "_mocha": "bin/_mocha", 1434 | "mocha": "bin/mocha.js" 1435 | }, 1436 | "engines": { 1437 | "node": "^18.18.0 || ^20.9.0 || >=21.1.0" 1438 | } 1439 | }, 1440 | "node_modules/mocha/node_modules/brace-expansion": { 1441 | "version": "2.0.1", 1442 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", 1443 | "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", 1444 | "dev": true, 1445 | "license": "MIT", 1446 | "dependencies": { 1447 | "balanced-match": "^1.0.0" 1448 | } 1449 | }, 1450 | "node_modules/mocha/node_modules/minimatch": { 1451 | "version": "9.0.5", 1452 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", 1453 | "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", 1454 | "dev": true, 1455 | "license": "ISC", 1456 | "dependencies": { 1457 | "brace-expansion": "^2.0.1" 1458 | }, 1459 | "engines": { 1460 | "node": ">=16 || 14 >=14.17" 1461 | }, 1462 | "funding": { 1463 | "url": "https://github.com/sponsors/isaacs" 1464 | } 1465 | }, 1466 | "node_modules/mocha/node_modules/supports-color": { 1467 | "version": "8.1.1", 1468 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", 1469 | "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", 1470 | "dev": true, 1471 | "license": "MIT", 1472 | "dependencies": { 1473 | "has-flag": "^4.0.0" 1474 | }, 1475 | "engines": { 1476 | "node": ">=10" 1477 | }, 1478 | "funding": { 1479 | "url": "https://github.com/chalk/supports-color?sponsor=1" 1480 | } 1481 | }, 1482 | "node_modules/ms": { 1483 | "version": "2.1.3", 1484 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", 1485 | "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", 1486 | "dev": true, 1487 | "license": "MIT" 1488 | }, 1489 | "node_modules/natural-compare": { 1490 | "version": "1.4.0", 1491 | "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", 1492 | "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", 1493 | "dev": true, 1494 | "license": "MIT" 1495 | }, 1496 | "node_modules/optionator": { 1497 | "version": "0.9.4", 1498 | "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", 1499 | "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", 1500 | "dev": true, 1501 | "license": "MIT", 1502 | "dependencies": { 1503 | "deep-is": "^0.1.3", 1504 | "fast-levenshtein": "^2.0.6", 1505 | "levn": "^0.4.1", 1506 | "prelude-ls": "^1.2.1", 1507 | "type-check": "^0.4.0", 1508 | "word-wrap": "^1.2.5" 1509 | }, 1510 | "engines": { 1511 | "node": ">= 0.8.0" 1512 | } 1513 | }, 1514 | "node_modules/p-limit": { 1515 | "version": "3.1.0", 1516 | "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", 1517 | "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", 1518 | "dev": true, 1519 | "license": "MIT", 1520 | "dependencies": { 1521 | "yocto-queue": "^0.1.0" 1522 | }, 1523 | "engines": { 1524 | "node": ">=10" 1525 | }, 1526 | "funding": { 1527 | "url": "https://github.com/sponsors/sindresorhus" 1528 | } 1529 | }, 1530 | "node_modules/p-locate": { 1531 | "version": "5.0.0", 1532 | "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", 1533 | "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", 1534 | "dev": true, 1535 | "license": "MIT", 1536 | "dependencies": { 1537 | "p-limit": "^3.0.2" 1538 | }, 1539 | "engines": { 1540 | "node": ">=10" 1541 | }, 1542 | "funding": { 1543 | "url": "https://github.com/sponsors/sindresorhus" 1544 | } 1545 | }, 1546 | "node_modules/package-json-from-dist": { 1547 | "version": "1.0.1", 1548 | "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", 1549 | "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", 1550 | "dev": true, 1551 | "license": "BlueOak-1.0.0" 1552 | }, 1553 | "node_modules/parent-module": { 1554 | "version": "1.0.1", 1555 | "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", 1556 | "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", 1557 | "dev": true, 1558 | "license": "MIT", 1559 | "dependencies": { 1560 | "callsites": "^3.0.0" 1561 | }, 1562 | "engines": { 1563 | "node": ">=6" 1564 | } 1565 | }, 1566 | "node_modules/path-exists": { 1567 | "version": "4.0.0", 1568 | "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", 1569 | "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", 1570 | "dev": true, 1571 | "license": "MIT", 1572 | "engines": { 1573 | "node": ">=8" 1574 | } 1575 | }, 1576 | "node_modules/path-key": { 1577 | "version": "3.1.1", 1578 | "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", 1579 | "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", 1580 | "dev": true, 1581 | "license": "MIT", 1582 | "engines": { 1583 | "node": ">=8" 1584 | } 1585 | }, 1586 | "node_modules/path-scurry": { 1587 | "version": "1.11.1", 1588 | "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", 1589 | "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", 1590 | "dev": true, 1591 | "license": "BlueOak-1.0.0", 1592 | "dependencies": { 1593 | "lru-cache": "^10.2.0", 1594 | "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" 1595 | }, 1596 | "engines": { 1597 | "node": ">=16 || 14 >=14.18" 1598 | }, 1599 | "funding": { 1600 | "url": "https://github.com/sponsors/isaacs" 1601 | } 1602 | }, 1603 | "node_modules/pg": { 1604 | "version": "8.16.0", 1605 | "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.0.tgz", 1606 | "integrity": "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg==", 1607 | "dev": true, 1608 | "license": "MIT", 1609 | "dependencies": { 1610 | "pg-connection-string": "^2.9.0", 1611 | "pg-pool": "^3.10.0", 1612 | "pg-protocol": "^1.10.0", 1613 | "pg-types": "2.2.0", 1614 | "pgpass": "1.0.5" 1615 | }, 1616 | "engines": { 1617 | "node": ">= 8.0.0" 1618 | }, 1619 | "optionalDependencies": { 1620 | "pg-cloudflare": "^1.2.5" 1621 | }, 1622 | "peerDependencies": { 1623 | "pg-native": ">=3.0.1" 1624 | }, 1625 | "peerDependenciesMeta": { 1626 | "pg-native": { 1627 | "optional": true 1628 | } 1629 | } 1630 | }, 1631 | "node_modules/pg-cloudflare": { 1632 | "version": "1.2.5", 1633 | "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.5.tgz", 1634 | "integrity": "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg==", 1635 | "dev": true, 1636 | "license": "MIT", 1637 | "optional": true 1638 | }, 1639 | "node_modules/pg-connection-string": { 1640 | "version": "2.9.0", 1641 | "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.0.tgz", 1642 | "integrity": "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ==", 1643 | "dev": true, 1644 | "license": "MIT" 1645 | }, 1646 | "node_modules/pg-int8": { 1647 | "version": "1.0.1", 1648 | "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", 1649 | "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", 1650 | "dev": true, 1651 | "license": "ISC", 1652 | "engines": { 1653 | "node": ">=4.0.0" 1654 | } 1655 | }, 1656 | "node_modules/pg-pool": { 1657 | "version": "3.10.0", 1658 | "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.0.tgz", 1659 | "integrity": "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA==", 1660 | "dev": true, 1661 | "license": "MIT", 1662 | "peerDependencies": { 1663 | "pg": ">=8.0" 1664 | } 1665 | }, 1666 | "node_modules/pg-protocol": { 1667 | "version": "1.10.0", 1668 | "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.0.tgz", 1669 | "integrity": "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q==", 1670 | "dev": true, 1671 | "license": "MIT" 1672 | }, 1673 | "node_modules/pg-types": { 1674 | "version": "2.2.0", 1675 | "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", 1676 | "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", 1677 | "dev": true, 1678 | "license": "MIT", 1679 | "dependencies": { 1680 | "pg-int8": "1.0.1", 1681 | "postgres-array": "~2.0.0", 1682 | "postgres-bytea": "~1.0.0", 1683 | "postgres-date": "~1.0.4", 1684 | "postgres-interval": "^1.1.0" 1685 | }, 1686 | "engines": { 1687 | "node": ">=4" 1688 | } 1689 | }, 1690 | "node_modules/pgpass": { 1691 | "version": "1.0.5", 1692 | "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", 1693 | "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", 1694 | "dev": true, 1695 | "license": "MIT", 1696 | "dependencies": { 1697 | "split2": "^4.1.0" 1698 | } 1699 | }, 1700 | "node_modules/picocolors": { 1701 | "version": "1.1.1", 1702 | "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", 1703 | "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", 1704 | "dev": true, 1705 | "license": "ISC" 1706 | }, 1707 | "node_modules/platform": { 1708 | "version": "1.3.6", 1709 | "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", 1710 | "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", 1711 | "dev": true, 1712 | "license": "MIT" 1713 | }, 1714 | "node_modules/postgres-array": { 1715 | "version": "2.0.0", 1716 | "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", 1717 | "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", 1718 | "dev": true, 1719 | "license": "MIT", 1720 | "engines": { 1721 | "node": ">=4" 1722 | } 1723 | }, 1724 | "node_modules/postgres-bytea": { 1725 | "version": "1.0.0", 1726 | "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", 1727 | "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", 1728 | "dev": true, 1729 | "license": "MIT", 1730 | "engines": { 1731 | "node": ">=0.10.0" 1732 | } 1733 | }, 1734 | "node_modules/postgres-date": { 1735 | "version": "1.0.7", 1736 | "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", 1737 | "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", 1738 | "dev": true, 1739 | "license": "MIT", 1740 | "engines": { 1741 | "node": ">=0.10.0" 1742 | } 1743 | }, 1744 | "node_modules/postgres-interval": { 1745 | "version": "1.2.0", 1746 | "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", 1747 | "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", 1748 | "dev": true, 1749 | "license": "MIT", 1750 | "dependencies": { 1751 | "xtend": "^4.0.0" 1752 | }, 1753 | "engines": { 1754 | "node": ">=0.10.0" 1755 | } 1756 | }, 1757 | "node_modules/prelude-ls": { 1758 | "version": "1.2.1", 1759 | "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", 1760 | "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", 1761 | "dev": true, 1762 | "license": "MIT", 1763 | "engines": { 1764 | "node": ">= 0.8.0" 1765 | } 1766 | }, 1767 | "node_modules/prettier": { 1768 | "version": "3.5.3", 1769 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", 1770 | "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", 1771 | "dev": true, 1772 | "license": "MIT", 1773 | "bin": { 1774 | "prettier": "bin/prettier.cjs" 1775 | }, 1776 | "engines": { 1777 | "node": ">=14" 1778 | }, 1779 | "funding": { 1780 | "url": "https://github.com/prettier/prettier?sponsor=1" 1781 | } 1782 | }, 1783 | "node_modules/prettier-linter-helpers": { 1784 | "version": "1.0.0", 1785 | "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", 1786 | "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", 1787 | "dev": true, 1788 | "license": "MIT", 1789 | "dependencies": { 1790 | "fast-diff": "^1.1.2" 1791 | }, 1792 | "engines": { 1793 | "node": ">=6.0.0" 1794 | } 1795 | }, 1796 | "node_modules/punycode": { 1797 | "version": "2.3.1", 1798 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", 1799 | "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", 1800 | "dev": true, 1801 | "license": "MIT", 1802 | "engines": { 1803 | "node": ">=6" 1804 | } 1805 | }, 1806 | "node_modules/randombytes": { 1807 | "version": "2.1.0", 1808 | "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", 1809 | "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", 1810 | "dev": true, 1811 | "license": "MIT", 1812 | "dependencies": { 1813 | "safe-buffer": "^5.1.0" 1814 | } 1815 | }, 1816 | "node_modules/readable-stream": { 1817 | "version": "3.6.2", 1818 | "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", 1819 | "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", 1820 | "dev": true, 1821 | "license": "MIT", 1822 | "dependencies": { 1823 | "inherits": "^2.0.3", 1824 | "string_decoder": "^1.1.1", 1825 | "util-deprecate": "^1.0.1" 1826 | }, 1827 | "engines": { 1828 | "node": ">= 6" 1829 | } 1830 | }, 1831 | "node_modules/readdirp": { 1832 | "version": "4.1.2", 1833 | "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", 1834 | "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", 1835 | "dev": true, 1836 | "license": "MIT", 1837 | "engines": { 1838 | "node": ">= 14.18.0" 1839 | }, 1840 | "funding": { 1841 | "type": "individual", 1842 | "url": "https://paulmillr.com/funding/" 1843 | } 1844 | }, 1845 | "node_modules/require-directory": { 1846 | "version": "2.1.1", 1847 | "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", 1848 | "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", 1849 | "dev": true, 1850 | "license": "MIT", 1851 | "engines": { 1852 | "node": ">=0.10.0" 1853 | } 1854 | }, 1855 | "node_modules/resolve-from": { 1856 | "version": "4.0.0", 1857 | "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", 1858 | "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", 1859 | "dev": true, 1860 | "license": "MIT", 1861 | "engines": { 1862 | "node": ">=4" 1863 | } 1864 | }, 1865 | "node_modules/safe-buffer": { 1866 | "version": "5.2.1", 1867 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", 1868 | "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", 1869 | "dev": true, 1870 | "funding": [ 1871 | { 1872 | "type": "github", 1873 | "url": "https://github.com/sponsors/feross" 1874 | }, 1875 | { 1876 | "type": "patreon", 1877 | "url": "https://www.patreon.com/feross" 1878 | }, 1879 | { 1880 | "type": "consulting", 1881 | "url": "https://feross.org/support" 1882 | } 1883 | ], 1884 | "license": "MIT" 1885 | }, 1886 | "node_modules/serialize-javascript": { 1887 | "version": "6.0.2", 1888 | "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", 1889 | "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", 1890 | "dev": true, 1891 | "license": "BSD-3-Clause", 1892 | "dependencies": { 1893 | "randombytes": "^2.1.0" 1894 | } 1895 | }, 1896 | "node_modules/shebang-command": { 1897 | "version": "2.0.0", 1898 | "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", 1899 | "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", 1900 | "dev": true, 1901 | "license": "MIT", 1902 | "dependencies": { 1903 | "shebang-regex": "^3.0.0" 1904 | }, 1905 | "engines": { 1906 | "node": ">=8" 1907 | } 1908 | }, 1909 | "node_modules/shebang-regex": { 1910 | "version": "3.0.0", 1911 | "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", 1912 | "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", 1913 | "dev": true, 1914 | "license": "MIT", 1915 | "engines": { 1916 | "node": ">=8" 1917 | } 1918 | }, 1919 | "node_modules/signal-exit": { 1920 | "version": "4.1.0", 1921 | "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", 1922 | "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", 1923 | "dev": true, 1924 | "license": "ISC", 1925 | "engines": { 1926 | "node": ">=14" 1927 | }, 1928 | "funding": { 1929 | "url": "https://github.com/sponsors/isaacs" 1930 | } 1931 | }, 1932 | "node_modules/split2": { 1933 | "version": "4.2.0", 1934 | "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", 1935 | "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", 1936 | "dev": true, 1937 | "license": "ISC", 1938 | "engines": { 1939 | "node": ">= 10.x" 1940 | } 1941 | }, 1942 | "node_modules/string_decoder": { 1943 | "version": "1.3.0", 1944 | "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", 1945 | "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", 1946 | "dev": true, 1947 | "license": "MIT", 1948 | "dependencies": { 1949 | "safe-buffer": "~5.2.0" 1950 | } 1951 | }, 1952 | "node_modules/string-width": { 1953 | "version": "5.1.2", 1954 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", 1955 | "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", 1956 | "dev": true, 1957 | "license": "MIT", 1958 | "dependencies": { 1959 | "eastasianwidth": "^0.2.0", 1960 | "emoji-regex": "^9.2.2", 1961 | "strip-ansi": "^7.0.1" 1962 | }, 1963 | "engines": { 1964 | "node": ">=12" 1965 | }, 1966 | "funding": { 1967 | "url": "https://github.com/sponsors/sindresorhus" 1968 | } 1969 | }, 1970 | "node_modules/string-width-cjs": { 1971 | "name": "string-width", 1972 | "version": "4.2.3", 1973 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", 1974 | "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", 1975 | "dev": true, 1976 | "license": "MIT", 1977 | "dependencies": { 1978 | "emoji-regex": "^8.0.0", 1979 | "is-fullwidth-code-point": "^3.0.0", 1980 | "strip-ansi": "^6.0.1" 1981 | }, 1982 | "engines": { 1983 | "node": ">=8" 1984 | } 1985 | }, 1986 | "node_modules/string-width-cjs/node_modules/ansi-regex": { 1987 | "version": "5.0.1", 1988 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 1989 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 1990 | "dev": true, 1991 | "license": "MIT", 1992 | "engines": { 1993 | "node": ">=8" 1994 | } 1995 | }, 1996 | "node_modules/string-width-cjs/node_modules/emoji-regex": { 1997 | "version": "8.0.0", 1998 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 1999 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", 2000 | "dev": true, 2001 | "license": "MIT" 2002 | }, 2003 | "node_modules/string-width-cjs/node_modules/strip-ansi": { 2004 | "version": "6.0.1", 2005 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 2006 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 2007 | "dev": true, 2008 | "license": "MIT", 2009 | "dependencies": { 2010 | "ansi-regex": "^5.0.1" 2011 | }, 2012 | "engines": { 2013 | "node": ">=8" 2014 | } 2015 | }, 2016 | "node_modules/strip-ansi": { 2017 | "version": "7.1.0", 2018 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", 2019 | "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", 2020 | "dev": true, 2021 | "license": "MIT", 2022 | "dependencies": { 2023 | "ansi-regex": "^6.0.1" 2024 | }, 2025 | "engines": { 2026 | "node": ">=12" 2027 | }, 2028 | "funding": { 2029 | "url": "https://github.com/chalk/strip-ansi?sponsor=1" 2030 | } 2031 | }, 2032 | "node_modules/strip-ansi-cjs": { 2033 | "name": "strip-ansi", 2034 | "version": "6.0.1", 2035 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 2036 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 2037 | "dev": true, 2038 | "license": "MIT", 2039 | "dependencies": { 2040 | "ansi-regex": "^5.0.1" 2041 | }, 2042 | "engines": { 2043 | "node": ">=8" 2044 | } 2045 | }, 2046 | "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { 2047 | "version": "5.0.1", 2048 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 2049 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 2050 | "dev": true, 2051 | "license": "MIT", 2052 | "engines": { 2053 | "node": ">=8" 2054 | } 2055 | }, 2056 | "node_modules/strip-json-comments": { 2057 | "version": "3.1.1", 2058 | "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", 2059 | "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", 2060 | "dev": true, 2061 | "license": "MIT", 2062 | "engines": { 2063 | "node": ">=8" 2064 | }, 2065 | "funding": { 2066 | "url": "https://github.com/sponsors/sindresorhus" 2067 | } 2068 | }, 2069 | "node_modules/supports-color": { 2070 | "version": "7.2.0", 2071 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", 2072 | "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", 2073 | "dev": true, 2074 | "license": "MIT", 2075 | "dependencies": { 2076 | "has-flag": "^4.0.0" 2077 | }, 2078 | "engines": { 2079 | "node": ">=8" 2080 | } 2081 | }, 2082 | "node_modules/synckit": { 2083 | "version": "0.11.6", 2084 | "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.6.tgz", 2085 | "integrity": "sha512-2pR2ubZSV64f/vqm9eLPz/KOvR9Dm+Co/5ChLgeHl0yEDRc6h5hXHoxEQH8Y5Ljycozd3p1k5TTSVdzYGkPvLw==", 2086 | "dev": true, 2087 | "license": "MIT", 2088 | "dependencies": { 2089 | "@pkgr/core": "^0.2.4" 2090 | }, 2091 | "engines": { 2092 | "node": "^14.18.0 || >=16.0.0" 2093 | }, 2094 | "funding": { 2095 | "url": "https://opencollective.com/synckit" 2096 | } 2097 | }, 2098 | "node_modules/type-check": { 2099 | "version": "0.4.0", 2100 | "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", 2101 | "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", 2102 | "dev": true, 2103 | "license": "MIT", 2104 | "dependencies": { 2105 | "prelude-ls": "^1.2.1" 2106 | }, 2107 | "engines": { 2108 | "node": ">= 0.8.0" 2109 | } 2110 | }, 2111 | "node_modules/typedarray": { 2112 | "version": "0.0.6", 2113 | "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", 2114 | "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", 2115 | "dev": true, 2116 | "license": "MIT" 2117 | }, 2118 | "node_modules/uri-js": { 2119 | "version": "4.4.1", 2120 | "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", 2121 | "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", 2122 | "dev": true, 2123 | "license": "BSD-2-Clause", 2124 | "dependencies": { 2125 | "punycode": "^2.1.0" 2126 | } 2127 | }, 2128 | "node_modules/util-deprecate": { 2129 | "version": "1.0.2", 2130 | "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", 2131 | "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", 2132 | "dev": true, 2133 | "license": "MIT" 2134 | }, 2135 | "node_modules/which": { 2136 | "version": "2.0.2", 2137 | "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", 2138 | "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", 2139 | "dev": true, 2140 | "license": "ISC", 2141 | "dependencies": { 2142 | "isexe": "^2.0.0" 2143 | }, 2144 | "bin": { 2145 | "node-which": "bin/node-which" 2146 | }, 2147 | "engines": { 2148 | "node": ">= 8" 2149 | } 2150 | }, 2151 | "node_modules/word-wrap": { 2152 | "version": "1.2.5", 2153 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", 2154 | "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", 2155 | "dev": true, 2156 | "license": "MIT", 2157 | "engines": { 2158 | "node": ">=0.10.0" 2159 | } 2160 | }, 2161 | "node_modules/workerpool": { 2162 | "version": "6.5.1", 2163 | "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", 2164 | "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", 2165 | "dev": true, 2166 | "license": "Apache-2.0" 2167 | }, 2168 | "node_modules/wrap-ansi": { 2169 | "version": "8.1.0", 2170 | "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", 2171 | "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", 2172 | "dev": true, 2173 | "license": "MIT", 2174 | "dependencies": { 2175 | "ansi-styles": "^6.1.0", 2176 | "string-width": "^5.0.1", 2177 | "strip-ansi": "^7.0.1" 2178 | }, 2179 | "engines": { 2180 | "node": ">=12" 2181 | }, 2182 | "funding": { 2183 | "url": "https://github.com/chalk/wrap-ansi?sponsor=1" 2184 | } 2185 | }, 2186 | "node_modules/wrap-ansi-cjs": { 2187 | "name": "wrap-ansi", 2188 | "version": "7.0.0", 2189 | "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", 2190 | "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", 2191 | "dev": true, 2192 | "license": "MIT", 2193 | "dependencies": { 2194 | "ansi-styles": "^4.0.0", 2195 | "string-width": "^4.1.0", 2196 | "strip-ansi": "^6.0.0" 2197 | }, 2198 | "engines": { 2199 | "node": ">=10" 2200 | }, 2201 | "funding": { 2202 | "url": "https://github.com/chalk/wrap-ansi?sponsor=1" 2203 | } 2204 | }, 2205 | "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { 2206 | "version": "5.0.1", 2207 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 2208 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 2209 | "dev": true, 2210 | "license": "MIT", 2211 | "engines": { 2212 | "node": ">=8" 2213 | } 2214 | }, 2215 | "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { 2216 | "version": "8.0.0", 2217 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 2218 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", 2219 | "dev": true, 2220 | "license": "MIT" 2221 | }, 2222 | "node_modules/wrap-ansi-cjs/node_modules/string-width": { 2223 | "version": "4.2.3", 2224 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", 2225 | "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", 2226 | "dev": true, 2227 | "license": "MIT", 2228 | "dependencies": { 2229 | "emoji-regex": "^8.0.0", 2230 | "is-fullwidth-code-point": "^3.0.0", 2231 | "strip-ansi": "^6.0.1" 2232 | }, 2233 | "engines": { 2234 | "node": ">=8" 2235 | } 2236 | }, 2237 | "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { 2238 | "version": "6.0.1", 2239 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 2240 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 2241 | "dev": true, 2242 | "license": "MIT", 2243 | "dependencies": { 2244 | "ansi-regex": "^5.0.1" 2245 | }, 2246 | "engines": { 2247 | "node": ">=8" 2248 | } 2249 | }, 2250 | "node_modules/wrap-ansi/node_modules/ansi-styles": { 2251 | "version": "6.2.1", 2252 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", 2253 | "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", 2254 | "dev": true, 2255 | "license": "MIT", 2256 | "engines": { 2257 | "node": ">=12" 2258 | }, 2259 | "funding": { 2260 | "url": "https://github.com/chalk/ansi-styles?sponsor=1" 2261 | } 2262 | }, 2263 | "node_modules/xtend": { 2264 | "version": "4.0.2", 2265 | "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", 2266 | "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", 2267 | "dev": true, 2268 | "license": "MIT", 2269 | "engines": { 2270 | "node": ">=0.4" 2271 | } 2272 | }, 2273 | "node_modules/y18n": { 2274 | "version": "5.0.8", 2275 | "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", 2276 | "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", 2277 | "dev": true, 2278 | "license": "ISC", 2279 | "engines": { 2280 | "node": ">=10" 2281 | } 2282 | }, 2283 | "node_modules/yargs": { 2284 | "version": "17.7.2", 2285 | "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", 2286 | "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", 2287 | "dev": true, 2288 | "license": "MIT", 2289 | "dependencies": { 2290 | "cliui": "^8.0.1", 2291 | "escalade": "^3.1.1", 2292 | "get-caller-file": "^2.0.5", 2293 | "require-directory": "^2.1.1", 2294 | "string-width": "^4.2.3", 2295 | "y18n": "^5.0.5", 2296 | "yargs-parser": "^21.1.1" 2297 | }, 2298 | "engines": { 2299 | "node": ">=12" 2300 | } 2301 | }, 2302 | "node_modules/yargs-parser": { 2303 | "version": "21.1.1", 2304 | "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", 2305 | "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", 2306 | "dev": true, 2307 | "license": "ISC", 2308 | "engines": { 2309 | "node": ">=12" 2310 | } 2311 | }, 2312 | "node_modules/yargs-unparser": { 2313 | "version": "2.0.0", 2314 | "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", 2315 | "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", 2316 | "dev": true, 2317 | "license": "MIT", 2318 | "dependencies": { 2319 | "camelcase": "^6.0.0", 2320 | "decamelize": "^4.0.0", 2321 | "flat": "^5.0.2", 2322 | "is-plain-obj": "^2.1.0" 2323 | }, 2324 | "engines": { 2325 | "node": ">=10" 2326 | } 2327 | }, 2328 | "node_modules/yargs/node_modules/ansi-regex": { 2329 | "version": "5.0.1", 2330 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", 2331 | "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", 2332 | "dev": true, 2333 | "license": "MIT", 2334 | "engines": { 2335 | "node": ">=8" 2336 | } 2337 | }, 2338 | "node_modules/yargs/node_modules/emoji-regex": { 2339 | "version": "8.0.0", 2340 | "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", 2341 | "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", 2342 | "dev": true, 2343 | "license": "MIT" 2344 | }, 2345 | "node_modules/yargs/node_modules/string-width": { 2346 | "version": "4.2.3", 2347 | "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", 2348 | "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", 2349 | "dev": true, 2350 | "license": "MIT", 2351 | "dependencies": { 2352 | "emoji-regex": "^8.0.0", 2353 | "is-fullwidth-code-point": "^3.0.0", 2354 | "strip-ansi": "^6.0.1" 2355 | }, 2356 | "engines": { 2357 | "node": ">=8" 2358 | } 2359 | }, 2360 | "node_modules/yargs/node_modules/strip-ansi": { 2361 | "version": "6.0.1", 2362 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", 2363 | "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", 2364 | "dev": true, 2365 | "license": "MIT", 2366 | "dependencies": { 2367 | "ansi-regex": "^5.0.1" 2368 | }, 2369 | "engines": { 2370 | "node": ">=8" 2371 | } 2372 | }, 2373 | "node_modules/yocto-queue": { 2374 | "version": "0.1.0", 2375 | "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", 2376 | "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", 2377 | "dev": true, 2378 | "license": "MIT", 2379 | "engines": { 2380 | "node": ">=10" 2381 | }, 2382 | "funding": { 2383 | "url": "https://github.com/sponsors/sindresorhus" 2384 | } 2385 | } 2386 | } 2387 | } 2388 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pg-copy-streams", 3 | "version": "7.0.0", 4 | "description": "Low-Level COPY TO and COPY FROM streams for PostgreSQL in JavaScript using", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "npm run lint && mocha --v8-expose-gc", 8 | "lint": "[ \"$(node -v | cut -d. -f1)\" = \"v8\" ] || eslint '**/*.{js,ts}'" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git://github.com/brianc/node-pg-copy-streams.git" 13 | }, 14 | "keywords": [ 15 | "postgres", 16 | "copy", 17 | "in", 18 | "out", 19 | "stream" 20 | ], 21 | "author": "Brian M. Carlson", 22 | "license": "MIT", 23 | "bugs": { 24 | "url": "https://github.com/brianc/node-pg-copy-streams/issues" 25 | }, 26 | "devDependencies": { 27 | "@eslint/eslintrc": "^3.3.1", 28 | "@eslint/js": "^9.27.0", 29 | "async": "~0.2.10", 30 | "benchmark": "^2.1.4", 31 | "concat-stream": "^2.0.0", 32 | "csv-parse": "^5.6.0", 33 | "csv-parser": "^3.2.0", 34 | "duplex-child-process": "^1.0.0", 35 | "eslint": "^9.27.0", 36 | "eslint-config-prettier": "^10.1.5", 37 | "eslint-plugin-prettier": "^5.4.0", 38 | "globals": "^16.2.0", 39 | "lodash": "^4.17.21", 40 | "mocha": "^11.5.0", 41 | "pg": "^8.16.0", 42 | "prettier": "^3.5.3" 43 | }, 44 | "prettier": { 45 | "semi": false, 46 | "printWidth": 120, 47 | "arrowParens": "always", 48 | "trailingComma": "es5", 49 | "singleQuote": true 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /test/binary.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | 5 | const async = require('async') 6 | const _ = require('lodash') 7 | const pg = require('pg') 8 | const concat = require('concat-stream') 9 | const { Transform, Writable } = require('stream') 10 | 11 | const { from, to } = require('../') 12 | 13 | describe('binary', () => { 14 | const getClient = function () { 15 | const client = new pg.Client() 16 | client.connect() 17 | return client 18 | } 19 | 20 | const LastFieldStream = function () { 21 | let firstChunk = true 22 | let byteaLength = 0 23 | const Streamer = new Transform({ 24 | transform: function (chunk, enc, cb) { 25 | if (firstChunk) { 26 | // cf binary protocol description on https://www.postgresql.org/docs/10/sql-copy.html 27 | try { 28 | assert(chunk.length >= 25) 29 | assert.deepEqual( 30 | chunk.slice(0, 11), 31 | Buffer.from([0x50, 0x47, 0x43, 0x4f, 0x50, 0x59, 0x0a, 0xff, 0x0d, 0x0a, 0x00]), 32 | 'COPY Signature should match' 33 | ) 34 | assert.equal(chunk.readUInt32BE(11), 0, 'Flags should match') 35 | assert.equal(chunk.readUInt32BE(11 + 4), 0, 'Header Extension area length should match') 36 | assert.equal(chunk.readUInt16BE(15 + 4), 1, 'Number of fields in tuple should be 1') 37 | } catch (err) { 38 | return cb(err) 39 | } 40 | byteaLength = chunk.readUInt32BE(19 + 2) 41 | chunk = chunk.slice(21 + 4) 42 | firstChunk = false 43 | } 44 | if (byteaLength) { 45 | chunk = chunk.slice(0, byteaLength) 46 | byteaLength -= chunk.length 47 | this.push(chunk) 48 | } 49 | cb() 50 | }, 51 | }) 52 | return Streamer 53 | } 54 | 55 | const snapshotBufferMemoryUsage = function () { 56 | const mu = process.memoryUsage() 57 | const mem = mu.arrayBuffers || mu.external 58 | return mem / 1024 / 1024 59 | } 60 | 61 | it('low copy-to memory usage during large bytea streaming', (done) => { 62 | const power = 26 63 | const sql = "COPY (select (repeat('-', CAST(2^" + power + ' AS int)))::bytea) TO STDOUT BINARY' 64 | const client = getClient() 65 | 66 | const query = to(sql) 67 | const lfs = LastFieldStream() 68 | const noop = new Writable({ 69 | write(chunk, enc, cb) { 70 | if (Math.random() < 0.02) { 71 | global.gc() 72 | const memNow = snapshotBufferMemoryUsage() 73 | try { 74 | const memLimit = 32 /*MB*/ 75 | const memDiff = Math.abs(memNow - memStart) 76 | if (memDiff > memLimit) { 77 | global.gc() 78 | } 79 | assert( 80 | memDiff < memLimit, 81 | 'copy of ' + 82 | Math.pow(2, power - 20) + 83 | 'MB should need less than ' + 84 | memLimit + 85 | 'MB of memoryUsage().external (' + 86 | Math.round(memDiff) + 87 | 'MB observed)' 88 | ) 89 | } catch (err) { 90 | cb(err) 91 | } 92 | } 93 | setImmediate(cb) 94 | }, 95 | }) 96 | 97 | global.gc(true) 98 | const memStart = snapshotBufferMemoryUsage() 99 | const stream = client.query(query).pipe(lfs).pipe(noop) 100 | 101 | stream.on('error', (err) => { 102 | client.end() 103 | done(err) 104 | }) 105 | 106 | stream.on('finish', () => { 107 | client.end() 108 | done() 109 | }) 110 | }) 111 | 112 | it('extract bytea field', (done) => { 113 | const power = 25 114 | const sql = "COPY (select (repeat('-', CAST(2^" + power + ' AS int)))::bytea) TO STDOUT BINARY' 115 | const client = getClient() 116 | const copyToStream = client.query(to(sql)) 117 | const assertResult = (buf) => { 118 | client.end() 119 | assert.deepEqual(buf, Buffer.alloc(Math.pow(2, power), '-')) 120 | done() 121 | } 122 | const ContentFilter = LastFieldStream() 123 | ContentFilter.on('error', (err) => { 124 | client.end() 125 | done(err) 126 | }) 127 | 128 | copyToStream.pipe(ContentFilter).pipe(concat({ encoding: 'buffer' }, assertResult)) 129 | }) 130 | 131 | it('table-2-table binary copy should work', (done) => { 132 | const fromClient = getClient() 133 | const toClient = getClient() 134 | 135 | let queries = [ 136 | 'DROP TABLE IF EXISTS data', 137 | 'CREATE TABLE IF NOT EXISTS data (num BIGINT, word TEXT)', 138 | "INSERT INTO data (num, word) VALUES (1, 'hello'), (2, 'other thing'), (3, 'goodbye')", 139 | 'DROP TABLE IF EXISTS data_copy', 140 | 'CREATE TABLE IF NOT EXISTS data_copy (LIKE data INCLUDING ALL)', 141 | ] 142 | 143 | async.eachSeries(queries, _.bind(fromClient.query, fromClient), function (err) { 144 | assert.ifError(err) 145 | 146 | const fromStream = fromClient.query(to('COPY (SELECT * FROM data) TO STDOUT BINARY')) 147 | const toStream = toClient.query(from('COPY data_copy FROM STDIN BINARY')) 148 | 149 | const runStream = function (callback) { 150 | fromStream.on('error', callback) 151 | toStream.on('error', callback) 152 | toStream.on('finish', callback) 153 | fromStream.pipe(toStream) 154 | } 155 | runStream(function (err) { 156 | assert.ifError(err) 157 | 158 | toClient.query('SELECT * FROM data_copy ORDER BY num', function (err, res) { 159 | assert.equal(res.rowCount, 3, 'expected 3 rows but got ' + res.rowCount) 160 | assert.equal(fromStream.rowCount, 3, 'expected 3 rows observed by fromStream but got ' + fromStream.rowCount) 161 | assert.equal(toStream.rowCount, 3, 'expected 3 rows observed by toStream but got ' + toStream.rowCount) 162 | assert.equal(res.rows[0].num, 1) 163 | assert.equal(res.rows[0].word, 'hello') 164 | assert.equal(res.rows[1].num, 2) 165 | assert.equal(res.rows[1].word, 'other thing') 166 | assert.equal(res.rows[2].num, 3) 167 | assert.equal(res.rows[2].word, 'goodbye') 168 | queries = ['DROP TABLE data', 'DROP TABLE data_copy'] 169 | async.each(queries, _.bind(fromClient.query, fromClient), function (err) { 170 | assert.ifError(err) 171 | fromClient.end() 172 | toClient.end() 173 | done() 174 | }) 175 | }) 176 | }) 177 | }) 178 | }) 179 | }) 180 | -------------------------------------------------------------------------------- /test/copy-both.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | const pg = require('pg') 5 | const async = require('async') 6 | const copyBoth = require('../copy-both.js') 7 | const { Transform, pipeline, finished } = require('stream') 8 | const BufferList = require('../obuf') 9 | 10 | before(async () => { 11 | const client = new pg.Client() 12 | await client.connect() 13 | await client.query(`CREATE TABLE IF NOT EXISTS plug (col1 text)`) 14 | client.end() 15 | }) 16 | 17 | after(async () => { 18 | // avoid leaving a dangling replication slot on the test database 19 | // that could cause wal inflation if unattended 20 | 21 | let client = new pg.Client({ replication: 'database' }) 22 | await client.connect() 23 | try { 24 | await client.query(`DROP_REPLICATION_SLOT slotplug`) 25 | } catch (err) { 26 | console.log(err) 27 | } 28 | await client.end() 29 | 30 | client = new pg.Client() 31 | await client.connect() 32 | await client.query(`DROP TABLE IF EXISTS plug`) 33 | client.end() 34 | }) 35 | 36 | describe('copy-both', () => { 37 | describe('integration tests (postgres)', () => { 38 | function getClient(opts, cb) { 39 | const client = new pg.Client(opts) 40 | client.connect(cb) 41 | return client 42 | } 43 | 44 | function getCopyDataHandlerStream(hook) { 45 | const buf = new BufferList() 46 | const PG_CODE = 1 47 | const PG_MESSAGE = 2 48 | let state = PG_CODE 49 | let code = null 50 | 51 | const parser = new Transform({ 52 | transform(chunk, encoding, callback) { 53 | buf.push(chunk) 54 | while (buf.size > 0) { 55 | if (state === PG_CODE) { 56 | if (!buf.has(1)) break 57 | code = buf.readUInt8() 58 | state = PG_MESSAGE 59 | } 60 | if (state === PG_MESSAGE) { 61 | if (code === 0x6b /*k*/) { 62 | buf.take(8 + 8 + 1) 63 | state = PG_CODE 64 | } else if (code === 0x77 /*w*/) { 65 | buf.take(8 + 8 + 8) 66 | buf.take(buf.size) /* plugin data */ 67 | state = PG_CODE 68 | this.XLogDataCount++ 69 | } else { 70 | return callback(new Error('wrong message code inside')) 71 | } 72 | } 73 | break 74 | } 75 | hook && hook.call(this) 76 | callback() 77 | }, 78 | flush() {}, 79 | }) 80 | parser.XLogDataCount = 0 81 | return parser 82 | } 83 | 84 | it('check testing wal_level configuration in postgresql.conf', async () => { 85 | const client = new pg.Client() 86 | await client.connect() 87 | const key = 'wal_level' 88 | const value = (await client.query(`SHOW ${key}`)).rows[0][key] 89 | await client.end() 90 | assert.equal( 91 | value, 92 | 'logical', 93 | `you must set ${key} = logical in postgresql.conf (+restart) to test replication features, found ${key} = ${value}` 94 | ) 95 | }) 96 | 97 | it('check testing max_wal_senders configuration in postgresql.conf', async () => { 98 | const client = new pg.Client() 99 | await client.connect() 100 | const key = 'max_wal_senders' 101 | const value = (await client.query(`SHOW ${key}`)).rows[0][key] 102 | await client.end() 103 | assert.ok(value > 0, `you must set ${key} in postgresql.conf (+restart) to a value > 0, ${value} found`) 104 | }) 105 | 106 | it('check testing max_replication_slots configuration in postgresql.conf', async () => { 107 | const client = new pg.Client() 108 | await client.connect() 109 | const key = 'max_replication_slots' 110 | const value = (await client.query(`SHOW ${key}`)).rows[0][key] 111 | await client.end() 112 | assert.ok(value > 0, `you must set ${key} in postgresql.conf (+restart) to a value > 0, ${value} found`) 113 | }) 114 | 115 | /* 116 | it('check testing wal_sender_timeout configuration in postgresql.conf', async () => { 117 | const client = new pg.Client() 118 | await client.connect() 119 | const key = 'wal_sender_timeout' 120 | const expected = 1000 121 | const value = (await client.query(`SELECT setting::int FROM pg_catalog.pg_settings WHERE name = '${key}'`)).rows[0]['setting'] 122 | await client.end() 123 | assert.equal(value, expected, `you must set ${key} = ${expected} in postgresql.conf (+restart) to test replication features, found ${key} = ${value}`) 124 | }) 125 | 126 | it('should properly handle terminating walsender process due to replication timeout', (done) => { 127 | const client = new pg.Client({ replication: 'database' }) 128 | client.connect((err) => { 129 | if (err) return done(err) 130 | client.on('error', ()=>{}) 131 | const sql = [`CREATE_REPLICATION_SLOT slotplug LOGICAL test_decoding`] 132 | async.eachSeries(sql, client.query.bind(client), function (err) { 133 | if (err) { 134 | client.end() 135 | return done(err) 136 | } 137 | const copyBothStream = copyBoth(`START_REPLICATION SLOT slotplug LOGICAL 0/0`, { alignOnCopyDataFrame: true }) 138 | client.query(copyBothStream) 139 | pipeline(copyBothStream, getCopyDataHandlerStream(), (err)=>{}) 140 | finished(copyBothStream, (err) => { 141 | try { 142 | assert.equal(err.toString(), 'Error: Connection terminated unexpectedly') 143 | }catch(err) { 144 | return done(err) 145 | } 146 | done() 147 | }) 148 | }) 149 | }) 150 | }).timeout(5000) 151 | */ 152 | it('should receive messages on the copyOut channel', (done) => { 153 | if (!pipeline) return done() /* do not test under node 8 */ 154 | if (!finished) return done() /* do not test under node 8 */ 155 | 156 | const client = getClient({ replication: 'database' }, (err) => { 157 | if (err) return done(err) 158 | 159 | client.on('error', () => {}) 160 | const sql = [`CREATE_REPLICATION_SLOT slotplug LOGICAL test_decoding`] 161 | 162 | async.eachSeries(sql, client.query.bind(client), function (err) { 163 | if (err) { 164 | client.end() 165 | return done(err) 166 | } 167 | const copyBothStream = copyBoth(`START_REPLICATION SLOT slotplug LOGICAL 0/0`, { alignOnCopyDataFrame: true }) 168 | client.query(copyBothStream) 169 | const copyDataHandler = getCopyDataHandlerStream(function () { 170 | if (this.XLogDataCount >= 3) { 171 | // close the replication 172 | // this can be slow as per https://commitfest.postgresql.org/11/621/ 173 | copyBothStream.end() 174 | } 175 | }) 176 | pipeline(copyBothStream, copyDataHandler, (err) => {}) 177 | finished(copyBothStream, (err) => { 178 | client.end() 179 | done(err) 180 | }) 181 | const fieldSize = 64 * 1024 182 | 183 | const c2 = getClient({}, (err) => { 184 | const sql = [`INSERT INTO plug (col1) values (repeat('-', ${fieldSize}))`] 185 | async.eachSeries(sql, c2.query.bind(c2), function (err) { 186 | if (err) return done(err) 187 | c2.end() 188 | }) 189 | }) 190 | }) 191 | }) 192 | }).timeout(60000) 193 | }) 194 | }) 195 | -------------------------------------------------------------------------------- /test/copy-from.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | 5 | const _ = require('lodash') 6 | const pg = require('pg') 7 | const { finished, pipeline, PassThrough } = require('stream') 8 | const { promisify } = require('util') 9 | const { spawn } = require('child_process') 10 | 11 | const copy = require('../').from 12 | 13 | describe('copy-from', () => { 14 | function getClient(config) { 15 | const client = new pg.Client(config) 16 | client.connect() 17 | return client 18 | } 19 | 20 | function createCopyFromQuery(table, fields, callback) { 21 | const client = getClient() 22 | client.query(`CREATE TEMP TABLE ${table}${fields}`, () => { 23 | let sql = `COPY ${table} FROM STDIN` 24 | if (table === 'syntaxError') { 25 | sql = `COPY (SELECT INVALID SYNTAX) FROM STDIN` 26 | } 27 | const copyFromStream = client.query(copy(sql)) 28 | callback(client, copyFromStream) 29 | }) 30 | } 31 | 32 | function spyOnEmitCalls(stream) { 33 | stream.emits = {} 34 | const realEmit = stream.emit 35 | stream.emit = function () { 36 | const [eventName, ...args] = arguments 37 | if (!stream.emits[eventName]) { 38 | stream.emits[eventName] = [] 39 | } 40 | stream.emits[eventName].push(args) 41 | realEmit.apply(this, arguments) 42 | } 43 | } 44 | 45 | function processCopyFromStreamForAssertFn(table, chunks, client, copyFromStream, assertFn) { 46 | spyOnEmitCalls(copyFromStream) 47 | function complete(err, rows, stream) { 48 | client.end() 49 | assertFn(err, rows, stream) 50 | } 51 | copyFromStream.on('error', (err) => { 52 | complete(err, null, copyFromStream) 53 | }) 54 | copyFromStream.on('finish', () => { 55 | client.query({ text: `SELECT * FROM ${table}`, rowMode: 'array' }, (err, res) => { 56 | complete(err, res.rows, copyFromStream) 57 | }) 58 | }) 59 | for (const chunk of chunks) { 60 | copyFromStream.write(chunk) 61 | } 62 | copyFromStream.end() 63 | } 64 | 65 | function assertCopyFromResult(table, fields, chunks, assertFn) { 66 | createCopyFromQuery(table, fields, (client, copyFromStream) => { 67 | processCopyFromStreamForAssertFn(table, chunks, client, copyFromStream, assertFn) 68 | }) 69 | } 70 | 71 | it('correctly handles simple case', (done) => { 72 | assertCopyFromResult('numbers', '(num int)', [Buffer.from('1\n')], (err, rows, stream) => { 73 | assert.deepEqual(rows, [[1]]) 74 | done(err) 75 | }) 76 | }) 77 | 78 | it('detect error when field mismatch', (done) => { 79 | assertCopyFromResult('numbers', '(num int)', [Buffer.from('1,2\n')], (err, rows, stream) => { 80 | assert.notEqual(err, null) 81 | const expectedMessage = /invalid input syntax for (type )?integer/ 82 | // assert.match( 83 | // err.toString(), 84 | // expectedMessage, 85 | // 'Error message should mention reason for query failure.' 86 | // ) 87 | assert.notEqual( 88 | err.toString().search(expectedMessage), 89 | -1, 90 | 'Error message should mention reason for query failure.' 91 | ) 92 | done() 93 | }) 94 | }) 95 | 96 | it('should respect highWaterMark backpressure', () => { 97 | const highWaterMark = 10 98 | const stream = copy('COPY numbers FROM STDIN', { highWaterMark: 10, objectMode: true }) 99 | for (let i = 0; i < highWaterMark * 1.5; i++) { 100 | stream.write('1\t2\n') 101 | } 102 | assert(!stream.write('1\t2\n'), 'Should correctly set highWaterMark.') 103 | }) 104 | 105 | it('correctly handle more heavy scenario', (done) => { 106 | const top = 130000 107 | const chunks = [] 108 | const expected = [] 109 | for (let i = 0; i < top; i++) { 110 | chunks.push(Buffer.from('' + i + '\t' + i * 10 + '\n')) 111 | expected.push([i, i * 10]) 112 | } 113 | 114 | assertCopyFromResult('numbers', '(num1 int, num2 int)', chunks, (err, rows, stream) => { 115 | assert.deepStrictEqual(rows, expected, 'not matched') 116 | assert.equal(stream.rowCount, top, 'should have rowCount ' + top + ' ') 117 | done() 118 | }) 119 | }).timeout(120000) 120 | 121 | it('test client reuse', (done) => { 122 | const fromClient = getClient() 123 | fromClient.query('CREATE TEMP TABLE numbers(num int)') 124 | const txt = 'COPY numbers FROM STDIN' 125 | let count = 0 126 | const countMax = 2 127 | const card = 100000 128 | const runStream = function () { 129 | const stream = fromClient.query(copy(txt)) 130 | stream.on('finish', function () { 131 | count++ 132 | if (count < countMax) { 133 | runStream() 134 | } else { 135 | fromClient.query('SELECT sum(num) AS s FROM numbers', function (err, res) { 136 | const total = countMax * card * (card + 1) 137 | assert.equal(res.rows[0].s, total, 'copy-from.ClientReuse wrong total') 138 | fromClient.end() 139 | done() 140 | }) 141 | } 142 | }) 143 | stream.write(Buffer.from(_.range(0, card + 1).join('\n') + '\n')) 144 | stream.end(Buffer.from(_.range(0, card + 1).join('\n') + '\n')) 145 | } 146 | runStream() 147 | }) 148 | 149 | it('test empty source - issue #112', (done) => { 150 | const fromClient = getClient() 151 | fromClient.query('CREATE TEMP TABLE numbers(num int)') 152 | const txt = 'COPY numbers FROM STDIN' 153 | const query = copy(txt) 154 | query.on('finish', function () { 155 | fromClient.end() 156 | done() 157 | }) 158 | fromClient.query(query) 159 | query.end() 160 | }) 161 | 162 | it('`pg` query_timeout should be properly canceled upon error - issue #125', (done) => { 163 | const fromClient = getClient({ query_timeout: 500 }) 164 | fromClient.query('CREATE TEMP TABLE numbers(num int)') 165 | const txt = 'COPY numbers FROM STDIN' 166 | const query = copy(txt) 167 | query.on('error', function (err) { 168 | fromClient.end() 169 | done() 170 | }) 171 | fromClient.query(query) 172 | query.write('A') 173 | query.end() 174 | }) 175 | 176 | it('`pg` query_timeout should be properly canceled upon success - issue #125', (done) => { 177 | const fromClient = getClient({ query_timeout: 1000 }) 178 | fromClient.query('CREATE TEMP TABLE numbers(num int)') 179 | const txt = 'COPY numbers FROM STDIN' 180 | const query = copy(txt) 181 | query.on('finish', function (err) { 182 | fromClient.end() 183 | done() 184 | }) 185 | fromClient.query(query) 186 | query.write('1') 187 | query.end() 188 | }) 189 | 190 | describe('stream compliance', () => { 191 | describe('successful stream', () => { 192 | it("emits 1 'finish' (writable stream)", (done) => { 193 | assertCopyFromResult('tablename', '(field1 int)', [Buffer.from('1\n')], (err, rows, stream) => { 194 | assert.ifError(err) 195 | assert.equal(stream.emits['finish'].length, 1) 196 | done() 197 | }) 198 | }) 199 | 200 | it("emits 0 'end' (writable stream)", (done) => { 201 | assertCopyFromResult('tablename', '(field1 int)', [Buffer.from('1\n')], (err, rows, stream) => { 202 | assert.ifError(err) 203 | assert.equal(stream.emits['end'], undefined) 204 | done() 205 | }) 206 | }) 207 | 208 | it('works with finished()', (done) => { 209 | if (!finished) return done() 210 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 211 | finished(copyFromStream, (err) => { 212 | assert.ifError(err) 213 | client.end() 214 | done() 215 | }) 216 | copyFromStream.end(Buffer.from('1\n')) 217 | }) 218 | }) 219 | 220 | it('works with pipeline()', (done) => { 221 | if (!pipeline) return done() 222 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 223 | const pt = new PassThrough() 224 | pipeline(pt, copyFromStream, (err) => { 225 | assert.ifError(err) 226 | client.end() 227 | done() 228 | }) 229 | pt.end(Buffer.from('1\n')) 230 | }) 231 | }) 232 | }) 233 | 234 | it('works with await pipeline()', async () => { 235 | if (!pipeline) return 236 | const client = new pg.Client() 237 | await client.connect() 238 | await client.query('CREATE TEMP TABLE numbers (num1 int)') 239 | try { 240 | const total = 1000 241 | const seq = spawn('seq', [1, total]).stdout 242 | const copyFromStream = client.query(copy(`COPY numbers FROM STDIN`)) 243 | await promisify(pipeline)(seq, copyFromStream) 244 | const res = await client.query('SELECT count(*) FROM numbers') 245 | assert.equal(res.rows[0].count, total) 246 | } finally { 247 | await client.end() 248 | } 249 | }) 250 | 251 | describe('erroneous stream (syntax error)', () => { 252 | it("emits 0 'finish'", (done) => { 253 | assertCopyFromResult('syntaxError', '(field1 int)', [Buffer.from('1\n')], (err, rows, stream) => { 254 | assert.ok(err) 255 | assert.equal(stream.emits['finish'], undefined) 256 | done() 257 | }) 258 | }) 259 | 260 | it("emits 0 'end'", (done) => { 261 | assertCopyFromResult('syntaxError', '(field1 int)', [Buffer.from('1\n')], (err, rows, stream) => { 262 | assert.ok(err) 263 | assert.equal(stream.emits['end'], undefined) 264 | done() 265 | }) 266 | }) 267 | 268 | it("emits 1 'error'", (done) => { 269 | assertCopyFromResult('syntaxError', '(field1 int)', [Buffer.from('1\n')], (err, rows, stream) => { 270 | assert.ok(err) 271 | assert.equal(stream.emits['error'].length, 1) 272 | done() 273 | }) 274 | }) 275 | 276 | it('works with finished()', (done) => { 277 | if (!finished) return done() 278 | createCopyFromQuery('syntaxError', '(field1 int)', (client, copyFromStream) => { 279 | finished(copyFromStream, (err) => { 280 | assert.ok(err) 281 | client.end() 282 | done() 283 | }) 284 | copyFromStream.end(Buffer.from('1\n')) 285 | }) 286 | }) 287 | 288 | it('works with pipeline()', (done) => { 289 | if (!pipeline) return done() 290 | createCopyFromQuery('syntaxError', '(field1 int)', (client, copyFromStream) => { 291 | const pt = new PassThrough() 292 | pipeline(pt, copyFromStream, (err) => { 293 | assert.ok(err) 294 | client.end() 295 | done() 296 | }) 297 | pt.end(Buffer.from('1\n')) 298 | }) 299 | }) 300 | }) 301 | 302 | describe('erroneous stream (internal error)', () => { 303 | it("emits 0 'finish'", (done) => { 304 | assertCopyFromResult('tablename', '(field1 int)', [Buffer.from('1,2\n')], (err, rows, stream) => { 305 | assert.ok(err) 306 | assert.equal(stream.emits['finish'], undefined) 307 | done() 308 | }) 309 | }) 310 | 311 | it("emits 0 'end'", (done) => { 312 | assertCopyFromResult('tablename', '(field1 int)', [Buffer.from('1,2\n')], (err, rows, stream) => { 313 | assert.ok(err) 314 | assert.equal(stream.emits['end'], undefined) 315 | done() 316 | }) 317 | }) 318 | 319 | it("emits 1 'error'", (done) => { 320 | assertCopyFromResult('tablename', '(field1 int)', [Buffer.from('1,2\n')], (err, rows, stream) => { 321 | assert.ok(err) 322 | assert.equal(stream.emits['error'].length, 1) 323 | done() 324 | }) 325 | }) 326 | 327 | it('works with finished()', (done) => { 328 | if (!finished) return done() 329 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 330 | finished(copyFromStream, (err) => { 331 | assert.ok(err) 332 | client.end() 333 | done() 334 | }) 335 | copyFromStream.end(Buffer.from('1,2\n')) 336 | }) 337 | }) 338 | 339 | it('works with pipeline()', (done) => { 340 | if (!pipeline) return done() 341 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 342 | const pt = new PassThrough() 343 | pipeline(pt, copyFromStream, (err) => { 344 | assert.ok(err) 345 | client.end() 346 | done() 347 | }) 348 | pt.end(Buffer.from('1,2\n')) 349 | }) 350 | }) 351 | }) 352 | 353 | describe('using destroy() should send copyFail', () => { 354 | it('works when destroy() is called via pipeline() before copyInResponse has been received', (done) => { 355 | if (!pipeline) return done() 356 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 357 | spyOnEmitCalls(copyFromStream) 358 | const pt = new PassThrough() 359 | copyFromStream.on('error', (err) => { 360 | assert.equal(copyFromStream.emits['error'].length, 1) 361 | const expectedMessage = /COPY from stdin failed/ 362 | assert.notEqual( 363 | copyFromStream.emits['error'][0].toString().search(expectedMessage), 364 | -1, 365 | 'Error message should mention that COPY failed' 366 | ) 367 | client.end() 368 | done() 369 | }) 370 | pipeline(pt, copyFromStream, (err) => { 371 | assert.ok(err) 372 | }) 373 | pt.emit('error', new Error('pipelineError')) 374 | }) 375 | }) 376 | it('works when destroy() is called via pipeline() after copyInResponse has been received', (done) => { 377 | if (!pipeline) return done() 378 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 379 | spyOnEmitCalls(copyFromStream) 380 | const pt = new PassThrough() 381 | copyFromStream.on('error', (err) => { 382 | assert.equal(copyFromStream.emits['error'].length, 1) 383 | const expectedMessage = /COPY from stdin failed/ 384 | assert.notEqual( 385 | copyFromStream.emits['error'][0].toString().search(expectedMessage), 386 | -1, 387 | 'Error message should mention that COPY failed' 388 | ) 389 | client.end() 390 | done() 391 | }) 392 | pipeline(pt, copyFromStream, (err) => { 393 | assert.ok(err) 394 | }) 395 | client.connection.once('copyInResponse', () => { 396 | pt.emit('error', new Error('pipelineError')) 397 | }) 398 | }) 399 | }) 400 | it('works when destroy() is called before copyInResponse has been received', (done) => { 401 | if (!pipeline) return done() 402 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 403 | spyOnEmitCalls(copyFromStream) 404 | copyFromStream.on('error', (err) => { 405 | assert.equal(copyFromStream.emits['error'].length, 1) 406 | const expectedMessage = /COPY from stdin failed/ 407 | assert.notEqual( 408 | copyFromStream.emits['error'][0].toString().search(expectedMessage), 409 | -1, 410 | 'Error message should mention that COPY failed' 411 | ) 412 | assert.ok(err) 413 | client.end() 414 | done() 415 | }) 416 | copyFromStream.destroy(new Error('myError')) 417 | }) 418 | }) 419 | it('works when destroy() is called after copyInResponse has been received', (done) => { 420 | if (!pipeline) return done() 421 | createCopyFromQuery('tablename', '(field1 int)', (client, copyFromStream) => { 422 | spyOnEmitCalls(copyFromStream) 423 | copyFromStream.on('error', (err) => { 424 | assert.equal(copyFromStream.emits['error'].length, 1) 425 | const expectedMessage = /COPY from stdin failed/ 426 | assert.notEqual( 427 | copyFromStream.emits['error'][0].toString().search(expectedMessage), 428 | -1, 429 | 'Error message should mention that COPY failed' 430 | ) 431 | assert.ok(err) 432 | client.end() 433 | done() 434 | }) 435 | client.connection.once('copyInResponse', () => { 436 | copyFromStream.destroy(new Error('myError')) 437 | }) 438 | }) 439 | }) 440 | }) 441 | }) 442 | }) 443 | -------------------------------------------------------------------------------- /test/copy-to.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const assert = require('assert') 4 | 5 | const _ = require('lodash') 6 | const concat = require('concat-stream') 7 | const { Writable, finished, pipeline } = require('stream') 8 | const pg = require('pg') 9 | const { PassThrough } = require('stream') 10 | const { Transform } = require('stream') 11 | const { promisify } = require('util') 12 | 13 | const csvParser = require('csv-parser') 14 | const csvParse = require('csv-parse').parse 15 | 16 | const copy = require('../').to 17 | const code = require('../message-formats') 18 | 19 | describe('copy-to', () => { 20 | describe('integration tests (postgres)', () => { 21 | function getClient() { 22 | const client = new pg.Client() 23 | client.connect() 24 | return client 25 | } 26 | 27 | function executeSql(sql) { 28 | const client = getClient() 29 | client.query(sql, () => { 30 | client.end() 31 | }) 32 | } 33 | 34 | function createCopyToQuery(sql, callback) { 35 | const client = getClient() 36 | const copyToStream = client.query(copy(sql)) 37 | callback(client, copyToStream) 38 | } 39 | 40 | function spyOnEmitCalls(copyToStream) { 41 | copyToStream.emits = {} 42 | const realEmit = copyToStream.emit 43 | copyToStream.emit = function () { 44 | const [eventName, ...args] = arguments 45 | if (!copyToStream.emits[eventName]) { 46 | copyToStream.emits[eventName] = [] 47 | } 48 | copyToStream.emits[eventName].push(args) 49 | realEmit.apply(this, arguments) 50 | } 51 | } 52 | 53 | function processCopyToStreamForAssertFn(client, copyToStream, assertFn) { 54 | const chunks = [] 55 | spyOnEmitCalls(copyToStream) 56 | 57 | function complete(err, chunks, result, stream) { 58 | client.end() 59 | assertFn(err, chunks, result, stream) 60 | } 61 | 62 | copyToStream.on('error', (err) => { 63 | complete(err, chunks, null, copyToStream) 64 | }) 65 | copyToStream.on('end', () => { 66 | const result = Buffer.concat(chunks).toString() 67 | complete(null, chunks, result, copyToStream) 68 | }) 69 | copyToStream.pipe( 70 | new Transform({ 71 | transform: (chunk, enc, cb) => { 72 | chunks.push(chunk) 73 | cb() 74 | }, 75 | }) 76 | ) 77 | } 78 | 79 | function assertCopyToResult(sql, assertFn) { 80 | createCopyToQuery(sql, (client, copyToStream) => { 81 | processCopyToStreamForAssertFn(client, copyToStream, assertFn) 82 | }) 83 | } 84 | 85 | it('provides row count', (done) => { 86 | const top = 100 87 | const sql = 'COPY (SELECT * from generate_series(0, ' + (top - 1) + ')) TO STDOUT' 88 | assertCopyToResult(sql, (err, chunks, result, stream) => { 89 | assert.ifError(err) 90 | assert.equal(stream.rowCount, top, 'should have rowCount ' + top + ' but got ' + stream.rowCount) 91 | done() 92 | }) 93 | }) 94 | 95 | it('correctly handle error in sql request', (done) => { 96 | assertCopyToResult('COPY --wrong-- TO STDOUT', (err, chunks, result, stream) => { 97 | assert.notEqual(err, null) 98 | const expectedMessage = 'syntax error at end of input' 99 | assert.notEqual( 100 | err.toString().indexOf(expectedMessage), 101 | -1, 102 | 'Error message should mention reason for query failure.' 103 | ) 104 | done() 105 | }) 106 | }) 107 | 108 | it('correctly handle BEGIN/COMMIT transaction #113', async () => { 109 | if (!pipeline) return /* node 8 is not tested */ 110 | const client = new pg.Client() 111 | await client.connect() 112 | await client.query('BEGIN') 113 | try { 114 | const outStream = client.query(copy(`COPY (SELECT INVALID SYNTAX) TO STDOUT`)) 115 | await promisify(pipeline)( 116 | outStream, 117 | concat(() => {}) 118 | ) 119 | } catch (err) { 120 | const expectedMessage = 'column "invalid" does not exist' 121 | assert.notEqual( 122 | err.toString().indexOf(expectedMessage), 123 | -1, 124 | 'Error message should mention reason for query failure.' 125 | ) 126 | } finally { 127 | await client.query('COMMIT') 128 | await client.end() 129 | } 130 | }) 131 | 132 | it('internal postgres error ends copy and emits error', (done) => { 133 | assertCopyToResult('COPY (SELECT pg_sleep(10)) TO STDOUT', (err, chunks, result, stream) => { 134 | assert.notEqual(err, null) 135 | const expectedMessage = 'canceling statement due to user request' 136 | assert.notEqual( 137 | err.toString().indexOf(expectedMessage), 138 | -1, 139 | 'Error message should mention reason for query failure.' 140 | ) 141 | done() 142 | }) 143 | 144 | setTimeout(() => { 145 | executeSql(`SELECT pg_cancel_backend(pid) 146 | FROM pg_stat_activity 147 | WHERE query ~ 'pg_sleep' 148 | AND NOT query ~ 'pg_cancel_backend'`) 149 | }, 20) 150 | }) 151 | 152 | it('interspersed NoticeResponse message is ignored', (done) => { 153 | // on the copy stream. 154 | const client = getClient() 155 | let set = '' 156 | set += 'SET SESSION client_min_messages = WARNING;' 157 | set += 'SET SESSION standard_conforming_strings = off;' 158 | set += 'SET SESSION escape_string_warning = on;' 159 | client.query(set, function (err, res) { 160 | assert.equal(err, null, 'testNoticeResponse - could not SET parameters') 161 | const runStream = function (callback) { 162 | const sql = "COPY (SELECT '\\\n') TO STDOUT" 163 | const stream = client.query(copy(sql)) 164 | stream.on('error', callback) 165 | 166 | // make sure stream is pulled from 167 | stream.pipe(concat(callback.bind(null, null))) 168 | } 169 | 170 | runStream(function (err) { 171 | assert.ifError(err) 172 | client.end() 173 | done() 174 | }) 175 | }) 176 | }) 177 | 178 | it('client can be reused for another COPY TO query', (done) => { 179 | const client = getClient() 180 | const generateRows = 100 181 | const totalRuns = 5 182 | let runsLeftToStart = totalRuns 183 | let currentRunNumber = 0 184 | 185 | function runStream(num, callback) { 186 | const sql = 'COPY (SELECT * FROM generate_series(0,' + generateRows + ')) TO STDOUT' 187 | const stream = client.query(copy(sql)) 188 | stream.on('error', callback) 189 | stream.pipe( 190 | concat(function (buf) { 191 | const res = buf.toString('utf8') 192 | const exp = _.range(0, generateRows + 1).join('\n') + '\n' 193 | assert.equal(res, exp, 'clientReuse: sent & received buffer should be equal') 194 | currentRunNumber++ 195 | callback() 196 | }) 197 | ) 198 | } 199 | 200 | function processResult(err) { 201 | assert.ifError(err) 202 | runsLeftToStart-- 203 | if (runsLeftToStart) { 204 | runStream(currentRunNumber, processResult) 205 | } else { 206 | assert.equal( 207 | currentRunNumber, 208 | totalRuns, 209 | 'clientReuse: there should be equal amount of queries on the same client' 210 | ) 211 | client.end() 212 | done() 213 | } 214 | } 215 | 216 | runStream(currentRunNumber, processResult) 217 | }) 218 | 219 | it('client can be reused for another query', (done) => { 220 | const client = getClient() 221 | 222 | // uncomment the code to see pausing and resuming of the connection stream 223 | 224 | //const orig_resume = client.connection.stream.resume; 225 | //const orig_pause = client.connection.stream.pause; 226 | // 227 | //client.connection.stream.resume = function () { 228 | // console.log('resume', new Error().stack); 229 | // orig_resume.apply(this, arguments) 230 | //} 231 | // 232 | //client.connection.stream.pause = function () { 233 | // console.log('pause', new Error().stack); 234 | // orig_pause.apply(this, arguments) 235 | //} 236 | 237 | function testConnection() { 238 | client.query('SELECT 1', function () { 239 | client.end() 240 | done() 241 | }) 242 | } 243 | 244 | const writable = new Writable({ 245 | write: function (chunk, encoding, cb) { 246 | cb() 247 | }, 248 | }) 249 | writable.on('finish', () => { 250 | setTimeout(testConnection, 30) // test if the connection didn't drop flowing state 251 | }) 252 | 253 | const sql = 'COPY (SELECT 1) TO STDOUT' 254 | const stream = client.query(copy(sql, { highWaterMark: 1 })) 255 | stream.pipe(writable) 256 | }) 257 | 258 | it('two small rows are combined into single chunk', (done) => { 259 | const sql = 'COPY (SELECT * FROM generate_series(1, 2)) TO STDOUT' 260 | assertCopyToResult(sql, (err, chunks, result, stream) => { 261 | assert.ifError(err) 262 | assert.equal(chunks.length, 1) 263 | assert.deepEqual(chunks[0], Buffer.from('1\n2\n')) 264 | done() 265 | }) 266 | }) 267 | 268 | it('one large row emits multiple chunks', (done) => { 269 | const fieldSize = 64 * 1024 270 | const sql = `COPY (SELECT repeat('-', ${fieldSize})) TO STDOUT` 271 | assertCopyToResult(sql, (err, chunks, result, stream) => { 272 | assert.ifError(err) 273 | assert(chunks.length > 1) 274 | assert.equal(result, `${'-'.repeat(fieldSize)}\n`) 275 | done() 276 | }) 277 | }) 278 | 279 | describe('stream compliance', () => { 280 | describe('successful stream', () => { 281 | const successfulSql = `COPY (SELECT 1) TO STDOUT` 282 | 283 | it("emits 0 'finish' (readable stream)", (done) => { 284 | assertCopyToResult(successfulSql, (err, chunks, result, stream) => { 285 | assert.ifError(err) 286 | assert.equal(stream.emits['finish'], undefined) 287 | done() 288 | }) 289 | }) 290 | 291 | it("emits 1 'end' (readable stream)", (done) => { 292 | assertCopyToResult(successfulSql, (err, chunks, result, stream) => { 293 | assert.ifError(err) 294 | assert.equal(stream.emits['end'].length, 1) 295 | done() 296 | }) 297 | }) 298 | 299 | it('works with finished()', (done) => { 300 | if (!finished) return done() 301 | createCopyToQuery(successfulSql, (client, copyToStream) => { 302 | copyToStream.resume() 303 | finished(copyToStream, (err) => { 304 | assert.ifError(err) 305 | client.end() 306 | done() 307 | }) 308 | }) 309 | }) 310 | 311 | it('works with pipeline()', (done) => { 312 | if (!pipeline) return done() 313 | createCopyToQuery(successfulSql, (client, copyToStream) => { 314 | const pt = new PassThrough() 315 | pipeline(copyToStream, pt, (err) => { 316 | assert.ifError(err) 317 | client.end() 318 | done() 319 | }) 320 | }) 321 | }) 322 | }) 323 | 324 | describe('erroneous stream (syntax error)', () => { 325 | const syntaxErrorSql = `COPY (SELECT INVALID SYNTAX) TO STDOUT` 326 | 327 | it("emits 0 'finish'", (done) => { 328 | assertCopyToResult(syntaxErrorSql, (err, chunks, result, stream) => { 329 | assert.ok(err) 330 | assert.equal(stream.emits['finish'], undefined) 331 | done() 332 | }) 333 | }) 334 | 335 | it("emits 0 'end'", (done) => { 336 | assertCopyToResult(syntaxErrorSql, (err, chunks, result, stream) => { 337 | assert.ok(err) 338 | assert.equal(stream.emits['end'], undefined) 339 | done() 340 | }) 341 | }) 342 | 343 | it("emits 1 'error'", (done) => { 344 | assertCopyToResult(syntaxErrorSql, (err, chunks, result, stream) => { 345 | assert.ok(err) 346 | assert.equal(stream.emits['error'].length, 1) 347 | done() 348 | }) 349 | }) 350 | 351 | it('works with finished()', (done) => { 352 | if (!finished) return done() 353 | createCopyToQuery(syntaxErrorSql, (client, copyToStream) => { 354 | copyToStream.resume() 355 | finished(copyToStream, (err) => { 356 | assert.ok(err) 357 | client.end() 358 | done() 359 | }) 360 | }) 361 | }) 362 | 363 | it('works with pipeline()', (done) => { 364 | if (!pipeline) return done() 365 | createCopyToQuery(syntaxErrorSql, (client, copyToStream) => { 366 | pipeline(copyToStream, new PassThrough(), (err) => { 367 | assert.ok(err) 368 | client.end() 369 | done() 370 | }) 371 | }) 372 | }) 373 | }) 374 | 375 | describe('erroneous stream (internal error)', () => { 376 | function createInternalErrorCopyToQuery(callback) { 377 | createCopyToQuery('COPY (SELECT pg_sleep(10)) TO STDOUT', callback) 378 | 379 | setTimeout(() => { 380 | executeSql(`SELECT pg_cancel_backend(pid) 381 | FROM pg_stat_activity 382 | WHERE query ~ 'pg_sleep' 383 | AND NOT query ~ 'pg_cancel_backend'`) 384 | }, 20) 385 | } 386 | 387 | function assertInternalErrorCopyToResult(assertFn) { 388 | createInternalErrorCopyToQuery((client, copyToStream) => { 389 | processCopyToStreamForAssertFn(client, copyToStream, assertFn) 390 | }) 391 | } 392 | 393 | it("emits 0 'finish'", (done) => { 394 | assertInternalErrorCopyToResult((err, chunks, result, stream) => { 395 | assert.ok(err) 396 | assert.equal(stream.emits['finish'], undefined) 397 | done() 398 | }) 399 | }) 400 | 401 | it("emits 0 'end'", (done) => { 402 | assertInternalErrorCopyToResult((err, chunks, result, stream) => { 403 | assert.ok(err) 404 | assert.equal(stream.emits['end'], undefined) 405 | done() 406 | }) 407 | }) 408 | 409 | it("emits 1 'error'", (done) => { 410 | assertInternalErrorCopyToResult((err, chunks, result, stream) => { 411 | assert.ok(err) 412 | assert.equal(stream.emits['error'].length, 1) 413 | done() 414 | }) 415 | }) 416 | 417 | it('works with finished()', (done) => { 418 | if (!finished) return done() 419 | createInternalErrorCopyToQuery((client, copyToStream) => { 420 | copyToStream.resume() 421 | finished(copyToStream, (err) => { 422 | assert.ok(err) 423 | client.end() 424 | done() 425 | }) 426 | }) 427 | }) 428 | 429 | it('works with pipeline()', (done) => { 430 | if (!pipeline) return done() 431 | createInternalErrorCopyToQuery((client, copyToStream) => { 432 | pipeline(copyToStream, new PassThrough(), (err) => { 433 | assert.ok(err) 434 | client.end() 435 | done() 436 | }) 437 | }) 438 | }) 439 | }) 440 | }) 441 | }) 442 | 443 | describe('integration tests (csv parsers)', () => { 444 | function readParserResult(csvModule, csvModuleOpts, inputByteArrays) { 445 | return new Promise((resolve, reject) => { 446 | const parser = csvModule(csvModuleOpts) 447 | parser.on('error', reject) 448 | parser.pipe(concat({ encoding: 'object' }, resolve)) 449 | 450 | for (const inputByteArray of inputByteArrays) { 451 | const inputBuffer = Buffer.from(inputByteArray) 452 | parser.write(inputBuffer) 453 | } 454 | parser.end() 455 | }) 456 | } 457 | 458 | async function assertResult(csvModule, csvModuleOpts, inputByteArrays, expectedContent) { 459 | const actualContent = await readParserResult(csvModule, csvModuleOpts, inputByteArrays) 460 | assert.deepEqual(actualContent, expectedContent) 461 | } 462 | 463 | it('module csv-parser handles cross boundary lines', async () => { 464 | const input = Buffer.from('hello,world\ncrossing,boundaries') 465 | 466 | for (let splitAt = 1; splitAt < input.length; splitAt++) { 467 | const inputPart1 = input.slice(0, splitAt) 468 | const inputPart2 = input.slice(splitAt) 469 | 470 | assert(inputPart1.length > 0) 471 | assert(inputPart2.length > 0) 472 | 473 | await assertResult( 474 | csvParser, 475 | { headers: false }, 476 | [inputPart1, inputPart2], 477 | [ 478 | { 0: 'hello', 1: 'world' }, 479 | { 0: 'crossing', 1: 'boundaries' }, 480 | ] 481 | ) 482 | } 483 | }) 484 | 485 | it('module csv-parse handles cross boundary lines', async () => { 486 | const input = Buffer.from('hello,world\ncrossing,boundaries') 487 | 488 | for (let splitAt = 1; splitAt < input.length; splitAt++) { 489 | const inputPart1 = input.slice(0, splitAt) 490 | const inputPart2 = input.slice(splitAt) 491 | 492 | assert(inputPart1.length > 0) 493 | assert(inputPart2.length > 0) 494 | 495 | await assertResult( 496 | csvParse, 497 | {}, 498 | [inputPart1, inputPart2], 499 | [ 500 | ['hello', 'world'], 501 | ['crossing', 'boundaries'], 502 | ] 503 | ) 504 | } 505 | }) 506 | }) 507 | 508 | describe('unit tests', () => { 509 | function readCopyToResult(inputByteArrays) { 510 | return new Promise((resolve, reject) => { 511 | // mock a pg client/server 512 | const pgStream = new PassThrough() 513 | pgStream.on('data', () => {}) 514 | const pgConnection = { 515 | stream: pgStream, 516 | query: () => {}, 517 | removeAllListeners: () => {}, 518 | } 519 | const pgClient = { 520 | connection: pgConnection, 521 | query: function (submittable) { 522 | submittable.submit(this.connection) 523 | }, 524 | } 525 | 526 | const copyToStream = copy(/*sql*/) 527 | pgClient.query(copyToStream) 528 | 529 | for (const inputByteArray of inputByteArrays) { 530 | const inputBuffer = Buffer.from(inputByteArray) 531 | pgStream.write(inputBuffer) 532 | } 533 | 534 | copyToStream.on('error', reject) 535 | copyToStream.pipe(concat({ encoding: 'string' }, resolve)) 536 | }) 537 | } 538 | 539 | async function assertResult(inputByteArrays, expectedContent) { 540 | const actualContent = await readCopyToResult(inputByteArrays) 541 | assert.deepEqual(actualContent, expectedContent) 542 | } 543 | 544 | it('forwards passed options to parent Transform stream', () => { 545 | const sql = 'COPY (SELECT * FROM generate_series(0, 10)) TO STDOUT' 546 | const stream = copy(sql, { highWaterMark: 10 }) 547 | assert.equal(stream._readableState.highWaterMark, 10, 'Client should have been set with a correct highWaterMark.') 548 | }) 549 | 550 | it('input without row data gives empty result', async () => { 551 | await assertResult([[code.CopyOutResponse, 0x0, 0x0, 0x0, 0x4, code.CopyDone, 0x0, 0x0, 0x0, 0x4]], '') 552 | }) 553 | 554 | it('complex input cut at chunk boundary every possible way gives correct result', async () => { 555 | const input = [] 556 | input.push(code.CopyOutResponse, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x01, 0x00, 0x00) 557 | input.push(code.CopyData, 0x00, 0x00, 0x00, 0x07, 0x78, 0x79, 0x0a) 558 | input.push(code.CopyDone, 0x00, 0x00, 0x00, 0x04) 559 | input.push(code.CommandComplete, 0x00, 0x00, 0x00, 0x0b, 0x43, 0x4f, 0x50, 0x59, 0x20, 0x31, 0x00) 560 | input.push(code.ReadyForQuery, 0x00, 0x00, 0x00, 0x05, 0x4) 561 | 562 | for (let splitAt = 1; splitAt < input.length; splitAt++) { 563 | const inputPart1 = input.slice(0, splitAt) 564 | const inputPart2 = input.slice(splitAt) 565 | 566 | assert(inputPart1.length > 0) 567 | assert(inputPart2.length > 0) 568 | 569 | await assertResult([inputPart1, inputPart2], 'xy\n') 570 | } 571 | }) 572 | }) 573 | }) 574 | -------------------------------------------------------------------------------- /test/obuf.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert') 2 | const OffsetBuffer = require('../obuf.js') 3 | 4 | describe('OffsetBuffer', function () { 5 | let o 6 | beforeEach(function () { 7 | o = new OffsetBuffer() 8 | }) 9 | 10 | describe('.take()', function () { 11 | it('should return empty buffer', function () { 12 | const b = new Buffer('hello world') 13 | o.push(b) 14 | const r = o.take(0) 15 | assert.equal(r.length, 0) 16 | assert.equal(o.size, b.length) 17 | }) 18 | 19 | it('should return the first buffer itself', function () { 20 | const b = new Buffer('hello world') 21 | o.push(b) 22 | const r = o.take(b.length) 23 | assert(r === b) 24 | assert(o.isEmpty()) 25 | }) 26 | 27 | it('should return the slice of the buffer ', function () { 28 | const b = new Buffer('hello world') 29 | o.push(b) 30 | assert.equal(o.take(5).toString(), 'hello') 31 | assert.equal(o.take(1).toString(), ' ') 32 | assert.equal(o.take(5).toString(), 'world') 33 | assert(o.isEmpty()) 34 | }) 35 | 36 | it('should concat buffers', function () { 37 | o.push(new Buffer('hello')) 38 | o.push(new Buffer(' ')) 39 | o.push(new Buffer('world!')) 40 | assert.equal(o.take(11).toString(), 'hello world') 41 | assert.equal(o.take(1).toString(), '!') 42 | assert(o.isEmpty()) 43 | }) 44 | }) 45 | 46 | describe('.skip', function () { 47 | it('should skip bytes', function () { 48 | o.push(new Buffer('hello ')) 49 | o.push(new Buffer('world')) 50 | o.push(new Buffer(' oh gosh')) 51 | 52 | assert.equal(o.take(2).toString(), 'he') 53 | o.skip(1) 54 | assert.equal(o.take(2).toString(), 'lo') 55 | o.skip(1) 56 | assert.equal(o.take(2).toString(), 'wo') 57 | o.skip(4) 58 | assert.equal(o.take(7).toString(), 'oh gosh') 59 | 60 | assert(o.isEmpty()) 61 | }) 62 | }) 63 | 64 | describe('.peekUInt8', function () { 65 | it('should return and not move by one byte', function () { 66 | o.push(new Buffer([0x1, 0x2])) 67 | assert.equal(o.peekUInt8(), 1) 68 | assert.equal(o.readUInt8(), 1) 69 | assert.equal(o.peekUInt8(), 2) 70 | assert.equal(o.readUInt8(), 2) 71 | assert(o.isEmpty()) 72 | }) 73 | }) 74 | 75 | describe('.peekInt8', function () { 76 | it('should return signed number', function () { 77 | o.push(new Buffer([0x80])) 78 | assert.equal(o.peekInt8(), -128) 79 | assert.equal(o.readInt8(), -128) 80 | assert(o.isEmpty()) 81 | }) 82 | }) 83 | 84 | describe('.readUInt8', function () { 85 | it('should return and move by one byte', function () { 86 | o.push(new Buffer([0x1, 0x2])) 87 | o.push(new Buffer([0x3, 0x4])) 88 | assert.equal(o.readUInt8(), 1) 89 | assert.equal(o.readUInt8(), 2) 90 | assert.equal(o.readUInt8(), 3) 91 | assert.equal(o.readUInt8(), 4) 92 | assert(o.isEmpty()) 93 | }) 94 | }) 95 | 96 | describe('.readInt8', function () { 97 | it('should return signed number', function () { 98 | o.push(new Buffer([0x8f, 0x7f])) 99 | assert.equal(o.readInt8(), -113) 100 | assert.equal(o.readInt8(), 127) 101 | assert(o.isEmpty()) 102 | }) 103 | }) 104 | 105 | describe('.readUInt16LE', function () { 106 | it('should return and move by two bytes', function () { 107 | o.push(new Buffer([0x1, 0x2, 0x3])) 108 | o.push(new Buffer([0x4, 0x5, 0x6])) 109 | assert.equal(o.readUInt16LE(), 0x0201) 110 | assert.equal(o.readUInt16LE(), 0x0403) 111 | assert.equal(o.readUInt16LE(), 0x0605) 112 | assert(o.isEmpty()) 113 | }) 114 | 115 | it('should return and move by two bytes (regression #1)', function () { 116 | o.push(new Buffer([0x1])) 117 | o.push(new Buffer([0x2, 0x3, 0x4])) 118 | assert.equal(o.readUInt16LE(), 0x0201) 119 | assert.equal(o.readUInt16LE(), 0x0403) 120 | assert(o.isEmpty()) 121 | }) 122 | }) 123 | 124 | describe('.readInt16LE', function () { 125 | it('should return signed number', function () { 126 | o.push(new Buffer([0x23, 0x81])) 127 | assert.equal(o.readInt16LE(), -32477) 128 | assert(o.isEmpty()) 129 | }) 130 | }) 131 | 132 | describe('.readUInt24LE', function () { 133 | it('should return and move by three bytes', function () { 134 | o.push(new Buffer([0x1, 0x2, 0x3, 0x4, 0x5])) 135 | o.push(new Buffer([0x6, 0x7])) 136 | o.push(new Buffer([0x8, 0x9])) 137 | assert.equal(o.readUInt24LE(), 0x030201) 138 | assert.equal(o.readUInt24LE(), 0x060504) 139 | assert.equal(o.readUInt24LE(), 0x090807) 140 | assert(o.isEmpty()) 141 | }) 142 | 143 | it('should return and move by three bytes (regression #1)', function () { 144 | o.push(new Buffer([0x1, 0x2])) 145 | o.push(new Buffer([0x3])) 146 | assert.equal(o.readUInt24LE(), 0x030201) 147 | assert.equal(o.buffers.length, 0) 148 | assert(o.isEmpty()) 149 | }) 150 | }) 151 | 152 | describe('.readInt24LE', function () { 153 | it('should return signed number', function () { 154 | o.push(new Buffer([0x23, 0x45, 0x81])) 155 | assert.equal(o.readInt24LE(), -8305373) 156 | assert(o.isEmpty()) 157 | }) 158 | }) 159 | 160 | describe('.readUInt32LE', function () { 161 | it('should return and move by four bytes', function () { 162 | o.push(new Buffer([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7])) 163 | o.push(new Buffer([0x8, 0x9, 0xa])) 164 | o.push(new Buffer([0xb, 0xc, 0xd])) 165 | o.push(new Buffer([0xe, 0xf, 0x10])) 166 | assert.equal(o.readUInt32LE(), 0x04030201) 167 | assert.equal(o.readUInt32LE(), 0x08070605) 168 | assert.equal(o.readUInt32LE(), 0x0c0b0a09) 169 | assert.equal(o.readUInt32LE(), 0x100f0e0d) 170 | assert(o.isEmpty()) 171 | }) 172 | 173 | it('should return and move by four bytes (regression #1)', function () { 174 | o.push(new Buffer([0x1, 0x2, 0x3])) 175 | o.push(new Buffer([0x4])) 176 | assert.equal(o.readUInt32LE(), 0x04030201) 177 | assert.equal(o.buffers.length, 0) 178 | assert(o.isEmpty()) 179 | }) 180 | }) 181 | 182 | describe('.readInt32LE', function () { 183 | it('should return signed number', function () { 184 | o.push(new Buffer([0xff, 0xff, 0xff, 0xff])) 185 | assert.equal(o.readInt32LE(), -1) 186 | assert(o.isEmpty()) 187 | }) 188 | }) 189 | 190 | describe('.readUInt16BE', function () { 191 | it('should return and move by two bytes', function () { 192 | o.push(new Buffer([0x1, 0x2, 0x3])) 193 | o.push(new Buffer([0x4, 0x5, 0x6])) 194 | assert.equal(o.readUInt16BE(), 0x0102) 195 | assert.equal(o.readUInt16BE(), 0x0304) 196 | assert.equal(o.readUInt16BE(), 0x0506) 197 | assert(o.isEmpty()) 198 | }) 199 | }) 200 | 201 | describe('.readInt16BE', function () { 202 | it('should return signed number', function () { 203 | o.push(new Buffer([0x81, 0x23])) 204 | assert.equal(o.readInt16BE(), -32477) 205 | assert(o.isEmpty()) 206 | }) 207 | }) 208 | 209 | describe('.readUInt24BE', function () { 210 | it('should return and move by three bytes', function () { 211 | o.push(new Buffer([0x1, 0x2, 0x3, 0x4, 0x5])) 212 | o.push(new Buffer([0x6, 0x7])) 213 | o.push(new Buffer([0x8, 0x9])) 214 | assert.equal(o.readUInt24BE(), 0x010203) 215 | assert.equal(o.readUInt24BE(), 0x040506) 216 | assert.equal(o.readUInt24BE(), 0x070809) 217 | assert(o.isEmpty()) 218 | }) 219 | }) 220 | 221 | describe('.readInt24BE', function () { 222 | it('should return signed number', function () { 223 | o.push(new Buffer([0x81, 0x45, 0x23])) 224 | assert.equal(o.readInt24BE(), -8305373) 225 | assert(o.isEmpty()) 226 | }) 227 | }) 228 | 229 | describe('.readUInt32BE', function () { 230 | it('should return and move by four bytes', function () { 231 | o.push(new Buffer([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7])) 232 | o.push(new Buffer([0x8, 0x9, 0xa])) 233 | o.push(new Buffer([0xb, 0xc, 0xd])) 234 | o.push(new Buffer([0xe, 0xf, 0x10])) 235 | assert.equal(o.readUInt32BE(), 0x01020304) 236 | assert.equal(o.readUInt32BE(), 0x05060708) 237 | assert.equal(o.readUInt32BE(), 0x090a0b0c) 238 | assert.equal(o.readUInt32BE(), 0x0d0e0f10) 239 | assert(o.isEmpty()) 240 | }) 241 | 242 | it('should return positive values', function () { 243 | o.push(new Buffer([0xff, 0xff, 0xff, 0xff])) 244 | assert.equal(o.readUInt32BE(), 0xffffffff) 245 | assert(o.isEmpty()) 246 | }) 247 | }) 248 | 249 | describe('.readInt32BE', function () { 250 | it('should return signed number', function () { 251 | o.push(new Buffer([0xff, 0xff, 0xff, 0xff])) 252 | assert.equal(o.readInt32BE(), -1) 253 | assert(o.isEmpty()) 254 | }) 255 | }) 256 | 257 | describe('.has', function () { 258 | it('should properly check the amount of the remaining bytes', function () { 259 | o.push(new Buffer([1, 2, 3])) 260 | assert(o.has(3)) 261 | assert.equal(o.readUInt8(), 0x01) 262 | assert(!o.has(3)) 263 | assert(o.has(2)) 264 | assert.equal(o.readUInt16BE(), 0x0203) 265 | assert(!o.has(1)) 266 | }) 267 | }) 268 | }) 269 | --------------------------------------------------------------------------------