├── .babelrc ├── .gitignore ├── .npmrc ├── .travis.yml ├── LICENSE ├── README.md ├── package.json ├── pg-chained-batch.js ├── pg-iterator.js ├── pgdown.js ├── test ├── _common.js ├── abstract-leveldown │ ├── approximate-size-test.js │ ├── index.js │ └── iterator-test.js ├── encoding.js ├── index.js └── pgdown.js └── util.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ "es2015" ] 3 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | npm-debug.log 3 | package-lock.json 4 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | 3 | node_js: 4 | - 10 5 | - 12 6 | - 14 7 | 8 | services: 9 | - docker 10 | 11 | before_install: 12 | - sudo /etc/init.d/postgresql stop 13 | - docker run -d -p 5432:5432 clkao/postgres-plv8:9.5 14 | 15 | script: 16 | - npm test -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Lars-Magnus Skog 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pgdown 2 | 3 | > [`PostgreSQL`](http://www.postgresql.org/) backed [`abstract-leveldown`](https://github.com/Level/abstract-leveldown). 4 | 5 | [![npm](https://img.shields.io/npm/v/pgdown.svg)](https://www.npmjs.com/package/pgdown) 6 | ![Node version](https://img.shields.io/node/v/pgdown.svg) 7 | [![Build Status](https://travis-ci.org/ralphtheninja/pgdown.svg?branch=master)](https://travis-ci.org/ralphtheninja/pgdown) 8 | [![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com) 9 | 10 | **Experimental and WIP. Do not use.** 11 | 12 | ## Usage 13 | 14 | Use together with [`levelup`](https://github.com/Level/levelup) to get a `PostgreSQL` backed storage. 15 | 16 | ```js 17 | const levelup = require('levelup') 18 | const PgDOWN = require('pgdown') 19 | 20 | const uri = 'postgres://postgres:@localhost:5432/postgres' 21 | const db = levelup(uri, { 22 | db: PgDOWN, 23 | keyEncoding: 'utf8', 24 | valueEncoding: 'json' 25 | }) 26 | 27 | db.put('foo', { bar: 'baz' }, (err) => { 28 | db.get('foo', (err, result) => { 29 | console.log('result %j', result) 30 | }) 31 | }) 32 | ``` 33 | 34 | ## Api 35 | 36 | #### `const PgDOWN = require('pgdown')` 37 | 38 | Constructor. 39 | 40 | #### `const down = PgDOWN(location)` 41 | 42 | Creates a `PgDOWN` object with `location` which can take the following forms: 43 | 44 | * `postgres://:@://` 45 | * `//
` 46 | 47 | An `options` object is created based on the location and passed to [`pg.Client`](https://github.com/brianc/node-postgres/wiki/Client#new-clientobject-config--client). However, `pgdown` respects the _default_ environment variables used by [`PostgreSQL`](http://www.postgresql.org/docs/9.5/static/libpq-envars.html) in favor of the ones used in `pg`. 48 | 49 | To summarize we have the following properties and their default values: 50 | 51 | * `database` from `location` _or_ `$PGDATABASE` _or_ `'postgres'` 52 | * `host` from `location` _or_ `$PGHOSTADDR` _or_ `'localhost'` 53 | * `port` from `location` _or_ `$PGPORT` _or_ `5432` 54 | * `user` from `location` _or_ `$PGUSER` _or_ `$USERNAME` (win32) _or_ `$USER` 55 | * `password` from `location` _or_ `$PGPASSWORD` _or_ `null` 56 | 57 | ## ES6 58 | 59 | `pgdown` mainly uses arrow functions and template strings from ES6 which are very useful when constructing SQL query strings. It primarily targets node `6+` but should work well with `0.10` and `0.12` together with [`babel-register`](https://www.npmjs.com/package/babel-register) _or_ [`babel-cli`](https://www.npmjs.com/package/babel-cli/) and [`babel-preset-es2015`](https://www.npmjs.com/package/babel-preset-es2015). 60 | 61 | ## PostgreSQL 62 | 63 | **Note** `pgdown` requires at least version `9.5` of `PostgreSQL`. 64 | 65 | If you're hacking on `pgdown` or just want to setup `PostgreSQL` locally the easiest way is probably to use docker. We can highly recommend [`clkao/postgres-plv8`](https://hub.docker.com/r/clkao/postgres-plv8/) which is based on the official `PostgreSQL` docker image but with support for [`plv8`](https://github.com/plv8/plv8). 66 | 67 | ``` 68 | $ docker run -d -p 5432:5432 clkao/postgres-plv8:9.5 69 | ``` 70 | 71 | Check out the [wiki](https://github.com/ralphtheninja/pgdown/wiki/PostgreSQL-and-Docker) for more information. 72 | 73 | ## License 74 | 75 | MIT 76 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pgdown", 3 | "version": "0.0.0", 4 | "description": "PostgreSQL backed abstract-leveldown", 5 | "main": "pgdown.js", 6 | "scripts": { 7 | "test": "standard && PGDATABASE=${PGDATABASE:-postgres} PGUSER=${PGUSER:-postgres} tape test | faucet" 8 | }, 9 | "keywords": [ 10 | "level", 11 | "leveldown", 12 | "postgres", 13 | "db" 14 | ], 15 | "contributors": [ 16 | "Dean Landolt ", 17 | "Lars-Magnus Skog " 18 | ], 19 | "license": "MIT", 20 | "repository": "ralphtheninja/pgdown", 21 | "bugs": { 22 | "url": "https://github.com/ralphtheninja/pgdown/issues" 23 | }, 24 | "homepage": "https://github.com/ralphtheninja/pgdown#readme", 25 | "dependencies": { 26 | "abstract-leveldown": "^4.0.3", 27 | "any-db": "^2.1.0", 28 | "any-db-pool": "^2.1.0", 29 | "any-db-postgres": "^2.1.4", 30 | "any-db-transaction": "^2.2.2", 31 | "inherits": "^2.0.1", 32 | "level-errors": "^1.0.4", 33 | "monotonic-timestamp": "0.0.9", 34 | "parse-db-url": "0.0.0", 35 | "pg": "^7.1.2", 36 | "pg-cursor": "^1.0.0", 37 | "pg-format": "^1.0.2" 38 | }, 39 | "engines": { 40 | "node": ">=10.0.0" 41 | }, 42 | "devDependencies": { 43 | "after": "^0.8.1", 44 | "babel-preset-es2015": "^6.6.0", 45 | "babel-register": "^6.6.5", 46 | "faucet": "0.0.1", 47 | "levelup": "^2.0.2", 48 | "standard": "^14.3.4", 49 | "tape": "^5.0.1" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /pg-chained-batch.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const inherits = require('inherits') 4 | const AbstractChainedBatch = require('abstract-leveldown/abstract-chained-batch') 5 | const util = require('./util') 6 | 7 | module.exports = PgChainedBatch 8 | 9 | inherits(PgChainedBatch, AbstractChainedBatch) 10 | function PgChainedBatch (db) { 11 | AbstractChainedBatch.call(this, db) 12 | 13 | // TODO: once queued batch exceeds some threshold create a temp table 14 | // then flush batch ops to temp table periodically and clear ops 15 | } 16 | 17 | // PgChainedBatch.prototype._put = function (key, value) { 18 | // TODO: send ops to temp table if passed buffer threshold 19 | // } 20 | 21 | // PgChainedBatch.prototype._del = function (key) { 22 | // TODO: send ops to temp table if passed buffer threshold 23 | // } 24 | 25 | // PgChainedBatch.prototype._clear = function () { 26 | // TODO: drop temp table, if any 27 | // } 28 | 29 | PgChainedBatch.prototype._write = function (cb) { 30 | const tx = util.createTransaction(this._db._pool, cb) 31 | 32 | this._operations.forEach((op) => { 33 | if (op.type === 'put') { 34 | tx.query(this._db._sql_put(), [op.key, op.value]) 35 | } else if (op.type === 'del') { 36 | tx.query(this._db._sql_del(), [op.key]) 37 | } 38 | }) 39 | 40 | tx.commit() 41 | } 42 | -------------------------------------------------------------------------------- /pg-iterator.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const inherits = require('inherits') 4 | const AbstractIterator = require('abstract-leveldown/abstract-iterator') 5 | const util = require('./util') 6 | 7 | module.exports = PgIterator 8 | 9 | inherits(PgIterator, AbstractIterator) 10 | function PgIterator (db, options) { 11 | AbstractIterator.call(this, db) 12 | 13 | this._keyAsBuffer = options.keyAsBuffer 14 | this._valueAsBuffer = options.valueAsBuffer 15 | 16 | const statement = PgIterator._parseOptions(db, options) 17 | const relation = db._config._relation 18 | const head = ` 19 | SELECT key::${db._keyColumnType}, value::${db._valueColumnType} FROM ${relation} 20 | ` 21 | 22 | statement.clauses.unshift(head) 23 | statement.text = statement.clauses.join(' ') 24 | 25 | this._cursor = util.createCursor(db, statement) 26 | } 27 | 28 | PgIterator._comparators = util.comparators 29 | 30 | PgIterator._parseOptions = function (db, options) { 31 | const context = {} 32 | const clauses = context.clauses = context.clauses || [] 33 | const values = context.values = context.values || [] 34 | PgIterator._parseRange(db, options, context) 35 | 36 | if (options.reverse != null) { 37 | clauses.push('ORDER BY key ' + (options.reverse ? 'DESC' : 'ASC')) 38 | } 39 | 40 | if (options.limit != null && options.limit >= 0) { 41 | values.push(options.limit) 42 | clauses.push('LIMIT $' + values.length) 43 | } 44 | 45 | if (options.offset > 0) { 46 | values.push(options.offset) 47 | clauses.push('OFFSET $' + values.length) 48 | } 49 | 50 | return context 51 | } 52 | 53 | PgIterator._parseRange = function (db, range, context) { 54 | const clauses = context.clauses 55 | const values = context.values 56 | 57 | clauses.push('WHERE') 58 | 59 | for (var k in range) { 60 | const v = range[k] 61 | const comp = PgIterator._comparators[k] 62 | const op = comp && comp(range) 63 | if (op && v) { 64 | values.push(db._serializeKey(v)) 65 | clauses.push(`(key) ${op} ($${values.length})`) 66 | clauses.push('AND') 67 | } else { 68 | // throw on unknown? 69 | } 70 | } 71 | 72 | // drop the trailing clause 73 | clauses.pop() 74 | 75 | return context 76 | } 77 | 78 | PgIterator.prototype._batchSize = 100 79 | 80 | PgIterator.prototype._next = function (cb) { 81 | const nextRow = this._rows && this._rows.shift() 82 | if (nextRow) return this._send(nextRow, cb) 83 | 84 | this._cursor.read(this._batchSize, (err, rows) => { 85 | if (err) return cb(err) 86 | 87 | this._rows = rows 88 | this._send(rows.shift(), cb) 89 | }) 90 | } 91 | 92 | PgIterator.prototype._end = function (cb) { 93 | this._cursor.close(cb) 94 | } 95 | 96 | PgIterator.prototype._send = function (row, cb) { 97 | if (!row) return process.nextTick(cb) 98 | 99 | const db = this.db 100 | const key = db._deserializeKey(row.key, this._keyAsBuffer) 101 | const value = db._deserializeValue(row.value, this._valueAsBuffer) 102 | 103 | cb(null, key, value) 104 | } 105 | -------------------------------------------------------------------------------- /pgdown.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const inherits = require('inherits') 4 | const AbstractLevelDOWN = require('abstract-leveldown/abstract-leveldown') 5 | const util = require('./util') 6 | const PgIterator = require('./pg-iterator') 7 | const PgChainedBatch = require('./pg-chained-batch') 8 | 9 | module.exports = PgDOWN 10 | 11 | inherits(PgDOWN, AbstractLevelDOWN) 12 | function PgDOWN (location) { 13 | if (!(this instanceof PgDOWN)) { 14 | return new PgDOWN(location) 15 | } 16 | 17 | AbstractLevelDOWN.call(this, location) 18 | this._config = util.parseLocation(location) 19 | } 20 | 21 | const proto = PgDOWN.prototype 22 | 23 | // NB: keys should *always* be stored using 'bytea' 24 | proto._keyColumnType = 'bytea' 25 | proto._valueColumnType = 'bytea' 26 | 27 | proto._serializeKey = function (key) { 28 | return util.serialize(this._keyColumnType, key) 29 | } 30 | 31 | proto._serializeValue = function (value) { 32 | return util.serialize(this._valueColumnType, value) 33 | } 34 | 35 | proto._deserializeKey = function (key, asBuffer) { 36 | return util.deserialize(this._keyColumnType, key, asBuffer) 37 | } 38 | 39 | proto._deserializeValue = function (value, asBuffer) { 40 | return util.deserialize(this._valueColumnType, value, asBuffer) 41 | } 42 | 43 | // TODO: memoized getters 44 | 45 | proto._sql_get = function (key) { 46 | return ` 47 | SELECT value FROM ${this._config._relation} WHERE (key)=$1 48 | ` 49 | } 50 | 51 | proto._sql_del = function (key) { 52 | return ` 53 | DELETE FROM ${this._config._relation} WHERE (key)=$1 54 | ` 55 | } 56 | 57 | proto._sql_insert = function () { 58 | return ` 59 | INSERT INTO ${this._config._relation} (key, value) VALUES($1,$2) 60 | ` 61 | } 62 | 63 | proto._sql_update = function () { 64 | return ` 65 | UPDATE ${this._config._relation} SET value=($2) WHERE key=($1) 66 | ` 67 | } 68 | 69 | proto._sql_put = function (key, value) { 70 | return this._sql_insert() + ' ON CONFLICT (key) DO UPDATE SET value=excluded.value' 71 | } 72 | 73 | proto._open = function (options, cb) { 74 | const config = this._config 75 | const pool = this._pool = util.createPool(config) 76 | // TODO: make pool init async, do create schema if not exists dance just once 77 | 78 | const createIfMissing = options.createIfMissing 79 | const errorIfExists = options.errorIfExists 80 | const IF_NOT_EXISTS = errorIfExists ? '' : 'IF NOT EXISTS' 81 | 82 | const schema = config._schema 83 | const table = config._table 84 | const relation = config._relation 85 | 86 | // always create pgdown schema 87 | pool.query(` 88 | CREATE SCHEMA IF NOT EXISTS ${schema} 89 | `, (err) => err ? fail(err) : info()) 90 | 91 | const info = () => { 92 | pool.query(` 93 | SELECT tablename FROM pg_catalog.pg_tables WHERE schemaname=$1 AND tablename=$2 94 | `, [schema, table], (err, result) => { 95 | const exists = result && result.rowCount === 1 96 | 97 | if (errorIfExists && exists) { 98 | err = new Error('table already exists: ' + table) 99 | } else if (!createIfMissing && !exists) { 100 | err = new Error('table does not exist: ' + table) 101 | } 102 | 103 | if (err) { 104 | fail(err) 105 | } else if (createIfMissing) { 106 | create() 107 | } else { 108 | cb() 109 | } 110 | }) 111 | } 112 | 113 | const create = () => { 114 | // TODO: use separate column names for different value types? 115 | pool.query(` 116 | CREATE TABLE ${IF_NOT_EXISTS} ${relation} ( 117 | key ${this._keyColumnType} PRIMARY KEY, 118 | value ${this._valueColumnType} 119 | ) 120 | `, (err) => { 121 | err ? fail(err) : cb() 122 | }) 123 | } 124 | 125 | const fail = (err) => { 126 | this._pool = null 127 | util.destroyPool(pool, (err_) => { 128 | cb(err) 129 | }) 130 | } 131 | } 132 | 133 | proto._close = function (cb) { 134 | const pool = this._pool 135 | if (pool) { 136 | this._pool = null 137 | util.destroyPool(pool, cb) 138 | } else { 139 | process.nextTick(cb) 140 | } 141 | } 142 | 143 | proto._get = function (key, options, cb) { 144 | this._pool.query(this._sql_get(), [key], (err, result) => { 145 | if (err) { 146 | cb(err) 147 | } else if (result.rowCount === 1) { 148 | cb(null, this._deserializeValue(result.rows[0].value, options.asBuffer)) 149 | } else if (result.rowCount === 0) { 150 | cb(new util.NotFoundError('not found: ' + key)) 151 | } else { 152 | cb(new Error('unexpected result for key: ' + key)) 153 | } 154 | }) 155 | } 156 | 157 | proto._put = function (key, value, options, cb) { 158 | const batch = [{ type: 'put', key: key, value: value }] 159 | this._batch(batch, options, (err) => cb(err || null)) 160 | } 161 | 162 | proto._del = function (key, options, cb) { 163 | const batch = [{ type: 'del', key: key }] 164 | this._batch(batch, options, (err) => cb(err || null)) 165 | } 166 | 167 | proto._batch = function (ops, options, cb) { 168 | const tx = util.createTransaction(this._pool, cb) 169 | 170 | ops.forEach((op) => { 171 | // TODO: merge op.options with batch options? 172 | if (op.type === 'put') { 173 | tx.query(this._sql_put(), [op.key, op.value]) 174 | } else if (op.type === 'del') { 175 | tx.query(this._sql_del(), [op.key]) 176 | } 177 | }) 178 | 179 | tx.commit() 180 | } 181 | 182 | proto._chainedBatch = function () { 183 | return new PgChainedBatch(this) 184 | } 185 | 186 | proto._iterator = function (options) { 187 | return new PgIterator(this, options) 188 | } 189 | 190 | // NB: represents exact compressed size? 191 | proto._approximateSize = function (start, end, cb) { 192 | const options = { start: start, end: end } 193 | // generate standard iterator sql and replace head clause 194 | const context = PgIterator._parseOptions(this, options) 195 | 196 | const relation = this._config._relation 197 | const head = `SELECT sum(pg_column_size(tbl)) as size FROM ${relation} as tbl` 198 | context.clauses.unshift(head) 199 | const text = context.clauses.join(' ') 200 | 201 | this._pool.query(text, context.values, (err, result) => { 202 | if (err) return cb(err) 203 | 204 | const size = result.rowCount && Number(result.rows[0].size) 205 | if (result.rowCount === 1 && !isNaN(size)) { 206 | cb(null, size) 207 | } else { 208 | cb(new Error('failed to calculate approximate size')) 209 | } 210 | }) 211 | } 212 | 213 | PgDOWN.destroy = util.dropTable 214 | -------------------------------------------------------------------------------- /test/_common.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const common = exports 4 | 5 | const after = require('after') 6 | const inherits = require('inherits') 7 | const util = require('../util') 8 | const PgDOWN = require('../') 9 | 10 | common.escape = util.escape 11 | 12 | common.PG_DEFAULTS = util.PG_DEFAULTS 13 | common.PG_DEFAULTS.idleTimeout = Number(process.env.PGDOWN_TEST_IDLE_TIMEOUT) || 5000 14 | common.PG_DEFAULTS.schema = process.env.PGDOWN_TEST_SCHEMA || 'pgdown_test' 15 | 16 | var _count = 0 17 | var _last 18 | 19 | common.lastLocation = () => _last 20 | 21 | common.location = (loc) => (_last = loc || ('test_' + (++_count))) 22 | 23 | common.cleanup = (cb) => { 24 | const len = common.OPENED.length 25 | const done = after(len, cb) 26 | 27 | for (var i = 0; i < len; i++) { 28 | const db = common.OPENED[i] 29 | const pool = db && db._pool 30 | if (pool) pool.close(done) 31 | else done() 32 | } 33 | 34 | common.OPENED.length = 0 35 | } 36 | 37 | common.setUp = (t) => { 38 | common.cleanup(t.end) 39 | } 40 | 41 | common.tearDown = (t) => { 42 | t.timeoutAfter(2000) 43 | common.setUp(t) 44 | } 45 | 46 | common.collectEntries = function (iterator, cb) { 47 | const data = [] 48 | function next () { 49 | iterator.next(function (err, key, value) { 50 | if (err) return cb(err) 51 | if (!arguments.length) { 52 | return iterator.end(function (err) { 53 | cb(err, data) 54 | }) 55 | } 56 | data.push({ key: key, value: value }) 57 | setTimeout(next, 0) 58 | }) 59 | } 60 | next() 61 | } 62 | 63 | common.maxCompressionFactor = 0.01 64 | 65 | common.checkBatchSize = function (batch, size) { 66 | // very specific to leveldb, accounts for snappy compression 67 | const total = batch.reduce((n, op) => n + (op.key + op.value).length, 0) 68 | return size > total * common.maxCompressionFactor 69 | } 70 | 71 | // hack db class to drop tables on first open, track open pools to close on end 72 | common.OPENED = [] 73 | common.DROPPED = {} 74 | common.db = TestPgDOWN 75 | 76 | inherits(TestPgDOWN, PgDOWN) 77 | function TestPgDOWN (location) { 78 | if (!(this instanceof TestPgDOWN)) { 79 | return new TestPgDOWN(location) 80 | } 81 | PgDOWN.call(this, location) 82 | } 83 | 84 | const __PgDOWN_open = PgDOWN.prototype._open 85 | TestPgDOWN.prototype._open = function (options, cb) { 86 | const location = this.location 87 | 88 | if (location !== _last || common.DROPPED[location]) { 89 | return __PgDOWN_open.call(this, options, cb) 90 | } 91 | 92 | util.dropTable(location, (err) => { 93 | if (err) return cb(err) 94 | 95 | common.DROPPED[location] = true 96 | __PgDOWN_open.call(this, options, (err) => { 97 | common.OPENED.push(this) 98 | cb(err) 99 | }) 100 | }) 101 | } 102 | -------------------------------------------------------------------------------- /test/abstract-leveldown/approximate-size-test.js: -------------------------------------------------------------------------------- 1 | var db, leveldown, testCommon 2 | 3 | module.exports.setUp = function (_leveldown, test, _testCommon) { 4 | test('setUp common', _testCommon.setUp) 5 | test('setUp db', function (t) { 6 | leveldown = _leveldown 7 | testCommon = _testCommon 8 | db = leveldown(testCommon.location()) 9 | db.open(t.end.bind(t)) 10 | }) 11 | } 12 | 13 | module.exports.args = function (test) { 14 | test('test argument-less approximateSize() throws', function (t) { 15 | t.throws( 16 | db.approximateSize.bind(db), 17 | { 18 | name: 'Error', 19 | message: 'approximateSize() requires valid `start`, `end` and `callback` arguments' 20 | }, 21 | 'no-arg approximateSize() throws' 22 | ) 23 | t.end() 24 | }) 25 | 26 | test('test callback-less, 1-arg, approximateSize() throws', function (t) { 27 | t.throws( 28 | db.approximateSize.bind(db, 'foo'), 29 | { 30 | name: 'Error', 31 | message: 'approximateSize() requires valid `start`, `end` and `callback` arguments' 32 | }, 33 | 'callback-less, 1-arg approximateSize() throws' 34 | ) 35 | t.end() 36 | }) 37 | 38 | test('test callback-less, 2-arg, approximateSize() throws', function (t) { 39 | t.throws( 40 | db.approximateSize.bind(db, 'foo', 'bar'), 41 | { 42 | name: 'Error', 43 | message: 'approximateSize() requires a callback argument' 44 | }, 45 | 'callback-less, 2-arg approximateSize() throws' 46 | ) 47 | t.end() 48 | }) 49 | 50 | test('test callback-less, 3-arg, approximateSize() throws', function (t) { 51 | t.throws( 52 | db.approximateSize.bind(db, function () {}), 53 | { 54 | name: 'Error', 55 | message: 'approximateSize() requires valid `start`, `end` and `callback` arguments' 56 | }, 57 | 'callback-only approximateSize() throws' 58 | ) 59 | t.end() 60 | }) 61 | 62 | test('test callback-only approximateSize() throws', function (t) { 63 | t.throws( 64 | db.approximateSize.bind(db, function () {}), 65 | { 66 | name: 'Error', 67 | message: 'approximateSize() requires valid `start`, `end` and `callback` arguments' 68 | }, 69 | 'callback-only approximateSize() throws' 70 | ) 71 | t.end() 72 | }) 73 | 74 | test('test 1-arg + callback approximateSize() throws', function (t) { 75 | t.throws( 76 | db.approximateSize.bind(db, 'foo', function () {}), 77 | { 78 | name: 'Error', 79 | message: 'approximateSize() requires valid `start`, `end` and `callback` arguments' 80 | }, 81 | '1-arg + callback approximateSize() throws' 82 | ) 83 | t.end() 84 | }) 85 | 86 | test('test _serialize object', function (t) { 87 | t.plan(3) 88 | var db = leveldown(testCommon.location()) 89 | db._approximateSize = function (start, end, callback) { 90 | t.equal(Buffer.isBuffer(start) ? String(start) : start, '[object Object]') 91 | t.equal(Buffer.isBuffer(end) ? String(end) : end, '[object Object]') 92 | callback() 93 | } 94 | db.approximateSize({}, {}, function (err, val) { 95 | t.error(err) 96 | }) 97 | }) 98 | 99 | test('test _serialize buffer', function (t) { 100 | t.plan(3) 101 | var db = leveldown(testCommon.location()) 102 | db._approximateSize = function (start, end, callback) { 103 | t.same(start, Buffer('start')) 104 | t.same(end, Buffer('end')) 105 | callback() 106 | } 107 | db.approximateSize(Buffer('start'), Buffer('end'), function (err, val) { 108 | t.error(err) 109 | }) 110 | }) 111 | 112 | test('test custom _serialize*', function (t) { 113 | t.plan(3) 114 | var db = leveldown(testCommon.location()) 115 | db._serializeKey = function (data) { return data } 116 | db._approximateSize = function (start, end, callback) { 117 | t.deepEqual(start, { foo: 'bar' }) 118 | t.deepEqual(end, { beep: 'boop' }) 119 | callback() 120 | } 121 | db.open(function () { 122 | db.approximateSize({ foo: 'bar' }, { beep: 'boop' }, function (err) { 123 | t.error(err) 124 | }) 125 | }) 126 | }) 127 | } 128 | 129 | module.exports.approximateSize = function (test) { 130 | test('test approximateSize()', function (t) { 131 | var data = Array.apply(null, Array(10000)).map(function () { 132 | return 'aaaaaaaaaa' 133 | }).join('') 134 | 135 | var items = Array.apply(null, Array(10)).map(function (x, i) { 136 | return { type: 'put', key: 'foo' + i, value: data } 137 | }) 138 | 139 | db.batch(items, function (err) { 140 | t.error(err) 141 | 142 | // cycle open/close to ensure a pack to .sst 143 | 144 | db.close(function (err) { 145 | t.error(err) 146 | 147 | db.open(function (err) { 148 | t.error(err) 149 | 150 | db.approximateSize('!', '~', function (err, size) { 151 | t.error(err) 152 | 153 | t.equal(typeof size, 'number') 154 | t.ok( 155 | testCommon.checkBatchSize(items, size), 156 | 'size reports a reasonable amount (' + size + ')' 157 | ) 158 | 159 | db.close(function (err) { 160 | t.error(err) 161 | t.end() 162 | }) 163 | }) 164 | }) 165 | }) 166 | }) 167 | }) 168 | } 169 | 170 | module.exports.tearDown = function (test, testCommon) { 171 | test('tearDown', function (t) { 172 | db.close(testCommon.tearDown.bind(null, t)) 173 | }) 174 | } 175 | 176 | module.exports.all = function (leveldown, test, testCommon) { 177 | module.exports.setUp(leveldown, test, testCommon) 178 | module.exports.args(test) 179 | module.exports.approximateSize(test) 180 | module.exports.tearDown(test, testCommon) 181 | } 182 | -------------------------------------------------------------------------------- /test/abstract-leveldown/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const test = require('tape') 4 | const common = require('../_common') 5 | 6 | // TODO: use a larger buffer 7 | const buffer = new Buffer('00ff61626301feffff00000000ffff', 'hex') 8 | 9 | // verify compatibility w/ leveldown api 10 | const suites = { 11 | leveldown: require('abstract-leveldown/abstract/leveldown-test').args, 12 | open: require('abstract-leveldown/abstract/open-test').all, 13 | close: require('abstract-leveldown/abstract/close-test').close, 14 | put: require('abstract-leveldown/abstract/put-test').all, 15 | del: require('abstract-leveldown/abstract/del-test').all, 16 | get: require('abstract-leveldown/abstract/get-test').all, 17 | put_get_del: require('abstract-leveldown/abstract/put-get-del-test').all, 18 | iterator: require('./iterator-test').all, 19 | ranges: require('abstract-leveldown/abstract/ranges-test').all, 20 | batch: require('abstract-leveldown/abstract/batch-test').all, 21 | chainedBatch: require('abstract-leveldown/abstract/chained-batch-test').all, 22 | approximateSize: require('./approximate-size-test').all 23 | } 24 | 25 | Object.keys(suites).forEach((name) => { 26 | suites[name](common.db, test, common, buffer) 27 | }) 28 | -------------------------------------------------------------------------------- /test/abstract-leveldown/iterator-test.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = require('abstract-leveldown/abstract/iterator-test') 4 | 5 | // NB: updates snapshot test to wait for snapshot acquisition before write 6 | module.exports.snapshot = function (leveldown, test, testCommon) { 7 | var db 8 | 9 | test('setUp #3', function (t) { 10 | db = leveldown(testCommon.location()) 11 | db.open(function () { 12 | db.put('foobatch1', 'bar1', t.end.bind(t)) 13 | }) 14 | }) 15 | 16 | test('iterator create snapshot correctly', function (t) { 17 | var iterator = db.iterator() 18 | 19 | // NB: this is pretty shite -- for now just a hack for testing 20 | setTimeout(function () { 21 | db.del('foobatch1', function () { 22 | iterator.next(function (err, key, value) { 23 | t.error(err) 24 | t.ok(key, 'got a key') 25 | t.equal(key.toString(), 'foobatch1', 'correct key') 26 | t.equal(value.toString(), 'bar1', 'correct value') 27 | iterator.end(function (err) { 28 | if (err) return t.end(err) 29 | db.close(t.end) 30 | }) 31 | }) 32 | }) 33 | }, 100) 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /test/encoding.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const after = require('after') 4 | const levelup = require('levelup') 5 | const test = require('tape') 6 | const common = require('./_common') 7 | const destroy = require('../').destroy 8 | 9 | test('utf8 keyEncoding, json valueEncoding', (t) => { 10 | t.test('bytea keys, bytea values', (t) => { 11 | const db = levelup(common.location(), { 12 | db: common.db, 13 | keyEncoding: 'utf8', 14 | valueEncoding: 'json' 15 | }) 16 | db._keyColumnType = 'text' 17 | testEncodings(db, t) 18 | }) 19 | 20 | t.test('text keys, jsonb values', (t) => { 21 | const db = levelup(common.location(), { 22 | db: common.db, 23 | keyEncoding: 'utf8', 24 | valueEncoding: 'json' 25 | }) 26 | db._keyColumnType = 'text' 27 | db._valueColumnType = 'jsonb' 28 | testEncodings(db, t) 29 | }) 30 | 31 | t.test('jsonb keys, json values', (t) => { 32 | const db = levelup(common.location(), { 33 | db: common.db, 34 | keyEncoding: 'utf8', 35 | valueEncoding: 'json' 36 | }) 37 | db._keyColumnType = 'jsonb' 38 | db._valueColumnType = 'json' 39 | testEncodings(db, t) 40 | }) 41 | }) 42 | 43 | const testEncodings = (db, t) => { 44 | t.test('initialize', (t) => { 45 | destroy(db.location, (err) => { 46 | if (err) return t.end(err) 47 | db.open(t.end) 48 | }) 49 | }) 50 | 51 | t.test('open', (t) => { 52 | db.open(t.end) 53 | }) 54 | 55 | t.test('put', (t) => { 56 | db.put('a', { str: 'foo', int: 123 }, function (err, result) { 57 | if (err) return t.end(err) 58 | t.ok(result == null, 'empty response') 59 | t.end() 60 | }) 61 | }) 62 | 63 | t.test('get', (t) => { 64 | db.get('a', function (err, result) { 65 | if (err) return t.end(err) 66 | t.deepEqual(result, { str: 'foo', int: 123 }) 67 | t.end() 68 | }) 69 | }) 70 | 71 | t.test('del', (t) => { 72 | db.del('a', function (err, result) { 73 | if (err) return t.end(err) 74 | db.get('a', function (err, result) { 75 | t.ok(err && err.notFound, 'not found') 76 | t.ok(result == null, 'empty response') 77 | t.end() 78 | }) 79 | }) 80 | }) 81 | 82 | const batch = [ 83 | { 84 | type: 'put', 85 | key: 'aa', 86 | value: { k: 'aa' } 87 | }, 88 | { 89 | type: 'put', 90 | key: 'ac', 91 | value: { k: 'ac' } 92 | }, 93 | { 94 | type: 'put', 95 | key: 'ab', 96 | value: { k: 'ab' } 97 | } 98 | ] 99 | 100 | const sorted = batch.slice().sort((a, b) => a.key < b.key ? -1 : (a.key > b.key ? 1 : 0)) 101 | 102 | t.test('array batch', (t) => { 103 | db.batch(batch, (err) => { 104 | if (err) return t.end(err) 105 | 106 | const done = after(batch.length, t.end) 107 | 108 | db.get('aa', (err, value) => { 109 | t.deepEqual(value, sorted[0].value, 'aa') 110 | done(err) 111 | }) 112 | db.get('ab', (err, value) => { 113 | t.deepEqual(value, sorted[1].value, 'ab') 114 | done(err) 115 | }) 116 | db.get('ac', (err, value) => { 117 | t.deepEqual(value, sorted[2].value, 'ac') 118 | done(err) 119 | }) 120 | }) 121 | }) 122 | 123 | t.test('read stream', (t) => { 124 | const data = [] 125 | db.createReadStream() 126 | .on('error', t.end) 127 | .on('data', (d) => data.push(d)) 128 | .on('end', () => { 129 | // add put op type to compare to sorted batch 130 | data.forEach((d) => { d.type = 'put' }) 131 | t.deepEqual(data, sorted, 'all records in order') 132 | t.end() 133 | }) 134 | }) 135 | 136 | t.test('non-object values', (t) => { 137 | t.test('nullish values', (t) => { 138 | t.test('null value', (t) => { 139 | const k = 'null' 140 | db.put(k, null, (err) => { 141 | if (err) return t.end(err) 142 | db.get(k, (err, value) => { 143 | if (err) return t.end(err) 144 | t.equal(value, null, 'correct value') 145 | t.end() 146 | }) 147 | }) 148 | }) 149 | 150 | t.test('NaN value', (t) => { 151 | const k = 'NaN' 152 | db.put(k, NaN, (err) => { 153 | if (err) return t.end(err) 154 | db.get(k, (err, value) => { 155 | if (err) return t.end(err) 156 | t.equal(value, null, 'correct value') 157 | t.end() 158 | }) 159 | }) 160 | }) 161 | 162 | t.test('Invalid Date value', (t) => { 163 | const k = 'Invalid Date' 164 | db.put(k, new Date('xxxx'), (err) => { 165 | if (err) return t.end(err) 166 | db.get(k, (err, value) => { 167 | if (err) return t.end(err) 168 | t.equal(value, null, 'correct value') 169 | t.end() 170 | }) 171 | }) 172 | }) 173 | }) 174 | 175 | t.test('boolean values', (t) => { 176 | t.test('false value', (t) => { 177 | const k = 'false' 178 | db.put(k, false, (err) => { 179 | if (err) return t.end(err) 180 | db.get(k, (err, value) => { 181 | if (err) return t.end(err) 182 | t.equal(value, false, 'correct value') 183 | t.end() 184 | }) 185 | }) 186 | }) 187 | 188 | t.test('true value', (t) => { 189 | const k = 'true' 190 | db.put(k, true, (err) => { 191 | if (err) return t.end(err) 192 | db.get(k, (err, value) => { 193 | if (err) return t.end(err) 194 | t.equal(value, true, 'correct value') 195 | t.end() 196 | }) 197 | }) 198 | }) 199 | }) 200 | 201 | t.test('numeric values', (t) => { 202 | t.test('negative zero', (t) => { 203 | const k = 'zero' 204 | db.put(k, -0, (err) => { 205 | if (err) return t.end(err) 206 | db.get(k, (err, value) => { 207 | if (err) return t.end(err) 208 | t.equal(value, 0, 'correct value') 209 | t.end() 210 | }) 211 | }) 212 | }) 213 | 214 | t.test('integer', (t) => { 215 | const k = 'integer' 216 | const v = 21 217 | db.put(k, v, (err) => { 218 | if (err) return t.end(err) 219 | db.get(k, (err, value) => { 220 | if (err) return t.end(err) 221 | t.equal(value, v, 'correct value') 222 | t.end() 223 | }) 224 | }) 225 | }) 226 | 227 | t.test('float', (t) => { 228 | const k = 'float' 229 | const v = -29.3123433726 230 | db.put(k, v, (err) => { 231 | if (err) return t.end(err) 232 | db.get(k, (err, value) => { 233 | if (err) return t.end(err) 234 | t.equal(value, v, 'correct value') 235 | t.end() 236 | }) 237 | }) 238 | }) 239 | 240 | t.test('exponential', (t) => { 241 | const k = 'exponential' 242 | const v = 4.56e-123 243 | db.put(k, v, (err) => { 244 | if (err) return t.end(err) 245 | db.get(k, (err, value) => { 246 | if (err) return t.end(err) 247 | t.equal(value, v, 'correct value') 248 | t.end() 249 | }) 250 | }) 251 | }) 252 | }) 253 | 254 | t.test('date values', (t) => { 255 | t.test('y2k', (t) => { 256 | const k = 'y2k' 257 | const v = new Date('2000-01-01Z') 258 | db.put(k, v, (err) => { 259 | if (err) return t.end(err) 260 | db.get(k, (err, value) => { 261 | if (err) return t.end(err) 262 | t.equal(value, '2000-01-01T00:00:00.000Z', 'correct value') 263 | t.end() 264 | }) 265 | }) 266 | }) 267 | }) 268 | 269 | t.test('string values', (t) => { 270 | t.test('empty string', (t) => { 271 | const k = 'empty string' 272 | db.put(k, '', (err) => { 273 | if (err) return t.end(err) 274 | db.get(k, (err, value) => { 275 | if (err) return t.end(err) 276 | t.equal(value, '', 'correct value') 277 | t.end() 278 | }) 279 | }) 280 | }) 281 | 282 | t.test('long string', (t) => { 283 | const k = 'long string' 284 | const v = Array.apply(null, Array(100000)).map(() => 'Hello "there".\r\n').join('') 285 | db.put(k, v, (err) => { 286 | if (err) return t.end(err) 287 | db.get(k, (err, value) => { 288 | if (err) return t.end(err) 289 | t.equal(value, v, 'correct value') 290 | t.end() 291 | }) 292 | }) 293 | }) 294 | 295 | t.test('surrogate pair', (t) => { 296 | const k = 'surrogate pair' 297 | const v = 'pair \xc0\x80' 298 | db.put(k, v, (err) => { 299 | if (err) return t.end(err) 300 | db.get(k, (err, value) => { 301 | if (err) return t.end(err) 302 | t.equal(value, v, 'correct value') 303 | t.end() 304 | }) 305 | }) 306 | }) 307 | 308 | t.test('string with control chars', (t) => { 309 | const k = 'null char' 310 | const v = 'chars: \x01\x02\xff\xfe\x80\x7f\uffff\ufffe' 311 | db.put(k, v, (err) => { 312 | if (err) return t.end(err) 313 | db.get(k, (err, value) => { 314 | if (err) return t.end(err) 315 | t.equal(value, v, 'correct value') 316 | t.end() 317 | }) 318 | }) 319 | }) 320 | 321 | t.test('string with null chars', (t) => { 322 | const k = 'null char' 323 | const v = 'chars: \x01\x00\x02\x00\x01\x0101\x00\u0000\x7e\x01\x7d\xfe' 324 | db.put(k, v, (err) => { 325 | if (err) return t.end(err) 326 | db.get(k, (err, value) => { 327 | if (err) return t.end(err) 328 | t.equal(value, v, 'correct value') 329 | t.end() 330 | }) 331 | }) 332 | }) 333 | }) 334 | 335 | t.test('array values', (t) => { 336 | t.test('empty array', (t) => { 337 | const k = 'empty array' 338 | db.put(k, [], (err) => { 339 | if (err) return t.end(err) 340 | db.get(k, (err, value) => { 341 | if (err) return t.end(err) 342 | t.deepEqual(value, [], 'correct value') 343 | t.end() 344 | }) 345 | }) 346 | }) 347 | 348 | t.test('long array', (t) => { 349 | const k = 'long array' 350 | const v = Array.apply(null, Array(100000)).map(() => 'Hello there.\r\n') 351 | db.put(k, v, (err) => { 352 | if (err) return t.end(err) 353 | db.get(k, (err, value) => { 354 | if (err) return t.end(err) 355 | t.deepEqual(value, v, 'correct value') 356 | t.end() 357 | }) 358 | }) 359 | }) 360 | 361 | t.test('mixed array', (t) => { 362 | const k = 'mixed array' 363 | const v = ['foo', 123, [{}, { foo: { 0: null } }]] 364 | db.put(k, v, (err) => { 365 | if (err) return t.end(err) 366 | db.get(k, (err, value) => { 367 | if (err) return t.end(err) 368 | t.deepEqual(value, v, 'correct value') 369 | t.end() 370 | }) 371 | }) 372 | }) 373 | }) 374 | 375 | t.test('encoding failures', (t) => { 376 | t.skip('undefined value', (t) => { 377 | const k = 'undefined' 378 | db.put(k, undefined, (err, result) => { 379 | t.ok(err, 'fails to save') 380 | db.get(k, (err, record) => { 381 | t.ok(err && err.notFound, 'not found') 382 | t.ok(record == null, 'no value returned') 383 | t.end() 384 | }) 385 | }) 386 | }) 387 | 388 | t.test('circular reference', (t) => { 389 | const k = 'circular' 390 | const v = { child: {} } 391 | v.child.parent = v 392 | 393 | t.throws(() => db.put(k, v, () => {})) 394 | t.end() 395 | }) 396 | }) 397 | }) 398 | 399 | t.test('abnormal keys', (t) => { 400 | t.test('null byte', (t) => { 401 | const k = 'null\x00key' 402 | db.put(k, 'val', (err) => { 403 | if (err) return t.end(err) 404 | db.get(k, (err, value) => { 405 | if (err) return t.end(err) 406 | t.equal(value, 'val', 'correct value') 407 | t.end() 408 | }) 409 | }) 410 | }) 411 | 412 | t.test('control char (\\x01)', (t) => { 413 | const k = 'weird\x01key' 414 | db.put(k, 'val', (err) => { 415 | if (err) return t.end(err) 416 | db.get(k, (err, value) => { 417 | if (err) return t.end(err) 418 | t.equal(value, 'val', 'correct value') 419 | t.end() 420 | }) 421 | }) 422 | }) 423 | 424 | t.test('control char (\\xff)', (t) => { 425 | const k = 'weird\xffkey' 426 | db.put(k, 'val', (err) => { 427 | if (err) return t.end(err) 428 | db.get(k, (err, value) => { 429 | if (err) return t.end(err) 430 | t.equal(value, 'val', 'correct value') 431 | t.end() 432 | }) 433 | }) 434 | }) 435 | 436 | t.test('control char (\\uffff)', (t) => { 437 | const k = 'weird\uffffkey' 438 | db.put(k, 'val', (err) => { 439 | if (err) return t.end(err) 440 | db.get(k, (err, value) => { 441 | if (err) return t.end(err) 442 | t.equal(value, 'val', 'correct value') 443 | t.end() 444 | }) 445 | }) 446 | }) 447 | }) 448 | 449 | t.test('approximate size', (t) => { 450 | db.db.approximateSize('a', 'ac', (err, size1) => { 451 | if (err) return t.end(err) 452 | 453 | t.ok(size1 > 0, 'positive') 454 | t.equal(parseInt(size1), size1, 'integer') 455 | 456 | db.db.approximateSize('a', 'ab', (err, size2) => { 457 | if (err) return t.end(err) 458 | 459 | t.ok(size2 < size1, 'smaller than superset size') 460 | t.ok(size2 > 0, 'positive') 461 | t.equal(parseInt(size2), size2, 'integer') 462 | t.ok(size1 > size2) 463 | t.end() 464 | }) 465 | }) 466 | }) 467 | 468 | t.test('idempotent close', (t) => { 469 | db.close((err) => { 470 | if (err) return t.end(err) 471 | db.close(t.end) 472 | }) 473 | }) 474 | 475 | t.end() 476 | } 477 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | var major = process.version.slice(1).split('.').shift() 2 | 3 | // transpile with babel to tests in older node versions 4 | if (major < 4) require('babel-register') 5 | 6 | require('./pgdown') 7 | require('./encoding') 8 | require('./abstract-leveldown') 9 | -------------------------------------------------------------------------------- /test/pgdown.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const test = require('tape') 4 | const common = require('./_common') 5 | const PgDOWN = common.db 6 | 7 | test('constructor', (t) => { 8 | t.test('defaults', (t) => { 9 | const db = PgDOWN(common.location()) 10 | const config = db._config 11 | t.equal(config.database, common.PG_DEFAULTS.database, 'uses default database') 12 | t.equal(config._table.indexOf(common.escape.ident(common.lastLocation())), 0, 'uses test location') 13 | t.equal(config._schema, common.escape.ident(common.PG_DEFAULTS.schema), 0, 'uses default schema') 14 | t.equal(config._relation.indexOf(config._schema), 0, 'rel name begins with schema') 15 | t.ok(config._relation.indexOf(config._table) >= 0, 'rel name includes table') 16 | t.end() 17 | }) 18 | }) 19 | 20 | test('open', (t) => { 21 | t.test('empty location', (t) => { 22 | t.throws(() => PgDOWN(), 'location required') 23 | t.throws(() => new PgDOWN(), 'location required') 24 | t.throws(() => new PgDOWN(''), 'location required') 25 | t.end() 26 | }) 27 | 28 | t.test('throw on malformed db name', (t) => { 29 | const database = 'pg_invalid_db__' 30 | const loc = '/' + database + '/invalid_db_table' 31 | const db = PgDOWN(loc) 32 | t.equal(db._config.database, database, 'db name set') 33 | t.equal(db.location.indexOf(loc), 0, 'location set') 34 | 35 | db.open((err) => { 36 | t.ok(err, 'error on open') 37 | db.close(t.end) 38 | }) 39 | }) 40 | 41 | t.test('error on illegal table name (null byte)', (t) => { 42 | const db = PgDOWN('illegal_\x00_table') 43 | db.open((err) => { 44 | t.ok(err, 'error on open') 45 | db.close(t.end) 46 | }) 47 | }) 48 | 49 | t.test('table path', (t) => { 50 | const db = PgDOWN(common.location('foo/bar/baz')) 51 | db.open((err) => { 52 | if (err) return t.end(err) 53 | db.close(t.end) 54 | }) 55 | }) 56 | 57 | t.test('weird table name (0x01 byte)', (t) => { 58 | const db = PgDOWN(common.location('weird_\x01_table')) 59 | db.open((err) => { 60 | if (err) return t.end(err) 61 | db.close(t.end) 62 | }) 63 | }) 64 | 65 | t.test('weird table name (0xff byte)', (t) => { 66 | const db = PgDOWN(common.location('weird_\xff_table')) 67 | db.open((err) => { 68 | if (err) return t.end(err) 69 | db.close(t.end) 70 | }) 71 | }) 72 | 73 | t.test('weird table name (empty quoted string)', (t) => { 74 | const db = PgDOWN(common.location('""')) 75 | db.open((err) => { 76 | if (err) return t.end(err) 77 | db.close(t.end) 78 | }) 79 | }) 80 | 81 | t.test('error for create if missing', (t) => { 82 | const loc = common.location() 83 | const opts = { createIfMissing: false } 84 | 85 | const db1 = PgDOWN(loc) 86 | db1.open(opts, (err) => { 87 | t.equal(db1.location, loc, 'location set') 88 | t.ok(err, 'error on open') 89 | 90 | const db2 = PgDOWN(loc) 91 | db2.open(opts, (err) => { 92 | t.equal(db2.location, loc, 'location set') 93 | t.ok(err, 'error on open') 94 | 95 | db1.close((err1) => { 96 | db2.close((err2) => { 97 | t.end(err1 || err2) 98 | }) 99 | }) 100 | }) 101 | }) 102 | }) 103 | 104 | t.test('idempotent close', (t) => { 105 | const db = PgDOWN(common.location()) 106 | db.open((err) => { 107 | if (err) return t.end(err) 108 | db.close((err) => { 109 | if (err) return t.end(err) 110 | db.close(t.end) 111 | }) 112 | }) 113 | }) 114 | }) 115 | -------------------------------------------------------------------------------- /util.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const util = exports 4 | 5 | const AbstractLevelDOWN = require('abstract-leveldown/abstract-leveldown') 6 | const mts = require('monotonic-timestamp') 7 | const pg = require('pg') 8 | const Cursor = require('pg-cursor') 9 | const Postgres = require('any-db-postgres') 10 | const ConnectionPool = require('any-db-pool') 11 | const beginTransaction = require('any-db-transaction') 12 | const errors = require('level-errors') 13 | 14 | util.escape = require('pg-format') 15 | 16 | // // override pg serialization 17 | // const pgUtils = require('pg/lib/utils') 18 | // const __prepareValue = pgUtils.prepareValue 19 | // pgUtils.prepareValue = function (value) { 20 | // const result = __prepareValue(value) 21 | // console.warn('PREPARED VALUE', value, result) 22 | // return result 23 | // } 24 | 25 | util.isBuffer = AbstractLevelDOWN.prototype._isBuffer 26 | 27 | // encode null bytes for text/jsonb types 28 | // TODO: this is absolute shit... but ought to get the job done for now 29 | // hard to believe postgres doesn't have *any* legit workaround for null bytes 30 | 31 | util.encodeText = (text) => ( 32 | text.replace(/\x01/g, '\x01\x7e').replace(/\x00/g, '\x01\x7d') 33 | ) 34 | 35 | util.encodeJson = (text) => ( 36 | text.replace(/\\u0001/g, '\\u0001\x7e').replace(/\\u0000/g, '\\u0001\x7d') 37 | ) 38 | 39 | util.decodeText = (text) => ( 40 | text.replace(/\x01\x7d/g, '\x00').replace(/\x01\x7e/g, '\x01') 41 | ) 42 | 43 | util.decodeJson = (text) => ( 44 | text.replace(/\\u0001\x7d/g, '\\u0000').replace(/\\u0001\x7e/g, '\\u0001') 45 | ) 46 | 47 | util.serialize = (type, source) => { 48 | const fn = util.serialize[type] 49 | if (!fn) throw new Error('unable to serialize unknown data type:' + type) 50 | return fn(source) 51 | } 52 | 53 | util.serialize.bytea = (source) => ( 54 | util.isBuffer(source) ? source : new Buffer(source == null ? '' : String(source), 'utf8') 55 | ) 56 | 57 | util.serialize.text = (source) => util.encodeText( 58 | util.isBuffer(source) ? source.toString('utf8') : source == null ? '' : String(source) 59 | ) 60 | 61 | util.serialize.json = util.serialize.jsonb = (source) => util.encodeJson( 62 | util.isBuffer(source) ? source.toString('utf8') : source == null ? 'null' : String(source) 63 | ) 64 | 65 | util.deserialize = (type, source, asBuffer) => { 66 | const fn = util.deserialize[type] 67 | if (!fn) throw new Error('unable to deserialize unknown data type:' + type) 68 | return fn(source, asBuffer) 69 | } 70 | 71 | util.deserialize.bytea = (source, asBuffer) => { 72 | return asBuffer ? source : String(source || '') 73 | } 74 | 75 | util.deserialize.text = (source, asBuffer) => util.decodeText( 76 | asBuffer ? source.toString('utf8') : source == null ? '' : String(source) 77 | ) 78 | 79 | util.deserialize.json = util.deserialize.jsonb = (source, asBuffer) => util.decodeJson( 80 | JSON.stringify(asBuffer ? source.toString('utf8') : source) 81 | ) 82 | 83 | util.comparators = { 84 | eq: () => '=', 85 | ne: () => '<>', 86 | lt: () => '<', 87 | lte: () => '<=', 88 | min: () => '<=', 89 | gt: () => '>', 90 | gte: () => '>=', 91 | max: () => '>=', 92 | start: (range) => range.reverse ? '<=' : '>=', 93 | end: (range) => range.reverse ? '>=' : '<=' 94 | } 95 | 96 | util.NotFoundError = errors.NotFoundError 97 | 98 | util.createPool = (config) => { 99 | config.name = mts() 100 | const pool = new ConnectionPool(Postgres, config, util.POOL_CONFIG) 101 | return pool 102 | } 103 | 104 | util.destroyPool = (pool, cb) => { 105 | pool.close(cb) 106 | } 107 | 108 | util.createTransaction = (pool, cb) => { 109 | const tx = beginTransaction(pool) 110 | if (cb) tx.once('error', cb).once('commit:complete', cb) 111 | return tx 112 | } 113 | 114 | util.createCursor = (db, statement) => { 115 | const client = Postgres.createConnection(db._config) 116 | const cursor = client.query(new Cursor(statement.text, statement.values)) 117 | 118 | client.once('error', (err) => { 119 | console.warn('CURSOR ERR:', err) 120 | client.close() 121 | }) 122 | 123 | cursor.close = (cb) => { 124 | // NB: dirty hack to test the pool hanging issues... not working anyway... 125 | if (cursor.connection) { 126 | cursor.connection.close({ type: 'P' }) 127 | cursor.connection.sync() 128 | cursor.state = 'done' 129 | cursor.connection.once('closeComplete', () => { 130 | client.end() 131 | client.removeAllListeners() 132 | cb && cb() 133 | }) 134 | } else { 135 | client.end() 136 | client.removeAllListeners() 137 | cb && process.nextTick(cb) 138 | } 139 | } 140 | 141 | return cursor 142 | } 143 | 144 | // set up pg connection defaults with standard PG* env var overrides 145 | const PG_DEFAULTS = util.PG_DEFAULTS = {} 146 | 147 | PG_DEFAULTS.database = process.env.PGDATABASE || 'postgres' 148 | PG_DEFAULTS.host = process.env.PGHOSTADDR || pg.defaults.host 149 | PG_DEFAULTS.port = Number(process.env.PGPORT) || pg.defaults.port 150 | PG_DEFAULTS.user = process.env.PGUSER || pg.defaults.user 151 | PG_DEFAULTS.password = process.env.PGPASSWORD || pg.defaults.password 152 | PG_DEFAULTS.application_name = process.env.PGAPPNAME 153 | PG_DEFAULTS.schema = 'pgdown' 154 | PG_DEFAULTS.idleTimeout = pg.defaults.idleTimeoutMillis 155 | PG_DEFAULTS.reapInterval = pg.defaults.reapIntervalMillis 156 | 157 | util.POOL_CONFIG = { 158 | min: 0, 159 | max: 10, 160 | reset: function (conn, done) { 161 | conn.query('ROLLBACK', done) 162 | } 163 | } 164 | 165 | util.parseLocation = (location) => { 166 | const config = {} 167 | 168 | // copy over pg defaults 169 | for (var key in PG_DEFAULTS) { 170 | if (PG_DEFAULTS[key] !== undefined) config[key] = PG_DEFAULTS[key] 171 | } 172 | 173 | // always set fallback application name 174 | config.fallback_application_name = 'pgdown' 175 | 176 | // TODO: complete postgres:// uri parsing 177 | const parts = location.split('/') 178 | 179 | // location beginning with slash specifies database name 180 | if (location[0] === '/') { 181 | parts.shift() 182 | config.database = parts.shift() || config.database 183 | } 184 | 185 | // remaining components of location specifiy sublevel path/table name 186 | const tableName = parts.join('/') 187 | if (!tableName) throw new Error('table name required') 188 | 189 | // TODO: refactor this crap away 190 | const table = config._table = util.escape.ident(tableName) 191 | const schema = config._schema = util.escape.ident(config.schema) 192 | 193 | // set relation name (combination of schema and table name) 194 | config._relation = schema + '.' + table 195 | 196 | return config 197 | } 198 | 199 | // TODO: create/drop database, e.g.: 200 | // https://github.com/olalonde/pgtools/blob/master/index.js 201 | 202 | util.dropTable = (location, cb) => { 203 | // TODO: should try to use shared pool for location if one exists 204 | // probably merits a createConnection helper 205 | const config = util.parseLocation(location) 206 | const client = Postgres.createConnection(config) 207 | client.once('error', (err) => destroyClient(err, client, cb)) 208 | client.query(`DROP TABLE IF EXISTS ${config._relation}`, (err) => { 209 | destroyClient(err, client, cb) 210 | }) 211 | } 212 | 213 | const destroyClient = (err, client, cb) => { 214 | if (err) return cb(err) 215 | client && client.end() 216 | process.nextTick(cb) 217 | } 218 | --------------------------------------------------------------------------------