├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── bin └── converter.js ├── clean.js ├── main.js ├── package-lock.json ├── package.json ├── reader ├── mongo.js ├── postgres.js └── redis.js ├── renovate.json ├── session ├── mongo.js ├── postgres.js └── redis.js └── writer ├── mongo.js ├── postgres.js ├── postgres ├── copy.js ├── session.js ├── transaction.js └── transform.js └── redis.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | dist: trusty 4 | node_js: 5 | - lts/* 6 | cache: 7 | directories: 8 | - node_modules 9 | addons: 10 | postgresql: 9.5 11 | branches: 12 | only: 13 | - master 14 | before_install: 15 | - sh -ec "if [ '$IN' = 'postgres' ]; then psql -c 'create database input;' -U postgres; fi" 16 | - sh -ec "if [ '$OUT' = 'postgres' ]; then psql -c 'create database output;' -U postgres; fi" 17 | - git clone --depth=1 https://github.com/NodeBB/NodeBB.git nodebb 18 | - (cd nodebb && cp install/package.json package.json) 19 | - (cd nodebb && npm install --production) 20 | - sh -ec "if [ '$IN' = 'redis' ]; then cd nodebb && ./nodebb setup '{\"url\":\"http://127.0.0.1:4567\",\"secret\":\"abcdef\",\"database\":\"redis\",\"redis:host\":\"127.0.0.1\",\"redis:port\":6379,\"redis:password\":\"\",\"redis:database\":0,\"admin:username\":\"admin\",\"admin:email\":\"test@example.org\",\"admin:password\":\"abcdef\",\"admin:password:confirm\":\"abcdef\"}'; fi" 21 | - sh -ec "if [ '$IN' = 'mongo' ]; then cd nodebb && ./nodebb setup '{\"url\":\"http://127.0.0.1:4567\",\"secret\":\"abcdef\",\"database\":\"mongo\",\"mongo:host\":\"127.0.0.1\",\"mongo:port\":27017,\"mongo:username\":\"\",\"mongo:password\":\"\",\"mongo:database\":\"input\",\"admin:username\":\"admin\",\"admin:email\":\"test@example.org\",\"admin:password\":\"abcdef\",\"admin:password:confirm\":\"abcdef\"}'; fi" 22 | - sh -ec "if [ '$IN' = 'postgres' ]; then cd nodebb && ./nodebb setup '{\"url\":\"http://127.0.0.1:4567\",\"secret\":\"abcdef\",\"database\":\"postgres\",\"postgres:host\":\"127.0.0.1\",\"postgres:port\":5432,\"postgres:username\":\"postgres\",\"postgres:database\":\"input\",\"admin:username\":\"admin\",\"admin:email\":\"test@example.org\",\"admin:password\":\"abcdef\",\"admin:password:confirm\":\"abcdef\"}'; fi" 23 | - (cd nodebb && ./nodebb start) 24 | script: 25 | - node bin/converter.js --type $IN --outputType $OUT --input $IN_CONN --output $OUT_CONN --sessionType $IN --sessionInput $IN_CONN 26 | matrix: 27 | include: 28 | - env: IN=redis OUT=redis IN_CONN="redis://127.0.0.1:6379/0" OUT_CONN="redis://127.0.0.1:6379/1" 29 | services: 30 | - redis-server 31 | - env: IN=mongo OUT=mongo IN_CONN="mongodb://127.0.0.1:27017/input" OUT_CONN="mongodb://127.0.0.1:27017/output" 32 | services: 33 | - mongodb 34 | - env: IN=postgres OUT=postgres IN_CONN="postgresql://127.0.0.1:5432/input" OUT_CONN="postgresql://127.0.0.1:5432/output" 35 | services: 36 | - postgresql 37 | - env: IN=redis OUT=mongo IN_CONN="redis://127.0.0.1:6379/0" OUT_CONN="mongodb://127.0.0.1:27017/output" 38 | services: 39 | - redis-server 40 | - mongodb 41 | - env: IN=redis OUT=postgres IN_CONN="redis://127.0.0.1:6379/0" OUT_CONN="postgresql://127.0.0.1:5432/output" 42 | services: 43 | - redis-server 44 | - postgresql 45 | - env: IN=mongo OUT=redis IN_CONN="mongodb://127.0.0.1:27017/input" OUT_CONN="redis://127.0.0.1:6379/1" 46 | services: 47 | - mongodb 48 | - redis-server 49 | - env: IN=mongo OUT=postgres IN_CONN="mongodb://127.0.0.1:27017/input" OUT_CONN="postgresql://127.0.0.1:5432/output" 50 | services: 51 | - mongodb 52 | - postgresql 53 | - env: IN=postgres OUT=redis IN_CONN="postgresql://127.0.0.1:5432/input" OUT_CONN="redis://127.0.0.1:6379/1" 54 | services: 55 | - postgresql 56 | - redis-server 57 | - env: IN=postgres OUT=mongo IN_CONN="postgresql://127.0.0.1:5432/input" OUT_CONN="mongodb://127.0.0.1:27017/output" 58 | services: 59 | - postgresql 60 | - mongodb 61 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Ben Lubar 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | nodebb-postgres-converter 2 | ========================= 3 | 4 | Converts an existing NodeBB NoSQL database to a relational PostgreSQL database. 5 | 6 | See also: 7 | 8 | Supported database types 9 | ======================== 10 | 11 | - **Redis** 2.8.9+ 12 | - **MongoDB** 3.2+ 13 | - **PostgreSQL** 9.5+ 14 | 15 | Instructions 16 | ============ 17 | 18 | Download nodebb-postgres-converter, either by cloning this repository and 19 | running `npm install` or by installing globally with the command 20 | `npm install -g @ben_lubar/nodebb-postgres-converter` 21 | 22 | Make a backup of your NodeBB database and decide whether you'd prefer downtime 23 | or the loss of data more recent than your backup. In the future, there will be 24 | a plugin that will record changes made to the live database and then play them 25 | back on the converted database. 26 | 27 | Set up your destination database. This can be any of the supported database 28 | types listed above. If you decided to keep the forum running during this 29 | process, set up another copy of your source database(s) from the backup. 30 | 31 | Database connection URLs 32 | ------------------------ 33 | 34 | Database connection URLs follow the same format no matter which database you're 35 | using: `type://username:password@ip:port/database` 36 | 37 | - **type**: `redis`, `mongodb`, or `postgresql` 38 | - **username:password@**: The username and password for the source/destination 39 | databases. If you're doing this process on a server where the database cannot 40 | be accessed externally and you don't have a username or password set up for 41 | the database, you can skip the `:password` part or the entire authentication 42 | section of the URL. 43 | - **ip**: The IP address or hostname of the database server. 44 | - **:port**: The port number the database server is listening on. This can be 45 | skipped if you are using the default port for that database type: 46 | - **Redis** 6379 47 | - **MongoDB** 27017 48 | - **PostgreSQL** 5432 49 | - **database**: The database name (or number for Redis). Common examples 50 | include `0` for Redis and `nodebb` for MongoDB and PostgreSQL. For a 51 | PostgreSQL destination, the database must already be created. 52 | 53 | PostgreSQL performance tips 54 | --------------------------- 55 | 56 | Disabling certain safety features in a PostgreSQL destination database will 57 | improve performance. Just remember to turn them back on before going live. 58 | 59 | Try running these commands: 60 | 61 | ``` 62 | ALTER SYSTEM SET wal_level = 'minimal'; 63 | ALTER SYSTEM SET archive_mode = 'off'; 64 | ALTER SYSTEM SET max_wal_senders = 0; 65 | ALTER SYSTEM SET autovacuum = 'off'; 66 | ``` 67 | 68 | Followed by restarting the PostgreSQL database before the conversion. After the 69 | conversion, run `ALTER SYSTEM RESET ALL` and restart the database again. 70 | 71 | Disabling these features would normally make your database vulnerable to data 72 | loss during a power failure or a crash, but if the conversion process is 73 | interrupted, you can just start over from the backup. 74 | 75 | Running the command 76 | ------------------- 77 | 78 | Now that you're ready, run the command: 79 | 80 | - The command starts with `bin/converter.js` if you cloned the repository or 81 | `nodebb-postgres-converter` if you installed globally. 82 | - Next, add the databases. You always need an output database (`--outputType`, 83 | `--output`), and you need the object database (`--type`, `--input`) or the 84 | session database (`--sessionType`, `--sessionInput`) or both. 85 | - The type is `redis`, `mongo`, or `postgres`, following NodeBB's database 86 | naming convention. 87 | - The input or output parameter is a database URL as described above. 88 | - If you're converting to PostgreSQL, adding an argument like `--memory 4GB` 89 | will make the conversion go faster by temporarily allowing PostgreSQL to use 90 | up to 4GB of memory for maintenance tasks. PostgreSQL allows 64MB of memory 91 | by default, so for a large forum, creating indexes and clustering the data 92 | will take a long time without increasing the memory limit. 93 | 94 | If you are running the command remotely (such as over SSH), I suggest using a 95 | program like `screen` to separate the conversion process from your SSH session, 96 | so that the conversion process can continue if your SSH connection is lost. 97 | 98 | A complete command looks like this: 99 | 100 | ```bash 101 | nodebb-postgres-converter \ 102 | --type mongo --input 'mongodb://localhost/nodebb-copy' \ 103 | --sessionType redis --input 'redis://localhost/0' \ 104 | --outputType postgres --output 'postgresql://localhost/nodebb' \ 105 | --memory 12GB 106 | ``` 107 | -------------------------------------------------------------------------------- /bin/converter.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | 'use strict'; 4 | 5 | const commandLineArgs = require('command-line-args'); 6 | const commandLineUsage = require('command-line-usage'); 7 | const consoleStamp = require('console-stamp'); 8 | const main = require('..'); 9 | 10 | const optionDefinitions = [ 11 | { 12 | name: 'type', 13 | alias: 't', 14 | description: 'input database type (mongo/redis/postgres)', 15 | group: 'input' 16 | }, 17 | { 18 | name: 'input', 19 | alias: 'i', 20 | description: 'input database connection URL', 21 | group: 'input' 22 | }, 23 | { 24 | name: 'sessionType', 25 | alias: 'T', 26 | description: 'database containing sessions (mongo/redis/postgres)', 27 | group: 'input' 28 | }, 29 | { 30 | name: 'sessionInput', 31 | alias: 'I', 32 | description: 'database connection URL for the database containing sessions', 33 | group: 'input' 34 | }, 35 | { 36 | name: 'outputType', 37 | alias: 'O', 38 | description: 'output database type (mongo/redis/postgres) (default: postgres)', 39 | defaultValue: 'postgres', 40 | group: 'output' 41 | }, 42 | { 43 | name: 'output', 44 | alias: 'o', 45 | description: 'output database connection URL', 46 | group: 'output' 47 | }, 48 | { 49 | name: 'concurrency', 50 | alias: 'j', 51 | type: Number, 52 | description: 'number of queries executed at a time (default: 10)', 53 | defaultValue: 10, 54 | group: 'optional' 55 | }, 56 | { 57 | name: 'memory', 58 | alias: 'm', 59 | description: 'amount of memory PostgreSQL should use for maintenance tasks', 60 | defaultValue: '64MB', 61 | group: 'optional' 62 | } 63 | ]; 64 | 65 | const usage = [ 66 | { 67 | header: 'Source Database', 68 | optionList: optionDefinitions, 69 | group: ['input'] 70 | }, 71 | { 72 | header: 'Destination Database', 73 | optionList: optionDefinitions, 74 | group: ['output'] 75 | }, 76 | { 77 | header: 'Additional Parameters', 78 | optionList: optionDefinitions, 79 | group: ['optional'] 80 | } 81 | ]; 82 | 83 | var options; 84 | try { 85 | options = commandLineArgs(optionDefinitions)._all; 86 | } catch (ex) { 87 | if (ex.message !== 'Unknown option: --help') { 88 | console.error(ex.message); 89 | process.exit(1); 90 | return; 91 | } 92 | } 93 | 94 | if (!options || (options.type && !options.input) || (!options.type && options.input) || !options.output || options.concurrency < 1 || options.concurrency !== Math.floor(options.concurrency) || (options.sessionType && !options.sessionInput) || (!options.sessionType && options.sessionInput) || (!options.type && !options.sessionType)) { 95 | console.log(commandLineUsage(usage)); 96 | process.exit(1); 97 | return; 98 | } 99 | 100 | if (options.type && !/^[a-z]+$/.test(options.type)) { 101 | console.error('Invalid input database type.'); 102 | process.exit(1); 103 | return; 104 | } 105 | 106 | if (options.sessionType && !/^[a-z]+$/.test(options.sessionType)) { 107 | console.error('Invalid session database type.'); 108 | process.exit(1); 109 | return; 110 | } 111 | 112 | if (!/^[a-z]+$/.test(options.outputType)) { 113 | console.error('Invalid output database type.'); 114 | process.exit(1); 115 | return; 116 | } 117 | 118 | var reader; 119 | try { 120 | reader = options.type ? require('../reader/' + options.type + '.js') : null; 121 | } catch (ex) { 122 | console.error('Invalid input database type.'); 123 | process.exit(1); 124 | return; 125 | } 126 | 127 | var sessionReader; 128 | try { 129 | sessionReader = options.sessionType ? require('../session/' + options.sessionType + '.js') : null; 130 | } catch (ex) { 131 | console.error('Invalid session database type.'); 132 | process.exit(1); 133 | return; 134 | } 135 | 136 | var writer; 137 | try { 138 | writer = require('../writer/' + options.outputType + '.js'); 139 | } catch (ex) { 140 | console.error('Invalid output database type.'); 141 | process.exit(1); 142 | return; 143 | } 144 | 145 | consoleStamp(console, {pattern: 'yyyy-mm-dd HH:MM:ss.l'}); 146 | 147 | main(reader, options.input, writer, options.output, options.concurrency, options.memory, sessionReader, options.sessionInput).catch(function(ex) { 148 | console.error(ex.stack); 149 | process.exit(2); 150 | }); 151 | -------------------------------------------------------------------------------- /clean.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* eslint-disable no-control-regex */ 4 | 5 | module.exports.data = function (obj) { 6 | delete obj._id; 7 | if (!obj.hasOwnProperty('_key')) { 8 | return null; 9 | } 10 | var key = obj._key; 11 | 12 | // clean up importer bugs 13 | delete obj.undefined; 14 | if ((key.startsWith('chat:room:') && key.endsWith('uids') && !key.endsWith(':uids')) || (key.startsWith('uid:') && key.endsWith('sessionUUID:sessionId') && !key.endsWith(':sessionUUID:sessionId'))) { 15 | return null; 16 | } 17 | 18 | // remove importer cache on live objects 19 | if (!key.startsWith('_imported')) { 20 | for (var k of Object.keys(obj)) { 21 | if (k.startsWith('_imported')) { 22 | delete obj[k]; 23 | } 24 | } 25 | } 26 | 27 | return module.exports.value(obj); 28 | } 29 | 30 | module.exports.value = function (obj) { 31 | for (var k in obj) { 32 | if (!Object.prototype.hasOwnProperty.call(obj, k)) { 33 | continue; 34 | } 35 | var v = obj[k]; 36 | if (!v || v === true) { 37 | continue; 38 | } 39 | if (v instanceof Date) { 40 | obj[k] = v.getTime(); 41 | continue; 42 | } 43 | if (typeof v === 'number') { 44 | if (Number.isNaN(v)) { 45 | obj[k] = 'NaN'; 46 | } 47 | continue; 48 | } 49 | if (typeof v === 'string') { 50 | if (v.indexOf('\x00') !== -1) { 51 | obj[k] = v.replace(/\x00/g, 'x00'); 52 | } 53 | continue; 54 | } 55 | if (Array.isArray(v)) { 56 | obj[k] = v.map(function(a) { 57 | return String(a || '').replace(/\x00/g, 'x00'); 58 | }); 59 | continue; 60 | } 61 | 62 | // Object, possibly from a plugin 63 | obj[k] = module.exports.value(v); 64 | } 65 | 66 | return obj; 67 | } 68 | -------------------------------------------------------------------------------- /main.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const cleanData = require('./clean.js').data; 4 | 5 | async function copyDatabase(reader, input, output) { 6 | var total = 0; 7 | var copied = 0; 8 | var skipped = 0; 9 | 10 | await output(async function (write) { 11 | console.time('Copy objects'); 12 | 13 | try { 14 | console.log('Counting objects...'); 15 | 16 | await reader(input, async function(count) { 17 | total = count; 18 | console.log('Attempting to copy ' + total + ' objects...'); 19 | }, async function(data) { 20 | var values = cleanData(data); 21 | if (values) { 22 | await write(values); 23 | copied++; 24 | } else { 25 | skipped++; 26 | } 27 | if ((copied + skipped) % 100000 === 0) { 28 | console.log((' ' + Math.floor(100 * (copied + skipped) / total)).substr(-3) + '% - ' + copied + ' objects copied (' + skipped + ' skipped)'); 29 | } 30 | }); 31 | 32 | if ((copied + skipped) % 100000 !== 0) { 33 | console.log('100% - ' + copied + ' objects copied (' + skipped + ' skipped)'); 34 | } 35 | 36 | if (copied + skipped !== total) { 37 | console.warn('There were ' + (copied + skipped) + ' objects, but ' + total + ' were expected.'); 38 | } 39 | } finally { 40 | console.timeEnd('Copy objects'); 41 | } 42 | }); 43 | } 44 | 45 | async function main(reader, input, writer, output, concurrency, memory, sessionReader, sessionInput) { 46 | console.time('Full conversion'); 47 | try { 48 | await writer(output, concurrency, memory, async function (copyData, copySessions) { 49 | var data = input ? copyDatabase(reader, input, copyData) : Promise.resolve(); 50 | var sessions = sessionInput ? copySessions(sessionReader, sessionInput) : Promise.resolve(); 51 | await Promise.all([data, sessions]); 52 | }); 53 | } finally { 54 | console.timeEnd('Full conversion'); 55 | } 56 | } 57 | 58 | module.exports = main; 59 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@ben_lubar/nodebb-postgres-converter", 3 | "version": "0.1.1", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "ansi-regex": { 8 | "version": "2.1.1", 9 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", 10 | "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" 11 | }, 12 | "ansi-styles": { 13 | "version": "2.2.1", 14 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", 15 | "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" 16 | }, 17 | "argv-tools": { 18 | "version": "0.1.1", 19 | "resolved": "https://registry.npmjs.org/argv-tools/-/argv-tools-0.1.1.tgz", 20 | "integrity": "sha512-Cc0dBvx4dvrjjKpyDA6w8RlNAw8Su30NvZbWl/Tv9ZALEVlLVkWQiHMi84Q0xNfpVuSaiQbYkdmWK8g1PLGhKw==", 21 | "requires": { 22 | "array-back": "^2.0.0", 23 | "find-replace": "^2.0.1" 24 | } 25 | }, 26 | "array-back": { 27 | "version": "2.0.0", 28 | "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz", 29 | "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==", 30 | "requires": { 31 | "typical": "^2.6.1" 32 | } 33 | }, 34 | "array-find-index": { 35 | "version": "1.0.2", 36 | "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", 37 | "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=" 38 | }, 39 | "bson": { 40 | "version": "1.1.0", 41 | "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.0.tgz", 42 | "integrity": "sha512-9Aeai9TacfNtWXOYarkFJRW2CWo+dRon+fuLZYJmvLV3+MiUp0bEI6IAZfXEIg7/Pl/7IWlLaDnhzTsD81etQA==" 43 | }, 44 | "buffer-writer": { 45 | "version": "1.0.1", 46 | "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-1.0.1.tgz", 47 | "integrity": "sha1-Iqk2kB4wKa/NdUfrRIfOtpejvwg=" 48 | }, 49 | "builtin-modules": { 50 | "version": "1.1.1", 51 | "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", 52 | "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=" 53 | }, 54 | "camelcase": { 55 | "version": "2.1.1", 56 | "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", 57 | "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=" 58 | }, 59 | "camelcase-keys": { 60 | "version": "2.1.0", 61 | "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", 62 | "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", 63 | "requires": { 64 | "camelcase": "^2.0.0", 65 | "map-obj": "^1.0.0" 66 | } 67 | }, 68 | "chalk": { 69 | "version": "1.1.3", 70 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", 71 | "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", 72 | "requires": { 73 | "ansi-styles": "^2.2.1", 74 | "escape-string-regexp": "^1.0.2", 75 | "has-ansi": "^2.0.0", 76 | "strip-ansi": "^3.0.0", 77 | "supports-color": "^2.0.0" 78 | } 79 | }, 80 | "color-convert": { 81 | "version": "1.9.3", 82 | "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", 83 | "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", 84 | "requires": { 85 | "color-name": "1.1.3" 86 | } 87 | }, 88 | "color-name": { 89 | "version": "1.1.3", 90 | "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", 91 | "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" 92 | }, 93 | "command-line-args": { 94 | "version": "5.0.2", 95 | "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.0.2.tgz", 96 | "integrity": "sha512-/qPcbL8zpqg53x4rAaqMFlRV4opN3pbla7I7k9x8kyOBMQoGT6WltjN6sXZuxOXw6DgdK7Ad+ijYS5gjcr7vlA==", 97 | "requires": { 98 | "argv-tools": "^0.1.1", 99 | "array-back": "^2.0.0", 100 | "find-replace": "^2.0.1", 101 | "lodash.camelcase": "^4.3.0", 102 | "typical": "^2.6.1" 103 | } 104 | }, 105 | "command-line-usage": { 106 | "version": "6.0.2", 107 | "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-6.0.2.tgz", 108 | "integrity": "sha512-Jr9RQM43qWDwpRJOa0lgZw0LhiU8tgOqoR+xxIcb3eT5vFZi69fBWUODMSBtGUYI1qTlElPl3txFQY6rChVuXQ==", 109 | "requires": { 110 | "array-back": "^3.1.0", 111 | "chalk": "^2.4.2", 112 | "table-layout": "^1.0.0", 113 | "typical": "^5.1.0" 114 | }, 115 | "dependencies": { 116 | "ansi-styles": { 117 | "version": "3.2.1", 118 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", 119 | "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", 120 | "requires": { 121 | "color-convert": "^1.9.0" 122 | } 123 | }, 124 | "array-back": { 125 | "version": "3.1.0", 126 | "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", 127 | "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==" 128 | }, 129 | "chalk": { 130 | "version": "2.4.2", 131 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", 132 | "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", 133 | "requires": { 134 | "ansi-styles": "^3.2.1", 135 | "escape-string-regexp": "^1.0.5", 136 | "supports-color": "^5.3.0" 137 | } 138 | }, 139 | "supports-color": { 140 | "version": "5.5.0", 141 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", 142 | "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", 143 | "requires": { 144 | "has-flag": "^3.0.0" 145 | } 146 | }, 147 | "typical": { 148 | "version": "5.1.0", 149 | "resolved": "https://registry.npmjs.org/typical/-/typical-5.1.0.tgz", 150 | "integrity": "sha512-t5Ik8UAwBal1P1XzuVE4dc+RYQZicLUGJdvqr/vdqsED7SQECgsGBylldSsfWZL7RQjxT3xhQcKHWhLaVSR6YQ==" 151 | } 152 | } 153 | }, 154 | "console-stamp": { 155 | "version": "0.2.6", 156 | "resolved": "https://registry.npmjs.org/console-stamp/-/console-stamp-0.2.6.tgz", 157 | "integrity": "sha512-7l2MJ93tXpCWh9n5rRJHu7tNMxa9SPx8pba8kBJDROOfMFVBHPpqv141lkhcnLEYMC5npvrywH7gZ5DzYE+cSg==", 158 | "requires": { 159 | "chalk": "^1.1.1", 160 | "dateformat": "^1.0.11", 161 | "merge": "^1.2.0" 162 | } 163 | }, 164 | "currently-unhandled": { 165 | "version": "0.4.1", 166 | "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", 167 | "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", 168 | "requires": { 169 | "array-find-index": "^1.0.1" 170 | } 171 | }, 172 | "dateformat": { 173 | "version": "1.0.12", 174 | "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.12.tgz", 175 | "integrity": "sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk=", 176 | "requires": { 177 | "get-stdin": "^4.0.1", 178 | "meow": "^3.3.0" 179 | } 180 | }, 181 | "decamelize": { 182 | "version": "1.2.0", 183 | "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", 184 | "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" 185 | }, 186 | "deep-extend": { 187 | "version": "0.6.0", 188 | "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", 189 | "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" 190 | }, 191 | "double-ended-queue": { 192 | "version": "2.1.0-0", 193 | "resolved": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz", 194 | "integrity": "sha1-ED01J/0xUo9AGIEwyEHv3XgmTlw=" 195 | }, 196 | "error-ex": { 197 | "version": "1.3.1", 198 | "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.1.tgz", 199 | "integrity": "sha1-+FWobOYa3E6GIcPNoh56dhLDqNw=", 200 | "requires": { 201 | "is-arrayish": "^0.2.1" 202 | } 203 | }, 204 | "escape-string-regexp": { 205 | "version": "1.0.5", 206 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", 207 | "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" 208 | }, 209 | "find-replace": { 210 | "version": "2.0.1", 211 | "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-2.0.1.tgz", 212 | "integrity": "sha512-LzDo3Fpa30FLIBsh6DCDnMN1KW2g4QKkqKmejlImgWY67dDFPX/x9Kh/op/GK522DchQXEvDi/wD48HKW49XOQ==", 213 | "requires": { 214 | "array-back": "^2.0.0", 215 | "test-value": "^3.0.0" 216 | } 217 | }, 218 | "find-up": { 219 | "version": "1.1.2", 220 | "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", 221 | "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", 222 | "requires": { 223 | "path-exists": "^2.0.0", 224 | "pinkie-promise": "^2.0.0" 225 | } 226 | }, 227 | "get-stdin": { 228 | "version": "4.0.1", 229 | "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", 230 | "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=" 231 | }, 232 | "graceful-fs": { 233 | "version": "4.1.11", 234 | "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.11.tgz", 235 | "integrity": "sha1-Dovf5NHduIVNZOBOp8AOKgJuVlg=" 236 | }, 237 | "has-ansi": { 238 | "version": "2.0.0", 239 | "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", 240 | "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", 241 | "requires": { 242 | "ansi-regex": "^2.0.0" 243 | } 244 | }, 245 | "has-flag": { 246 | "version": "3.0.0", 247 | "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", 248 | "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" 249 | }, 250 | "hosted-git-info": { 251 | "version": "2.5.0", 252 | "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.5.0.tgz", 253 | "integrity": "sha512-pNgbURSuab90KbTqvRPsseaTxOJCZBD0a7t+haSN33piP9cCM4l0CqdzAif2hUqm716UovKB2ROmiabGAKVXyg==" 254 | }, 255 | "indent-string": { 256 | "version": "2.1.0", 257 | "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", 258 | "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", 259 | "requires": { 260 | "repeating": "^2.0.0" 261 | } 262 | }, 263 | "is-arrayish": { 264 | "version": "0.2.1", 265 | "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", 266 | "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" 267 | }, 268 | "is-builtin-module": { 269 | "version": "1.0.0", 270 | "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", 271 | "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", 272 | "requires": { 273 | "builtin-modules": "^1.0.0" 274 | } 275 | }, 276 | "is-finite": { 277 | "version": "1.0.2", 278 | "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", 279 | "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=", 280 | "requires": { 281 | "number-is-nan": "^1.0.0" 282 | } 283 | }, 284 | "is-utf8": { 285 | "version": "0.2.1", 286 | "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", 287 | "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=" 288 | }, 289 | "load-json-file": { 290 | "version": "1.1.0", 291 | "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", 292 | "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", 293 | "requires": { 294 | "graceful-fs": "^4.1.2", 295 | "parse-json": "^2.2.0", 296 | "pify": "^2.0.0", 297 | "pinkie-promise": "^2.0.0", 298 | "strip-bom": "^2.0.0" 299 | } 300 | }, 301 | "lodash.camelcase": { 302 | "version": "4.3.0", 303 | "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", 304 | "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" 305 | }, 306 | "loud-rejection": { 307 | "version": "1.6.0", 308 | "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", 309 | "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", 310 | "requires": { 311 | "currently-unhandled": "^0.4.1", 312 | "signal-exit": "^3.0.0" 313 | } 314 | }, 315 | "map-obj": { 316 | "version": "1.0.1", 317 | "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", 318 | "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=" 319 | }, 320 | "meow": { 321 | "version": "3.7.0", 322 | "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", 323 | "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", 324 | "requires": { 325 | "camelcase-keys": "^2.0.0", 326 | "decamelize": "^1.1.2", 327 | "loud-rejection": "^1.0.0", 328 | "map-obj": "^1.0.1", 329 | "minimist": "^1.1.3", 330 | "normalize-package-data": "^2.3.4", 331 | "object-assign": "^4.0.1", 332 | "read-pkg-up": "^1.0.1", 333 | "redent": "^1.0.0", 334 | "trim-newlines": "^1.0.0" 335 | } 336 | }, 337 | "merge": { 338 | "version": "1.2.1", 339 | "resolved": "https://registry.npmjs.org/merge/-/merge-1.2.1.tgz", 340 | "integrity": "sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ==" 341 | }, 342 | "minimist": { 343 | "version": "1.2.0", 344 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", 345 | "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" 346 | }, 347 | "mongodb": { 348 | "version": "3.1.3", 349 | "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.1.3.tgz", 350 | "integrity": "sha512-hfzI54/fe+604w5gP+i9aJ5GGVxnquxZ09ZN1cyLTbpnBfDRpj78lN59SBdDRkF1VNTzmM2KcgDWhHHDHcsJxw==", 351 | "requires": { 352 | "mongodb-core": "3.1.2" 353 | } 354 | }, 355 | "mongodb-core": { 356 | "version": "3.1.2", 357 | "resolved": "https://registry.npmjs.org/mongodb-core/-/mongodb-core-3.1.2.tgz", 358 | "integrity": "sha512-R2XxGzsmhlUeOK2jKATj1TWn3q3qNcJpKrSh0rhaBSHxJmV7WZ+ikjocdY8VdJxUkKqOxM8rxMqOAEzeJ3p1ww==", 359 | "requires": { 360 | "bson": "^1.1.0", 361 | "require_optional": "^1.0.1", 362 | "saslprep": "^1.0.0" 363 | } 364 | }, 365 | "normalize-package-data": { 366 | "version": "2.4.0", 367 | "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", 368 | "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", 369 | "requires": { 370 | "hosted-git-info": "^2.1.4", 371 | "is-builtin-module": "^1.0.0", 372 | "semver": "2 || 3 || 4 || 5", 373 | "validate-npm-package-license": "^3.0.1" 374 | } 375 | }, 376 | "number-is-nan": { 377 | "version": "1.0.1", 378 | "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", 379 | "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" 380 | }, 381 | "object-assign": { 382 | "version": "4.1.1", 383 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", 384 | "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" 385 | }, 386 | "packet-reader": { 387 | "version": "0.3.1", 388 | "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-0.3.1.tgz", 389 | "integrity": "sha1-zWLmCvjX/qinBexP+ZCHHEaHHyc=" 390 | }, 391 | "parse-json": { 392 | "version": "2.2.0", 393 | "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", 394 | "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", 395 | "requires": { 396 | "error-ex": "^1.2.0" 397 | } 398 | }, 399 | "path-exists": { 400 | "version": "2.1.0", 401 | "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", 402 | "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", 403 | "requires": { 404 | "pinkie-promise": "^2.0.0" 405 | } 406 | }, 407 | "path-type": { 408 | "version": "1.1.0", 409 | "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", 410 | "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", 411 | "requires": { 412 | "graceful-fs": "^4.1.2", 413 | "pify": "^2.0.0", 414 | "pinkie-promise": "^2.0.0" 415 | } 416 | }, 417 | "pg": { 418 | "version": "7.4.3", 419 | "resolved": "https://registry.npmjs.org/pg/-/pg-7.4.3.tgz", 420 | "integrity": "sha1-97b5P1NA7MJZavu5ShPj1rYJg0s=", 421 | "requires": { 422 | "buffer-writer": "1.0.1", 423 | "packet-reader": "0.3.1", 424 | "pg-connection-string": "0.1.3", 425 | "pg-pool": "~2.0.3", 426 | "pg-types": "~1.12.1", 427 | "pgpass": "1.x", 428 | "semver": "4.3.2" 429 | }, 430 | "dependencies": { 431 | "semver": { 432 | "version": "4.3.2", 433 | "resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", 434 | "integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c=" 435 | } 436 | } 437 | }, 438 | "pg-connection-string": { 439 | "version": "0.1.3", 440 | "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz", 441 | "integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc=" 442 | }, 443 | "pg-copy-streams": { 444 | "version": "1.2.0", 445 | "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-1.2.0.tgz", 446 | "integrity": "sha1-ez+d7gtsX8IGj1nED6IY4MHXQkk=" 447 | }, 448 | "pg-cursor": { 449 | "version": "1.3.0", 450 | "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-1.3.0.tgz", 451 | "integrity": "sha1-siDxkIl2t7QNqjc8etpfyoI6sNk=" 452 | }, 453 | "pg-pool": { 454 | "version": "2.0.3", 455 | "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-2.0.3.tgz", 456 | "integrity": "sha1-wCIDLIlJ8xKk+R+2QJzgQHa+Mlc=" 457 | }, 458 | "pg-types": { 459 | "version": "1.12.1", 460 | "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.12.1.tgz", 461 | "integrity": "sha1-1kCH45A7WP+q0nnnWVxSIIoUw9I=", 462 | "requires": { 463 | "postgres-array": "~1.0.0", 464 | "postgres-bytea": "~1.0.0", 465 | "postgres-date": "~1.0.0", 466 | "postgres-interval": "^1.1.0" 467 | } 468 | }, 469 | "pgpass": { 470 | "version": "1.0.2", 471 | "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz", 472 | "integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=", 473 | "requires": { 474 | "split": "^1.0.0" 475 | } 476 | }, 477 | "pify": { 478 | "version": "2.3.0", 479 | "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", 480 | "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" 481 | }, 482 | "pinkie": { 483 | "version": "2.0.4", 484 | "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", 485 | "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=" 486 | }, 487 | "pinkie-promise": { 488 | "version": "2.0.1", 489 | "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", 490 | "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", 491 | "requires": { 492 | "pinkie": "^2.0.0" 493 | } 494 | }, 495 | "postgres-array": { 496 | "version": "1.0.2", 497 | "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.2.tgz", 498 | "integrity": "sha1-jgsy6wO/d6XAp4UeBEHBaaJWojg=" 499 | }, 500 | "postgres-bytea": { 501 | "version": "1.0.0", 502 | "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", 503 | "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" 504 | }, 505 | "postgres-date": { 506 | "version": "1.0.3", 507 | "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.3.tgz", 508 | "integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g=" 509 | }, 510 | "postgres-interval": { 511 | "version": "1.1.1", 512 | "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.1.tgz", 513 | "integrity": "sha512-OkuCi9t/3CZmeQreutGgx/OVNv9MKHGIT5jH8KldQ4NLYXkvmT9nDVxEuCENlNwhlGPE374oA/xMqn05G49pHA==", 514 | "requires": { 515 | "xtend": "^4.0.0" 516 | } 517 | }, 518 | "read-pkg": { 519 | "version": "1.1.0", 520 | "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", 521 | "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", 522 | "requires": { 523 | "load-json-file": "^1.0.0", 524 | "normalize-package-data": "^2.3.2", 525 | "path-type": "^1.0.0" 526 | } 527 | }, 528 | "read-pkg-up": { 529 | "version": "1.0.1", 530 | "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", 531 | "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", 532 | "requires": { 533 | "find-up": "^1.0.0", 534 | "read-pkg": "^1.0.0" 535 | } 536 | }, 537 | "redent": { 538 | "version": "1.0.0", 539 | "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", 540 | "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", 541 | "requires": { 542 | "indent-string": "^2.1.0", 543 | "strip-indent": "^1.0.1" 544 | } 545 | }, 546 | "redis": { 547 | "version": "2.8.0", 548 | "resolved": "https://registry.npmjs.org/redis/-/redis-2.8.0.tgz", 549 | "integrity": "sha512-M1OkonEQwtRmZv4tEWF2VgpG0JWJ8Fv1PhlgT5+B+uNq2cA3Rt1Yt/ryoR+vQNOQcIEgdCdfH0jr3bDpihAw1A==", 550 | "requires": { 551 | "double-ended-queue": "^2.1.0-0", 552 | "redis-commands": "^1.2.0", 553 | "redis-parser": "^2.6.0" 554 | } 555 | }, 556 | "redis-commands": { 557 | "version": "1.3.1", 558 | "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.3.1.tgz", 559 | "integrity": "sha1-gdgm9F+pyLIBH0zXoP5ZfSQdRCs=" 560 | }, 561 | "redis-parser": { 562 | "version": "2.6.0", 563 | "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz", 564 | "integrity": "sha1-Uu0J2srBCPGmMcB+m2mUHnoZUEs=" 565 | }, 566 | "reduce-flatten": { 567 | "version": "2.0.0", 568 | "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-2.0.0.tgz", 569 | "integrity": "sha512-EJ4UNY/U1t2P/2k6oqotuX2Cc3T6nxJwsM0N0asT7dhrtH1ltUxDn4NalSYmPE2rCkVpcf/X6R0wDwcFpzhd4w==" 570 | }, 571 | "repeating": { 572 | "version": "2.0.1", 573 | "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", 574 | "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", 575 | "requires": { 576 | "is-finite": "^1.0.0" 577 | } 578 | }, 579 | "require_optional": { 580 | "version": "1.0.1", 581 | "resolved": "https://registry.npmjs.org/require_optional/-/require_optional-1.0.1.tgz", 582 | "integrity": "sha512-qhM/y57enGWHAe3v/NcwML6a3/vfESLe/sGM2dII+gEO0BpKRUkWZow/tyloNqJyN6kXSl3RyyM8Ll5D/sJP8g==", 583 | "requires": { 584 | "resolve-from": "^2.0.0", 585 | "semver": "^5.1.0" 586 | } 587 | }, 588 | "resolve-from": { 589 | "version": "2.0.0", 590 | "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-2.0.0.tgz", 591 | "integrity": "sha1-lICrIOlP+h2egKgEx+oUdhGWa1c=" 592 | }, 593 | "saslprep": { 594 | "version": "1.0.1", 595 | "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.1.tgz", 596 | "integrity": "sha512-ntN6SbE3hRqd45PKKadRPgA+xHPWg5lPSj2JWJdJvjTwXDDfkPVtXWvP8jJojvnm+rAsZ2b299C5NwZqq818EA==", 597 | "optional": true 598 | }, 599 | "semver": { 600 | "version": "5.5.0", 601 | "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", 602 | "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==" 603 | }, 604 | "signal-exit": { 605 | "version": "3.0.2", 606 | "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", 607 | "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" 608 | }, 609 | "spdx-correct": { 610 | "version": "1.0.2", 611 | "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-1.0.2.tgz", 612 | "integrity": "sha1-SzBz2TP/UfORLwOsVRlJikFQ20A=", 613 | "requires": { 614 | "spdx-license-ids": "^1.0.2" 615 | } 616 | }, 617 | "spdx-expression-parse": { 618 | "version": "1.0.4", 619 | "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz", 620 | "integrity": "sha1-m98vIOH0DtRH++JzJmGR/O1RYmw=" 621 | }, 622 | "spdx-license-ids": { 623 | "version": "1.2.2", 624 | "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz", 625 | "integrity": "sha1-yd96NCRZSt5r0RkA1ZZpbcBrrFc=" 626 | }, 627 | "split": { 628 | "version": "1.0.1", 629 | "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", 630 | "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", 631 | "requires": { 632 | "through": "2" 633 | } 634 | }, 635 | "strip-ansi": { 636 | "version": "3.0.1", 637 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", 638 | "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", 639 | "requires": { 640 | "ansi-regex": "^2.0.0" 641 | } 642 | }, 643 | "strip-bom": { 644 | "version": "2.0.0", 645 | "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", 646 | "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", 647 | "requires": { 648 | "is-utf8": "^0.2.0" 649 | } 650 | }, 651 | "strip-indent": { 652 | "version": "1.0.1", 653 | "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", 654 | "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", 655 | "requires": { 656 | "get-stdin": "^4.0.1" 657 | } 658 | }, 659 | "supports-color": { 660 | "version": "2.0.0", 661 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", 662 | "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" 663 | }, 664 | "table-layout": { 665 | "version": "1.0.0", 666 | "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-1.0.0.tgz", 667 | "integrity": "sha512-o8V8u943KXX9gLNK/Ss1n6Nn4YhpyY/RRnp3hKv/zTA+SXYiQnzJQlR8CZQf1RqYqgkiWMJ54Mv+Vq9Kfzxz1A==", 668 | "requires": { 669 | "array-back": "^3.1.0", 670 | "deep-extend": "~0.6.0", 671 | "typical": "^5.0.0", 672 | "wordwrapjs": "^4.0.0" 673 | }, 674 | "dependencies": { 675 | "array-back": { 676 | "version": "3.1.0", 677 | "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz", 678 | "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q==" 679 | }, 680 | "typical": { 681 | "version": "5.1.0", 682 | "resolved": "https://registry.npmjs.org/typical/-/typical-5.1.0.tgz", 683 | "integrity": "sha512-t5Ik8UAwBal1P1XzuVE4dc+RYQZicLUGJdvqr/vdqsED7SQECgsGBylldSsfWZL7RQjxT3xhQcKHWhLaVSR6YQ==" 684 | } 685 | } 686 | }, 687 | "test-value": { 688 | "version": "3.0.0", 689 | "resolved": "https://registry.npmjs.org/test-value/-/test-value-3.0.0.tgz", 690 | "integrity": "sha512-sVACdAWcZkSU9x7AOmJo5TqE+GyNJknHaHsMrR6ZnhjVlVN9Yx6FjHrsKZ3BjIpPCT68zYesPWkakrNupwfOTQ==", 691 | "requires": { 692 | "array-back": "^2.0.0", 693 | "typical": "^2.6.1" 694 | } 695 | }, 696 | "through": { 697 | "version": "2.3.8", 698 | "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", 699 | "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" 700 | }, 701 | "trim-newlines": { 702 | "version": "1.0.0", 703 | "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", 704 | "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=" 705 | }, 706 | "typical": { 707 | "version": "2.6.1", 708 | "resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz", 709 | "integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0=" 710 | }, 711 | "validate-npm-package-license": { 712 | "version": "3.0.1", 713 | "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz", 714 | "integrity": "sha1-KAS6vnEq0zeUWaz74kdGqywwP7w=", 715 | "requires": { 716 | "spdx-correct": "~1.0.0", 717 | "spdx-expression-parse": "~1.0.0" 718 | } 719 | }, 720 | "wordwrapjs": { 721 | "version": "4.0.0", 722 | "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-4.0.0.tgz", 723 | "integrity": "sha512-Svqw723a3R34KvsMgpjFBYCgNOSdcW3mQFK4wIfhGQhtaFVOJmdYoXgi63ne3dTlWgatVcUc7t4HtQ/+bUVIzQ==", 724 | "requires": { 725 | "reduce-flatten": "^2.0.0", 726 | "typical": "^5.0.0" 727 | }, 728 | "dependencies": { 729 | "typical": { 730 | "version": "5.1.0", 731 | "resolved": "https://registry.npmjs.org/typical/-/typical-5.1.0.tgz", 732 | "integrity": "sha512-t5Ik8UAwBal1P1XzuVE4dc+RYQZicLUGJdvqr/vdqsED7SQECgsGBylldSsfWZL7RQjxT3xhQcKHWhLaVSR6YQ==" 733 | } 734 | } 735 | }, 736 | "xtend": { 737 | "version": "4.0.1", 738 | "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", 739 | "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" 740 | } 741 | } 742 | } 743 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@ben_lubar/nodebb-postgres-converter", 3 | "version": "0.1.1", 4 | "description": "Converts an existing NodeBB NoSQL database to a relational PostgreSQL database.", 5 | "main": "main.js", 6 | "bin": { 7 | "nodebb-postgres-converter": "bin/converter.js" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/BenLubar/nodebb-postgres-converter.git" 12 | }, 13 | "author": "Ben Lubar ", 14 | "license": "MIT", 15 | "bugs": { 16 | "url": "https://github.com/BenLubar/nodebb-postgres-converter/issues" 17 | }, 18 | "homepage": "https://github.com/BenLubar/nodebb-postgres-converter#readme", 19 | "dependencies": { 20 | "command-line-args": "^5.0.0", 21 | "command-line-usage": "^6.0.0", 22 | "console-stamp": "^0.2.6", 23 | "mongodb": "^3.0.0", 24 | "pg": "^7.4.3", 25 | "pg-copy-streams": "^1.2.0", 26 | "pg-cursor": "^1.3.0", 27 | "redis": "^2.8.0" 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /reader/mongo.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { MongoClient } = require('mongodb'); 4 | 5 | module.exports = async function(connection, count, each) { 6 | const client = await MongoClient.connect(connection); 7 | const db = client.db(); 8 | 9 | try { 10 | const objects = db.collection('objects'); 11 | 12 | await count(await objects.count()); 13 | 14 | const cur = objects.find().addCursorFlag('noCursorTimeout', true); 15 | 16 | var data; 17 | while ((data = await cur.next()) !== null) { 18 | await each(data); 19 | } 20 | } finally { 21 | await client.close(); 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /reader/postgres.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { Client } = require('pg'); 4 | const Cursor = require('pg-cursor'); 5 | const { promisify } = require('util'); 6 | 7 | module.exports = async function(connection, count, each) { 8 | const client = new Client({ 9 | connectionString: connection 10 | }); 11 | await client.connect(); 12 | 13 | try { 14 | var getAllQuery = `SELECT "data" FROM "objects"`; 15 | var res; 16 | 17 | try { 18 | res = await client.query(`SELECT COUNT(*) c FROM "objects"`); 19 | } catch (ex) { 20 | getAllQuery = `WITH o AS (SELECT CASE 21 | WHEN o."expireAt" IS NULL THEN jsonb_build_object('_key', o."_key") 22 | ELSE jsonb_build_object('_key', o."_key", 'expireAt', EXTRACT(EPOCH FROM o."expireAt") * 1000) 23 | END "data", o."_key", o."type" 24 | FROM "legacy_object" o) 25 | SELECT h."data" || o."data" "data" 26 | FROM "legacy_hash" h 27 | INNER JOIN o 28 | ON o."_key" = h."_key" 29 | AND o."type" = h."type" 30 | UNION ALL 31 | SELECT jsonb_build_object('value', z."value", 'score', z."score") || o."data" "data" 32 | FROM "legacy_zset" z 33 | INNER JOIN o 34 | ON o."_key" = z."_key" 35 | AND o."type" = z."type" 36 | UNION ALL 37 | SELECT jsonb_build_object('members', to_jsonb(array_agg(s."member"))) || o."data" "data" 38 | FROM "legacy_set" s 39 | INNER JOIN o 40 | ON o."_key" = s."_key" 41 | AND o."type" = s."type" 42 | GROUP BY o."data" 43 | UNION ALL 44 | SELECT jsonb_build_object('array', to_jsonb(l."array")) || o."data" "data" 45 | FROM "legacy_list" l 46 | INNER JOIN o 47 | ON o."_key" = l."_key" 48 | AND o."type" = l."type" 49 | UNION ALL 50 | SELECT jsonb_build_object('data', s."data") || o."data" "data" 51 | FROM "legacy_string" s 52 | INNER JOIN o 53 | ON o."_key" = s."_key" 54 | AND o."type" = s."type"`; 55 | try { 56 | res = await client.query(`SELECT (SELECT COUNT(*) FROM "legacy_object" WHERE "type" <> 'zset') + (SELECT COUNT(*) FROM "legacy_zset") + (SELECT COUNT(*) FROM "legacy_imported") c`); 57 | getAllQuery += ` 58 | UNION ALL 59 | SELECT i."data" || jsonb_build_object('_key', '_imported_' || i."type" || ':' || i."id") "data" 60 | FROM "legacy_imported" i`; 61 | } catch (ex) { 62 | res = await client.query(`SELECT (SELECT COUNT(*) FROM "legacy_object" WHERE "type" <> 'zset') + (SELECT COUNT(*) FROM "legacy_zset") c`); 63 | } 64 | } 65 | 66 | await count(parseInt(res.rows[0].c, 10)); 67 | 68 | const cursor = client.query(new Cursor(getAllQuery)); 69 | cursor.readAsync = promisify(cursor.read); 70 | 71 | var queue = await cursor.readAsync(1000); 72 | while (queue.length) { 73 | var next = cursor.readAsync(1000); 74 | for (var row of queue) { 75 | await each(row.data); 76 | } 77 | queue = await next; 78 | } 79 | } finally { 80 | await client.end(); 81 | } 82 | }; 83 | -------------------------------------------------------------------------------- /reader/redis.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const redis = require('redis'); 4 | const { promisify } = require('util'); 5 | 6 | for (var fn of ['dbsize', 'scan', 'type', 'get', 'lrange', 'smembers', 'hgetall', 'zscan', 'pttl', 'zcard']) { 7 | redis.RedisClient.prototype[fn + 'Async'] = promisify(redis.RedisClient.prototype[fn]); 8 | } 9 | 10 | module.exports = async function(connection, count, realEach) { 11 | const client = redis.createClient(connection); 12 | 13 | const each = async function(data) { 14 | var ttl = parseInt(await client.pttlAsync(data._key), 10); 15 | if (ttl >= 0) { 16 | data.expireAt = Date.now() + ttl; 17 | } 18 | await realEach(data); 19 | }; 20 | 21 | try { 22 | var totalKeys = 0; 23 | 24 | var cursor = '0'; 25 | do { 26 | var result = await client.scanAsync(cursor, 'COUNT', '1000'); 27 | cursor = result[0]; 28 | 29 | totalKeys += result[1].length; 30 | 31 | for (var key of result[1]) { 32 | var type = await client.typeAsync(key); 33 | 34 | if (type === 'string' && key.startsWith('sess:')) { 35 | totalKeys--; 36 | } else if (type === 'zset') { 37 | totalKeys += await client.zcardAsync(key) - 1; 38 | } 39 | } 40 | } while (cursor !== '0'); 41 | 42 | await count(totalKeys); 43 | 44 | cursor = '0'; 45 | do { 46 | var result = await client.scanAsync(cursor, 'COUNT', '1000'); 47 | cursor = result[0]; 48 | 49 | for (var key of result[1]) { 50 | var type = await client.typeAsync(key); 51 | switch (type) { 52 | case 'string': 53 | if (key.startsWith('sess:')) { 54 | continue; 55 | } 56 | 57 | await each({ 58 | _key: key, 59 | value: await client.getAsync(key) 60 | }); 61 | break; 62 | case 'list': 63 | await each({ 64 | _key: key, 65 | array: await client.lrangeAsync(key, '0', '-1') 66 | }); 67 | break; 68 | case 'set': 69 | await each({ 70 | _key: key, 71 | members: await client.smembersAsync(key) 72 | }); 73 | break; 74 | case 'zset': 75 | await eachSorted(client, key, each); 76 | break; 77 | case 'hash': 78 | var data = await client.hgetallAsync(key); 79 | data._key = key; 80 | await each(data); 81 | break; 82 | default: 83 | throw new Exception('Unexpected redis type for key "' + key + '": ' + type); 84 | } 85 | } 86 | } while (cursor !== '0'); 87 | } finally { 88 | client.quit(); 89 | } 90 | }; 91 | 92 | async function eachSorted(client, key, each) { 93 | var cursor = '0'; 94 | do { 95 | var result = await client.zscanAsync(key, cursor, 'COUNT', '1000'); 96 | cursor = result[0]; 97 | 98 | for (var i = 0; i < result[1].length; i += 2) { 99 | await each({ 100 | _key: key, 101 | value: result[1][i], 102 | score: parseFloat(result[1][i + 1]) 103 | }); 104 | } 105 | } while (cursor !== '0'); 106 | } 107 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /session/mongo.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { MongoClient } = require('mongodb'); 4 | 5 | module.exports = async function(connection, each) { 6 | const client = await MongoClient.connect(connection); 7 | const db = client.db(); 8 | 9 | try { 10 | const sessions = db.collection('sessions'); 11 | 12 | const cur = sessions.find().addCursorFlag('noCursorTimeout', true); 13 | 14 | var data; 15 | while ((data = await cur.next()) !== null) { 16 | await each({ 17 | sid: data._id, 18 | sess: data.session, 19 | expire: data.expires.getTime() 20 | }); 21 | } 22 | } finally { 23 | await client.close(); 24 | } 25 | }; 26 | -------------------------------------------------------------------------------- /session/postgres.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { Client } = require('pg'); 4 | const Cursor = require('pg-cursor'); 5 | const { promisify } = require('util'); 6 | 7 | module.exports = async function(connection, each) { 8 | const client = new Client({ 9 | connectionString: connection 10 | }); 11 | await client.connect(); 12 | 13 | try { 14 | const cursor = client.query(new Cursor(`SELECT "sid", "sess"::TEXT "sess", EXTRACT(EPOCH FROM "expire") * 1000 "expire" FROM "session"`)); 15 | cursor.readAsync = promisify(cursor.read); 16 | 17 | var queue = await cursor.readAsync(1000); 18 | while (queue.length) { 19 | var next = cursor.readAsync(1000); 20 | for (var row of queue) { 21 | await each(row); 22 | } 23 | queue = await next; 24 | } 25 | } finally { 26 | await client.end(); 27 | } 28 | }; 29 | -------------------------------------------------------------------------------- /session/redis.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const redis = require('redis'); 4 | const { promisify } = require('util'); 5 | 6 | for (var fn of ['scan', 'type', 'get', 'pttl']) { 7 | redis.RedisClient.prototype[fn + 'Async'] = promisify(redis.RedisClient.prototype[fn]); 8 | } 9 | 10 | module.exports = async function(connection, each) { 11 | const client = redis.createClient(connection); 12 | 13 | try { 14 | var cursor = '0'; 15 | do { 16 | var result = await client.scanAsync(cursor, 'MATCH', 'sess:*', 'COUNT', '1000'); 17 | cursor = result[0]; 18 | 19 | for (var key of result[1]) { 20 | var type = await client.typeAsync(key); 21 | if (type === 'string') { 22 | var data = await client.getAsync(key); 23 | var ttl = parseInt(await client.pttlAsync(key), 10); 24 | if (ttl < 0) { 25 | continue; 26 | } 27 | 28 | await each({ 29 | sid: key.substring('sess:'.length), 30 | sess: data, 31 | expire: Date.now() + ttl 32 | }); 33 | } 34 | } 35 | } while (cursor !== '0'); 36 | } finally { 37 | client.quit(); 38 | } 39 | }; 40 | -------------------------------------------------------------------------------- /writer/mongo.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { MongoClient } = require('mongodb'); 4 | 5 | async function bulkInsert(collection, each) { 6 | var op = null; 7 | 8 | await each(async function (doc) { 9 | if (op === null) { 10 | op = collection.initializeUnorderedBulkOp(); 11 | } 12 | 13 | if (doc.hasOwnProperty('expireAt')) { 14 | doc.expireAt = new Date(doc.expireAt); 15 | } 16 | 17 | op.insert(doc); 18 | 19 | if (op.length >= 10000) { 20 | await op.execute(); 21 | op = null; 22 | } 23 | }); 24 | 25 | if (op !== null) { 26 | await op.execute(); 27 | } 28 | } 29 | 30 | async function writer(output, concurrency, memory, callback) { 31 | const client = await MongoClient.connect(output); 32 | const db = client.db(); 33 | 34 | try { 35 | await callback(async function (each) { 36 | const objects = await db.collection('objects'); 37 | 38 | await bulkInsert(objects, each); 39 | 40 | await objects.createIndex({ _key: 1, score: -1 }, { background: true }); 41 | await objects.createIndex({ _key: 1, value: -1 }, { background: true, unique: true, sparse: true }); 42 | await objects.createIndex({ expireAt: 1 }, { expireAfterSeconds: 0, background: true }); 43 | }, async function (reader, input) { 44 | const sessions = await db.collection('sessions'); 45 | 46 | await bulkInsert(sessions, async function (insert) { 47 | await reader(input, async function (data) { 48 | await insert({ 49 | _id: data.sid, 50 | session: data.sess, 51 | expires: new Date(data.expire), 52 | }); 53 | }); 54 | }); 55 | 56 | await sessions.createIndex({ expires: 1 }, { expireAfterSeconds: 0 }); 57 | }); 58 | } finally { 59 | await client.close(); 60 | } 61 | }; 62 | 63 | module.exports = writer; 64 | -------------------------------------------------------------------------------- /writer/postgres.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { Pool } = require('pg'); 4 | const copyData = require('./postgres/copy.js'); 5 | const transaction = require('./postgres/transaction.js'); 6 | 7 | async function query(label, conn, query) { 8 | var shouldRelease = false; 9 | if (!conn.release) { 10 | shouldRelease = true; 11 | conn = await conn.connect(); 12 | } 13 | 14 | console.time(label); 15 | 16 | try { 17 | await conn.query(query); 18 | } catch (ex) { 19 | console.error('Query failed: ' + label); 20 | throw ex; 21 | } finally { 22 | console.timeEnd(label); 23 | 24 | if (shouldRelease) { 25 | conn.release(); 26 | } 27 | } 28 | } 29 | 30 | async function writer(output, concurrency, memory, callback) { 31 | const pool = new Pool({ 32 | connectionString: output, 33 | max: concurrency, 34 | application_name: 'nodebb-postgres-converter', 35 | verify: async function (client, callback) { 36 | try { 37 | await client.query(`SELECT set_config('maintenance_work_mem', $1::TEXT, false)`, [memory]); 38 | } catch (err) { 39 | client.release(err); 40 | return callback(err); 41 | } 42 | callback(null, client, client.release); 43 | } 44 | }); 45 | 46 | pool.on('connect', function (client) { 47 | client.on('notice', function(notice) { 48 | console.log('[DB] ' + notice.message + (notice.detail ? '\n' + notice.detail : '')); 49 | }); 50 | }); 51 | 52 | try { 53 | await callback(async function (each) { 54 | await transaction('Copy', pool, async function(db) { 55 | await query('Create table objects', db, `CREATE TABLE "objects" ( 56 | "data" JSONB NOT NULL 57 | CHECK (("data" ? '_key')) 58 | )`); 59 | 60 | await copyData(db, each); 61 | }); 62 | 63 | console.time('Index'); 64 | 65 | await Promise.all([ 66 | query('Create index on key__score', pool, `CREATE INDEX IF NOT EXISTS "idx__objects__key__score" ON "objects"(("data"->>'_key') ASC, (("data"->>'score')::numeric) DESC)`), 67 | query('Create unique index on key', pool, `CREATE UNIQUE INDEX IF NOT EXISTS "uniq__objects__key" ON "objects"(("data"->>'_key')) WHERE NOT ("data" ? 'score')`), 68 | query('Create unique index on key__value', pool, `CREATE UNIQUE INDEX IF NOT EXISTS "uniq__objects__key__value" ON "objects"(("data"->>'_key') ASC, ("data"->>'value') DESC)`), 69 | query('Create index on expireAt', pool, `CREATE INDEX IF NOT EXISTS "idx__objects__expireAt" ON "objects"((("data"->>'expireAt')::numeric) ASC) WHERE "data" ? 'expireAt'`) 70 | ]); 71 | 72 | console.timeEnd('Index'); 73 | 74 | console.time('Analyze'); 75 | 76 | await query('Analyze objects', pool, `ANALYZE VERBOSE "objects"`); 77 | 78 | console.timeEnd('Analyze'); 79 | 80 | console.time('Convert'); 81 | 82 | await transaction('Objects', pool, async function(db) { 83 | await query('Create type legacy_object_type', db, `CREATE TYPE LEGACY_OBJECT_TYPE AS ENUM ( 'hash', 'zset', 'set', 'list', 'string' )`); 84 | 85 | await query('Create table legacy_object', db, `CREATE TABLE "legacy_object" ( 86 | "_key" TEXT NOT NULL, 87 | "type" LEGACY_OBJECT_TYPE NOT NULL, 88 | "expireAt" TIMESTAMPTZ 89 | DEFAULT NULL 90 | )`); 91 | 92 | await query('Insert into legacy_object (zset)', db, `INSERT INTO "legacy_object" ("_key", "type", "expireAt") 93 | SELECT "data"->>'_key', 'zset'::LEGACY_OBJECT_TYPE, MIN(CASE 94 | WHEN ("data" ? 'expireAt') THEN to_timestamp(("data"->>'expireAt')::double precision / 1000) 95 | ELSE NULL 96 | END) 97 | FROM "objects" 98 | WHERE ("data" ? 'score') 99 | AND ("data"->>'value' IS NOT NULL) 100 | AND ("data"->>'score' IS NOT NULL) 101 | GROUP BY "data"->>'_key'`); 102 | 103 | await query('Insert into legacy_object (string, set, list, hash)', db, `INSERT INTO "legacy_object" ("_key", "type", "expireAt") 104 | SELECT "data"->>'_key', CASE 105 | WHEN (SELECT COUNT(*) FROM jsonb_object_keys("data" - 'expireAt')) = 2 THEN CASE 106 | WHEN ("data" ? 'value') OR ("data" ? 'data') THEN 'string' 107 | WHEN "data" ? 'array' THEN 'list' 108 | WHEN "data" ? 'members' THEN 'set' 109 | ELSE 'hash' 110 | END 111 | ELSE 'hash' 112 | END::LEGACY_OBJECT_TYPE, CASE 113 | WHEN ("data" ? 'expireAt') THEN to_timestamp(("data"->>'expireAt')::double precision / 1000) 114 | ELSE NULL 115 | END 116 | FROM "objects" 117 | WHERE NOT ("data" ? 'score') 118 | AND ("data"->>'_key') NOT LIKE '_imported_%:%'`); 119 | 120 | await query('Add primary key to legacy_object', db, `ALTER TABLE "legacy_object" 121 | ADD PRIMARY KEY ( "_key" )`); 122 | 123 | await query('Create unique index on key__type', db, `ALTER TABLE "legacy_object" 124 | ADD UNIQUE ( "_key", "type" )`); 125 | 126 | await query('Create index on expireAt', db, `CREATE INDEX "idx__legacy_object__expireAt" ON "legacy_object"("expireAt" ASC)`); 127 | 128 | await query('Create temporary index on type', db, `CREATE INDEX "idx__legacy_object__type" ON "legacy_object"("type")`); 129 | }); 130 | 131 | await query('Create table legacy_hash', pool, `CREATE TABLE "legacy_hash" ( 132 | "_key" TEXT NOT NULL, 133 | "data" JSONB NOT NULL, 134 | "type" LEGACY_OBJECT_TYPE NOT NULL 135 | DEFAULT 'hash'::LEGACY_OBJECT_TYPE 136 | CHECK ( "type" = 'hash' ) 137 | )`); 138 | 139 | await query('Create table legacy_zset', pool, `CREATE TABLE "legacy_zset" ( 140 | "_key" TEXT NOT NULL, 141 | "value" TEXT NOT NULL, 142 | "score" NUMERIC NOT NULL, 143 | "type" LEGACY_OBJECT_TYPE NOT NULL 144 | DEFAULT 'zset'::LEGACY_OBJECT_TYPE 145 | CHECK ( "type" = 'zset' ) 146 | )`); 147 | 148 | await query('Create table legacy_set', pool, `CREATE TABLE "legacy_set" ( 149 | "_key" TEXT NOT NULL, 150 | "member" TEXT NOT NULL, 151 | "type" LEGACY_OBJECT_TYPE NOT NULL 152 | DEFAULT 'set'::LEGACY_OBJECT_TYPE 153 | CHECK ( "type" = 'set' ) 154 | )`); 155 | 156 | await query('Create table legacy_list', pool, `CREATE TABLE "legacy_list" ( 157 | "_key" TEXT NOT NULL, 158 | "array" TEXT[] NOT NULL, 159 | "type" LEGACY_OBJECT_TYPE NOT NULL 160 | DEFAULT 'list'::LEGACY_OBJECT_TYPE 161 | CHECK ( "type" = 'list' ) 162 | )`); 163 | 164 | await query('Create table legacy_string', pool, `CREATE TABLE "legacy_string" ( 165 | "_key" TEXT NOT NULL, 166 | "data" TEXT NOT NULL, 167 | "type" LEGACY_OBJECT_TYPE NOT NULL 168 | DEFAULT 'string'::LEGACY_OBJECT_TYPE 169 | CHECK ( "type" = 'string' ) 170 | )`); 171 | 172 | await query('Create type legacy_imported_type', pool, `CREATE TYPE LEGACY_IMPORTED_TYPE AS ENUM ( 'bookmark', 'category', 'favourite', 'group', 'message', 'post', 'room', 'topic', 'user', 'vote' )`); 173 | 174 | await query('Create table legacy_imported', pool, `CREATE TABLE "legacy_imported" ( 175 | "type" LEGACY_IMPORTED_TYPE NOT NULL, 176 | "id" BIGINT NOT NULL, 177 | "data" JSONB NOT NULL 178 | )`); 179 | 180 | await Promise.all([ 181 | query('Insert into legacy_hash', pool, `INSERT INTO "legacy_hash" ("_key", "data") 182 | SELECT l."_key", o."data" - '_key' - 'expireAt' 183 | FROM "legacy_object" l 184 | INNER JOIN "objects" o 185 | ON l."_key" = o."data"->>'_key' 186 | WHERE l."type" = 'hash'`), 187 | query('Insert into legacy_zset', pool, `INSERT INTO "legacy_zset" ("_key", "value", "score") 188 | SELECT l."_key", o."data"->>'value', (o."data"->>'score')::numeric 189 | FROM "legacy_object" l 190 | INNER JOIN "objects" o 191 | ON l."_key" = o."data"->>'_key' 192 | WHERE l."type" = 'zset' 193 | AND o."data"->>'value' IS NOT NULL 194 | AND o."data"->>'score' IS NOT NULL`), 195 | query('Insert into legacy_set', pool, `INSERT INTO "legacy_set" ("_key", "member") 196 | SELECT l."_key", jsonb_array_elements_text(o."data"->'members') 197 | FROM "legacy_object" l 198 | INNER JOIN "objects" o 199 | ON l."_key" = o."data"->>'_key' 200 | WHERE l."type" = 'set'`), 201 | query('Insert into legacy_list', pool, `INSERT INTO "legacy_list" ("_key", "array") 202 | SELECT l."_key", ARRAY(SELECT a.t FROM jsonb_array_elements_text(o."data"->'list') WITH ORDINALITY a(t, i) ORDER BY a.i ASC) 203 | FROM "legacy_object" l 204 | INNER JOIN "objects" o 205 | ON l."_key" = o."data"->>'_key' 206 | WHERE l."type" = 'list'`), 207 | query('Insert into legacy_string', pool, `INSERT INTO "legacy_string" ("_key", "data") 208 | SELECT l."_key", CASE 209 | WHEN o."data" ? 'value' THEN o."data"->>'value' 210 | ELSE o."data"->>'data' 211 | END 212 | FROM "legacy_object" l 213 | INNER JOIN "objects" o 214 | ON l."_key" = o."data"->>'_key' 215 | WHERE l."type" = 'string'`), 216 | query('Insert into legacy_imported', pool, `INSERT INTO "legacy_imported" ("type", "id", "data") 217 | SELECT (regexp_matches(o."data"->>'_key', '^_imported_(.*):'))[1]::LEGACY_IMPORTED_TYPE, 218 | (regexp_matches(o."data"->>'_key', ':(.*)$'))[1]::BIGINT, 219 | o."data" - '_key' 220 | FROM "objects" o 221 | WHERE (o."data"->>'_key') LIKE '_imported_%:%'`) 222 | ]); 223 | 224 | console.timeEnd('Convert'); 225 | 226 | await query('Create view legacy_object_live', pool, `CREATE VIEW "legacy_object_live" AS 227 | SELECT "_key", "type" 228 | FROM "legacy_object" 229 | WHERE "expireAt" IS NULL 230 | OR "expireAt" > CURRENT_TIMESTAMP`); 231 | 232 | console.time('Constraints'); 233 | 234 | await Promise.all([ 235 | async function() { 236 | await query('Add primary key to legacy_hash', pool, `ALTER TABLE "legacy_hash" 237 | ADD PRIMARY KEY ("_key")`); 238 | 239 | await query('Add foreign key to legacy_hash', pool, `ALTER TABLE "legacy_hash" 240 | ADD CONSTRAINT "fk__legacy_hash__key" 241 | FOREIGN KEY ("_key", "type") 242 | REFERENCES "legacy_object"("_key", "type") 243 | ON UPDATE CASCADE 244 | ON DELETE CASCADE`); 245 | }(), 246 | async function() { 247 | await query('Add primary key to legacy_zset', pool, `ALTER TABLE "legacy_zset" 248 | ADD PRIMARY KEY ("_key", "value")`); 249 | 250 | await query('Add foreign key to legacy_zset', pool, `ALTER TABLE "legacy_zset" 251 | ADD CONSTRAINT "fk__legacy_zset__key" 252 | FOREIGN KEY ("_key", "type") 253 | REFERENCES "legacy_object"("_key", "type") 254 | ON UPDATE CASCADE 255 | ON DELETE CASCADE`); 256 | 257 | await query('Create index on key__score', pool, `CREATE INDEX "idx__legacy_zset__key__score" ON "legacy_zset"("_key" ASC, "score" DESC)`); 258 | }(), 259 | async function() { 260 | await query('Add primary key to legacy_set', pool, `ALTER TABLE "legacy_set" 261 | ADD PRIMARY KEY ("_key", "member")`); 262 | 263 | await query('Add foreign key to legacy_set', pool, `ALTER TABLE "legacy_set" 264 | ADD CONSTRAINT "fk__legacy_set__key" 265 | FOREIGN KEY ("_key", "type") 266 | REFERENCES "legacy_object"("_key", "type") 267 | ON UPDATE CASCADE 268 | ON DELETE CASCADE`); 269 | }(), 270 | async function() { 271 | await query('Add primary key to legacy_list', pool, `ALTER TABLE "legacy_list" 272 | ADD PRIMARY KEY ("_key")`); 273 | 274 | await query('Add foreign key to legacy_list', pool, `ALTER TABLE "legacy_list" 275 | ADD CONSTRAINT "fk__legacy_list__key" 276 | FOREIGN KEY ("_key", "type") 277 | REFERENCES "legacy_object"("_key", "type") 278 | ON UPDATE CASCADE 279 | ON DELETE CASCADE`); 280 | }(), 281 | async function() { 282 | await query('Add primary key to legacy_string', pool, `ALTER TABLE "legacy_string" 283 | ADD PRIMARY KEY ("_key")`); 284 | 285 | await query('Add foreign key to legacy_string', pool, `ALTER TABLE "legacy_string" 286 | ADD CONSTRAINT "fk__legacy_string__key" 287 | FOREIGN KEY ("_key", "type") 288 | REFERENCES "legacy_object"("_key", "type") 289 | ON UPDATE CASCADE 290 | ON DELETE CASCADE`); 291 | }(), 292 | async function() { 293 | await query('Add primary key to legacy_imported', pool, `ALTER TABLE "legacy_imported" 294 | ADD PRIMARY KEY ("type", "id")`); 295 | }() 296 | ]); 297 | 298 | console.timeEnd('Constraints'); 299 | 300 | console.time('Cleanup'); 301 | 302 | await Promise.all([ 303 | query('Drop table objects', pool, `DROP TABLE "objects" CASCADE`), 304 | query('Drop temporary index on legacy_objects', pool, `DROP INDEX "idx__legacy_object__type"`) 305 | ]); 306 | 307 | console.timeEnd('Cleanup'); 308 | 309 | await query('Alter tables cluster on', pool, `ALTER TABLE "legacy_object" CLUSTER ON "legacy_object_pkey"; 310 | ALTER TABLE "legacy_hash" CLUSTER ON "legacy_hash_pkey"; 311 | ALTER TABLE "legacy_zset" CLUSTER ON "legacy_zset_pkey"; 312 | ALTER TABLE "legacy_set" CLUSTER ON "legacy_set_pkey"; 313 | ALTER TABLE "legacy_list" CLUSTER ON "legacy_list_pkey"; 314 | ALTER TABLE "legacy_string" CLUSTER ON "legacy_string_pkey"; 315 | ALTER TABLE "legacy_imported" CLUSTER ON "legacy_imported_pkey"`); 316 | 317 | await query('Cluster all tables', pool, `CLUSTER VERBOSE`); 318 | 319 | console.time('Analyze'); 320 | 321 | await Promise.all([ 322 | query('Analyze legacy_object', pool, `ANALYZE VERBOSE "legacy_object"`), 323 | query('Analyze legacy_hash', pool, `ANALYZE VERBOSE "legacy_hash"`), 324 | query('Analyze legacy_zset', pool, `ANALYZE VERBOSE "legacy_zset"`), 325 | query('Analyze legacy_set', pool, `ANALYZE VERBOSE "legacy_set"`), 326 | query('Analyze legacy_list', pool, `ANALYZE VERBOSE "legacy_list"`), 327 | query('Analyze legacy_string', pool, `ANALYZE VERBOSE "legacy_string"`), 328 | query('Analyze legacy_imported', pool, `ANALYZE VERBOSE "legacy_imported"`) 329 | ]); 330 | 331 | console.timeEnd('Analyze'); 332 | }, function (reader, input) { 333 | return transaction('Sessions', pool, async function(db) { 334 | await query('Create table session', db, `CREATE TABLE IF NOT EXISTS "session" ( 335 | "sid" VARCHAR NOT NULL 336 | COLLATE "default", 337 | "sess" JSON NOT NULL, 338 | "expire" TIMESTAMP(6) NOT NULL 339 | ) WITH (OIDS=FALSE)`); 340 | 341 | await require('./postgres/session.js')(db, reader, input); 342 | 343 | await query('Add primary key to session', db, `ALTER TABLE "session" 344 | ADD CONSTRAINT "session_pkey" 345 | PRIMARY KEY ("sid") 346 | NOT DEFERRABLE 347 | INITIALLY IMMEDIATE`); 348 | 349 | await query('Analyze session', db, `ANALYZE VERBOSE "session"`); 350 | }); 351 | }); 352 | } finally { 353 | await pool.end(); 354 | } 355 | } 356 | 357 | module.exports = writer; 358 | -------------------------------------------------------------------------------- /writer/postgres/copy.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const copyFrom = require('pg-copy-streams').from; 4 | const transformRow = require('./transform.js').row; 5 | 6 | async function copyData(db, each) { 7 | var stream = db.query(copyFrom(`COPY "objects" FROM STDIN`)); 8 | 9 | var promise = new Promise(function(resolve, reject) { 10 | stream.on('error', reject); 11 | stream.on('end', resolve); 12 | }); 13 | 14 | function write(data) { 15 | var values = transformRow([JSON.stringify(data)]); 16 | 17 | return new Promise(function(resolve, reject) { 18 | var ok = stream.write(values, 'utf8'); 19 | if (ok) { 20 | return resolve(); 21 | } 22 | stream.once('drain', resolve); 23 | }); 24 | } 25 | 26 | await each(write); 27 | 28 | stream.end(); 29 | 30 | await promise; 31 | } 32 | 33 | module.exports = copyData; 34 | -------------------------------------------------------------------------------- /writer/postgres/session.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const copyFrom = require('pg-copy-streams').from; 4 | const transformRow = require('./transform.js').row; 5 | 6 | async function copySessions(db, input, connection) { 7 | console.time('Copy sessions'); 8 | 9 | try { 10 | var stream = db.query(copyFrom(`COPY "session" FROM STDIN`)); 11 | 12 | var promise = new Promise(function(resolve, reject) { 13 | stream.on('error', reject); 14 | stream.on('end', resolve); 15 | }); 16 | 17 | function write(values) { 18 | return new Promise(function(resolve, reject) { 19 | var ok = stream.write(values, 'utf8'); 20 | if (ok) { 21 | return resolve(); 22 | } 23 | stream.once('drain', resolve); 24 | }); 25 | } 26 | 27 | await input(connection, async function(data) { 28 | var row = transformRow([data.sid, data.sess, new Date(data.expire).toISOString()]); 29 | await write(row); 30 | }); 31 | 32 | stream.end(); 33 | 34 | await promise; 35 | } finally { 36 | console.timeEnd('Copy sessions'); 37 | } 38 | } 39 | 40 | module.exports = copySessions; 41 | -------------------------------------------------------------------------------- /writer/postgres/transaction.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | async function transaction(label, pool, callback) { 4 | const client = await pool.connect(); 5 | console.time(label); 6 | 7 | try { 8 | await client.query(`START TRANSACTION`); 9 | await callback(client); 10 | await client.query(`COMMIT`); 11 | } catch (ex) { 12 | await client.query(`ROLLBACK`); 13 | console.error('Transaction failed: ' + label); 14 | throw ex; 15 | } finally { 16 | console.timeEnd(label); 17 | client.release(); 18 | } 19 | } 20 | 21 | module.exports = transaction; 22 | -------------------------------------------------------------------------------- /writer/postgres/transform.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports.row = function transformRow(columns) { 4 | return columns.map(function(col) { 5 | if (col === null) { 6 | return '\\N'; 7 | } 8 | 9 | return String(col).replace(/[\\\n\r\t]/g, function(x) { 10 | switch (x) { 11 | case '\\': 12 | return '\\\\'; 13 | case '\n': 14 | return '\\n'; 15 | case '\r': 16 | return '\\r'; 17 | case '\t': 18 | return '\\t'; 19 | } 20 | }); 21 | }).join('\t') + '\n'; 22 | } 23 | 24 | module.exports.array = function transformArray(array) { 25 | if (array.length === 0) { 26 | return '{}'; 27 | } 28 | 29 | return '{"' + array.map(function(v) { 30 | return String(v).replace(/[\\"]/g, function(x) { 31 | return '\\' + x; 32 | }); 33 | }).join('","') + '"}'; 34 | } 35 | -------------------------------------------------------------------------------- /writer/redis.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const redis = require('redis'); 4 | 5 | function waitInsert(client) { 6 | return new Promise(function (resolve) { 7 | if (!client.should_buffer) { 8 | return resolve(); 9 | } 10 | client.stream.once('drain', resolve); 11 | }); 12 | } 13 | 14 | async function insertOne(client, data) { 15 | var expireAt = data.expireAt; 16 | delete data.expireAt; 17 | 18 | var nk = Object.keys(data).length; 19 | 20 | if (nk === 3 && data.hasOwnProperty('score') && data.hasOwnProperty('value')) { 21 | client.zadd(data._key, data.score, data.value); 22 | } else if (nk === 2 && data.hasOwnProperty('members')) { 23 | if (data.members.length > 0) { 24 | client.sadd(data._key, data.members); 25 | } 26 | } else if (nk === 2 && data.hasOwnProperty('array')) { 27 | client.lpush(data._key, data.array); 28 | } else if (nk === 2 && data.hasOwnProperty('data') || data.hasOwnProperty('value')) { 29 | client.set(data._key, data.data || data.value); 30 | } else { 31 | var command = [data._key]; 32 | for (var key of Object.keys(data)) { 33 | if (key !== '_key') { 34 | command.push(key, data[key]); 35 | } 36 | } 37 | if (command[1]) { 38 | client.hmset(command); 39 | } 40 | } 41 | 42 | if (expireAt) { 43 | client.pexpireat(data._key, expireAt); 44 | } 45 | 46 | await waitInsert(client); 47 | } 48 | 49 | async function bulkInsert(client, each) { 50 | await each(async function (data) { 51 | await insertOne(client, data); 52 | }); 53 | } 54 | 55 | async function writer(output, concurrency, memory, callback) { 56 | const client = redis.createClient(output); 57 | 58 | try { 59 | await callback(async function (each) { 60 | await bulkInsert(client, each); 61 | }, async function (reader, input) { 62 | await reader(input, async function (each) { 63 | await bulkInsert(client, async function (insert) { 64 | await each(async function (session) { 65 | await insert({ 66 | _key: 'sess:' + session.sid, 67 | data: session.sess, 68 | expireAt: session.expire, 69 | }); 70 | }); 71 | }); 72 | }); 73 | }); 74 | } finally { 75 | client.quit(); 76 | } 77 | } 78 | 79 | module.exports = writer; 80 | --------------------------------------------------------------------------------