├── .codeclimate.yml ├── .eslintrc ├── .github └── workflows │ └── build.yml ├── .gitignore ├── .npmignore ├── .travis.yml ├── CHANGES.md ├── LICENSE ├── README.md ├── docker-compose.yml ├── docs └── create-a-release.md ├── lib ├── intern.js └── qbuilder.js ├── package.json ├── postgresql-store.js └── test ├── postgres.test.js └── support ├── db └── config.js └── docker ├── Dockerfile └── dbschema.sql /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | --- 2 | engines: 3 | requiresafe: 4 | enabled: true 5 | duplication: 6 | enabled: true 7 | config: 8 | languages: 9 | - javascript 10 | eslint: 11 | enabled: true 12 | fixme: 13 | enabled: true 14 | ratings: 15 | paths: 16 | - "**.js" 17 | exclude_paths: 18 | - test/**/* 19 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "seneca" 3 | } -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | timeout-minutes: 4 15 | 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [ubuntu-latest] 20 | node-version: [12.x] 21 | 22 | runs-on: ${{ matrix.os }} 23 | 24 | services: 25 | postgres: 26 | image: postgres 27 | env: 28 | POSTGRES_PASSWORD: senecatest_ci_07y71809h1 29 | POSTGRES_DB: senecatest_ci_629vv14 30 | POSTGRES_USER: senecatest 31 | ports: 32 | - 5432:5432 33 | options: >- 34 | --health-cmd pg_isready 35 | --health-interval 10s 36 | --health-timeout 5s 37 | --health-retries 5 38 | 39 | steps: 40 | - name: Check out repository code 41 | uses: actions/checkout@v2 42 | 43 | - name: Connect to PostgreSQL 44 | run: PGPASSWORD="${POSTGRES_PASSWORD}" psql -h localhost -U "${POSTGRES_USER}" -d "${POSTGRES_DB}" -f "./test/support/docker/dbschema.sql" 45 | env: 46 | POSTGRES_USER: "senecatest" 47 | POSTGRES_PASSWORD: senecatest_ci_07y71809h1 48 | POSTGRES_DB: "senecatest_ci_629vv14" 49 | 50 | - name: Install dependencies 51 | run: npm install 52 | 53 | - name: Run the tests 54 | run: npm test 55 | 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | 10 | pids 11 | logs 12 | results 13 | node_modules/ 14 | npm-debug.log 15 | .idea/ 16 | .settings/ 17 | .vscode/ 18 | docs/coverage.html 19 | docs/annotated 20 | package-lock.json 21 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | lib-cov 2 | *.seed 3 | *.log 4 | *.csv 5 | *.dat 6 | *.out 7 | *.pid 8 | *.gz 9 | 10 | pids 11 | logs 12 | results 13 | scripts/ 14 | test/ 15 | node_modules/ 16 | npm-debug.log 17 | .idea/ 18 | .settings/ 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | 4 | env: 5 | - SENECA_VER=@1.x.x 6 | - SENECA_VER=@2.x.x 7 | - SENECA_VER=@3.x.x 8 | 9 | node_js: 10 | - '6' 11 | - '4' 12 | 13 | addons: 14 | postgresql: "9.4" 15 | 16 | services: 17 | - postgresql 18 | 19 | before_script: 20 | - npm uninstall seneca 21 | - npm install seneca$SENECA_VER 22 | - psql -U postgres -f docker/dbschema.sql 23 | 24 | after_script: 25 | - npm run coveralls 26 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | ## 2.3.0 26-08-2016 2 | 3 | * Updated dependencies 4 | * Added Seneca 3 and Node 6 support 5 | * Dropped Node 0.10, 0.12, 5 support 6 | 7 | ## 2.2.1 2016-08-02 8 | * Updated dependencies to be like Seneca ones 9 | 10 | ## 2.2.0: 2016-07-27 11 | * Updated dependencies 12 | 13 | ## 2.1.0: 2016-06-08 14 | 15 | Updated dependencies 16 | ! Updated pg from 4.x.x to 5.x.x 17 | 18 | ## 2.0.0: 2016-04-05 19 | 20 | Removed the internal conversion that the plugin was making from CamelCase column names to Snake case. Backward compatibility is ensured by allowing the user to pass into options two functions named toColumnName() and fromColumnName() that make this conversion. These should implement the CamelCase to Snake case conversion. More details are provided in the [README](https://github.com/senecajs/seneca-postgres-store) in the **Column name transformation, backward compatibility** section. 21 | 22 | All query generation code related to basic seneca functionality was moved to [seneca-standard-query](https://github.com/senecajs/seneca-standard-query) and the extended query functionality moved to [seneca-store-query](https://github.com/senecajs/seneca-store-query). This doesn't change functionality but enables functionality reuse into other stores. 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2012 Marian Radulescu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Seneca](http://senecajs.org/files/assets/seneca-logo.png) 2 | > A [Seneca.js](http://senecajs.org) data storage plugin 3 | 4 | # seneca-postgres-store 5 | 6 | [![npm version][npm-badge]][npm-url] 7 | [![Build Status][travis-badge]][travis-url] 8 | [![Dependency Status][david-badge]][david-url] 9 | [![Coveralls][BadgeCoveralls]][Coveralls] 10 | [![Gitter][gitter-badge]][gitter-url] 11 | 12 | | ![Voxgig](https://www.voxgig.com/res/img/vgt01r.png) | This open source module is sponsored and supported by [Voxgig](https://www.voxgig.com). | 13 | |---|---| 14 | 15 | ## Description 16 | 17 | seneca-postgres-store is a [PostgreSQL][postgresqlorg] database plugin for the [Seneca][seneca] MVP toolkit. The plugin is using the [node-postgres][nodepg] driver. 18 | For query generation it uses internally the [seneca-standard-query][standard-query] plugin and the standard functionality can be extended by using the [seneca-store-query][store-query] plugin. 19 | 20 | Usage: 21 | 22 | var Seneca = require('seneca'); 23 | var store = require('seneca-postgres-store'); 24 | 25 | var DBConfig = { 26 | name: 'senecatest', 27 | host: 'localhost', 28 | username: 'senecatest', 29 | password: 'senecatest', 30 | port: 5432 31 | } 32 | ... 33 | 34 | var si = Seneca(DBConfig) 35 | si.use(require('seneca-postgres-store'), DBConfig) 36 | si.ready(function() { 37 | var product = si.make('product') 38 | ... 39 | }) 40 | ... 41 | 42 | ### Seneca compatibility 43 | Supports Seneca versions **1.x** - **3.x** 44 | 45 | ### Supported functionality 46 | All Seneca data store supported functionality is implemented in [seneca-store-test](https://github.com/senecajs/seneca-store-test) as a test suite. The tests represent the store functionality specifications. 47 | 48 | ## Usage 49 | You don't use this module directly. It provides an underlying data storage engine for the Seneca entity API: 50 | 51 | ```js 52 | var entity = seneca.make$('typename') 53 | entity.someproperty = "something" 54 | entity.anotherproperty = 100 55 | 56 | entity.save$(function (err, entity) { ... }) 57 | entity.load$({id: ...}, function (err, entity) { ... }) 58 | entity.list$({property: ...}, function (err, entity) { ... }) 59 | entity.remove$({id: ...}, function (err, entity) { ... }) 60 | ``` 61 | 62 | ### Query Support 63 | 64 | The standard Seneca query format is supported. See the [seneca-standard-query][standard-query] plugin for more details. 65 | 66 | ## Extended Query Support 67 | 68 | By using the [seneca-store-query][store-query] plugin its query capabilities can be extended. See the plugin page for more details. 69 | 70 | ## Column name transformation, backward compatibility 71 | 72 | In seneca-postgres-store 2.0 the internal CamelCase to snake_case column names conversion was removed. 73 | 74 | To update from seneca-postgres-store 1.x to 2.x on systems built with seneca-postgres-store 1.x you must provide to the plugin through its options the functions that do the CamelCase to snake_case conversion and back. Any other name transformations to and from database column name can be also made with these. Example: 75 | 76 | ```js 77 | var DefaultConfig = { 78 | ... 79 | fromColumnName: function (attr) { 80 | // apply some conversion on column names 81 | return attr.toUpperCase() 82 | }, 83 | toColumnName: function (attr) { 84 | // convert back column names 85 | return attr.toLowerCase() 86 | } 87 | } 88 | seneca.use(require('seneca-postgres-store'), DefaultConfig) 89 | ``` 90 | 91 | For a fully functional CamelCase to snake_case implementation sample please look in the postgres.test.js at the 'Column Names conversions' test code. 92 | 93 | ## Limits 94 | 95 | By default queries are limited to 20 values. This can be bypassed by passing the `nolimit` option, which if set to true will not limit any queries. 96 | 97 | ## Fields 98 | 99 | To filter the fields returned from the `list` operation, pass a `fields$` array of column names to return. If no `fields$` are passed, all fields are returned (i.e. `select *` is used). e.g. 100 | 101 | query.fields$ = ['id', 'name'] 102 | 103 | 104 | Note: The implicit id that is generated on save$ has uuid value. To override this you must provide entity.id$ with a desired value. 105 | 106 | ### Custom ID generator 107 | 108 | To generate custom IDs it is exposed a seneca action pattern hook that can be overwritten: 109 | 110 | 111 | ```js 112 | seneca.add({role: 'sql', hook: 'generate_id', target: }, function (args, done) { 113 | return done(null, {id: idPrefix + Uuid()}) 114 | }) 115 | 116 | ``` 117 | 118 | ### Native Driver 119 | As with all seneca stores, you can access the native driver, in this case, the `pg` 120 | `connection` object using `entity.native$(function (err, connectionPool, release) {...})`. 121 | Please make sure that you release the connection after using it. 122 | 123 | ``` 124 | entity.native$( function (err, client, releaseConnection){ 125 | // ... you can use client 126 | // ... then release connection 127 | releaseConnection() 128 | } ) 129 | ``` 130 | 131 | ## Contributing 132 | The [Senecajs org][] encourages open participation. If you feel you 133 | can help in any way, be it with documentation, examples, extra 134 | testing, or new features please get in touch. 135 | 136 | ## To run tests with Docker 137 | Build the PostgreSQL Docker image: 138 | 139 | ```sh 140 | npm run build 141 | 142 | ``` 143 | 144 | Start the PostgreSQL container: 145 | ```sh 146 | npm run start 147 | ``` 148 | 149 | Stop the PostgreSQL container: 150 | ```sh 151 | npm run stop 152 | ``` 153 | 154 | While the container is running you can run the tests into another terminal: 155 | ```sh 156 | npm run test 157 | ``` 158 | 159 | #### Testing for Mac users 160 | Before the tests can be run you must run `docker-machine env default` and copy the docker host address (example: '192.168.99.100'). 161 | This address must be inserted into the test/default_config.json file as the value for the host variable. The tests can now be run. 162 | 163 | 164 | ## License 165 | Copyright (c) 2012 - 2016, Marian Radulescu and other contributors. 166 | Licensed under [MIT][]. 167 | 168 | [MIT]: ./LICENSE 169 | [npm-badge]: https://img.shields.io/npm/v/seneca-postgres-store.svg 170 | [npm-url]: https://npmjs.com/package/seneca-postgres-store 171 | [travis-badge]: https://api.travis-ci.org/senecajs/seneca-postgres-store.svg 172 | [travis-url]: https://travis-ci.org/senecajs/seneca-postgres-store 173 | [david-badge]: https://david-dm.org/senecajs/seneca-postgres-store.svg 174 | [david-url]: https://david-dm.org/senecajs/seneca-postgres-store 175 | [codeclimate-badge]: https://codeclimate.com/github/senecajs/seneca-postgres-store/badges/gpa.svg 176 | [codeclimate-url]: https://codeclimate.com/github/senecajs/seneca-postgres-store 177 | [gitter-badge]: https://badges.gitter.im/Join%20Chat.svg 178 | [gitter-url]: https://gitter.im/senecajs/seneca 179 | [standard-query]: https://github.com/senecajs/seneca-standard-query 180 | [store-query]: https://github.com/senecajs/seneca-store-query 181 | [postgresqlorg]: http://www.postgresql.org/ 182 | [seneca]: http://senecajs.org/ 183 | [nodepg]: https://github.com/brianc/node-postgres 184 | [Senecajs org]: https://github.com/senecajs/ 185 | [Coveralls]: https://coveralls.io/github/senecajs/seneca-postgres-store?branch=master 186 | [BadgeCoveralls]: https://coveralls.io/repos/github/senecajs/seneca-postgres-store/badge.svg?branch=master 187 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | postgres: 2 | build: ./test/support/docker 3 | ports: 4 | - "5432:5432" 5 | environment: 6 | POSTGRES_PASSWORD: senecatest_2086hab80y 7 | POSTGRES_DB: senecatest_71v94h 8 | POSTGRES_USER: senecatest 9 | 10 | -------------------------------------------------------------------------------- /docs/create-a-release.md: -------------------------------------------------------------------------------- 1 | # Creating a release 2 | 3 | 1. Review github issues, triage, close and merge issues related to the release. 4 | 2. Update CHANGES.md, with date release, notes, and version. 5 | 3. Pull down the repository locally on the master branch. 6 | 4. Ensure there are no outstanding commits and the branch is clean. 7 | 5. Run `npm install` and ensure all dependencies correctly install. 8 | 6. Run `npm run test` and ensure testing and linting passes. 9 | 7. Run `npm version vx.x.x -m "version x.x.x"` where `x.x.x` is the version. 10 | 8. Run `git push upstream master --tags` 11 | 9. Run `npm publish` 12 | 10. Go to the [Github release page][Releases] and hit 'Draft a new release'. 13 | 11. Paste the Changelog content for this release and add additional release notes. 14 | 12. Choose the tag version and a title matching the release and publish. 15 | 13. Notify core maintainers of the release via email. 16 | 17 | [Releases]: https://github.com/senecajs/seneca-postgres-store/releases 18 | -------------------------------------------------------------------------------- /lib/intern.js: -------------------------------------------------------------------------------- 1 | const Assert = require('assert') 2 | const Util = require('util') 3 | const Q = require('./qbuilder') 4 | const Uuid = require('uuid').v4 5 | 6 | 7 | const intern = { 8 | asyncMethod(f) { 9 | return function (msg, done, meta) { 10 | const seneca = this 11 | const p = f.call(seneca, msg, meta) 12 | 13 | Assert('function' === typeof p.then && 14 | 'function' === typeof p.catch, 15 | 'The function must be async, i.e. return a promise.') 16 | 17 | return p 18 | .then(result => done(null, result)) 19 | .catch(done) 20 | } 21 | }, 22 | 23 | 24 | compact(obj) { 25 | return Object.entries(obj) 26 | .filter(([, v]) => undefined !== v) 27 | .reduce((acc, [k, v]) => { 28 | acc[k] = v 29 | return acc 30 | }, {}) 31 | }, 32 | 33 | 34 | buildCtx(seneca, msg, meta) { 35 | let ctx = {} 36 | let transaction = seneca.entity.state().transaction 37 | 38 | if(transaction && !transaction.finish && false !== msg.transaction$) { 39 | transaction.trace.push({ 40 | when: Date.now(), 41 | msg, 42 | meta, 43 | }) 44 | ctx.transaction = transaction 45 | ctx.client = transaction.client 46 | } 47 | 48 | return ctx 49 | }, 50 | 51 | 52 | async withDbClient(dbPool, ctx, f) { 53 | ctx = ctx || {} 54 | 55 | let isTransaction = !!ctx.transaction 56 | 57 | ctx.client = ctx.client || await dbPool.connect() 58 | 59 | if(isTransaction) { 60 | if(null == ctx.transaction.client) { 61 | ctx.transaction.client = ctx.client 62 | await ctx.client.query('BEGIN') 63 | } 64 | } 65 | 66 | let result 67 | 68 | try { 69 | result = await f(ctx.client) 70 | } 71 | catch(e) { 72 | if(isTransaction) { 73 | await ctx.client.query('ROLLBACK') 74 | } 75 | throw e 76 | } 77 | finally { 78 | if(!isTransaction) { 79 | ctx.client.release() 80 | } 81 | } 82 | 83 | return result 84 | }, 85 | 86 | 87 | async execQuery(query, ctx) { 88 | const { client, seneca } = ctx 89 | 90 | if (!query) { 91 | const err = new Error('An empty query is not a valid query') 92 | return seneca.fail(err) 93 | } 94 | 95 | return client.query(query) 96 | }, 97 | 98 | 99 | async insertEnt(ent, ctx) { 100 | const { 101 | client, 102 | fromColumnName: fromColumn = intern.identity, 103 | toColumnName: toColumn = intern.identity 104 | } = ctx 105 | 106 | const ent_table = intern.tablename(ent) 107 | const entp = intern.makeentp(ent) 108 | 109 | const values = intern.deepXformKeys(toColumn, intern.compact(entp)) 110 | const escapeIdentifier = client.escapeIdentifier.bind(client) 111 | 112 | const ins_query = Q.insertstm({ 113 | into: ent_table, 114 | values, 115 | escapeIdentifier 116 | }) 117 | 118 | const insert = await intern.execQuery(ins_query, ctx) 119 | const ent_fields = intern.deepXformKeys(fromColumn, insert.rows[0]) 120 | 121 | return intern.makeent(ent, ent_fields) 122 | }, 123 | 124 | 125 | async findEnt(ent, q, ctx) { 126 | const { client } = ctx 127 | const ent_table = intern.tablename(ent) 128 | 129 | const query = Q.selectstm({ 130 | columns: '*', 131 | from: ent_table, 132 | where: intern.whereOfQ(q, ctx), 133 | limit: 1, 134 | offset: 0 <= q.skip$ ? q.skip$ : null, 135 | order_by: q.sort$ || null, 136 | escapeIdentifier: client.escapeIdentifier.bind(client) 137 | }) 138 | 139 | const { rows } = await intern.execQuery(query, ctx) 140 | 141 | if (rows.length > 0) { 142 | return intern.makeent(ent, rows[0]) 143 | } 144 | 145 | return null 146 | }, 147 | 148 | 149 | async listEnts(ent, q, ctx) { 150 | const ent_table = intern.tablename(ent) 151 | 152 | const { 153 | client, 154 | fromColumnName: fromColumn = intern.identity, 155 | toColumnName: toColumn = intern.identity 156 | } = ctx 157 | 158 | const columns = q.fields$ 159 | ? intern.deepXformKeys(toColumn, q.fields$) 160 | : '*' 161 | 162 | const where = intern.deepXformKeys(toColumn, intern.whereOfQ(q, ctx)) 163 | 164 | const query = Q.selectstm({ 165 | columns, 166 | from: ent_table, 167 | where, 168 | limit: 0 <= q.limit$ ? q.limit$ : null, 169 | offset: 0 <= q.skip$ ? q.skip$ : null, 170 | order_by: q.sort$ || null, 171 | escapeIdentifier: client.escapeIdentifier.bind(client) 172 | }) 173 | 174 | const { rows } = await intern.execQuery(query, ctx) 175 | 176 | return rows 177 | .map((row) => intern.deepXformKeys(fromColumn, row)) 178 | .map((row) => intern.makeent(ent, row)) 179 | }, 180 | 181 | 182 | filterObj(f, obj) { 183 | const out = {} 184 | 185 | for (const k in obj) { 186 | const v = obj[k] 187 | 188 | if (f(k, v)) { 189 | out[k] = v 190 | } 191 | } 192 | 193 | return out 194 | }, 195 | 196 | 197 | whereOfQ(q, ctx) { 198 | if ('string' === typeof q || Array.isArray(q)) { 199 | return { id: q } 200 | } 201 | 202 | 203 | const { seneca } = ctx 204 | 205 | const cq = seneca.util.clean(q) 206 | const ops = intern.filterObj(Q.isOp, q) 207 | 208 | 209 | return { ...cq, ...ops } 210 | }, 211 | 212 | 213 | async upsertEnt(upsert_fields, ent, q, ctx) { 214 | const { client } = ctx 215 | const escapeIdentifier = client.escapeIdentifier.bind(client) 216 | 217 | const ent_table = intern.tablename(ent) 218 | const entp = intern.makeentp(ent) 219 | 220 | const insert_values = intern.compact(entp) 221 | const set_values = intern.compact(entp); delete set_values.id 222 | 223 | const query = Q.insertstm({ 224 | into: ent_table, 225 | values: insert_values, 226 | on_conflict: { 227 | columns: upsert_fields, 228 | do_update: { 229 | set: set_values 230 | } 231 | }, 232 | escapeIdentifier 233 | }) 234 | 235 | const { rows } = await intern.execQuery(query, ctx) 236 | 237 | return intern.makeent(ent, rows[0]) 238 | }, 239 | 240 | 241 | async updateEnt(ent, ctx) { 242 | const { client } = ctx 243 | const escapeIdentifier = client.escapeIdentifier.bind(client) 244 | 245 | const ent_table = intern.tablename(ent) 246 | const entp = intern.makeentp(ent) 247 | 248 | const { id: ent_id } = ent 249 | 250 | const update_query = Q.updatestm({ 251 | table: ent_table, 252 | set: intern.compact(entp), 253 | where: { id: ent_id }, 254 | escapeIdentifier 255 | }) 256 | 257 | const update = await intern.execQuery(update_query, ctx) 258 | const updated_anything = update.rows.length > 0 259 | 260 | if (updated_anything) { 261 | return intern.makeent(ent, update.rows[0]) 262 | } 263 | 264 | 265 | // TODO: Re-write using upserts on the id column: 266 | // 267 | const ins_query = Q.insertstm({ 268 | into: ent_table, 269 | values: intern.compact(entp), 270 | escapeIdentifier 271 | }) 272 | 273 | const insert = await intern.execQuery(ins_query, ctx) 274 | 275 | return intern.makeent(ent, insert.rows[0]) 276 | }, 277 | 278 | 279 | async removeEnt(ent, q, ctx) { 280 | if (q.all$) { 281 | return removeManyEnts(ent, q, ctx) 282 | } 283 | 284 | return removeOneEnt(ent, q, ctx) 285 | 286 | 287 | async function removeOneEnt(ent, q, ctx) { 288 | const { seneca, client } = ctx 289 | 290 | const ent_table = intern.tablename(ent) 291 | const escapeIdentifier = client.escapeIdentifier.bind(client) 292 | 293 | 294 | const sel_query = Q.selectstm({ 295 | columns: ['id'], 296 | from: ent_table, 297 | where: seneca.util.clean(q), 298 | limit: 1, 299 | offset: 0 <= q.skip$ ? q.skip$ : null, 300 | order_by: q.sort$ || null, 301 | escapeIdentifier 302 | }) 303 | 304 | const { rows: sel_rows } = await intern.execQuery(sel_query, ctx) 305 | 306 | 307 | const del_query = Q.deletestm({ 308 | from: ent_table, 309 | where: { 310 | id: sel_rows.map(x => x.id) 311 | }, 312 | escapeIdentifier 313 | }) 314 | 315 | const { rows: del_rows } = await intern.execQuery(del_query, ctx) 316 | 317 | if (q.load$) { 318 | return 0 < del_rows.length 319 | ? intern.makeent(ent, del_rows[0]) 320 | : null 321 | } 322 | 323 | return null 324 | } 325 | 326 | 327 | async function removeManyEnts(ent, q, ctx) { 328 | const { seneca, client } = ctx 329 | 330 | const ent_table = intern.tablename(ent) 331 | const escapeIdentifier = client.escapeIdentifier.bind(client) 332 | 333 | 334 | const sel_query = Q.selectstm({ 335 | columns: ['id'], 336 | from: ent_table, 337 | where: seneca.util.clean(q), 338 | limit: 0 <= q.limit$ ? q.limit$ : null, 339 | offset: 0 <= q.skip$ ? q.skip$ : null, 340 | order_by: q.sort$ || null, 341 | escapeIdentifier 342 | }) 343 | 344 | const { rows } = await intern.execQuery(sel_query, ctx) 345 | 346 | 347 | const del_query = Q.deletestm({ 348 | from: ent_table, 349 | where: { 350 | id: rows.map(x => x.id) 351 | }, 352 | escapeIdentifier 353 | }) 354 | 355 | await intern.execQuery(del_query, ctx) 356 | 357 | return 358 | } 359 | }, 360 | 361 | 362 | msgForGenerateId(args) { 363 | const { role, target } = args 364 | return { role, target, hook: 'generate_id' } 365 | }, 366 | 367 | 368 | async askSenecaToGenerateId(args, ctx) { 369 | const { seneca } = ctx 370 | 371 | const act = Util.promisify(seneca.act).bind(seneca) 372 | const result = await act(intern.msgForGenerateId(args)) 373 | 374 | const { id: newId } = result 375 | 376 | return newId 377 | }, 378 | 379 | 380 | generateId() { 381 | return Uuid() 382 | }, 383 | 384 | 385 | getConfig(spec) { 386 | let conf 387 | 388 | if ('string' === typeof spec) { 389 | const urlM = /^postgres:\/\/((.*?):(.*?)@)?(.*?)(:?(\d+))?\/(.*?)$/.exec(spec) 390 | 391 | conf = {} 392 | conf.name = urlM[7] 393 | conf.port = urlM[6] 394 | conf.host = urlM[4] 395 | conf.username = urlM[2] 396 | conf.password = urlM[3] 397 | conf.port = conf.port ? parseInt(conf.port, 10) : null 398 | } else { 399 | conf = spec 400 | } 401 | 402 | // pg conf properties 403 | conf.user = conf.username 404 | conf.database = conf.name 405 | 406 | conf.host = conf.host || conf.server 407 | conf.username = conf.username || conf.user 408 | conf.password = conf.password || conf.pass 409 | 410 | return conf 411 | }, 412 | 413 | 414 | maybeUpsert(msg) { 415 | const { ent, q } = msg 416 | 417 | if (!Array.isArray(q.upsert$)) { 418 | return null 419 | } 420 | 421 | const upsertFields = q.upsert$.filter((p) => !p.includes('$')) 422 | 423 | if (0 === upsertFields.length) { 424 | return null 425 | } 426 | 427 | return upsertFields 428 | }, 429 | 430 | 431 | isUpdate(msg) { 432 | const { ent } = msg 433 | return null != ent.id 434 | }, 435 | 436 | 437 | isObject(x) { 438 | return null != x && '[object Object]' === toString.call(x) 439 | }, 440 | 441 | 442 | isDate(x) { 443 | return '[object Date]' === toString.call(x) 444 | }, 445 | 446 | 447 | deepXformKeys(f, x) { 448 | if (Array.isArray(x)) { 449 | return x.map(y => intern.deepXformKeys(f, y)) 450 | } 451 | 452 | if (intern.isObject(x)) { 453 | const out = {} 454 | 455 | for (const k in x) { 456 | out[f(k)] = intern.deepXformKeys(f, x[k]) 457 | } 458 | 459 | return out 460 | } 461 | 462 | return x 463 | }, 464 | 465 | 466 | /** 467 | * NOTE: makeentp is used to create a new persistable entity from the entity 468 | * object. 469 | */ 470 | makeentp(ent) { 471 | const fields = ent.fields$() 472 | const entp = {} 473 | 474 | for (const field of fields) { 475 | if (!intern.isDate(ent[field]) && intern.isObject(ent[field])) { 476 | entp[field] = JSON.stringify(ent[field]) 477 | } else { 478 | entp[field] = ent[field] 479 | } 480 | } 481 | 482 | return entp 483 | }, 484 | 485 | 486 | /** 487 | * NOTE: makeent is used to create a new entity using a row from a database. 488 | * 489 | */ 490 | makeent(ent, row) { 491 | if (!row) { 492 | return null 493 | } 494 | 495 | const fields = Object.keys(row) 496 | const entp = {} 497 | 498 | for (const field of fields) { 499 | let value = row[field] 500 | 501 | try { 502 | const parsed = JSON.parse(row[field]) 503 | 504 | if (intern.isObject(parsed)) { 505 | value = parsed 506 | } 507 | } catch (err) { 508 | if (!(err instanceof SyntaxError)) { 509 | throw err 510 | } 511 | } 512 | 513 | entp[field] = value 514 | } 515 | 516 | return ent.make$(entp) 517 | }, 518 | 519 | 520 | tablename(ent) { 521 | const canon = ent.canon$({ object: true }) 522 | 523 | return (canon.base ? canon.base + '_' : '') + canon.name 524 | }, 525 | 526 | 527 | identity(x) { 528 | return x 529 | } 530 | } 531 | 532 | module.exports = { intern } 533 | -------------------------------------------------------------------------------- /lib/qbuilder.js: -------------------------------------------------------------------------------- 1 | const Assert = require('assert') 2 | 3 | const Q = { 4 | insertstm(args) { 5 | const { 6 | into, 7 | values, 8 | escapeIdentifier, 9 | on_conflict = null 10 | } = args 11 | 12 | const col_names = Object.keys(values) 13 | const col_vals = Object.values(values) 14 | 15 | 16 | let bindings = [] 17 | let sql = '' 18 | let param_no = 1 19 | 20 | 21 | sql += 'insert into ' + escapeIdentifier(into) 22 | 23 | 24 | const safe_cols = col_names.map(col_name => escapeIdentifier(col_name)) 25 | sql += ' (' + safe_cols.join(', ') + ')' 26 | 27 | 28 | const val_placeholders = col_vals.map(_ => Q.valuePlaceholder(param_no++)) 29 | sql += ' values (' + val_placeholders.join(', ') + ')' 30 | bindings = bindings.concat(col_vals) 31 | 32 | 33 | if (null != on_conflict) { 34 | const { 35 | columns: confl_cols, 36 | do_update: { set } 37 | } = on_conflict 38 | 39 | 40 | const safe_confl_cols = 41 | confl_cols.map(col_name => escapeIdentifier(col_name)) 42 | 43 | sql += ' on conflict (' + safe_confl_cols + ')' 44 | 45 | const set_q = setstm({ 46 | set, 47 | escapeIdentifier, 48 | first_param_no: param_no 49 | }) 50 | 51 | sql += ' do update set ' + set_q.text 52 | bindings = bindings.concat(set_q.values) 53 | param_no = set_q.next_param_no 54 | } 55 | 56 | 57 | sql += ' returning *' 58 | 59 | 60 | return { text: sql, values: bindings } 61 | }, 62 | 63 | 64 | selectstm(args) { 65 | const { 66 | escapeIdentifier, 67 | from, 68 | columns = '*', 69 | where = null, 70 | offset = null, 71 | limit = null, 72 | order_by = null 73 | } = args 74 | 75 | 76 | let bindings = [] 77 | let sql = '' 78 | let param_no = 1 79 | 80 | 81 | sql += 'select ' 82 | 83 | 84 | if ('*' === columns) { 85 | sql += '*' 86 | } else { 87 | const safe_columns = columns.map(col_name => escapeIdentifier(col_name)) 88 | sql += safe_columns.join(', ') 89 | } 90 | 91 | 92 | sql += ' from ' + escapeIdentifier(from) 93 | 94 | if (null != where) { 95 | const where_q = wherestm({ 96 | where, 97 | escapeIdentifier, 98 | first_param_no: param_no 99 | }) 100 | 101 | sql += ' where ' + where_q.text 102 | bindings = bindings.concat(where_q.values) 103 | param_no = where_q.next_param_no 104 | } 105 | 106 | 107 | if (null != order_by) { 108 | const order_q = orderbystm({ 109 | order_by, 110 | escapeIdentifier, 111 | first_param_no: param_no 112 | }) 113 | 114 | sql += ' order by ' + order_q.text 115 | bindings = bindings.concat(order_q.values) 116 | param_no = order_q.next_param_no 117 | } 118 | 119 | 120 | if (null != limit) { 121 | sql += ' limit ' + Q.valuePlaceholder(param_no++) 122 | bindings.push(limit) 123 | } 124 | 125 | 126 | if (null != offset) { 127 | sql += ' offset ' + Q.valuePlaceholder(param_no++) 128 | bindings.push(offset) 129 | } 130 | 131 | return { text: sql, values: bindings } 132 | }, 133 | 134 | 135 | deletestm(args) { 136 | const { 137 | escapeIdentifier, 138 | from, 139 | where = null, 140 | limit = null 141 | } = args 142 | 143 | let sql = '' 144 | let bindings = [] 145 | let param_no = 1 146 | 147 | sql += 'delete from ' + escapeIdentifier(from) 148 | 149 | if (null != where) { 150 | const where_q = wherestm({ 151 | where, 152 | escapeIdentifier, 153 | first_param_no: param_no 154 | }) 155 | 156 | sql += ' where ' + where_q.text 157 | bindings = bindings.concat(where_q.values) 158 | param_no = bindings.next_param_no 159 | } 160 | 161 | if (null != limit) { 162 | sql += ' limit ' + Q.valuePlaceholder(param_no++) 163 | bindings.push(limit) 164 | } 165 | 166 | sql += ' returning *' 167 | 168 | return { text: sql, values: bindings } 169 | }, 170 | 171 | 172 | updatestm(args) { 173 | const { 174 | table, 175 | set, 176 | where, 177 | escapeIdentifier 178 | } = args 179 | 180 | 181 | let bindings = [] 182 | let sql = '' 183 | let param_no = 1 184 | 185 | 186 | sql += 'update ' + escapeIdentifier(table) + ' set ' 187 | 188 | const set_q = setstm({ set, escapeIdentifier, first_param_no: param_no }) 189 | sql += set_q.text 190 | bindings = bindings.concat(set_q.values) 191 | param_no = set_q.next_param_no 192 | 193 | 194 | const where_q = wherestm({ 195 | where, 196 | first_param_no: param_no, 197 | escapeIdentifier 198 | }) 199 | 200 | sql += ' where ' + where_q.text 201 | bindings = bindings.concat(where_q.values) 202 | param_no = where_q.next_param_no 203 | 204 | 205 | sql += ' returning *' 206 | 207 | 208 | return { text: sql, values: bindings } 209 | }, 210 | 211 | 212 | valuePlaceholder(param_no) { 213 | Assert.strictEqual(typeof param_no, 'number', 'param_no') 214 | return '$' + param_no 215 | }, 216 | 217 | 218 | isOp(k) { 219 | const OPERATORS = [ 220 | 'ne$', 'gte$', 'gt$', 'lte$', 'lt$', 'eq$', 'in$', 'nin$', 'or$', 'and$' 221 | ] 222 | 223 | return OPERATORS.includes(k) 224 | } 225 | } 226 | 227 | 228 | function isObject(x) { 229 | return null != x && '[object Object]' === toString.call(x) 230 | } 231 | 232 | 233 | function normalizeOpsInQuery(q) { 234 | /* 235 | * NOTE: A query may include operators, which may take the following forms 236 | * inside a query, that may look like so: 237 | * 238 | * ``` 239 | * const q = { 240 | * in$: { color: ['orange', 'grey'] }, 241 | * price: { gt$: '9.95' } 242 | * } 243 | * ``` 244 | * 245 | * The purpose of this function is to leave non-operators as is, and put 246 | * the operators into the form: [operator]: { [col0]: v0, ..., [colN]: vN } 247 | * This is the format that the Qbuilder expects, as it should facilitate 248 | * conversion of a query into a SQL expression. 249 | */ 250 | 251 | const out = [] 252 | 253 | for (const k in q) { 254 | if (isObject(q[k])) { 255 | for (const kk in q[k]) { 256 | if (Q.isOp(kk)) { 257 | out[kk] = { [k]: q[k][kk] } 258 | } else { 259 | out[k] = out[k] || {} 260 | out[k][kk] = q[k][kk] 261 | } 262 | } 263 | } else { 264 | out[k] = q[k] 265 | } 266 | } 267 | 268 | return out 269 | } 270 | 271 | 272 | function inexprstm(args) { 273 | const { 274 | column, 275 | in: ary, 276 | escapeIdentifier, 277 | negate = false, 278 | first_param_no = 1 279 | } = args 280 | 281 | Assert(Array.isArray(ary), 'ary must be an array') 282 | 283 | let sql = '' 284 | let bindings = [] 285 | let param_no = first_param_no 286 | 287 | const val_placeholders = ary 288 | .map(_ => Q.valuePlaceholder(param_no++)) 289 | .join(', ') 290 | 291 | if (0 === val_placeholders.length) { 292 | sql += 'false' 293 | } else { 294 | sql += escapeIdentifier(column) 295 | 296 | if (negate) { 297 | sql += ' not' 298 | } 299 | 300 | sql += ' in (' + val_placeholders + ')' 301 | bindings = bindings.concat(ary) 302 | } 303 | 304 | return { 305 | text: sql, 306 | values: bindings, 307 | next_param_no: param_no, 308 | expr: 'in', 309 | } 310 | } 311 | 312 | 313 | function eqexprstm(args) { 314 | const { 315 | column, 316 | value, 317 | escapeIdentifier, 318 | negate = false, 319 | first_param_no = 1 320 | } = args 321 | 322 | let sql = '' 323 | let bindings = [] 324 | let param_no = first_param_no 325 | 326 | 327 | if (null == value) { 328 | sql += escapeIdentifier(column) + ' is' + 329 | (negate ? ' not' : '') + ' null' 330 | } else { 331 | sql += escapeIdentifier(column) + 332 | (negate ? ' != ' : ' = ') + Q.valuePlaceholder(param_no++) 333 | 334 | bindings.push(value) 335 | } 336 | 337 | return { text: sql, values: bindings, next_param_no: param_no } 338 | } 339 | 340 | 341 | function ltexprstm(args) { 342 | const { 343 | column, 344 | value, 345 | escapeIdentifier, 346 | first_param_no = 1 347 | } = args 348 | 349 | let sql = '' 350 | let bindings = [] 351 | let param_no = first_param_no 352 | 353 | 354 | sql += escapeIdentifier(column) + ' < ' + 355 | Q.valuePlaceholder(param_no++) 356 | 357 | bindings.push(value) 358 | 359 | 360 | return { text: sql, values: bindings, next_param_no: param_no } 361 | } 362 | 363 | 364 | function lteexprstm(args) { 365 | const { 366 | column, 367 | value, 368 | escapeIdentifier, 369 | first_param_no = 1 370 | } = args 371 | 372 | let sql = '' 373 | let bindings = [] 374 | let param_no = first_param_no 375 | 376 | 377 | sql += escapeIdentifier(column) + ' <= ' + 378 | Q.valuePlaceholder(param_no++) 379 | 380 | bindings.push(value) 381 | 382 | 383 | return { text: sql, values: bindings, next_param_no: param_no } 384 | } 385 | 386 | 387 | function gtexprstm(args) { 388 | const { 389 | column, 390 | value, 391 | escapeIdentifier, 392 | first_param_no = 1 393 | } = args 394 | 395 | let sql = '' 396 | let bindings = [] 397 | let param_no = first_param_no 398 | 399 | 400 | sql += escapeIdentifier(column) + ' > ' + 401 | Q.valuePlaceholder(param_no++) 402 | 403 | bindings.push(value) 404 | 405 | 406 | return { text: sql, values: bindings, next_param_no: param_no } 407 | } 408 | 409 | 410 | function gteexprstm(args) { 411 | const { 412 | column, 413 | value, 414 | escapeIdentifier, 415 | first_param_no = 1 416 | } = args 417 | 418 | let sql = '' 419 | let bindings = [] 420 | let param_no = first_param_no 421 | 422 | 423 | sql += escapeIdentifier(column) + ' >= ' + 424 | Q.valuePlaceholder(param_no++) 425 | 426 | bindings.push(value) 427 | 428 | 429 | return { text: sql, values: bindings, next_param_no: param_no } 430 | } 431 | 432 | 433 | function wherestm(args) { 434 | const { 435 | escapeIdentifier, 436 | first_param_no = 1 437 | } = args 438 | 439 | const where = normalizeOpsInQuery(args.where) 440 | 441 | 442 | let sql = '' 443 | let bindings = [] 444 | let param_no = first_param_no 445 | 446 | 447 | const update_all = 0 === Object.keys(where).length 448 | 449 | if (update_all) { 450 | sql += 'true' 451 | } else { 452 | const where_cols = Object.keys(where) 453 | 454 | 455 | const qs = where_cols.reduce((acc, where_col) => { 456 | const where_val = where[where_col] 457 | 458 | 459 | if ('in$' === where_col) { 460 | const in_qs = Object.keys(where_val).map(column => { 461 | const in_q = inexprstm({ 462 | column, 463 | in: where_val[column], 464 | escapeIdentifier, 465 | first_param_no: param_no 466 | }) 467 | 468 | param_no = in_q.next_param_no 469 | 470 | return in_q 471 | }) 472 | 473 | return acc.concat(in_qs) 474 | } 475 | 476 | 477 | if ('nin$' === where_col) { 478 | const nin_qs = Object.keys(where_val).map(column => { 479 | const nin_q = inexprstm({ 480 | column, 481 | in: where_val[column], 482 | negate: true, 483 | escapeIdentifier, 484 | first_param_no: param_no 485 | }) 486 | 487 | param_no = nin_q.next_param_no 488 | 489 | return nin_q 490 | }) 491 | 492 | return acc.concat(nin_qs) 493 | } 494 | 495 | 496 | if ('eq$' === where_col) { 497 | const eq_qs = Object.keys(where_val).map(column => { 498 | const eq_q = eqexprstm({ 499 | column, 500 | value: where_val[column], 501 | escapeIdentifier, 502 | first_param_no: param_no 503 | }) 504 | 505 | param_no = eq_q.next_param_no 506 | 507 | return eq_q 508 | }) 509 | 510 | return acc.concat(eq_qs) 511 | } 512 | 513 | 514 | if ('ne$' === where_col) { 515 | const neq_qs = Object.keys(where_val).map(column => { 516 | const neq_q = eqexprstm({ 517 | column, 518 | value: where_val[column], 519 | negate: true, 520 | escapeIdentifier, 521 | first_param_no: param_no 522 | }) 523 | 524 | param_no = neq_q.next_param_no 525 | 526 | return neq_q 527 | }) 528 | 529 | return acc.concat(neq_qs) 530 | } 531 | 532 | 533 | if ('lt$' === where_col) { 534 | const lt_qs = Object.keys(where_val).map(column => { 535 | const lt_q = ltexprstm({ 536 | column, 537 | value: where_val[column], 538 | escapeIdentifier, 539 | first_param_no: param_no 540 | }) 541 | 542 | param_no = lt_q.next_param_no 543 | 544 | return lt_q 545 | }) 546 | 547 | return acc.concat(lt_qs) 548 | } 549 | 550 | 551 | if ('lte$' === where_col) { 552 | const lte_qs = Object.keys(where_val).map(column => { 553 | const lte_q = lteexprstm({ 554 | column, 555 | value: where_val[column], 556 | escapeIdentifier, 557 | first_param_no: param_no 558 | }) 559 | 560 | param_no = lte_q.next_param_no 561 | 562 | return lte_q 563 | }) 564 | 565 | return acc.concat(lte_qs) 566 | } 567 | 568 | 569 | if ('gt$' === where_col) { 570 | const gt_qs = Object.keys(where_val).map(column => { 571 | const gt_q = gtexprstm({ 572 | column, 573 | value: where_val[column], 574 | escapeIdentifier, 575 | first_param_no: param_no 576 | }) 577 | 578 | param_no = gt_q.next_param_no 579 | 580 | return gt_q 581 | }) 582 | 583 | return acc.concat(gt_qs) 584 | } 585 | 586 | 587 | if ('gte$' === where_col) { 588 | const gte_qs = Object.keys(where_val).map(column => { 589 | const gte_q = gteexprstm({ 590 | column, 591 | value: where_val[column], 592 | escapeIdentifier, 593 | first_param_no: param_no 594 | }) 595 | 596 | param_no = gte_q.next_param_no 597 | 598 | return gte_q 599 | }) 600 | 601 | return acc.concat(gte_qs) 602 | } 603 | 604 | 605 | if ('or$' === where_col) { 606 | const or_qs = where_val.map((or_where) => { 607 | const or_q = wherestm({ 608 | where: or_where, 609 | first_param_no: param_no, 610 | escapeIdentifier 611 | }) 612 | 613 | param_no = or_q.next_param_no 614 | 615 | return or_q 616 | }) 617 | 618 | const or_q = or_qs.reduce((acc, or_q) => { 619 | return { 620 | text: acc.text + ' or ' + or_q.text, 621 | values: acc.values.concat(or_q.values) 622 | } 623 | }) 624 | 625 | return acc.concat({ ...or_q, text: '(' + or_q.text + ')' }) 626 | } 627 | 628 | 629 | if ('and$' === where_col) { 630 | const and_qs = where_val.map((and_where) => { 631 | const and_q = wherestm({ 632 | where: and_where, 633 | first_param_no: param_no, 634 | escapeIdentifier 635 | }) 636 | 637 | param_no = and_q.next_param_no 638 | 639 | return and_q 640 | }) 641 | 642 | const and_q = and_qs.reduce((acc, and_q) => { 643 | return { 644 | text: acc.text + ' and ' + and_q.text, 645 | values: acc.values.concat(and_q.values) 646 | } 647 | }) 648 | 649 | return acc.concat({ ...and_q, text: '(' + and_q.text + ')' }) 650 | } 651 | 652 | 653 | if (Array.isArray(where_val)) { 654 | const in_q = inexprstm({ 655 | column: where_col, 656 | in: where_val, 657 | escapeIdentifier, 658 | first_param_no: param_no 659 | }) 660 | 661 | param_no = in_q.next_param_no 662 | 663 | return acc.concat(in_q) 664 | } 665 | 666 | 667 | const eq_q = eqexprstm({ 668 | column: where_col, 669 | value: where_val, 670 | escapeIdentifier, 671 | first_param_no: param_no 672 | }) 673 | 674 | param_no = eq_q.next_param_no 675 | 676 | 677 | return acc.concat(eq_q) 678 | }, []) 679 | 680 | 681 | const out = qs.reduce((acc, q) => { 682 | return { 683 | text: acc.text + ' and ' + q.text, 684 | values: acc.values.concat(q.values), 685 | } 686 | }) 687 | 688 | sql += out.text 689 | bindings = out.values 690 | } 691 | 692 | 693 | return { text: sql, values: bindings, next_param_no: param_no } 694 | } 695 | 696 | 697 | function orderbystm(args) { 698 | const { 699 | order_by, 700 | escapeIdentifier, 701 | first_param_no = 1 702 | } = args 703 | 704 | 705 | let sql = '' 706 | let bindings = [] 707 | let param_no = first_param_no 708 | 709 | 710 | let first_pair = true 711 | 712 | for (const order_col in order_by) { 713 | if (!first_pair) { 714 | sql += ', ' 715 | } 716 | 717 | first_pair = false 718 | 719 | 720 | const order_val = order_by[order_col] 721 | const order = 0 <= order_val ? 'asc' : 'desc' 722 | 723 | sql += escapeIdentifier(order_col) + ' ' + order 724 | } 725 | 726 | 727 | return { text: sql, values: bindings, next_param_no: param_no } 728 | } 729 | 730 | function setstm(args) { 731 | const { 732 | set, 733 | escapeIdentifier, 734 | first_param_no = 1 735 | } = args 736 | 737 | 738 | let sql = '' 739 | let bindings = [] 740 | let param_no = first_param_no 741 | 742 | let first_set = true 743 | 744 | for (const set_col in set) { 745 | const set_val = set[set_col] 746 | 747 | if (!first_set) { 748 | sql += ',' 749 | } 750 | 751 | 752 | sql += ' ' + escapeIdentifier(set_col) + ' = ' + 753 | Q.valuePlaceholder(param_no++) 754 | 755 | bindings.push(set_val) 756 | 757 | 758 | first_set = false 759 | } 760 | 761 | return { text: sql, values: bindings, next_param_no: param_no } 762 | } 763 | 764 | 765 | 766 | module.exports = Q 767 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "seneca-postgres-store", 3 | "version": "2.4.0", 4 | "description": "Seneca data store plugin for PostgreSQL", 5 | "main": "postgresql-store.js", 6 | "author": "Marian Radulescu", 7 | "contributors": [ 8 | "Peter Elger (http://peterelger.com/)", 9 | "Nicolas Herment (https://github.com/nherment)", 10 | "Cristian Ianto (https://github.com/iantocristian)", 11 | "Brian Mullan", 12 | "Michele Capra (https://github.com/piccoloaiutante)", 13 | "Mircea Alexandru (http://alexandrumircea.ro/)", 14 | "Mihai Dima (https://github.com/mihaidma)", 15 | "Shane Lacey (https://github.com/shanel262)", 16 | "marianr (https://github.com/marianr)", 17 | "Adrien Becchis (https://github.com/AdrieanKhisbe)", 18 | "Greenkeeper (https://github.com/greenkeeperio-bot)", 19 | "Cristian Kiss (https://github.com/ckiss)", 20 | "Emer Rutherford (https://github.com/eeswr)", 21 | "Damian Beresford (https://github.com/dberesford)", 22 | "Matteo Collina (https://github.com/mcollina)", 23 | "Max Nachlinger (https://github.com/maxnachlinger)", 24 | "David Cahill (https://github.com/david-cahill)", 25 | "Hiro Asari (https://github.com/BanzaiMan)", 26 | "Christian Savard (https://github.com/savardc)" 27 | ], 28 | "license": "MIT", 29 | "readmeFilename": "README.md", 30 | "repository": { 31 | "type": "git", 32 | "url": "https://github.com/senecajs/seneca-postgres-store.git" 33 | }, 34 | "keywords": [ 35 | "seneca", 36 | "plugin", 37 | "store", 38 | "postgres", 39 | "postgresql", 40 | "pg" 41 | ], 42 | "files": [ 43 | "README.md", 44 | "LICENSE", 45 | "lib" 46 | ], 47 | "scripts": { 48 | "build": "docker-compose build", 49 | "start": "docker-compose up", 50 | "stop": "docker-compose kill", 51 | "test": "lab test -r console -v -t 60 -I AggregateError,atob,btoa,AbortController,AbortSignal,EventTarget,Event,MessageChannel,MessagePort,MessageEvent,performance", 52 | "test-some": "lab test -r console -v -g ", 53 | "coveralls": "lab test/*.test.js -v -r lcov | coveralls", 54 | "coverage": "lab -v -P test -t 80 -r html > docs/coverage.html", 55 | "annotate": "docco lib/postgresql-store.js -o docs/annotated", 56 | "reset": "npm run clean && npm i && npm test", 57 | "clean": "rm -rf node_modules dist package-lock.json yarn.lock", 58 | "repo-tag": "REPO_VERSION=`node -e \"console.log(require('./package').version)\"` && echo TAG: v$REPO_VERSION && git commit -a -m v$REPO_VERSION && git push && git tag v$REPO_VERSION && git push --tags;", 59 | "repo-publish": "npm run clean && npm i --registry=https://registry.npmjs.org && npm run repo-publish-quick", 60 | "repo-publish-quick": "npm test && npm run repo-tag && npm publish --access public --registry=https://registry.npmjs.org" 61 | 62 | }, 63 | "dependencies": { 64 | "pg": "8.9.0", 65 | "uuid": "9.0.0" 66 | }, 67 | "devDependencies": { 68 | "@hapi/code": "8", 69 | "@hapi/lab": "23", 70 | "async": "3.2.x", 71 | "code": "3.0.x", 72 | "coveralls": "3.1.x", 73 | "docco": "0.9.x", 74 | "seneca": "3.30.0", 75 | "seneca-entity": "20.0.1", 76 | "seneca-store-test": "5.2.0" 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /postgresql-store.js: -------------------------------------------------------------------------------- 1 | const Assert = require('assert') 2 | const Pg = require('pg') 3 | 4 | const Q = require('./lib/qbuilder') 5 | const { intern } = require('./lib/intern') 6 | const { asyncMethod } = intern 7 | 8 | const STORE_NAME = 'postgresql-store' 9 | const ACTION_ROLE = 'sql' 10 | 11 | 12 | function postgres_store(options) { 13 | const seneca = this 14 | 15 | const { 16 | fromColumnName = intern.identity, 17 | toColumnName = intern.identity 18 | } = options 19 | 20 | 21 | let dbPool 22 | 23 | function configure(spec, done) { 24 | const conf = intern.getConfig(spec) 25 | 26 | dbPool = new Pg.Pool({ 27 | user: conf.user, 28 | host: conf.host, 29 | database: conf.database, 30 | password: conf.password, 31 | port: conf.port 32 | }) 33 | 34 | return done() 35 | } 36 | 37 | const store = { 38 | name: STORE_NAME, 39 | 40 | close: function (_msg, done) { 41 | dbPool.end().then(done).catch(done) 42 | }, 43 | 44 | save: asyncMethod(async function (msg, meta) { 45 | const seneca = this 46 | 47 | let ctx = intern.buildCtx(seneca, msg, meta) 48 | 49 | return intern.withDbClient(dbPool, ctx, async (client) => { 50 | const ctx = { seneca, client, fromColumnName, toColumnName } 51 | 52 | const { ent, q } = msg 53 | const { auto_increment$: autoIncrement = false } = q 54 | 55 | if (intern.isUpdate(msg)) { 56 | return intern.updateEnt(ent, ctx) 57 | } 58 | 59 | 60 | const newEnt = ent.clone$() 61 | 62 | if (!autoIncrement) { 63 | const generatedId = await intern 64 | .askSenecaToGenerateId({ role: ACTION_ROLE, target: STORE_NAME }, ctx) 65 | 66 | const newId = null == ent.id$ 67 | ? generatedId 68 | : ent.id$ 69 | 70 | newEnt.id = newId 71 | } 72 | 73 | 74 | const upsertFields = intern.maybeUpsert(msg) 75 | 76 | if (null != upsertFields) { 77 | return intern.upsertEnt(upsertFields, newEnt, q, ctx) 78 | } 79 | 80 | return intern.insertEnt(newEnt, ctx) 81 | }) 82 | }), 83 | 84 | 85 | load: asyncMethod(async function (msg, meta) { 86 | const seneca = this 87 | let ctx = intern.buildCtx(seneca, msg, meta) 88 | 89 | return intern.withDbClient(dbPool, ctx, async (client) => { 90 | const ctx = { seneca, client } 91 | const { qent, q } = msg 92 | 93 | return intern.findEnt(qent, q, ctx) 94 | }) 95 | }), 96 | 97 | 98 | list: asyncMethod(async function (msg, meta) { 99 | const seneca = this 100 | 101 | let ctx = intern.buildCtx(seneca, msg, meta) 102 | 103 | return intern.withDbClient(dbPool, ctx, async (client) => { 104 | const ctx = { seneca, client, fromColumnName, toColumnName } 105 | const { qent, q } = msg 106 | 107 | const nativeQuery = isNativeQuery(q) 108 | 109 | if (null == nativeQuery) { 110 | return intern.listEnts(qent, q, ctx) 111 | } 112 | 113 | const { rows } = await intern.execQuery(nativeQuery, ctx) 114 | 115 | return rows 116 | .map((row) => intern.deepXformKeys(fromColumnName, row)) 117 | .map((row) => intern.makeent(qent, row)) 118 | }) 119 | 120 | function isNativeQuery(q) { 121 | if ('string' === typeof q.native$) { 122 | return toPgSql(q.native$) 123 | } 124 | 125 | if (Array.isArray(q.native$)) { 126 | Assert(0 < q.native$.length, 'q.native$.length') 127 | const [sql, ...bindings] = q.native$ 128 | 129 | return { text: toPgSql(sql), values: bindings } 130 | } 131 | 132 | return null 133 | } 134 | 135 | function toPgSql(sql) { 136 | let param_no = 1 137 | return sql.replace(/\?/g, _ => Q.valuePlaceholder(param_no++)) 138 | } 139 | }), 140 | 141 | remove: asyncMethod(async function (msg, meta) { 142 | const seneca = this 143 | 144 | let ctx = intern.buildCtx(seneca, msg, meta) 145 | 146 | return intern.withDbClient(dbPool, ctx, async (client) => { 147 | const ctx = { seneca, client } 148 | const { qent, q } = msg 149 | 150 | return intern.removeEnt(qent, q, ctx) 151 | }) 152 | }), 153 | 154 | native: function (_msg, done) { 155 | dbPool.connect().then(done).catch(done) 156 | } 157 | } 158 | 159 | 160 | const meta = seneca.store.init(seneca, options, store) 161 | 162 | 163 | seneca.add({ init: store.name, tag: meta.tag }, function (_msg, done) { 164 | return configure(options, done) 165 | }) 166 | 167 | 168 | seneca.add(intern.msgForGenerateId({ role: ACTION_ROLE, target: STORE_NAME }), 169 | function (_msg, done) { 170 | const id = intern.generateId() 171 | return done(null, { id }) 172 | }) 173 | 174 | 175 | 176 | seneca.add('sys:entity,transaction:begin', function(msg,reply) { 177 | // NOTE: `BEGIN` is called in intern.withDbClient 178 | reply({ 179 | get_handle: () => ({ id: this.util.Nid(), name: 'postgres' }) 180 | }) 181 | }) 182 | 183 | seneca.add('sys:entity,transaction:end', function(msg,reply) { 184 | let transaction = msg.details() 185 | let client = transaction.client 186 | 187 | client.query('COMMIT') 188 | .then(()=>{ 189 | reply({ 190 | done: true 191 | }) 192 | }) 193 | .catch((err)=>reply(err)) 194 | }) 195 | 196 | seneca.add('sys:entity,transaction:rollback', function(msg,reply) { 197 | let transaction = msg.details() 198 | let client = transaction.client 199 | 200 | client.query('ROLLBACK') 201 | .then(()=>{ 202 | reply({ 203 | done: false, rollback: true 204 | }) 205 | }) 206 | .catch((err)=>reply(err)) 207 | }) 208 | 209 | // seneca.act('sys:entity,hook:intercept,intercept:act_error') 210 | 211 | 212 | return { name: store.name, tag: meta.tag } 213 | } 214 | 215 | 216 | module.exports = postgres_store 217 | -------------------------------------------------------------------------------- /test/postgres.test.js: -------------------------------------------------------------------------------- 1 | const Seneca = require('seneca') 2 | const Lab = require('@hapi/lab') 3 | const lab = (exports.lab = Lab.script()) 4 | const { before, beforeEach, afterEach, describe, it } = lab 5 | const { expect } = require('code') 6 | 7 | const PgStore = require('..') 8 | const DbConfig = require('./support/db/config') 9 | const Shared = require('seneca-store-test') 10 | 11 | const Async = require('async') 12 | const Uuid = require('uuid').v4 13 | const Util = require('util') 14 | 15 | const POSTGRES_STORE_NAME = 'postgresql-store' 16 | 17 | 18 | describe('seneca postgres plugin', () => { 19 | describe('shared tests', () => { 20 | const si = makeSenecaForTest() 21 | 22 | before(() => { 23 | return new Promise(done => { 24 | si.ready(done) 25 | }) 26 | }) 27 | 28 | describe('basic tests', () => { 29 | Shared.basictest({ 30 | seneca: si, 31 | senecaMerge: makeSenecaForTest({ postgres_opts: { merge: false } }), 32 | script: lab 33 | }) 34 | }) 35 | 36 | describe('sort tests', () => { 37 | Shared.sorttest({ 38 | seneca: si, 39 | script: lab 40 | }) 41 | }) 42 | 43 | describe('limit tests', () => { 44 | Shared.limitstest({ 45 | seneca: si, 46 | script: lab 47 | }) 48 | }) 49 | 50 | describe('sql tests', () => { 51 | Shared.sqltest({ 52 | seneca: si, 53 | script: lab 54 | }) 55 | }) 56 | 57 | describe('upsert tests', () => { 58 | Shared.upserttest({ 59 | seneca: si, 60 | script: lab 61 | }) 62 | }) 63 | }) 64 | 65 | describe('postgres', function () { 66 | const si = makeSenecaForTest() 67 | 68 | before(() => { 69 | return new Promise(done => { 70 | si.ready(done) 71 | }) 72 | }) 73 | 74 | beforeEach(clearDb(si)) 75 | 76 | beforeEach(createEntities(si, 'foo', [{ 77 | id$: 'foo1', 78 | p1: 'v1' 79 | }, { 80 | id$: 'foo2', 81 | p1: 'v2', 82 | p2: 'z2' 83 | }])) 84 | 85 | it('save with passing an external id', () => new Promise((resolve, reject) => { 86 | const idPrefix = 'test_' + Uuid() 87 | 88 | si.add({role: 'sql', hook: 'generate_id', target: POSTGRES_STORE_NAME}, function (args, done) { 89 | return done(null, {id: idPrefix + Uuid()}) 90 | }) 91 | 92 | const foo = si.make('foo') 93 | foo.p1 = 'v1' 94 | foo.p2 = 'v2' 95 | 96 | return foo.save$(function (err, foo1) { 97 | if (err) { 98 | return reject(err) 99 | } 100 | 101 | try { 102 | expect(foo1.id).to.exist() 103 | expect(foo1.id).to.startWith(idPrefix) 104 | } catch (err) { 105 | return reject(err) 106 | } 107 | 108 | return foo1.load$(foo1.id, function (err, foo2) { 109 | if (err) { 110 | return reject(err) 111 | } 112 | 113 | try { 114 | expect(foo2).to.exist() 115 | expect(foo2.id).to.equal(foo1.id) 116 | expect(foo2.p1).to.equal('v1') 117 | expect(foo2.p2).to.equal('v2') 118 | } catch (err) { 119 | return reject(err) 120 | } 121 | 122 | return resolve() 123 | }) 124 | }) 125 | })) 126 | 127 | it('should support opaque ids (array) and fields$', () => new Promise((resolve, reject) => { 128 | const foo = si.make('foo') 129 | 130 | return foo.list$({id: ['foo1', 'foo2'], fields$: ['p1']}, function (err, out) { 131 | if (err) { 132 | return reject(err) 133 | } 134 | 135 | const res = sortBy(out, x => x.p1) 136 | 137 | try { 138 | expect(2).to.equal(res.length) 139 | expect(res[0].p1).to.equal('v1') 140 | expect(res[0].p2).to.not.exist() 141 | expect(res[0].p3).to.not.exist() 142 | expect(res[1].p1).to.equal('v2') 143 | expect(res[1].p2).to.not.exist() 144 | expect(res[1].p3).to.not.exist() 145 | } catch (err) { 146 | return reject(err) 147 | } 148 | 149 | return resolve() 150 | }) 151 | })) 152 | }) 153 | 154 | describe('transaction', function () { 155 | const si = makeSenecaForTest({entity_opts: { transaction: {active:true} }}) 156 | si.use('promisify') 157 | 158 | before(() => { 159 | return new Promise(done => { 160 | si.ready(done) 161 | }) 162 | }) 163 | 164 | beforeEach(clearDb(si)) 165 | 166 | afterEach(clearDb(si)) 167 | 168 | it('happy', async () => { 169 | let s0 = await si.entity.begin() 170 | await s0.entity('foo').data$({p1:'t1'}).save$() 171 | let tx0 = await s0.entity.end() 172 | 173 | expect(tx0).include({ 174 | handle: { name: 'postgres' }, 175 | result: { done: true }, 176 | }) 177 | // console.log(tx0) 178 | // console.dir(tx0.trace) 179 | 180 | let foos = await si.entity('foo').list$() 181 | expect(foos.length).equal(1) 182 | expect(foos[0].p1).equal('t1') 183 | }) 184 | 185 | 186 | it('rollback', async () => { 187 | let s0 = await si.entity.begin() 188 | 189 | await s0.entity('foo').data$({p1:'t1'}).save$() 190 | 191 | let tx0 = await s0.entity.rollback() 192 | 193 | expect(tx0).include({ 194 | handle: { name: 'postgres' }, 195 | result: { done: false }, 196 | }) 197 | // console.log(tx0) 198 | // console.dir(tx0.trace) 199 | 200 | let foos = await si.entity('foo').list$() 201 | expect(foos.length).equal(0) 202 | }) 203 | 204 | 205 | it('rollback-on-error', async () => { 206 | si.message('foo:red', async function foo_red(msg) { 207 | await this.entity('foo').data$({p1:'t1'}).save$() 208 | throw new Error('BAD') 209 | await this.entity('foo').data$({p1:'t2'}).save$() 210 | return {ok:true} 211 | }) 212 | 213 | let s0 = await si.entity.begin() 214 | 215 | // console.log(s0.entity) 216 | 217 | try { 218 | await s0.post('foo:red') 219 | expect(false).toEqual(true) 220 | } 221 | catch(err) { 222 | // console.log(err.message) 223 | 224 | // Data NOT saved as rolled back 225 | let foos = await s0.entity('foo').list$() 226 | // console.log('FOOS', foos) 227 | expect(foos.length).equal(0) 228 | 229 | let t0 = s0.entity.state() 230 | // console.log(t0.transaction.trace) 231 | expect(t0.transaction.trace.length).equal(1) 232 | } 233 | }) 234 | 235 | 236 | 237 | // TODO: preserved in children 238 | 239 | }) 240 | 241 | 242 | describe('postgres store API V2.0.0', function () { 243 | const si = makeSenecaForTest() 244 | 245 | before(() => { 246 | return new Promise(done => { 247 | si.ready(done) 248 | }) 249 | }) 250 | 251 | beforeEach(clearDb(si)) 252 | 253 | afterEach(clearDb(si)) 254 | 255 | beforeEach(() => new Promise((resolve, reject) => { 256 | const Product = si.make('products') 257 | 258 | return Async.series( 259 | [ 260 | function clear(next) { 261 | Product.remove$({all$: true}, next) 262 | }, 263 | 264 | function create(next) { 265 | const products = [ 266 | Product.make$({label: 'apple', price: '100'}), 267 | Product.make$({label: 'pear', price: '200'}), 268 | Product.make$({label: 'cherry', price: '300'}) 269 | ] 270 | 271 | function saveproduct(product, next) { 272 | product.save$(next) 273 | } 274 | 275 | Async.forEach(products, saveproduct, next) 276 | } 277 | ], 278 | 279 | function (err) { 280 | if (err) { 281 | return reject(err) 282 | } 283 | 284 | return resolve() 285 | } 286 | ) 287 | })) 288 | 289 | it('use not equal ne$', () => new Promise((resolve, reject) => { 290 | const product = si.make('products') 291 | 292 | return product.list$({price: {ne$: '200'}, sort$: {price: 1}}, function (err, lst) { 293 | if (err) { 294 | return reject(err) 295 | } 296 | 297 | try { 298 | expect(2).to.equal(lst.length) 299 | expect('apple').to.equal(lst[0].label) 300 | expect('cherry').to.equal(lst[1].label) 301 | } catch (err) { 302 | return reject(err) 303 | } 304 | 305 | return resolve() 306 | }) 307 | })) 308 | 309 | it('use not equal ne$ string', () => new Promise((resolve, reject) => { 310 | const product = si.make('products') 311 | 312 | return product.list$({label: {ne$: 'pear'}, sort$: {price: 1}}, function (err, lst) { 313 | if (err) { 314 | return reject(err) 315 | } 316 | 317 | try { 318 | expect(2).to.equal(lst.length) 319 | expect('apple').to.equal(lst[0].label) 320 | expect('cherry').to.equal(lst[1].label) 321 | } catch (err) { 322 | return reject(err) 323 | } 324 | 325 | return resolve() 326 | }) 327 | })) 328 | 329 | it('use eq$', () => new Promise((resolve, reject) => { 330 | const product = si.make('products') 331 | 332 | return product.list$({price: {eq$: '200'}}, function (err, lst) { 333 | if (err) { 334 | return reject(err) 335 | } 336 | 337 | try { 338 | expect(1).to.equal(lst.length) 339 | expect('pear').to.equal(lst[0].label) 340 | } catch (err) { 341 | return reject(err) 342 | } 343 | 344 | return resolve() 345 | }) 346 | })) 347 | 348 | it('use eq$ string', () => new Promise((resolve, reject) => { 349 | const product = si.make('products') 350 | 351 | return product.list$({label: {eq$: 'pear'}}, function (err, lst) { 352 | if (err) { 353 | return reject(err) 354 | } 355 | 356 | try { 357 | expect(1).to.equal(lst.length) 358 | expect('pear').to.equal(lst[0].label) 359 | } catch (err) { 360 | return reject(err) 361 | } 362 | 363 | return resolve() 364 | }) 365 | })) 366 | 367 | it('use gte$', () => new Promise((resolve, reject) => { 368 | const product = si.make('products') 369 | 370 | return product.list$({price: {gte$: '200'}, sort$: {price: 1}}, function (err, lst) { 371 | if (err) { 372 | return reject(err) 373 | } 374 | 375 | try { 376 | expect(2).to.equal(lst.length) 377 | expect('pear').to.equal(lst[0].label) 378 | expect('cherry').to.equal(lst[1].label) 379 | } catch (err) { 380 | return reject(err) 381 | } 382 | 383 | return resolve() 384 | }) 385 | })) 386 | 387 | it('use gt$', () => new Promise((resolve, reject) => { 388 | const product = si.make('products') 389 | 390 | return product.list$({price: {gt$: '200'}, sort$: {price: 1}}, function (err, lst) { 391 | if (err) { 392 | return reject(err) 393 | } 394 | 395 | try { 396 | expect(1).to.equal(lst.length) 397 | expect('cherry').to.equal(lst[0].label) 398 | } catch (err) { 399 | return reject(err) 400 | } 401 | 402 | return resolve() 403 | }) 404 | })) 405 | 406 | it('use lte$', () => new Promise((resolve, reject) => { 407 | const product = si.make('products') 408 | 409 | return product.list$({price: {lte$: '200'}, sort$: {price: 1}}, function (err, lst) { 410 | if (err) { 411 | return reject(err) 412 | } 413 | 414 | try { 415 | expect(2).to.equal(lst.length) 416 | expect('apple').to.equal(lst[0].label) 417 | expect('pear').to.equal(lst[1].label) 418 | } catch (err) { 419 | return reject(err) 420 | } 421 | 422 | return resolve() 423 | }) 424 | })) 425 | 426 | it('use lt$', () => new Promise((resolve, reject) => { 427 | const product = si.make('products') 428 | 429 | return product.list$({price: {lt$: '200'}, sort$: {price: 1}}, function (err, lst) { 430 | if (err) { 431 | return reject(err) 432 | } 433 | 434 | try { 435 | expect(1).to.equal(lst.length) 436 | expect('apple').to.equal(lst[0].label) 437 | } catch (err) { 438 | return reject(err) 439 | } 440 | 441 | return resolve() 442 | }) 443 | })) 444 | 445 | it('use in$', () => new Promise((resolve, reject) => { 446 | const product = si.make('products') 447 | 448 | return product.list$({price: {in$: ['200', '300']}, sort$: {price: 1}}, function (err, lst) { 449 | if (err) { 450 | return reject(err) 451 | } 452 | 453 | try { 454 | expect(2).to.equal(lst.length) 455 | expect('pear').to.equal(lst[0].label) 456 | expect('cherry').to.equal(lst[1].label) 457 | } catch (err) { 458 | return reject(err) 459 | } 460 | 461 | return resolve() 462 | }) 463 | })) 464 | 465 | it('use in$ string', () => new Promise((resolve, reject) => { 466 | const product = si.make('products') 467 | 468 | return product.list$({label: {in$: ['cherry', 'pear']}, sort$: {price: 1}}, function (err, lst) { 469 | if (err) { 470 | return reject(err) 471 | } 472 | 473 | try { 474 | expect(2).to.equal(lst.length) 475 | expect('pear').to.equal(lst[0].label) 476 | expect('cherry').to.equal(lst[1].label) 477 | } catch (err) { 478 | return reject(err) 479 | } 480 | 481 | return resolve() 482 | }) 483 | })) 484 | 485 | it('use in$ one matching', () => new Promise((resolve, reject) => { 486 | const product = si.make('products') 487 | 488 | return product.list$({price: {in$: ['200', '500', '700']}, sort$: {price: 1}}, function (err, lst) { 489 | if (err) { 490 | return reject(err) 491 | } 492 | 493 | try { 494 | expect(1).to.equal(lst.length) 495 | expect('pear').to.equal(lst[0].label) 496 | } catch (err) { 497 | return reject(err) 498 | } 499 | 500 | return resolve() 501 | }) 502 | })) 503 | 504 | it('use in$ no matching', () => new Promise((resolve, reject) => { 505 | const product = si.make('products') 506 | 507 | return product.list$({price: {in$: ['250', '500', '700']}, sort$: {price: 1}}, function (err, lst) { 508 | if (err) { 509 | return reject(err) 510 | } 511 | 512 | try { 513 | expect(err).to.not.exist() 514 | expect(0).to.equal(lst.length) 515 | } catch (err) { 516 | return reject(err) 517 | } 518 | 519 | return resolve() 520 | }) 521 | })) 522 | 523 | it('use nin$ three matching', () => new Promise((resolve, reject) => { 524 | const product = si.make('products') 525 | 526 | return product.list$({price: {nin$: ['250', '500', '700']}, sort$: {price: 1}}, function (err, lst) { 527 | if (err) { 528 | return(err) 529 | } 530 | 531 | try { 532 | expect(3).to.equal(lst.length) 533 | } catch (err) { 534 | return reject(err) 535 | } 536 | 537 | return resolve() 538 | }) 539 | })) 540 | 541 | it('use nin$ one matching', () => new Promise((resolve, reject) => { 542 | const product = si.make('products') 543 | 544 | return product.list$({price: {nin$: ['200', '500', '300']}, sort$: {price: 1}}, function (err, lst) { 545 | if (err) { 546 | return(err) 547 | } 548 | 549 | try { 550 | expect(1).to.equal(lst.length) 551 | expect('apple').to.equal(lst[0].label) 552 | } catch (err) { 553 | return reject(err) 554 | } 555 | 556 | return resolve() 557 | }) 558 | })) 559 | 560 | it('use complex in$ and nin$', () => new Promise((resolve, reject) => { 561 | const product = si.make('products') 562 | 563 | return product.list$({ 564 | price: {nin$: ['250', '500', '300'], 565 | in$: ['200', '300'] 566 | }, 567 | sort$: {price: 1} 568 | }, function (err, lst) { 569 | if (err) { 570 | return(err) 571 | } 572 | 573 | try { 574 | expect(1).to.equal(lst.length) 575 | expect('pear').to.equal(lst[0].label) 576 | } catch (err) { 577 | return reject(err) 578 | } 579 | 580 | return resolve() 581 | }) 582 | })) 583 | 584 | it('use nin$ string', () => new Promise((resolve, reject) => { 585 | const product = si.make('products') 586 | 587 | return product.list$({label: {nin$: ['cherry', 'pear']}, sort$: {price: 1}}, function (err, lst) { 588 | if (err) { 589 | return(err) 590 | } 591 | 592 | try { 593 | expect(1).to.equal(lst.length) 594 | expect('apple').to.equal(lst[0].label) 595 | } catch (err) { 596 | return reject(err) 597 | } 598 | 599 | return resolve() 600 | }) 601 | })) 602 | 603 | it('use or$', () => new Promise((resolve, reject) => { 604 | const product = si.make('products') 605 | 606 | return product.list$({or$: [{label: 'cherry'}, {price: '200'}], sort$: {price: 1}}, function (err, lst) { 607 | if (err) { 608 | return(err) 609 | } 610 | 611 | try { 612 | expect(2).to.equal(lst.length) 613 | expect('pear').to.equal(lst[0].label) 614 | expect('cherry').to.equal(lst[1].label) 615 | } catch (err) { 616 | return reject(err) 617 | } 618 | 619 | return resolve() 620 | }) 621 | })) 622 | 623 | it('use and$', () => new Promise((resolve, reject) => { 624 | const product = si.make('products') 625 | 626 | return product.list$({and$: [{label: 'cherry'}, {price: '300'}], sort$: {price: 1}}, function (err, lst) { 627 | if (err) { 628 | return reject(err) 629 | } 630 | 631 | try { 632 | expect(1).to.equal(lst.length) 633 | expect('cherry').to.equal(lst[0].label) 634 | } catch (err) { 635 | return reject(err) 636 | } 637 | 638 | return resolve() 639 | }) 640 | })) 641 | 642 | it('use and$ & or$', () => new Promise((resolve, reject) => { 643 | const product = si.make('products') 644 | 645 | return product.list$({ 646 | or$: [{price: {gte$: '200'}}, {and$: [{label: 'cherry'}, {price: '300'}]}], 647 | sort$: {price: 1} 648 | }, function (err, lst) { 649 | if (err) { 650 | return reject(err) 651 | } 652 | 653 | try { 654 | expect(2).to.equal(lst.length) 655 | expect('pear').to.equal(lst[0].label) 656 | expect('cherry').to.equal(lst[1].label) 657 | } catch (err) { 658 | return reject(err) 659 | } 660 | 661 | return resolve() 662 | }) 663 | })) 664 | 665 | it('use and$ & or$ and limit$', () => new Promise((resolve, reject) => { 666 | const product = si.make('products') 667 | 668 | return product.list$({ 669 | or$: [{price: {gte$: '200'}}, {and$: [{label: 'cherry'}, {price: '300'}]}], 670 | sort$: {price: 1}, 671 | limit$: 1, 672 | fields$: ['label'] 673 | }, function (err, lst) { 674 | if (err) { 675 | return reject(err) 676 | } 677 | 678 | try { 679 | expect(1).to.equal(lst.length) 680 | expect('pear').to.equal(lst[0].label) 681 | expect(lst[0].price).to.not.exist() 682 | } catch (err) { 683 | return reject(err) 684 | } 685 | 686 | return resolve() 687 | }) 688 | })) 689 | 690 | it('use and$ & or$ and limit$, fields$ and skip$', () => new Promise((resolve, reject) => { 691 | const product = si.make('products') 692 | 693 | return product.list$({ 694 | price: {gte$: '200'}, 695 | sort$: {price: 1}, 696 | limit$: 1, 697 | fields$: ['label'], 698 | skip$: 1 699 | }, function (err, lst) { 700 | if (err) { 701 | return reject(err) 702 | } 703 | 704 | try { 705 | expect(1).to.equal(lst.length) 706 | expect('cherry').to.equal(lst[0].label) 707 | expect(lst[0].price).to.not.exist() 708 | } catch (err) { 709 | return reject(err) 710 | } 711 | 712 | return resolve() 713 | }) 714 | })) 715 | 716 | describe('#save$', () => { 717 | describe('auto_increment$:true', () => { 718 | describe('normally', () => { 719 | it('relies on the database to generate the id', () => new Promise((resolve, reject) => { 720 | si.make('auto_incrementors') 721 | .data$({ value: 37 }) 722 | .save$({ auto_increment$: true }, function (err, ent) { 723 | if (err) { 724 | return reject(err) 725 | } 726 | 727 | try { 728 | expect(ent).to.exist() 729 | expect(typeof ent.id).to.equal('number') 730 | expect(ent.value).to.equal(37) 731 | } catch (err) { 732 | return reject(err) 733 | } 734 | 735 | return si.make('auto_incrementors').load$(ent.id, function (err, ent) { 736 | if (err) { 737 | return reject(err) 738 | } 739 | 740 | try { 741 | expect(ent).to.exist() 742 | expect(typeof ent.id).to.equal('number') 743 | expect(ent.value).to.equal(37) 744 | } catch (err) { 745 | return reject(err) 746 | } 747 | 748 | return resolve() 749 | }) 750 | }) 751 | })) 752 | }) 753 | 754 | describe('when upserting', () => { 755 | describe('no match exists', () => { 756 | it('relies on the database to generate the id', () => new Promise((resolve, reject) => { 757 | si.make('auto_incrementors') 758 | .data$({ value: 37 }) 759 | .save$({ auto_increment$: true, upsert$: ['value'] }, function (err, ent) { 760 | if (err) { 761 | return reject(err) 762 | } 763 | 764 | try { 765 | expect(ent).to.exist() 766 | expect(typeof ent.id).to.equal('number') 767 | expect(ent.value).to.equal(37) 768 | } catch (err) { 769 | return reject(err) 770 | } 771 | 772 | return si.make('auto_incrementors').load$(ent.id, function (err, ent) { 773 | if (err) { 774 | return reject(err) 775 | } 776 | 777 | try { 778 | expect(ent).to.exist() 779 | expect(typeof ent.id).to.equal('number') 780 | expect(ent.value).to.equal(37) 781 | } catch (err) { 782 | return reject(err) 783 | } 784 | 785 | return resolve() 786 | }) 787 | }) 788 | })) 789 | }) 790 | }) 791 | }) 792 | }) 793 | }) 794 | 795 | describe('Column Names conversions', function () { 796 | describe('Default CamelCase to snake_case conversion', function () { 797 | const si = makeSenecaForTest() 798 | 799 | before(() => { 800 | return new Promise(done => { 801 | si.ready(done) 802 | }) 803 | }) 804 | 805 | beforeEach(clearDb(si)) 806 | 807 | beforeEach(createEntities(si, 'foo', [{ 808 | fooBar: 'fooBar', 809 | bar_foo: 'bar_foo' 810 | }])) 811 | 812 | it('should not alter CamelCase column names, in list$', () => new Promise((resolve, reject) => { 813 | const foo = si.make('foo') 814 | 815 | return foo.list$({native$: 'SELECT * FROM foo WHERE "fooBar" = \'fooBar\''}, function (err, res) { 816 | if (err) { 817 | return reject(err) 818 | } 819 | 820 | try { 821 | expect(res.length).to.equal(1) 822 | expect(res[0].fooBar).to.equal('fooBar') 823 | } catch (err) { 824 | return reject(err) 825 | } 826 | 827 | return resolve() 828 | }) 829 | })) 830 | 831 | it('should not alter snake_case column names, in list$', () => new Promise((resolve, reject) => { 832 | const foo = si.make('foo') 833 | 834 | return foo.list$({native$: 'SELECT * FROM foo WHERE bar_foo = \'bar_foo\''}, function (err, res) { 835 | if (err) { 836 | return reject(err) 837 | } 838 | 839 | try { 840 | expect(res.length).to.equal(1) 841 | expect(res[0].bar_foo).to.equal('bar_foo') 842 | 843 | return resolve() 844 | } catch (err) { 845 | return reject(err) 846 | } 847 | }) 848 | })) 849 | 850 | it('should not alter snake_case column names, in list$', () => new Promise((resolve, reject) => { 851 | const foo = si.make('foo') 852 | 853 | return foo.list$({ bar_foo: 'bar_foo' }, function (err, res) { 854 | if (err) { 855 | return reject(err) 856 | } 857 | 858 | try { 859 | expect(res.length).to.equal(1) 860 | expect(res[0].bar_foo).to.equal('bar_foo') 861 | } catch (err) { 862 | return reject(err) 863 | } 864 | 865 | return resolve() 866 | }) 867 | })) 868 | }) 869 | 870 | describe('Custom CamelCase to snake_case conversion', function () { 871 | const si = makeSenecaForTest({ 872 | postgres_opts: { 873 | toColumnName: camelToSnakeCase, 874 | fromColumnName: snakeToCamelCase 875 | } 876 | }) 877 | 878 | before(() => { 879 | return new Promise(done => { 880 | si.ready(done) 881 | }) 882 | }) 883 | 884 | beforeEach(clearDb(si)) 885 | 886 | beforeEach(createEntities(si, 'foo', [{ 887 | barFoo: 'barFoo' 888 | }])) 889 | 890 | it('should convert the CamelCase column name to snake case, in list$', () => new Promise((resolve, reject) => { 891 | const foo = si.make('foo') 892 | 893 | return foo.list$({native$: 'SELECT * FROM foo WHERE "bar_foo" = \'barFoo\''}, function (err, res) { 894 | if (err) { 895 | return reject(err) 896 | } 897 | 898 | try { 899 | expect(res.length).to.equal(1) 900 | expect(res[0].barFoo).to.equal('barFoo') 901 | } catch (err) { 902 | return reject(err) 903 | } 904 | 905 | return resolve() 906 | }) 907 | })) 908 | 909 | it('should convert the CamelCase column name to snake case, in list$', () => new Promise((resolve, reject) => { 910 | const foo = si.make('foo') 911 | 912 | return foo.list$({ barFoo: 'barFoo' }, function (err, res) { 913 | if (err) { 914 | return reject(err) 915 | } 916 | 917 | try { 918 | expect(res.length).to.equal(1) 919 | expect(res[0].barFoo).to.equal('barFoo') 920 | } catch (err) { 921 | return reject(err) 922 | } 923 | 924 | return resolve() 925 | }) 926 | })) 927 | 928 | 929 | const UpperCaseRegExp = /[A-Z]/g 930 | 931 | // Replace "camelCase" with "camel_case" 932 | function camelToSnakeCase (field) { 933 | UpperCaseRegExp.lastIndex = 0 934 | return field.replace(UpperCaseRegExp, function (str, offset) { 935 | return ('_' + str.toLowerCase()) 936 | }) 937 | } 938 | 939 | // Replace "snake_case" with "snakeCase" 940 | function snakeToCamelCase (column) { 941 | const arr = column.split('_') 942 | let field = arr[0] 943 | 944 | for (let i = 1; i < arr.length; i++) { 945 | field += arr[i][0].toUpperCase() + arr[i].slice(1, arr[i].length) 946 | } 947 | 948 | return field 949 | } 950 | }) 951 | }) 952 | }) 953 | 954 | function makeSenecaForTest(opts = {}) { 955 | const si = Seneca().test() 956 | 957 | const { entity_opts = {}, postgres_opts = {} } = opts 958 | 959 | si.use('seneca-entity', { 960 | mem_store: false, 961 | ...entity_opts 962 | }) 963 | 964 | si.use(PgStore, { ...DbConfig, ...postgres_opts }) 965 | 966 | return si 967 | } 968 | 969 | function clearDb(si) { 970 | return () => new Promise(done => { 971 | Async.series([ 972 | function clearFoo(next) { 973 | si.make('foo').remove$({ all$: true }, next) 974 | }, 975 | 976 | function clearBar(next) { 977 | si.make('zen', 'moon', 'bar').remove$({ all$: true }, next) 978 | }, 979 | 980 | function clearProduct(next) { 981 | si.make('products').remove$({ all$: true }, next) 982 | }, 983 | 984 | function clearAutoIncrementors(next) { 985 | si.make('auto_incrementors').remove$({ all$: true }, next) 986 | } 987 | ], done) 988 | }) 989 | } 990 | 991 | function createEntities(si, name, data) { 992 | return () => { 993 | return new Promise((done) => { 994 | Async.each( 995 | data, 996 | function (el, next) { 997 | si.make$(name, el).save$(next) 998 | }, 999 | done 1000 | ) 1001 | }) 1002 | } 1003 | } 1004 | 1005 | function sortBy(ary, f) { 1006 | return [...ary].sort((a, b) => { 1007 | const x = f(a) 1008 | const y = f(b) 1009 | 1010 | if (x < y) { 1011 | return -1 1012 | } 1013 | 1014 | if (x > y) { 1015 | return 1 1016 | } 1017 | 1018 | return 0 1019 | }) 1020 | } 1021 | 1022 | -------------------------------------------------------------------------------- /test/support/db/config.js: -------------------------------------------------------------------------------- 1 | function getConfig() { 2 | if (process.env.CI) { 3 | return { 4 | name: "senecatest_ci_629vv14", 5 | host: "localhost", 6 | port: 5432, 7 | username: "senecatest", 8 | password: "senecatest_ci_07y71809h1", 9 | options: {} 10 | } 11 | } 12 | 13 | return { 14 | name: "senecatest_71v94h", 15 | host: "localhost", 16 | port: 5432, 17 | username: "senecatest", 18 | password: "senecatest_2086hab80y", 19 | options: {} 20 | } 21 | } 22 | 23 | module.exports = getConfig() 24 | -------------------------------------------------------------------------------- /test/support/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres 2 | 3 | ADD dbschema.sql /docker-entrypoint-initdb.d 4 | 5 | EXPOSE 5432 6 | -------------------------------------------------------------------------------- /test/support/docker/dbschema.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE foo 2 | ( 3 | id character varying PRIMARY KEY, 4 | p1 character varying, 5 | p2 character varying, 6 | p3 character varying, 7 | x int, 8 | y int, 9 | int_arr integer[], 10 | "fooBar" character varying, 11 | bar_foo character varying, 12 | unique(x) 13 | ); 14 | 15 | CREATE TABLE moon_bar 16 | ( 17 | str character varying, 18 | id character varying PRIMARY KEY, 19 | "int" integer, 20 | bol boolean, 21 | wen timestamp with time zone, 22 | mark character varying, 23 | "dec" real, 24 | arr integer[], 25 | obj json, 26 | seneca text 27 | ); 28 | 29 | CREATE TABLE auto_incrementors 30 | ( 31 | id SERIAL PRIMARY KEY, 32 | value integer not null, 33 | unique(value) 34 | ); 35 | 36 | CREATE TABLE products 37 | ( 38 | id character varying PRIMARY KEY, 39 | price decimal not null, 40 | label character varying default null, 41 | coolness_factor integer default null, 42 | unique(label), 43 | unique(label, price) 44 | ); 45 | 46 | CREATE TABLE players 47 | ( 48 | id character varying PRIMARY KEY, 49 | username character varying not null, 50 | favorite_car character varying default null, 51 | points integer default null, 52 | points_history integer[] default null, 53 | unique(username) 54 | ); 55 | 56 | CREATE TABLE racers 57 | ( 58 | id character varying PRIMARY KEY, 59 | points integer not null default 0, 60 | username character varying not null, 61 | favorite_car character varying not null, 62 | unique(username) 63 | ); 64 | 65 | CREATE TABLE users 66 | ( 67 | id character varying PRIMARY KEY, 68 | username character varying not null, 69 | email character varying not null, 70 | unique(email) 71 | ); 72 | 73 | CREATE TABLE customers 74 | ( 75 | id character varying PRIMARY KEY, 76 | first_name character varying not null, 77 | last_name character varying not null, 78 | credits integer not null, 79 | unique(first_name, last_name) 80 | ); 81 | 82 | --------------------------------------------------------------------------------