├── .gitignore
├── .jshintrc
├── .npmignore
├── .travis.yml
├── LICENSE
├── Makefile
├── README.md
├── gulpfile.js
├── lib
├── adapter.js
├── error.js
├── procedures.js
├── spatial.js
├── sql.js
└── util.js
├── package.json
└── test
└── integration
├── pg
├── index.js
└── models.js
├── runner.js
└── wl
├── index.js
└── models.js
/.gitignore:
--------------------------------------------------------------------------------
1 | dist/
2 | *.sw*
3 |
4 | # Logs
5 | logs
6 | *.log
7 |
8 | # Runtime data
9 | pids
10 | *.pid
11 | *.seed
12 |
13 | # Directory for instrumented libs generated by jscoverage/JSCover
14 | lib-cov
15 |
16 | # Coverage directory used by tools like istanbul
17 | coverage
18 |
19 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
20 | .grunt
21 |
22 | # Compiled binary addons (http://nodejs.org/api/addons.html)
23 | build/*
24 |
25 | # Dependency directory
26 | # Deployed apps should consider commenting this line out:
27 | # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git
28 | node_modules
29 |
--------------------------------------------------------------------------------
/.jshintrc:
--------------------------------------------------------------------------------
1 | {
2 | "asi": true,
3 | "indent": 2,
4 | "maxdepth": 6,
5 | "maxlen": 120,
6 | "expr": true,
7 | "trailing": true,
8 | "node": true,
9 | "esnext": true
10 | }
11 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/waterlinejs/postgresql-adapter/5fd72e442d1476041150cd44ed5959bc17cbc4ce/.npmignore
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | node_js:
3 | - 4
4 | - stable
5 |
6 | sudo: false
7 |
8 | addons:
9 | postgresql: '9.4'
10 |
11 | notifications:
12 | email: false
13 |
14 | before_script:
15 | - psql -U postgres -c "create extension postgis"
16 |
17 | deploy:
18 | provider: npm
19 | email: waterline@balderdash.io
20 | api_key:
21 | secure: VV3RBq1A5kJetCCeoFzBAk6ZLnjyWxQ4rh4M6ApbEjgzfa+xHRKA8jCtulx2dh9cs/DL9QjiO33kINo/H34uB2Vb64yuNSoexDYVo4gtNgtqjCkrQHpOq9tegV0nU9k5O/A9N75iP0GtBtbvONGoEUfZBB4kAWGTiGzBJrQXrMooSPYzOaY9yrUI005oCtNzp+fcVJEEaq7/mxUnBjbjQQvXfK1vElaVdIul2c8t7byIs5S5JTbhdggSyOIF8ugU420i09++ZafVZU3VRRggTN9u72oRHW1wvq2rEdkx8kuTYIbwv8nAjd/YYSID+OtPwJjXXkA4TlDT7dYQ4AHKbfARHpz3RARaFfoIV1qo/qBJQRYIU58VYucAsyzOIlJzZmX8A+Bc09NTR9/W8H9selas5iEUIDMCy6fDha3ReDOKuJOaScIeKRat5tjRvTTLB5I8lAK1oWcoZ3e0RsZzEzYG8hQP+Jfs8yVFJbKOmbg8lYKVRtO5I58QaSXr2uNz8iMrOT3Ib5BPFiMgytauFucHWqRrqpfdlDdUHVwqXC0lt4rwTNrAUoRhA+oOhVtUaa8R2P2XWlh9pw0OvjuPW+X5hJErSHsi+URXFm9ivvBUV6oMBVCSmBPej3EiEpMRbQymZArIAuAVwzC9abMeP0qcYHw9I/8L+L4N4NXHB1A=
22 | on:
23 | tags: true
24 | repo: waterlinejs/postgresql-adapter
25 | all_branches: true
26 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 Balderdash, Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | MOCHA_OPTS= --check-leaks --compilers js:babel/register
2 | REPORTER = spec
3 |
4 | test: build test-unit test-integration
5 |
6 | build:
7 | gulp
8 |
9 | test-unit:
10 | @NODE_ENV=test ./node_modules/.bin/mocha \
11 | --reporter $(REPORTER) \
12 | $(MOCHA_OPTS)
13 |
14 | test-integration:
15 | @NODE_ENV=test node test/integration/runner.js
16 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PostgreSQL Waterline Adapter
2 |
3 | [![NPM version][npm-image]][npm-url]
4 | [![Build status][ci-image]][ci-url]
5 | [![Dependency Status][daviddm-image]][daviddm-url]
6 | [![Code Climate][codeclimate-image]][codeclimate-url]
7 |
8 | A Waterline adapter for [PostgreSQL](http://www.postgresql.org/), with [PostGIS](http://postgis.net/) support.
9 |
10 | ## Features
11 | - 100% re-write of the original sails-postgresql adapter in ES6. And *way* faster.
12 | - Uses [knex.js](http://knexjs.org/) for query building and connection pooling
13 | - PostGIS 2.1+ Support
14 |
15 | ## Compatibility
16 | - Waterline v0.11 and later
17 | - PostgreSQL 9.4 and later
18 |
19 | ## Install
20 |
21 | ```sh
22 | $ npm install waterline-postgresql --save
23 | ```
24 |
25 | ## Configuration
26 |
27 | #### `config/connections.js`
28 |
29 | ```js
30 | module.exports.connections = {
31 | // ...
32 | postgresdb: {
33 | /**
34 | * This 'connection' object could also be a connection string
35 | * e.g. 'postgresql://user:password@localhost:5432/databaseName?ssl=false'
36 | */
37 | connection: {
38 | database: 'databaseName',
39 | host: 'localhost',
40 | user: 'user',
41 | password: 'password',
42 | port: 5432,
43 | ssl: false
44 | },
45 | /**
46 | * Pool configuration
47 | */
48 | pool: {
49 | min: 2,
50 | max: 20
51 | }
52 | }
53 | }
54 | ```
55 |
56 | ## License
57 | MIT
58 |
59 | ## Maintained By
60 | [
](http://langa.io)
61 |
62 | [waterline-version-image]: https://goo.gl/goisO1
63 | [waterline-url]: http://sailsjs.org
64 | [npm-image]: https://img.shields.io/npm/v/waterline-postgresql.svg?style=flat
65 | [npm-url]: https://npmjs.org/package/waterline-postgresql
66 | [ci-image]: https://img.shields.io/travis/waterlinejs/postgresql-adapter/master.svg?style=flat
67 | [ci-url]: https://travis-ci.org/waterlinejs/postgresql-adapter
68 | [daviddm-image]: http://img.shields.io/david/waterlinejs/postgresql-adapter.svg?style=flat
69 | [daviddm-url]: https://david-dm.org/waterlinejs/postgresql-adapter
70 | [codeclimate-image]: https://img.shields.io/codeclimate/github/waterlinejs/postgresql-adapter.svg?style=flat
71 | [codeclimate-url]: https://codeclimate.com/github/waterlinejs/postgresql-adapter
72 |
--------------------------------------------------------------------------------
/gulpfile.js:
--------------------------------------------------------------------------------
1 | var gulp = require('gulp');
2 | var babel = require('gulp-babel');
3 |
4 | gulp.task('default', function () {
5 | return gulp.src([ 'lib/**' ])
6 | .pipe(babel())
7 | .pipe(gulp.dest('dist'));
8 | });
9 |
--------------------------------------------------------------------------------
/lib/adapter.js:
--------------------------------------------------------------------------------
1 | import Knex from 'knex'
2 | import _ from 'lodash'
3 | import camelize from 'camelize'
4 | import WaterlineSequel from 'waterline-sequel'
5 |
6 | import KnexPostgis from 'knex-postgis'
7 | import WaterlineError from 'waterline-errors'
8 | import AdapterError from './error'
9 | import Util from './util'
10 | import SpatialUtil from './spatial'
11 | import SQL from './sql'
12 |
13 | const Adapter = {
14 |
15 | identity: 'waterline-postgresql',
16 |
17 | wlSqlOptions: {
18 | parameterized: true,
19 | caseSensitive: false,
20 | escapeCharacter: '"',
21 | wlNext: {
22 | caseSensitive: true
23 | },
24 | casting: true,
25 | canReturnValues: true,
26 | escapeInserts: true,
27 | declareDeleteAlias: false
28 | },
29 |
30 | /**
31 | * Local connections store
32 | */
33 | connections: new Map(),
34 |
35 | //pkFormat: 'string',
36 | syncable: true,
37 |
38 | /**
39 | * Adapter default configuration
40 | */
41 | defaults: {
42 | schema: true,
43 | debug: process.env.WL_DEBUG || false,
44 |
45 | connection: {
46 | host: 'localhost',
47 | user: 'postgres',
48 | password: 'postgres',
49 | database: 'postgres',
50 | port: 5432
51 | },
52 |
53 | pool: {
54 | min: 1,
55 | max: 16,
56 | ping (knex, cb) {
57 | return knex.query('SELECT 1', cb)
58 | },
59 | pingTimeout: 10 * 1000,
60 | syncInterval: 2 * 1000,
61 | idleTimeout: 30 * 1000,
62 | acquireTimeout: 300 * 1000
63 | }
64 | },
65 |
66 | /**
67 | * This method runs when a connection is initially registered
68 | * at server-start-time. This is the only required method.
69 | *
70 | * @param {[type]} connection [description]
71 | * @param {[type]} collection [description]
72 | * @param {Function} cb [description]
73 | * @return {[type]} [description]
74 | */
75 | registerConnection (connection, collections, cb) {
76 | if (!connection.identity) {
77 | return cb(WaterlineError.adapter.IdentityMissing)
78 | }
79 | if (Adapter.connections.get(connection.identity)) {
80 | return cb(WaterlineError.adapter.IdentityDuplicate)
81 | }
82 |
83 | _.defaultsDeep(connection, Adapter.defaults)
84 |
85 | let knex = Knex({
86 | client: 'pg',
87 | connection: connection.url || connection.connection,
88 | pool: connection.pool,
89 | debug: process.env.WATERLINE_DEBUG_SQL || connection.debug
90 | })
91 | let cxn = {
92 | identity: connection.identity,
93 | schema: Adapter.buildSchema(connection, collections),
94 | collections: collections,
95 | config: connection,
96 | knex: knex,
97 | st: KnexPostgis(knex)
98 | }
99 |
100 | return Util.initializeConnection(cxn)
101 | .then(() => {
102 | Adapter.connections.set(connection.identity, cxn)
103 | cb()
104 | })
105 | .catch(cb)
106 | },
107 |
108 | /**
109 | * Construct the waterline schema for the given connection.
110 | *
111 | * @param connection
112 | * @param collections[]
113 | */
114 | buildSchema (connection, collections) {
115 | return _.chain(collections)
116 | .map((model, modelName) => {
117 | let definition = _.get(model, [ 'waterline', 'schema', model.identity ])
118 | return _.defaultsDeep(definition, {
119 | attributes: { },
120 | tableName: modelName
121 | })
122 | })
123 | .keyBy('tableName')
124 | .value()
125 | },
126 |
127 | /**
128 | * Return the version of the PostgreSQL server as an array
129 | * e.g. for Postgres 9.3.9, return [ '9', '3', '9' ]
130 | */
131 | getVersion (cxn) {
132 | return cxn.knex
133 | .raw('select version() as version')
134 | .then(({ rows: [row] }) => {
135 | return row.version.split(' ')[1].split('.')
136 | })
137 | },
138 |
139 | /**
140 | * Describe a table. List all columns and their properties.
141 | *
142 | * @param connectionName
143 | * @param tableName
144 | */
145 | describe (connectionName, tableName, cb) {
146 | let cxn = Adapter.connections.get(connectionName)
147 |
148 | return cxn.knex(tableName).columnInfo()
149 | .then(columnInfo => {
150 | if (_.isEmpty(columnInfo)) {
151 | return cb()
152 | }
153 |
154 | return Adapter._query(cxn, SQL.indexes, [ tableName ])
155 | .then(({ rows }) => {
156 | _.merge(columnInfo, _.keyBy(camelize(rows), 'columnName'))
157 | _.isFunction(cb) && cb(null, columnInfo)
158 | })
159 | })
160 | .catch(AdapterError.wrap(cb))
161 | },
162 |
163 | /**
164 | * Perform a direct SQL query on the database
165 | *
166 | * @param connectionName
167 | * @param tableName
168 | * @param queryString
169 | * @param data
170 | */
171 | query (connectionName, tableName, queryString, args, cb) {
172 | let cxn = Adapter.connections.get(connectionName)
173 |
174 | return Adapter._query(cxn, queryString, args)
175 | .then((result = { }) => {
176 | _.isFunction(cb) && cb(null, result)
177 | return result
178 | })
179 | .catch(AdapterError.wrap(cb))
180 | },
181 |
182 | _query (cxn, query, values) {
183 | return cxn.knex.raw(Util.toKnexRawQuery(query), Util.castValues(values))
184 | .then((result = { }) => result)
185 | },
186 |
187 | /**
188 | * Create a new table
189 | *
190 | * @param connectionName
191 | * @param tableName
192 | * @param definition - the waterline schema definition for model
193 | * @param cb
194 | */
195 | define (connectionName, _tableName, definition, cb) {
196 | let cxn = Adapter.connections.get(connectionName)
197 | let schema = cxn.collections[_tableName]
198 | let tableName = _tableName.substring(0, 63)
199 |
200 | return cxn.knex.schema
201 | .hasTable(tableName)
202 | .then(exists => {
203 | if (exists) return
204 |
205 | return cxn.knex.schema.createTable(tableName, table => {
206 | _.each(definition, (definition, attributeName) => {
207 | let newColumn = Util.toKnexColumn(table, attributeName, definition, schema, cxn.collections)
208 | Util.applyColumnConstraints(newColumn, definition)
209 | })
210 | Util.applyTableConstraints(table, definition)
211 | })
212 | })
213 | .then(() => {
214 | //console.log('created table', tableName, schema)
215 | _.isFunction(cb) && cb()
216 | })
217 | .catch(AdapterError.wrap(cb))
218 | },
219 |
220 | /**
221 | * Drop a table
222 | */
223 | drop (connectionName, tableName, relations = [ ], cb = relations) {
224 | let cxn = Adapter.connections.get(connectionName)
225 |
226 | return cxn.knex.schema.dropTableIfExists(tableName)
227 | .then(() => {
228 | return Promise.all(_.map(relations, relation => {
229 | return cxn.knex.schema.dropTableIfExists(relation)
230 | }))
231 | })
232 | .then(() => {
233 | _.isFunction(cb) && cb()
234 | })
235 | .catch(AdapterError.wrap(cb))
236 | },
237 |
238 | /**
239 | * Add a column to a table
240 | */
241 | addAttribute (connectionName, tableName, attributeName, definition, cb) {
242 | let cxn = Adapter.connections.get(connectionName)
243 | let schema = cxn.collections[tableName]
244 |
245 | return cxn.knex.schema
246 | .table(tableName, table => {
247 | let newColumn = Util.toKnexColumn(table, attributeName, definition, schema, cxn.collections)
248 | Util.applyColumnConstraints(newColumn, definition)
249 | })
250 | .then(() => {
251 | _.isFunction(cb) && cb()
252 | })
253 | .catch(AdapterError.wrap(cb))
254 | },
255 |
256 | /**
257 | * Remove a column from a table
258 | */
259 | removeAttribute (connectionName, tableName, attributeName, cb) {
260 | let cxn = Adapter.connections.get(connectionName)
261 |
262 | return cxn.knex.schema
263 | .table(tableName, table => {
264 | table.dropColumn(attributeName)
265 | })
266 | .then(result => {
267 | _.isFunction(cb) && cb(null, result)
268 | return result
269 | })
270 | .catch(AdapterError.wrap(cb))
271 | },
272 |
273 | /**
274 | * Create a new record
275 | */
276 | create (connectionName, tableName, data, cb) {
277 | let cxn = Adapter.connections.get(connectionName)
278 | let insertData = Util.sanitize(data, cxn.collections[tableName], cxn)
279 | let schema = cxn.collections[tableName]
280 | let spatialColumns = SpatialUtil.buildSpatialSelect(schema.definition, tableName, cxn)
281 |
282 | return cxn.knex(tableName)
283 | .insert(insertData)
284 | .returning([ '*', ...spatialColumns ])
285 | .then(rows => {
286 | let casted = Util.castResultRows(rows, schema)
287 | let result = _.isArray(data) ? casted : casted[0]
288 |
289 | _.isFunction(cb) && cb(null, result)
290 | return result
291 | })
292 | .catch(AdapterError.wrap(cb, null, data))
293 | },
294 |
295 | /**
296 | * Create multiple records
297 | */
298 | createEach (connectionName, tableName, records, cb) {
299 | // TODO use knex.batchInsert
300 | return Adapter.create(connectionName, tableName, records, cb)
301 | },
302 |
303 | /**
304 | * Update a record
305 | */
306 | update (connectionName, tableName, options, data, cb) {
307 | let cxn = Adapter.connections.get(connectionName)
308 | let schema = cxn.collections[tableName]
309 | let wlsql = new WaterlineSequel(cxn.schema, Adapter.wlSqlOptions)
310 | let spatialColumns = SpatialUtil.getSpatialColumns(schema.definition)
311 | let updateData = _.omit(data, _.keys(spatialColumns))
312 |
313 | return new Promise((resolve, reject) => {
314 | if (_.isEmpty(data)) {
315 | return Adapter.find(connectionName, tableName, options, cb)
316 | }
317 | resolve(wlsql.update(tableName, options, updateData))
318 | })
319 | .then(({ query, values }) => {
320 | return Adapter._query(cxn, query, values)
321 | })
322 | .then(({ rows }) => {
323 | cb && cb(null, rows)
324 | })
325 | .catch(AdapterError.wrap(cb, null, data))
326 | },
327 |
328 | /**
329 | * Destroy a record
330 | */
331 | destroy (connectionName, tableName, options, cb) {
332 | let cxn = Adapter.connections.get(connectionName)
333 | let wlsql = new WaterlineSequel(cxn.schema, Adapter.wlSqlOptions)
334 |
335 | return new Promise((resolve, reject) => {
336 | resolve(wlsql.destroy(tableName, options))
337 | })
338 | .then(({ query, values }) => {
339 | return Adapter._query(cxn, query, values)
340 | })
341 | .then(({ rows }) => {
342 | cb(null, rows)
343 | })
344 | .catch(AdapterError.wrap(cb))
345 | },
346 |
347 | /**
348 | * Populate record associations
349 | */
350 | join (connectionName, tableName, options, cb) {
351 | let cxn = Adapter.connections.get(connectionName)
352 | let schema = cxn.collections[tableName]
353 |
354 | return Util.buildKnexJoinQuery (cxn, tableName, options)
355 | .then(result => {
356 | // return unique records only.
357 | // TODO move to SQL
358 | _.each(_.reject(options.joins, { select: false }), join => {
359 | let alias = Util.getJoinAlias(join)
360 | let pk = Adapter.getPrimaryKey(cxn, join.child)
361 | let schema = cxn.collections[join.child]
362 |
363 | _.each(result, row => {
364 | row[alias] = Util.castResultRows(_.compact(_.uniqBy(row[alias], pk)), schema)
365 | })
366 | })
367 |
368 | return result
369 | })
370 | .then(result => {
371 | result = Util.castResultRows(result, schema)
372 | _.isFunction(cb) && cb(null, result)
373 | return result
374 | })
375 | .catch(AdapterError.wrap(cb))
376 | },
377 |
378 | /**
379 | * Get the primary key column of a table
380 | */
381 | getPrimaryKey ({ collections }, tableName) {
382 | let definition = collections[tableName].definition
383 |
384 |
385 | if (!definition._pk) {
386 | let pk = _.findKey(definition, (attr, name) => {
387 | return attr.primaryKey === true
388 | })
389 | definition._pk = pk || 'id'
390 | }
391 |
392 | return definition._pk
393 | },
394 |
395 | /**
396 | * Find records
397 | */
398 | find (connectionName, tableName, options, cb) {
399 | let cxn = Adapter.connections.get(connectionName)
400 | let wlsql = new WaterlineSequel(cxn.schema, Adapter.wlSqlOptions)
401 | let schema = cxn.collections[tableName]
402 |
403 | //console.log('find', tableName, options)
404 | //console.log('schema types', schema._types)
405 |
406 | return new Promise((resolve, reject) => {
407 | resolve(wlsql.find(tableName, options))
408 | })
409 | .then(({ query: [query], values: [values] }) => {
410 | let spatialColumns = SpatialUtil.buildSpatialSelect(schema.definition, tableName, cxn)
411 | let fullQuery = Util.addSelectColumns(spatialColumns, query)
412 |
413 | //console.log('fullQuery', fullQuery)
414 | //console.log('values', values)
415 |
416 | return Adapter._query(cxn, fullQuery, values)
417 | })
418 | .then(({ rows }) => {
419 | let result = Util.castResultRows(rows, schema)
420 | _.isFunction(cb) && cb(null, result)
421 | return result
422 | })
423 | .catch(AdapterError.wrap(cb))
424 | },
425 |
426 | /**
427 | * Count the number of records
428 | */
429 | count (connectionName, tableName, options, cb) {
430 | let cxn = Adapter.connections.get(connectionName)
431 | let wlsql = new WaterlineSequel(cxn.schema, Adapter.wlSqlOptions)
432 |
433 | return new Promise((resolve, reject) => {
434 | resolve(wlsql.count(tableName, options))
435 | })
436 | .then(({ query: [query], values: [values] }) => {
437 | return Adapter._query(cxn, query, values)
438 | })
439 | .then(({ rows: [row] }) => {
440 | let count = Number(row.count)
441 | _.isFunction(cb) && cb(null, count)
442 | return count
443 | })
444 | .catch(AdapterError.wrap(cb))
445 | },
446 |
447 | /**
448 | * Run queries inside of a transaction.
449 | *
450 | * Model.transaction(txn => {
451 | * Model.create({ ... }, txn)
452 | * .then(newModel => {
453 | * return Model.update(..., txn)
454 | * })
455 | * })
456 | * .then(txn.commit)
457 | * .catch(txn.rollback)
458 | */
459 | transaction (connectionName, tableName, cb) {
460 | let cxn = Adapter.connections.get(connectionName)
461 |
462 | return new Promise(resolve => {
463 | cxn.knex.transaction(txn => {
464 | _.isFunction(cb) && cb(null, txn)
465 | resolve(txn)
466 | })
467 | })
468 | },
469 |
470 | /**
471 | * Invoke a database function, aka "stored procedure"
472 | *
473 | * @param connectionName
474 | * @param tableName
475 | * @param procedureName the name of the stored procedure to invoke
476 | * @param args An array of arguments to pass to the stored procedure
477 | */
478 | procedure (connectionName, procedureName, args = [ ], cb = args) {
479 | let cxn = Adapter.connections.get(connectionName)
480 | let procedure = cxn.storedProcedures[procedureName.toLowerCase()]
481 |
482 | if (!procedure) {
483 | let error = new Error(`No stored procedure found with the name ${procedureName}`)
484 | return (_.isFunction(cb) ? cb(error) : Promise.reject(error))
485 | }
486 |
487 | return procedure.invoke(args)
488 | .then(result => {
489 | _.isFunction(cb) && cb(null, result)
490 | return result
491 | })
492 | .catch(AdapterError.wrap(cb))
493 | },
494 |
495 | /**
496 | * Stream query results
497 | *
498 | * TODO not tested
499 | */
500 | stream (connectionName, tableName, options, outputStream) {
501 | let cxn = Adapter.connections.get(connectionName)
502 | let wlsql = new WaterlineSequel(cxn.schema, Adapter.wlSqlOptions)
503 |
504 | return new Promise((resolve, reject) => {
505 | resolve(wlsql.find(tableName, options))
506 | })
507 | .then(({ query: [query], values: [values] }) => {
508 | let resultStream = cxn.knex.raw(query, values)
509 | resultStream.pipe(outputStream)
510 |
511 | return new Promise((resolve, reject) => {
512 | resultStream.on('end', resolve)
513 | })
514 | })
515 | .catch(AdapterError.wrap(cb))
516 | },
517 |
518 | /**
519 | * Fired when a model is unregistered, typically when the server
520 | * is killed. Useful for tearing-down remaining open connections,
521 | * etc.
522 | *
523 | * @param {Function} cb [description]
524 | * @return {[type]} [description]
525 | */
526 | teardown (conn, cb = conn) {
527 | let connections = conn ? [ Adapter.connections.get(conn) ] : Adapter.connections.values()
528 | let teardownPromises = [ ]
529 |
530 | for (let cxn of connections) {
531 | if (!cxn) continue
532 |
533 | teardownPromises.push(cxn.knex.destroy())
534 | }
535 | return Promise.all(teardownPromises)
536 | .then(() => {
537 | // only delete connection references after all open sessions are closed
538 | for (let cxn of connections) {
539 | if (!cxn) continue
540 | Adapter.connections.delete(cxn.identity)
541 | }
542 | cb()
543 | })
544 | .catch(cb)
545 | },
546 |
547 | /**
548 | * Return the knex object
549 | *
550 | * @param connectionName
551 | */
552 | knex (connectionName) {
553 | let cnx = Adapter.connections.get(connectionName)
554 | if (cnx) {
555 | return cnx.knex
556 | }
557 | }
558 | }
559 | export default Adapter
560 |
--------------------------------------------------------------------------------
/lib/error.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash'
2 | import AdapterUtil from './util'
3 |
4 |
5 | const Errors = {
6 |
7 | E_UNIQUE (pgError) {
8 | return {
9 | code: 'E_UNIQUE',
10 | message: pgError.message,
11 | invalidAttributes: [ pgError.column ]
12 | }
13 | },
14 |
15 | E_NOTNULL (pgError) {
16 | return {
17 | code: 'E_UNIQUE',
18 | message: pgError.message,
19 | invalidAttributes: [ pgError.column ]
20 | }
21 | },
22 |
23 | E_PGERROR (pgError) {
24 | return pgError
25 | }
26 | }
27 |
28 | const PostgresErrorMapping = {
29 | // uniqueness constraint violation
30 | '23505': Errors.E_UNIQUE,
31 |
32 | // null-constraint violation
33 | '22002': Errors.E_NOTNULL,
34 | '22004': Errors.E_NOTNULL,
35 | '23502': Errors.E_NOTNULL,
36 | '39004': Errors.E_NOTNULL,
37 |
38 | // todo finish mapping
39 | }
40 |
41 | const AdapterError = {
42 | wrap (cb, txn, payload) {
43 | return function (pgError) {
44 | let errorWrapper = PostgresErrorMapping[pgError.code]
45 | let error = pgError
46 |
47 | if (_.isFunction(errorWrapper)) {
48 | error = errorWrapper(pgError)
49 | }
50 |
51 | console.error(error)
52 | if (AdapterUtil.isTransaction(txn)) {
53 | return txn.rollback().then(AdapterError.wrap(cb))
54 | }
55 |
56 | _.isFunction(cb) && cb(error)
57 | }
58 | }
59 | }
60 |
61 | export default AdapterError
62 |
--------------------------------------------------------------------------------
/lib/procedures.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash'
2 | import SQL from './sql'
3 |
4 | export const Procedures = {
5 |
6 | /**
7 | * Return a collection of all stored procedures accessible to the current
8 | * database connection
9 | */
10 | describeAll (cxn) {
11 | let sp = cxn.knex.raw(SQL.storedProcedures)
12 |
13 | return sp
14 | .then(({ rows }) => {
15 | let procedures = _.map(rows, row => {
16 | return Procedures.buildStoredProcedure(row, cxn)
17 | })
18 |
19 | procedures.push(Procedures.buildStoredProcedure({ name: 'version' }, cxn))
20 |
21 | return _.isEmpty(procedures) ? { } : _.keyBy(procedures, 'name')
22 | })
23 | },
24 |
25 | /**
26 | * Build a function that invokes the SP with the required arguments
27 | */
28 | buildStoredProcedure ({ schema, name, returntype, signature }, cxn) {
29 | let argTemplate = Procedures.buildArgumentTemplate(signature)
30 | let fullName = (!schema || (schema == 'public')) ? name : `${schema}.${name}`
31 |
32 | return {
33 | name: fullName,
34 | signature: Procedures.parseSignature(signature),
35 | invoke (args) {
36 | if (!schema) {
37 | return cxn.knex.raw(`select ${name}(${argTemplate})`, args)
38 | }
39 | else {
40 | return cxn.knex.raw(`select ${schema}.${name}(${argTemplate})`, args)
41 | }
42 | }
43 | }
44 | },
45 |
46 | buildArgumentTemplate (signature) {
47 | if (!signature) return ''
48 |
49 | let args = signature.split(', ')
50 | return args.map(arg => '?').join(',')
51 | },
52 |
53 | parseSignature (signature = '') {
54 | let parameters = signature.split(', ')
55 | return _.map(parameters, param => {
56 | return param.split(' ')[0]
57 | })
58 | }
59 | }
60 |
61 | export default Procedures
62 |
--------------------------------------------------------------------------------
/lib/spatial.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash'
2 |
3 | const SpatialUtil = {
4 |
5 | spatialTypeRegex: /^(\w+)(?:\((\w+), (\d+)\))?$/,
6 |
7 | /**
8 | * Get the version of the installed postgis extension
9 | */
10 | getPostgisVersion (cxn) {
11 | return cxn.knex
12 | .raw('select postgis_lib_version()')
13 | .then(({ rows: [{ version }] }) => {
14 | return version.split('.')
15 | })
16 | },
17 |
18 | /**
19 | * Parse and validate the installed postgis version
20 | * (must be newer than 2.1)
21 | */
22 | validatePostgisVersion ([ major, minor, patch ]) {
23 | if (major < 2 || (major == 2 && minor < 1)) {
24 | throw new Error(`
25 | PostGIS ${major}.${minor}.${patch} detected. This adapter requires PostGIS 2.1 or higher.
26 | Please either:
27 | 1. Upgrade your PostGIS extension to at least 2.1.0
28 | 2. Disable the spatial extension on this adapter (see README)
29 | `)
30 | }
31 |
32 | return parseFloat(`${major}.${minor}`)
33 | },
34 |
35 | /*
36 | addGeometryColumns (cxn, tableName, tableDefinition) {
37 | let geometryColumns = _.chain(tableDefinition)
38 | .pick(SpatialUtil.isSpatialColumn)
39 | .map((attr, name) => {
40 | return SpatialUtil.addGeometryColumn(cxn, tableName, name, attr)
41 | })
42 | .value()
43 |
44 | return Promise.all(geometryColumns)
45 | },
46 | */
47 |
48 | /**
49 | * Add a geometry column to a table
50 | * http://postgis.net/docs/AddGeometryColumn.html
51 | addGeometryColumn (cxn, tableName, attributeName, definition) {
52 | let columnName = attributeName || definition.columnName
53 | let srid = definition.srid || 4326
54 |
55 | return cxn.knex.raw(`
56 | select AddGeometryColumn('${tableName}', '${columnName}', ${srid}, 'GEOMETRY', 2)
57 | `)
58 | },
59 | */
60 |
61 | /**
62 | * Convert geojson into postgis 'geometry' type. Re-project geometry if necessary.
63 | *
64 | * http://postgis.net/docs/ST_GeomFromGeoJSON.html
65 | * http://postgis.org/docs/ST_Transform.html
66 | */
67 | fromGeojson (geojson, definition, cxn) {
68 | if (_.isEmpty(geojson)) return
69 |
70 | let obj = _.isString(geojson) ? JSON.parse(geojson) : geojson
71 | let geometry = obj.geometry || obj
72 |
73 | _.defaultsDeep(geometry, {
74 | crs: {
75 | type: 'name',
76 | properties: {
77 | name: 'EPSG:' + SpatialUtil.getDeclaredSrid(geometry, definition)
78 | }
79 | }
80 | })
81 |
82 | return cxn.st.transform(
83 | cxn.st.geomFromGeoJSON(geometry),
84 | SpatialUtil.getNativeSrid(definition)
85 | )
86 | },
87 |
88 | /**
89 | * Get "declared srid". This is the SRID that we're expecting of geometries
90 | * that we're inserting into the database.
91 | */
92 | getDeclaredSrid (geometry, definition) {
93 | let [ $, declaredSrid ] = (_.get(geometry, [ 'crs', 'properties', 'name' ]) || '').split(':')
94 | return declaredSrid || SpatialUtil.getNativeSrid(definition)
95 | },
96 |
97 | /**
98 | * Get "native srid". This is the SRID that we're using to store geometries
99 | * in the database.
100 | *
101 | * examples:
102 | * geometry(Point, 4326)
103 | */
104 | getNativeSrid (definition) {
105 | let [ $, dbType, geoType, srid ] = SpatialUtil.spatialTypeRegex.exec(definition.dbType)
106 | return srid || 0
107 | },
108 |
109 | buildSpatialSelect (tableDefinition, tableName, cxn) {
110 | return _.map(SpatialUtil.getSpatialColumns(tableDefinition), (definition, attr) => {
111 | return cxn.st.asGeoJSON(`${tableName}.${attr}`).as(attr)
112 | })
113 | },
114 |
115 | getSpatialColumns (tableDefinition) {
116 | return _.pickBy(tableDefinition, SpatialUtil.isSpatialColumn)
117 | },
118 |
119 | hasSpatialColumn (tableDefinition) {
120 | return !!_.find(tableDefinition, SpatialUtil.isSpatialColumn)
121 | },
122 |
123 | isSpatialColumn (definition) {
124 | if (!definition || !definition.dbType) return false
125 |
126 | let [ $, dbType, geoType, srid ] = SpatialUtil.spatialTypeRegex.exec(definition.dbType) || [ ]
127 | return dbType === 'geometry'
128 | }
129 | }
130 |
131 | export default SpatialUtil
132 |
--------------------------------------------------------------------------------
/lib/sql.js:
--------------------------------------------------------------------------------
1 | const SQL = {
2 |
3 | indexes: `
4 | select
5 | attname as column_name,
6 | indisprimary as primary_key,
7 | indisunique as unique,
8 | true as indexed
9 |
10 | from
11 | pg_index
12 |
13 | inner join pg_attribute
14 | on (pg_attribute.attnum = any (pg_index.indkey) and pg_attribute.attrelid = pg_index.indrelid)
15 | inner join pg_class
16 | on (pg_class.oid = pg_index.indrelid)
17 |
18 | where
19 | pg_class.relname = ?
20 | `,
21 |
22 | storedProcedures: `
23 | select n.nspname as schema,
24 | p.proname as name,
25 | pg_catalog.pg_get_function_result(p.oid) as returntype,
26 | pg_catalog.pg_get_function_arguments(p.oid) as signature
27 |
28 | from pg_catalog.pg_proc p
29 |
30 | left join pg_catalog.pg_namespace n on n.oid = p.pronamespace
31 |
32 | where
33 | pg_catalog.pg_function_is_visible(p.oid)
34 | and n.nspname not in ('pg_catalog', 'information_schema')
35 | and p.proname not like '\\_%'
36 | order by schema, name
37 | `
38 | }
39 |
40 | export default SQL
41 |
--------------------------------------------------------------------------------
/lib/util.js:
--------------------------------------------------------------------------------
1 | import _ from 'lodash'
2 | import Adapter from './adapter'
3 | import CriteriaParser from 'waterline-sequel/sequel/lib/criteriaProcessor'
4 | import SpatialUtil from './spatial'
5 | import Procedures from './procedures'
6 | import knex from 'knex'
7 |
8 | const Util = {
9 |
10 | PG_MAX_INT: 2147483647,
11 |
12 | initializeConnection (cxn) {
13 | return Adapter.getVersion(cxn)
14 | .then(version => {
15 | cxn.version = Util.validateVersion(version)
16 |
17 | return Procedures.describeAll(cxn)
18 | })
19 | .then(procedures => {
20 | cxn.storedProcedures = procedures
21 | })
22 | },
23 |
24 | getTransaction (txn, query) {
25 | if (Util.isTransaction(txn)) {
26 | return txn
27 | }
28 | else {
29 | return query
30 | }
31 | },
32 |
33 | isTransaction (txn) {
34 | return txn && _.isFunction(txn.commit)
35 | },
36 |
37 |
38 | /**
39 | * Apply a primary key constraint to a table
40 | *
41 | * @param table - a knex table object
42 | * @param definition - a waterline attribute definition
43 | */
44 | applyPrimaryKeyConstraints (table, definition) {
45 | let primaryKeys = _.keys(_.pickBy(definition, attribute => {
46 | return attribute.primaryKey
47 | }))
48 |
49 | if (!primaryKeys.length) return
50 |
51 | return table.primary(primaryKeys)
52 | },
53 |
54 | applyCompositeUniqueConstraints (table, definition) {
55 | _.each(definition, (attribute, name) => {
56 | let uniqueDef = attribute.unique || { }
57 | if (attribute.primaryKey) return
58 | if (_.isEmpty(uniqueDef)) return
59 | if (!_.isArray(uniqueDef.composite)) return
60 |
61 | let uniqueKeys = _.uniq([ name, ...uniqueDef.composite ])
62 |
63 | table.unique(uniqueKeys)
64 | })
65 | },
66 |
67 | applyEnumConstraints (table, definition) {
68 | _.each(definition, (attribute, name) => {
69 | if (_.isArray(attribute.enum)) {
70 | table.enu(name, attribute.enum)
71 | }
72 | })
73 | },
74 |
75 | applyTableConstraints (table, definition) {
76 | return Promise.all([
77 | Util.applyPrimaryKeyConstraints(table, definition),
78 | Util.applyCompositeUniqueConstraints(table, definition),
79 | //Util.applyEnumConstraints(table, definition)
80 | ])
81 | },
82 |
83 | applyColumnConstraints (column, definition) {
84 | if (_.isString(definition)) {
85 | return
86 | }
87 | return _.map(definition, (value, key) => {
88 | if (key == 'defaultsTo' && definition.autoIncrement && value == 'AUTO_INCREMENT') {
89 | return
90 | }
91 |
92 | return Util.applyParticularColumnConstraint(column, key, value, definition)
93 | })
94 | },
95 |
96 | /**
97 | * Apply value constraints to a particular column
98 | */
99 | applyParticularColumnConstraint (column, constraintName, value, definition) {
100 | if (!value) return
101 |
102 | switch (constraintName) {
103 |
104 | case 'index':
105 | return column.index(_.get(value, 'indexName'), _.get(value, 'indexType'))
106 |
107 | /**
108 | * Acceptable forms:
109 | * attr: { unique: true }
110 | * attr: {
111 | * unique: {
112 | * unique: true, // or false
113 | * composite: [ 'otherAttr' ]
114 | * }
115 | * }
116 | */
117 | case 'unique':
118 | if ((value === true || _.get(value, 'unique') === true) && !definition.primaryKey) {
119 | column.unique()
120 | }
121 | return
122 |
123 | case 'notNull':
124 | return column.notNullable()
125 |
126 | case 'defaultsTo':
127 | if (_.isArray(value) && definition.type == 'array') {
128 | return column.defaultTo('{' + value.join(',') + '}')
129 | }
130 | if (!_.isFunction(value)) {
131 | return column.defaultTo(value)
132 | }
133 |
134 | /*
135 | * TODO
136 | case 'comment':
137 | return table.comment(attr.comment || attr.description)
138 | */
139 |
140 | case 'primaryKey':
141 | case 'autoIncrement':
142 | if (definition.dbType == 'uuid') {
143 | return column.defaultTo(knex.raw('uuid_generate_v4()'))
144 | }
145 | }
146 | },
147 |
148 |
149 |
150 | /**
151 | * Create a column for Knex from a Waterline attribute definition
152 | */
153 | toKnexColumn (table, _name, attrDefinition, wlModel, schema) {
154 | let attr = _.isObject(attrDefinition) ? attrDefinition : { type: attrDefinition }
155 | let type = attr.autoIncrement ? 'serial' : attr.type
156 | let name = attr.columnName || _name
157 |
158 | if (_.includes(wlModel.meta.uuids, _name) && !wlModel.meta.junctionTable) {
159 | wlModel._attributes[_name].type = 'uuid'
160 | wlModel.definition[_name].type = 'uuid'
161 | wlModel._cast._types[_name] = 'uuid'
162 |
163 | type = 'uuid'
164 | }
165 |
166 | if (attrDefinition.foreignKey && attrDefinition.model) {
167 | const refModel = schema[attrDefinition.model]
168 | try {
169 | const fpk = Adapter.getPrimaryKey({ collections: schema }, attrDefinition.model)
170 | if (_.includes(refModel.meta.uuids, fpk) && !refModel.meta.junctionTable) {
171 | type = 'uuid'
172 | }
173 | }
174 | catch (e) { }
175 | }
176 |
177 | // set key types for m2m
178 | if (attrDefinition.foreignKey && attrDefinition.references && attrDefinition.on) {
179 | try {
180 | type = schema[attrDefinition.references].attributes[attrDefinition.on].type
181 | }
182 | catch (e) { }
183 | }
184 |
185 | /**
186 | * Perform a special check for ENUM. ENUM is both a type and a constraint.
187 | *
188 | * table.enu(col, values)
189 | * Adds a enum column, (aliased to enu, as enum is a reserved word in javascript).
190 | */
191 | if (_.isArray(attr.enum)) {
192 | return table.enu(name, attr.enum)
193 | }
194 |
195 | switch (attr.dbType || type.toLowerCase()) {
196 | /**
197 | * table.text(name, [textType])
198 | * Adds a text column, with optional textType for MySql text datatype preference.
199 | * textType may be mediumtext or longtext, otherwise defaults to text.
200 | */
201 | case 'string':
202 | case 'text':
203 | case 'mediumtext':
204 | case 'longtext':
205 | return table.text(name, type)
206 |
207 | /**
208 | * table.string(name, [length])
209 | * Adds a string column, with optional length defaulting to 255.
210 | */
211 | case 'character varying':
212 | return table.string(name, attr.length)
213 |
214 | case 'serial':
215 | case 'smallserial':
216 | return table.specificType(name, 'serial')
217 | case 'bigserial':
218 | return table.specificType(name, 'bigserial')
219 |
220 | /**
221 | * table.boolean(name)
222 | * Adds a boolean column.
223 | */
224 | case 'boolean':
225 | return table.boolean(name)
226 |
227 | /**
228 | * table.integer(name)
229 | * Adds an integer column.
230 | */
231 | case 'int':
232 | case 'integer':
233 | case 'smallint':
234 | return table.integer(name)
235 |
236 | /**
237 | * table.bigInteger(name)
238 | * In MySQL or PostgreSQL, adds a bigint column, otherwise adds a normal integer.
239 | * Note that bigint data is returned as a string in queries because JavaScript may
240 | * be unable to parse them without loss of precision.
241 | */
242 | case 'bigint':
243 | case 'biginteger':
244 | return table.bigInteger(name)
245 |
246 | /**
247 | * table.float(column, [precision], [scale])
248 | * Adds a float column, with optional precision and scale.
249 | */
250 | case 'real':
251 | case 'float':
252 | return table.float(name, attr.precision, attr.scale)
253 |
254 | case 'double':
255 | return table.float(name, 15, attr.scale)
256 |
257 | /**
258 | * table.decimal(column, [precision], [scale])
259 | * Adds a decimal column, with optional precision and scale.
260 | */
261 | case 'decimal':
262 | return table.decimal(name, attr.precision, attr.scale)
263 |
264 | /**
265 | * table.time(name)
266 | * Adds a time column.
267 | */
268 | case 'time':
269 | return table.time(name)
270 |
271 | /**
272 | * table.date(name)
273 | * Adds a date column.
274 | */
275 | case 'date':
276 | return table.date(name)
277 |
278 | /**
279 | * table.timestamp(name, [standard])
280 | * Adds a timestamp column, defaults to timestamptz in PostgreSQL,
281 | * unless true is passed as the second argument.
282 | *
283 | * Note that the method for defaulting to the current datetime varies from one
284 | * database to another. For example: PostreSQL requires .defaultTo(knex.raw('now()')),
285 | * but SQLite3 requires .defaultTo(knex.raw("date('now')")).
286 | */
287 | case 'datestamp':
288 | case 'datetime':
289 | return table.timestamp(name, attr.standard)
290 |
291 | case 'array':
292 | return table.specificType(name, 'text ARRAY')
293 |
294 | /**
295 | * table.json(name, [jsonb])
296 | * Adds a json column, using the built-in json type in postgresql,
297 | * defaulting to a text column in older versions of postgresql or in unsupported databases.
298 | * jsonb can be used by passing true as the second argument.
299 | */
300 | case 'json':
301 | case 'jsonb':
302 | return table.jsonb(name)
303 |
304 | case 'binary':
305 | return table.binary(name)
306 |
307 | /**
308 | * table.uuid(name)
309 | * Adds a uuid column - this uses the built-in uuid type in postgresql,
310 | * and falling back to a char(36) in other databases.
311 | */
312 | case 'uuid':
313 | return table.uuid(name)
314 |
315 | default:
316 | return table.specificType(name, attr.dbType || type)
317 | }
318 | },
319 |
320 | /**
321 | * Convert a parameterized waterline query into a knex-compatible query string
322 | */
323 | toKnexRawQuery (sql) {
324 | const wlSqlOptions = Adapter.wlSqlOptions
325 |
326 | sql = (sql || '').replace(/\$\d+/g, '?')
327 | if (_.get(wlSqlOptions, 'wlNext.caseSensitive')) {
328 | sql = sql.replace(/LOWER\(("\w+"."\w+")\)/ig, '$1')
329 | }
330 |
331 | return sql
332 | },
333 |
334 | /**
335 | * Cast values to the correct type
336 | */
337 | castValues (values) {
338 | // No special handling currently
339 | return values;
340 | },
341 |
342 | castResultRows (rows, schema) {
343 | if (_.isPlainObject(rows)) {
344 | return Util.castResultValues(rows, schema)
345 | }
346 | else {
347 | return _.map(rows, row => {
348 | return Util.castResultValues(row, schema)
349 | })
350 | }
351 | },
352 |
353 | castResultValues (values, schema) {
354 | return _.mapValues(values, (value, attr) => {
355 | let definition = schema.definition[attr]
356 | if (!definition) return value
357 |
358 | if (SpatialUtil.isSpatialColumn(definition)) {
359 | try {
360 | return JSON.parse(value)
361 | }
362 | catch (e) {
363 | return null
364 | }
365 | }
366 |
367 | if (_.isArray(value)) {
368 | return _.map(value, (item) => {
369 | try {
370 | return JSON.parse(item)
371 | }
372 | catch (e) {
373 | return item
374 | }
375 | })
376 | }
377 |
378 | return value
379 | })
380 | },
381 |
382 | sanitize (data, schema, cxn) {
383 | if (_.isArray(data)) {
384 | return _.map(data, record => {
385 | return Util.sanitizeRecord(record, schema, cxn)
386 | })
387 | }
388 | else {
389 | return Util.sanitizeRecord(data, schema, cxn)
390 | }
391 | },
392 |
393 | sanitizeRecord (data, schema, cxn) {
394 | _.each(data, (value, attr) => {
395 | let definition = schema.definition[attr]
396 |
397 | // remove unrecognized fields (according to schema) from data
398 | if (!definition) {
399 | delete data[attr]
400 | return
401 | }
402 |
403 | // remove any autoIncrement fields from data
404 | if (!definition || definition.autoIncrement) {
405 | delete data[attr]
406 | }
407 | if (SpatialUtil.isSpatialColumn(definition)) {
408 | data[attr] = SpatialUtil.fromGeojson(data[attr], definition, cxn)
409 | }
410 | })
411 |
412 | return data
413 | },
414 |
415 | /**
416 | * Construct a knex query that joins one or more tables for populate()
417 | */
418 | buildKnexJoinQuery (cxn, tableName, options) {
419 | let schema = cxn.collections[tableName]
420 | let pk = Adapter.getPrimaryKey(cxn, tableName)
421 |
422 | let query = cxn.knex
423 | .select(`${tableName}.*`)
424 | .select(SpatialUtil.buildSpatialSelect(schema.definition, tableName, cxn))
425 | .select(cxn.knex.raw(Util.buildSelectAggregationColumns(cxn, options)))
426 | .from(tableName)
427 | .where(Util.buildWhereClause(cxn, tableName, options))
428 | .groupBy(`${tableName}.${pk}`)
429 | .orderByRaw(Util.buildOrderByClause(tableName, options))
430 | .limit(options.limit || Util.PG_MAX_INT)
431 | .offset(options.skip || 0)
432 |
433 | Util.buildKnexJoins(cxn, options, query)
434 |
435 | return query
436 | },
437 |
438 | addSelectColumns (columns, query) {
439 | let [ oldSelectClause, fromClause ] = query.split('FROM')
440 | let newSelectClause = [ oldSelectClause.split(','), ...columns ].join(',')
441 |
442 | return `${newSelectClause} FROM ${fromClause}`
443 | },
444 |
445 | buildKnexJoins (cxn, { joins }, query) {
446 | _.each(joins, join => {
447 | let parentAlias = Util.getParentAlias(join)
448 | let alias = Util.getSubqueryAlias(join)
449 | let subquery = Util.buildKnexJoinSubquery(cxn, join)
450 |
451 | query.leftJoin(
452 | cxn.knex.raw(`(${subquery}) as "${alias}"`),
453 | `${alias}.${join.childKey}`,
454 | `${parentAlias}.${join.parentKey}`
455 | )
456 | })
457 | },
458 |
459 | buildKnexJoinSubquery (cxn, { criteria, child }) {
460 | let schema = cxn.collections[child]
461 |
462 | return cxn.knex
463 | .select('*')
464 | .select(SpatialUtil.buildSpatialSelect(schema.definition, child, cxn))
465 | .from(child)
466 | .where(Util.buildWhereClause(cxn, child, criteria))
467 | },
468 |
469 | buildOrderByClause (tableName, { sort }) {
470 | if (_.isEmpty(sort)) {
471 | return '1'
472 | }
473 |
474 | let queryTokens = _.map(sort, (_direction, field) => {
475 | let direction = _direction === 1 ? '' : 'desc'
476 | return `"${tableName}"."${field}" ${direction}`
477 | })
478 | return queryTokens.join(', ')
479 | },
480 |
481 | buildWhereClause (cxn, tableName, options) {
482 | let parser = new CriteriaParser(tableName, cxn.schema, Adapter.wlSqlOptions)
483 | let { query, values } = parser.read(_.omit(options, [
484 | 'sort', 'limit', 'groupBy', 'skip'
485 | ]))
486 |
487 | return cxn.knex.raw(Util.toKnexRawQuery(query), Util.castValues(values))
488 | },
489 |
490 | getJoinAlias ({ alias, parentKey, removeParentKey }) {
491 | if (alias != parentKey && removeParentKey === true) {
492 | return parentKey
493 | }
494 | else {
495 | return alias
496 | }
497 | },
498 |
499 | getParentAlias (join) {
500 | if (join.junctionTable) {
501 | return Util.getJoinAlias(join) + join.parent
502 | }
503 | else {
504 | return join.parent
505 | }
506 | },
507 |
508 | getSubqueryAlias (join) {
509 | return Util.getJoinAlias(join) + join.child
510 | },
511 |
512 | buildSelectAggregationColumns (cxn, { joins }) {
513 | return _.map(_.reject(joins, { select: false }), join => {
514 |
515 | let criteria = join.criteria || { }
516 | let subqueryAlias = Util.getSubqueryAlias(join)
517 | let asColumn = Util.getJoinAlias(join)
518 | let orderBy = Util.buildOrderByClause(subqueryAlias, criteria)
519 | let start = (criteria.skip || 0) + 1
520 | let end = (criteria.limit || (Util.PG_MAX_INT - start)) + start - 1
521 |
522 | if (!criteria.skip && !criteria.limit) {
523 | return `json_agg("${subqueryAlias}".* order by ${orderBy}) as "${asColumn}"`
524 | }
525 |
526 | return `array_to_json((array_agg("${subqueryAlias}".* order by ${orderBy}))[${start}:${end}]) as "${asColumn}"`
527 | })
528 | },
529 |
530 | /**
531 | * Parse and validate a Postgres "select version()" result
532 | */
533 | validateVersion ([ major, minor, patch ]) {
534 | if (major < 9 || (major === 9 && minor < 4)) {
535 | throw new Error(`
536 | PostgreSQL ${major}.${minor}.${patch} detected. This adapter requires PostgreSQL 9.4 or higher.
537 | Please either:
538 | 1. Upgrade your Postgres server to at least 9.4.0 -or-
539 | 2. Use the sails-postgresql adapter instead: https://www.npmjs.com/package/sails-postgresql
540 | `)
541 | }
542 |
543 | return parseFloat(`${major}.${minor}`)
544 | }
545 | }
546 |
547 | export default Util
548 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "waterline-postgresql",
3 | "description": "PostgreSQL Adapter for Sails and Waterline",
4 | "version": "0.14.8",
5 | "author": "Travis Webb ",
6 | "url": "http://github.com/waterlinejs/postgresql-adapter",
7 | "keywords": [
8 | "postgresql",
9 | "postgres",
10 | "pg",
11 | "postgis",
12 | "node orm",
13 | "orm",
14 | "waterline",
15 | "waterlinejs",
16 | "driver",
17 | "sails",
18 | "sailsjs",
19 | "es6"
20 | ],
21 | "repository": {
22 | "type": "git",
23 | "url": "git://github.com/waterlinejs/postgresql-adapter.git"
24 | },
25 | "dependencies": {
26 | "camelize": "^1.0.0",
27 | "knex": "^0.10.0",
28 | "knex-postgis": "^0.1.8",
29 | "lodash": "^4.6",
30 | "pg": "^4.5",
31 | "pg-query-stream": "^1.0.0",
32 | "waterline-errors": "^0.10.1",
33 | "waterline-sequel": "^0.5.0"
34 | },
35 | "devDependencies": {
36 | "babel": "^5.6.23",
37 | "gulp": "^3.9.0",
38 | "gulp-babel": "^5.2.0",
39 | "mocha": "*",
40 | "should": "*",
41 | "waterline": "^0.11.0",
42 | "waterline-adapter-tests": "^0.11.0"
43 | },
44 | "bundledDependencies": [
45 | "camelize",
46 | "knex",
47 | "knex-postgis",
48 | "lodash",
49 | "pg",
50 | "pg-query-stream",
51 | "waterline-sequel"
52 | ],
53 | "scripts": {
54 | "test": "make test",
55 | "prepublish": "gulp"
56 | },
57 | "main": "dist/adapter",
58 | "license": "MIT",
59 | "bugs": "https://github.com/waterlinejs/postgresql-adapter/issues",
60 | "waterlineAdapter": {
61 | "waterlineVersion": ">0.10.0",
62 | "interfaces": [
63 | "associations",
64 | "migratable",
65 | "queryable",
66 | "semantic",
67 | "sql"
68 | ],
69 | "features": [
70 | "autoIncrement",
71 | "crossAdapter",
72 | "unique"
73 | ]
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/test/integration/pg/index.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const _ = require('lodash')
4 | const assert = require('assert')
5 | const Waterline = require('waterline')
6 | const models = require('./models')
7 | const Adapter = require('../../../dist/adapter')
8 |
9 | describe('pg edge cases', () => {
10 | const wlconfig = {
11 | adapters: {
12 | edgetests: Adapter
13 | },
14 | connections: {
15 | edgetests: {
16 | migrate: 'drop',
17 | adapter: 'edgetests',
18 | connection: {
19 | }
20 | }
21 | }
22 | }
23 | let waterline, orm
24 |
25 | before(done => {
26 | waterline = new Waterline();
27 | waterline.loadCollection(models.ArrayModel)
28 | waterline.loadCollection(models.JsonModel)
29 | waterline.initialize(wlconfig, (err, _orm) => {
30 | if (err) return done(err)
31 |
32 | orm = _orm.collections
33 | done()
34 | })
35 | })
36 |
37 | describe('array type', () => {
38 | it('should initialize without error', () => {
39 | assert(orm.arraymodel)
40 | })
41 | it('should support insertion with list field', done => {
42 | orm.arraymodel.create({
43 | list: [1,2,3],
44 | listSyntaxA: [4,5,6],
45 | listOfObjects: [{ index: 1 }, { index: 2 }]
46 | })
47 | .then(record => {
48 | assert.equal(record.list.length, 3)
49 | assert.equal(record.listSyntaxA.length, 3)
50 | assert.equal(record.listOfObjects.length, 2)
51 | done()
52 | })
53 | })
54 | it('should parse array of objects on load', done => {
55 | orm.arraymodel.create({
56 | list: [1,2,3],
57 | listSyntaxA: [4,5,6],
58 | listOfObjects: [{ index: 1 }, { index: 2 }]
59 | })
60 | .then(record => {
61 | assert.ok(record.id)
62 | assert.equal(record.list.length, 3)
63 | assert.equal(record.listSyntaxA.length, 3)
64 | assert.equal(record.listOfObjects.length, 2)
65 |
66 | return orm.arraymodel.findOne(record.id)
67 | })
68 | .then(record => {
69 | assert.equal(record.listOfObjects.length, 2)
70 | assert.equal(typeof record.listOfObjects[0], 'object')
71 | done()
72 | })
73 | })
74 |
75 | })
76 |
77 | describe('jsonb type', () => {
78 | it('should initialize without error', () => {
79 | assert(orm.jsonmodel)
80 | })
81 |
82 | it('should support insertion with json field', done => {
83 | orm.jsonmodel.create({
84 | json: { foo: 'bar' },
85 | jsonb: { foo: 'bar' },
86 | jsonbSyntaxA: {
87 | a: 1,
88 | b: { foo: 'bar' }
89 | }
90 | })
91 | .then(record => {
92 | assert.equal(record.json.foo, 'bar')
93 | assert.equal(record.jsonbSyntaxA.b.foo, 'bar')
94 | done()
95 | })
96 | })
97 | })
98 |
99 | })
100 |
--------------------------------------------------------------------------------
/test/integration/pg/models.js:
--------------------------------------------------------------------------------
1 | const Waterline = require('waterline')
2 |
3 | module.exports = {
4 |
5 | ArrayModel: Waterline.Collection.extend({
6 | identity: 'arraymodel',
7 | connection: 'edgetests',
8 | dynamicFinders: false,
9 | associationFinders: false,
10 |
11 | attributes: {
12 | list: {
13 | type: 'array'
14 | },
15 | listSyntaxA: {
16 | type: 'array',
17 | defaultsTo: '{}'
18 | },
19 | listOfObjects: {
20 | type: 'array',
21 | defaultsTo: []
22 | }
23 | }
24 | }),
25 |
26 | JsonModel: Waterline.Collection.extend({
27 | identity: 'jsonmodel',
28 | connection: 'edgetests',
29 | dynamicFinders: false,
30 | associationFinders: false,
31 |
32 | attributes: {
33 | json: {
34 | type: 'json'
35 | },
36 | jsonb: {
37 | type: 'json'
38 | },
39 | jsonbSyntaxA: {
40 | type: 'json',
41 | defaultsTo: '[]'
42 | },
43 | jsonbSyntaxB: {
44 | type: 'json',
45 | defaultsTo: '{}'
46 | }
47 | }
48 | })
49 |
50 | }
51 |
--------------------------------------------------------------------------------
/test/integration/runner.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Run integration tests
3 | *
4 | * Uses the `waterline-adapter-tests` module to
5 | * run mocha tests against the appropriate version
6 | * of Waterline. Only the interfaces explicitly
7 | * declared in this adapter's `package.json` file
8 | * are tested. (e.g. `queryable`, `semantic`, etc.)
9 | */
10 |
11 |
12 | /**
13 | * Module dependencies
14 | */
15 | var util = require('util');
16 | var mocha = require('mocha');
17 | var TestRunner = require('waterline-adapter-tests');
18 | var Adapter = require('../../dist/adapter');
19 |
20 | // Grab targeted interfaces from this adapter's `package.json` file:
21 | var package = {},
22 | interfaces = [],
23 | features = [];
24 | try {
25 | package = require('../../package.json');
26 | interfaces = package.waterlineAdapter.interfaces;
27 | features = package.waterlineAdapter.features;
28 | } catch (e) {
29 | throw new Error(
30 | '\n' +
31 | 'Could not read supported interfaces from `waterlineAdapter.interfaces`' + '\n' +
32 | 'in this adapter\'s `package.json` file ::' + '\n' +
33 | util.inspect(e)
34 | );
35 | }
36 |
37 |
38 |
39 | console.log('Testing `' + package.name + '`, a Sails/Waterline adapter.');
40 | console.log('Running `waterline-adapter-tests` against ' + interfaces.length + ' interfaces...');
41 | console.log('( ' + interfaces.join(', ') + ' )');
42 | console.log();
43 | console.log('Latest draft of Waterline adapter interface spec:');
44 | console.log('http://links.sailsjs.org/docs/plugins/adapters/interfaces');
45 | console.log();
46 |
47 |
48 |
49 | /**
50 | * Integration Test Runner
51 | *
52 | * Uses the `waterline-adapter-tests` module to
53 | * run mocha tests against the specified interfaces
54 | * of the currently-implemented Waterline adapter API.
55 | */
56 | new TestRunner({
57 |
58 | // Mocha opts
59 | mocha: {
60 | bail: false,
61 | grep: /(case in)|(case se)|(greaterThanOrEqual key when searching strings)|(>= usage when searching strings)/,
62 | invert: true
63 | },
64 |
65 | // Load the adapter module.
66 | adapter: Adapter,
67 |
68 | // Default connection config to use.
69 | config: {
70 | },
71 |
72 | failOnError: true,
73 | // The set of adapter interfaces to test against.
74 | // (grabbed these from this adapter's package.json file above)
75 | interfaces: interfaces,
76 |
77 | // The set of adapter features to test against.
78 | // (grabbed these from this adapter's package.json file above)
79 | features: features,
80 |
81 | // Most databases implement 'semantic' and 'queryable'.
82 | //
83 | // As of Sails/Waterline v0.10, the 'associations' interface
84 | // is also available. If you don't implement 'associations',
85 | // it will be polyfilled for you by Waterline core. The core
86 | // implementation will always be used for cross-adapter / cross-connection
87 | // joins.
88 | //
89 | // In future versions of Sails/Waterline, 'queryable' may be also
90 | // be polyfilled by core.
91 | //
92 | // These polyfilled implementations can usually be further optimized at the
93 | // adapter level, since most databases provide optimizations for internal
94 | // operations.
95 | //
96 | // Full interface reference:
97 | // https://github.com/balderdashy/sails-docs/blob/master/adapter-specification.md
98 | });
99 |
--------------------------------------------------------------------------------
/test/integration/wl/index.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const _ = require('lodash')
4 | const assert = require('assert')
5 | const Waterline = require('waterline')
6 | const models = require('./models')
7 | const Adapter = require('../../../dist/adapter')
8 |
9 | describe('wl edge cases', () => {
10 | const wlconfig = {
11 | adapters: {
12 | edgetests: Adapter
13 | },
14 | connections: {
15 | edgetests: {
16 | adapter: 'edgetests',
17 | connection: {
18 |
19 | }
20 | }
21 | }
22 | }
23 | let waterline, orm, wl
24 |
25 | before(done => {
26 | waterline = new Waterline();
27 | waterline.loadCollection(models.NoisyModel)
28 | waterline.loadCollection(models.NormalModel)
29 | waterline.initialize(wlconfig, (err, _orm) => {
30 | if (err) return done(err)
31 |
32 | wl = _orm
33 | orm = _orm.collections
34 | done()
35 | })
36 | })
37 |
38 | describe('update order by', () => {
39 | it('update should ignore orderBy', () => {
40 | orm.normalmodel.update({ id: 1 }, {
41 | where: {
42 | name: 'hello'
43 | },
44 | orderBy: 'id asc'
45 | })
46 | })
47 | })
48 |
49 | describe('model definition noise', () => {
50 | it('should insert itself normally when .create invoked on the model', done => {
51 | const model = orm.noisymodel
52 | const modelObject = {
53 | name: model.globalId,
54 | identity: model.identity,
55 | attributes: _.omit(model.attributes, _.functions(model.attributes)),
56 | noise: model.noise,
57 | description: model.description
58 | }
59 | orm.noisymodel.create(modelObject)
60 | .then(record => {
61 | assert.equal(record.identity, 'noisymodel')
62 | done()
63 | })
64 | })
65 | it('should insert normally when .create invoked on the adapter', done => {
66 | const model = orm.noisymodel
67 | const modelObject = {
68 | name: model.globalId,
69 | identity: model.identity,
70 | attributes: _.omit(model.attributes, _.functions(model.attributes)),
71 | noise: model.noise,
72 | description: model.description
73 | }
74 | wl.connections.edgetests._adapter.create('edgetests', 'noisymodel', modelObject, (err, record) => {
75 | assert.equal(record.identity, 'noisymodel')
76 | done()
77 | })
78 | })
79 | })
80 | })
81 |
82 |
--------------------------------------------------------------------------------
/test/integration/wl/models.js:
--------------------------------------------------------------------------------
1 | const Waterline = require('waterline')
2 |
3 | module.exports = {
4 |
5 | NormalModel: Waterline.Collection.extend({
6 | identity: 'normalmodel',
7 | connection: 'edgetests',
8 | dynamicFinders: false,
9 | associationFinders: false,
10 |
11 | attributes: {
12 | name: 'string'
13 | }
14 | }),
15 |
16 | NoisyModel: Waterline.Collection.extend({
17 | identity: 'noisymodel',
18 | connection: 'edgetests',
19 | dynamicFinders: false,
20 | associationFinders: false,
21 |
22 | // noise
23 | description: 'hello',
24 | noise: {
25 | foo: 'bar'
26 | },
27 |
28 | attributes: {
29 | id: {
30 | type: 'integer',
31 | primaryKey: true,
32 | autoIncrement: true
33 | },
34 | name: 'string',
35 | identity: 'string',
36 | attributes: 'json',
37 | //noise: 'json',
38 | description: 'string'
39 | }
40 | })
41 | }
42 |
43 |
--------------------------------------------------------------------------------