├── .npmrc ├── test ├── mocha.opts ├── .eslintrc ├── config │ └── config.example.json ├── fixtures │ ├── tables │ │ └── artist.cql │ ├── dog.js │ ├── artist-entity.js │ └── schemas.js ├── mocks │ ├── index.js │ └── priam.js ├── unit │ ├── index.tests.js │ ├── partial-statements.tests.js │ ├── await-wrap.tests.js │ ├── model.instance.tests.js │ ├── schema.tests.js │ ├── model.tests.js │ └── statement-builder.tests.js ├── integration │ ├── index.tests.js │ └── model.tests.js └── helpers │ └── index.js ├── lib ├── statement-builder │ ├── partial-statements │ │ ├── index.js │ │ └── with.js │ ├── statements │ │ ├── index.js │ │ ├── create.js │ │ ├── remove.js │ │ ├── alter.js │ │ ├── find.js │ │ ├── table.js │ │ └── update.js │ ├── compound-statement.js │ ├── statement.js │ └── index.js ├── camel-case.js ├── snake-case.js ├── memoize.js ├── pick.js ├── attributes.js ├── statement-collection.js ├── await-wrap.js ├── index.js ├── model.js └── schema.js ├── .eslintrc ├── examples └── music │ ├── config.js │ ├── models.js │ ├── package.json │ ├── album.js │ ├── artist.js │ └── index.js ├── .travis.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE └── package.json /.npmrc: -------------------------------------------------------------------------------- 1 | registry=https://registry.npmjs.org/ 2 | -------------------------------------------------------------------------------- /test/mocha.opts: -------------------------------------------------------------------------------- 1 | --reporter spec 2 | --recursive 3 | -t 20000 4 | -------------------------------------------------------------------------------- /lib/statement-builder/partial-statements/index.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | exports.With = require('./with'); 4 | -------------------------------------------------------------------------------- /test/.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "root": false, 3 | "rules": { 4 | "max-statements": "off" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "extends": "godaddy", 4 | "rules": { 5 | "eqeqeq": "off" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /lib/camel-case.js: -------------------------------------------------------------------------------- 1 | const memoize = require('./memoize').memoize1; 2 | 3 | module.exports = memoize(require('to-camel-case')); 4 | -------------------------------------------------------------------------------- /lib/snake-case.js: -------------------------------------------------------------------------------- 1 | const memoize = require('./memoize').memoize1; 2 | 3 | module.exports = memoize(require('to-snake-case')); 4 | -------------------------------------------------------------------------------- /examples/music/config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | user: 'cassandra', 3 | password: 'cassandra', 4 | keyspace: 'datastar', 5 | hosts: ['127.0.0.1'] 6 | } 7 | -------------------------------------------------------------------------------- /examples/music/models.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = function (datastar) { 4 | return { 5 | Album: require('./album')(datastar), 6 | Artist: require('./artist')(datastar) 7 | }; 8 | }; 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | dist: trusty 3 | language: node_js 4 | matrix: 5 | fast_finish: true 6 | include: 7 | - node_js: "13" 8 | - node_js: "12.3" 9 | - node_js: "10.17" 10 | services: 11 | - cassandra 12 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/index.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | exports.create = require('./create'); 4 | exports.find = require('./find'); 5 | exports.remove = require('./remove'); 6 | exports.table = require('./table'); 7 | exports.alter = require('./alter'); 8 | exports.update = require('./update'); 9 | -------------------------------------------------------------------------------- /test/config/config.example.json: -------------------------------------------------------------------------------- 1 | { 2 | "cassandra": { 3 | "credentials": { 4 | "username": "cassandra", 5 | "password": "cassandra" 6 | }, 7 | "keyspace": "datastar", 8 | "contactPoints": ["127.0.0.1"], 9 | "localDataCenter": "datacenter1" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /lib/memoize.js: -------------------------------------------------------------------------------- 1 | module.exports.memoize1 = function (fn) { 2 | const cache = new Map(); 3 | return function (arg) { 4 | if (cache.has(arg)) { 5 | return cache.get(arg); 6 | } 7 | const result = fn(arg); 8 | cache.set(arg, result); 9 | return result; 10 | }; 11 | }; 12 | -------------------------------------------------------------------------------- /test/fixtures/tables/artist.cql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS artist ( 2 | artist_id uuid, 3 | name text, 4 | create_date timestamp, 5 | update_date timestamp, 6 | members set, 7 | related_artists set, 8 | traits set, 9 | metadata map, 10 | PRIMARY KEY (artist_id) 11 | ); 12 | -------------------------------------------------------------------------------- /test/mocks/index.js: -------------------------------------------------------------------------------- 1 | const proxyquire = require('proxyquire'); 2 | 3 | // 4 | // Exports our prototypal mocks. 5 | // 6 | exports.Priam = require('./priam'); 7 | 8 | /* 9 | * function datastar () 10 | * Returns a new mock Datastar prototype 11 | */ 12 | exports.datastar = function () { 13 | return proxyquire('../../lib', { 14 | priam: exports.Priam 15 | }); 16 | }; 17 | -------------------------------------------------------------------------------- /examples/music/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "music", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "dependencies": { 7 | "datastar": "~1.0.0", 8 | "stringify-stream": "~1.0.5" 9 | }, 10 | "scripts": { 11 | "test": "echo \"Error: no test specified\" && exit 1" 12 | }, 13 | "keywords": [], 14 | "author": "", 15 | "license": "ISC" 16 | } 17 | -------------------------------------------------------------------------------- /lib/pick.js: -------------------------------------------------------------------------------- 1 | // 2 | // Simple pick function to select specified properties from an object 3 | // Replacement for lodash.pick to avoid dependency vulnerabilities 4 | // 5 | module.exports = function pick(obj, keys) { 6 | const result = {}; 7 | if (obj != null) { 8 | for (const key of keys) { 9 | if (Object.prototype.hasOwnProperty.call(obj, key)) { 10 | result[key] = obj[key]; 11 | } 12 | } 13 | } 14 | return result; 15 | }; -------------------------------------------------------------------------------- /test/unit/index.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'), 3 | mocks = require('../mocks'); 4 | 5 | describe('Datastar (unit)', () => { 6 | let Datastar; 7 | 8 | beforeEach(() => { 9 | Datastar = mocks.datastar(); 10 | }); 11 | 12 | it('should create datastar instance', () => { 13 | const datastar = new Datastar(); 14 | datastar.connect(err => { 15 | assume(err).is.an('undefined'); 16 | }); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /test/fixtures/dog.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | module.exports = { 4 | id: '00000000-0000-0000-0000-000002588490', 5 | name: 'Fido', 6 | color: 'brown', 7 | weight: 75, 8 | dogThing: 'hello', 9 | owner: JSON.stringify({ 10 | name: 'John Doe', 11 | address: { 12 | street: '123 Somewhere Lane', 13 | locality: 'Chandler', 14 | region: 'Arizona', 15 | country: 'United States', 16 | code: '12345' 17 | } 18 | }), 19 | vaccinations: [ 20 | JSON.stringify({ date: new Date('2015-09-09'), types: ['rabies', 'heartworms'] }) 21 | ] 22 | }; 23 | -------------------------------------------------------------------------------- /examples/music/album.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = function (datastar) { 4 | const cql = datastar.schema.cql; 5 | 6 | return datastar.define('album', { 7 | schema: datastar.schema.object({ 8 | album_id: cql.uuid(), 9 | artist_id: cql.uuid(), 10 | name: cql.text(), 11 | track_list: cql.list(cql.text()), 12 | song_list: cql.list(cql.uuid()), 13 | release_date: cql.timestamp(), 14 | create_date: cql.timestamp(), 15 | producer: cql.text() 16 | }).partitionKey('artist_id') 17 | .clusteringKey('album_id') 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /test/fixtures/artist-entity.js: -------------------------------------------------------------------------------- 1 | 2 | /* jscs: disable */ 3 | 4 | module.exports = { 5 | artistId: '17a72421-d76a-4c7b-99f1-1ce520507c8b', 6 | name: 'nirvana', 7 | createDate: new Date(), 8 | members: ['Kurt Cobain', 'Krist Novoselic', 'Dave Grohl'], 9 | relatedArtists: [ 10 | '64605492-3e0f-4055-bbbd-3cb15fd2d5d4', 11 | '762bfbf0-0e92-4fe7-977b-a65e8f86dfd1', 12 | '72074a3b-5a46-4f51-b874-912d00fb7d96', 13 | '04984b08-967c-46f9-9d0e-7b20f17c673d', 14 | '53a1e63d-8ab6-4f05-903b-78128b7af21d' 15 | ], 16 | traits: ['dark', 'alternative rock', 'rock', 'punk', 'grunge'] 17 | }; 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | 10 | # Directory for instrumented libs generated by jscoverage/JSCover 11 | lib-cov 12 | 13 | # Coverage directory used by tools like istanbul 14 | coverage 15 | .nyc_output 16 | 17 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 18 | .grunt 19 | 20 | # Compiled binary addons (http://nodejs.org/api/addons.html) 21 | build/Release 22 | 23 | # Dependency directory 24 | # Deployed apps should consider commenting this line out: 25 | # see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git 26 | node_modules 27 | 28 | examples/**/node_modules 29 | 30 | #IntelliJ project files 31 | *.iml 32 | .idea 33 | -------------------------------------------------------------------------------- /examples/music/artist.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = function (datastar) { 4 | const cql = datastar.schema.cql; 5 | 6 | return datastar.define('artist', { 7 | schema: datastar.schema.object({ 8 | artist_id: cql.uuid(), 9 | name: cql.text(), 10 | create_date: cql.timestamp({ default: 'create' }), 11 | update_date: cql.timestamp({ default: 'update' }), 12 | members: cql.set(cql.text()), 13 | related_artists: cql.set(cql.uuid()).allow(null), 14 | traits: cql.set(cql.text()), 15 | metadata: cql.map(cql.text(), cql.text()).allow(null) 16 | }).partitionKey('artist_id'), 17 | readConsistency: 'one', 18 | writeConsistency: 'localQuorum', 19 | with: { 20 | compaction: { 21 | class: 'LeveledCompactionStrategy' 22 | } 23 | } 24 | }); 25 | }; 26 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 4.0.4 4 | 5 | - (fix) address vulnerable packagse 6 | 7 | ## 4.0.3 8 | 9 | - (fix) address vulnerable packages 10 | 11 | ## 4.0.1 12 | 13 | - (fix) bring back the publishing of `datastar/test` libraries used in some consumers' unit tests. 14 | 15 | ## 4.0.0 16 | 17 | - (breaking) support for node 8 is dropped; minimum node version is now 10.17.x. 18 | - (breaking) the configuration schema is now aligned with `priam` version 4. See the [`priam` migration guide](https://github.com/godaddy/node-priam/blob/master/MIGRATION.md) for help with converting your config settings. 19 | - (feature) data can now be queried as an async iterable for a more lightweight alternative to streams. 20 | - (feature) assigning a `transform` function to your model now gives you the ability to convert all queried records regardless of whether you used the callback or streaming interface. 21 | - (fix) data processing has been streamlined. 22 | -------------------------------------------------------------------------------- /lib/statement-builder/compound-statement.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | module.exports = CompoundStatement; 4 | /* 5 | * A simple wrapper around a set of statements 6 | */ 7 | function CompoundStatement(schema) { 8 | this.statements = []; 9 | // 10 | // Execute compound statements as a single batch, ALWAYS. Must ensure 11 | // determinism for these operations 12 | // 13 | this.batch = true; 14 | this.schema = schema; 15 | this.table = this.schema.name; 16 | this.typeOf = this.schema.type; 17 | } 18 | 19 | // 20 | // Return the options so we don't unncessarily keep them on the object and 21 | // handle errors 22 | // 23 | CompoundStatement.prototype.init = function (options, entity) { 24 | return this._init(options || {}, entity) || {}; 25 | }; 26 | 27 | // 28 | // Overrdide this in higher level statements 29 | // 30 | CompoundStatement.prototype._init = function () { 31 | }; 32 | 33 | CompoundStatement.prototype.add = function (statement) { 34 | this.statements.push(statement); 35 | }; 36 | -------------------------------------------------------------------------------- /test/unit/partial-statements.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'); 3 | const With = require('../../lib/statement-builder/partial-statements/with'); 4 | 5 | describe('Partial Statements', () => { 6 | 7 | describe('with', () => { 8 | 9 | it('should return a with partial statement given appropriate options', () => { 10 | const w = new With({ 11 | compaction: { 12 | cool: 'things can happen', 13 | when: 'you use proper compaction' 14 | }, 15 | gcGraceSeconds: 9600 16 | }); 17 | 18 | assume(w.cql).to.be.a('string'); 19 | assume(w.cql).to.contain('compaction'); 20 | assume(w.cql).to.contain('gc_grace_seconds'); 21 | assume(w.error).is.falsey(); 22 | }); 23 | 24 | it('should return an error on the partial statement when it cannot process the given options', () => { 25 | const w = new With({ 26 | what: new RegExp() 27 | }); 28 | 29 | assume(w.error).is.instanceof(Error); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/integration/index.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'), 3 | Datastar = require('../../lib'), 4 | helpers = require('../helpers'); 5 | 6 | describe('Datastar', () => { 7 | let config; 8 | 9 | beforeEach(done => { 10 | // 11 | // Load our config for the current environment once. 12 | // 13 | if (config) { 14 | return done(); 15 | } 16 | helpers.load((err, data) => { 17 | assume(err).equals(null); 18 | config = data; 19 | done(); 20 | }); 21 | }); 22 | 23 | it('should create datastar instance without pre-heating connection', done => { 24 | const datastar = new Datastar({ config: config.cassandra }); 25 | datastar.connect(); 26 | assume(datastar.connection).is.not.an('undefined'); 27 | datastar.close(done); 28 | }); 29 | 30 | it('should create datastar instance with pre-heating connection', done => { 31 | const datastar = new Datastar({ config: config.cassandra }); 32 | datastar.connect(err => { 33 | assume(err).is.falsey(); 34 | assume(datastar.connection).is.not.an('undefined'); 35 | datastar.close(done); 36 | }); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 GoDaddy Operating Company LLC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/create.js: -------------------------------------------------------------------------------- 1 | const Statement = require('../statement'); 2 | 3 | class CreateStatement extends Statement { 4 | _init(options, entity) { 5 | const opts = {}; 6 | const ret = this.schema.validate(this.schema.fixKeys(entity), 'create'); 7 | opts.entity = this.schema.deNull(ret); 8 | 9 | // 10 | // Allow ttl to be passed into an insert 11 | // 12 | if (options.ttl) opts.ttl = options.ttl; 13 | 14 | return opts; 15 | } 16 | 17 | build(options) { 18 | const allFields = this.schema.fields(); 19 | const entity = options.entity; 20 | // 21 | // Handle lookup table writes. 22 | // 23 | const table = options.table || this.table; 24 | 25 | const placeholders = new Array(allFields.length).join(', ?'); 26 | const ttlClause = options.ttl ? ` USING TTL ${options.ttl}` : ''; 27 | this.cql = `INSERT INTO ${table} (${allFields.join(', ')}) VALUES (?${placeholders})${ttlClause};`; 28 | 29 | this.options = { executeAsPrepared: true, queryName: `insert-${this.name}` }; 30 | // 31 | // Remark: This could be preparsed and put on options so we wouldn't have to know 32 | // about the schema 33 | // 34 | this.params = this.schema.getValues(entity); 35 | 36 | return this; 37 | } 38 | } 39 | 40 | module.exports = CreateStatement; 41 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/remove.js: -------------------------------------------------------------------------------- 1 | const Statement = require('../statement'); 2 | 3 | class RemoveStatement extends Statement { 4 | _init(options, entity) { 5 | const opts = {}; 6 | 7 | const conditions = options.conditions || entity; 8 | 9 | opts.conditionals = this.schema.createRemoveConditions(conditions, options.table); 10 | 11 | if (!Object.keys(opts.conditionals.query).length) { 12 | throw new Error(`Insufficient conditions to remove ${JSON.stringify(conditions)}`); 13 | } 14 | 15 | return opts; 16 | } 17 | 18 | build(options) { 19 | const conditionals = options.conditionals; 20 | // 21 | // Handle lookup table deletes by being able to pass in the table; 22 | // Public API is not allowed to do this 23 | // 24 | const table = conditionals.table || this.table; 25 | 26 | // 27 | // THe actual CQL 28 | // 29 | this.cql = `DELETE FROM ${table}`; 30 | this.cql += ` WHERE ${conditionals.query.join(' AND ')}`; 31 | 32 | // 33 | // Name of the query to pass to priam 34 | // 35 | this.name += `remove-${table}${conditionals.fields.sort().join('-')}`; 36 | this.options = { executeAsPrepared: true, queryName: this.name }; 37 | this.params = conditionals.params; 38 | 39 | return this; 40 | } 41 | } 42 | 43 | module.exports = RemoveStatement; 44 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datastar", 3 | "version": "4.0.4", 4 | "description": "Now witness the power of this FULLY ARMED AND OPERATIONAL DATASTAR!", 5 | "main": "lib", 6 | "files": [ 7 | "lib", 8 | "test/helpers", 9 | "test/mocks" 10 | ], 11 | "dependencies": { 12 | "async": "^3.2.4", 13 | "clone": "^1.0.4", 14 | "joi-of-cql": "^2.0.4", 15 | "object-assign": "^4.0.1", 16 | "priam": "^4.0.0", 17 | "tinythen": "^1.0.1", 18 | "to-camel-case": "^1.0.0", 19 | "to-snake-case": "^1.0.0", 20 | "understudy": "^4.1.0", 21 | "uuid": "^2.0.1" 22 | }, 23 | "devDependencies": { 24 | "assume": "^2.1.0", 25 | "assume-sinon": "^1.0.0", 26 | "cassandra-driver": "^4.1.0", 27 | "eslint": "^8.29.0", 28 | "eslint-config-godaddy": "^7.0.0", 29 | "lodash.clonedeep": "^4.5.0", 30 | "mocha": "^10.2.0", 31 | "nyc": "^15.1.0", 32 | "proxyquire": "^1.7.3", 33 | "sinon": "^1.17.2" 34 | }, 35 | "scripts": { 36 | "mocha": "mocha -R spec test/**/*.tests.js", 37 | "coverage": "nyc npm run mocha", 38 | "pretest": "npm run lint", 39 | "lint": "eslint-godaddy lib/ test/", 40 | "test": "npm run coverage" 41 | }, 42 | "repository": { 43 | "type": "git", 44 | "url": "git@github.com:godaddy/datastar.git" 45 | }, 46 | "keywords": [ 47 | "C*", 48 | "cassandra", 49 | "ODM", 50 | "models", 51 | "schemas", 52 | "data modeling" 53 | ], 54 | "engines": { 55 | "node": "^10.17.0 || >=12.3.0" 56 | }, 57 | "author": "GoDaddy Engineers", 58 | "license": "MIT" 59 | } 60 | -------------------------------------------------------------------------------- /test/unit/await-wrap.tests.js: -------------------------------------------------------------------------------- 1 | const assume = require('assume'); 2 | const { AwaitWrap } = require('../..'); 3 | const Stream = require('stream'); 4 | const mocks = require('../mocks'); 5 | const helpers = require('../helpers'); 6 | 7 | describe('datastar-await-wrap', () => { 8 | let Model; 9 | let wrapped; 10 | const data = { name: 'what' }; 11 | 12 | before(() => { 13 | const datastar = helpers.connectDatastar({ mock: true }, mocks.datastar()); 14 | const cql = datastar.schema.cql; 15 | Model = datastar.define('model', { 16 | schema: datastar.schema.object({ 17 | name: cql.text() 18 | }).partitionKey('name') 19 | }); 20 | wrapped = new AwaitWrap(Model); 21 | }); 22 | 23 | it('should correctly wrap the create function', async () => { 24 | await wrapped.create(data); 25 | }); 26 | 27 | it('should correctly wrap the update function', async () => { 28 | await wrapped.update(data); 29 | }); 30 | 31 | it('should correctly wrap the remove function', async () => { 32 | await wrapped.remove(data); 33 | }); 34 | 35 | it('should correctly wrap the findOne function', async () => { 36 | await wrapped.findOne(data); 37 | }); 38 | 39 | it('should correctly wrap the findAll function', async () => { 40 | await wrapped.findAll(data); 41 | }); 42 | 43 | it('should return a stream from the findAllStream function', () => { 44 | const strm = wrapped.findAllStream(data); 45 | assume(strm).is.instanceof(Stream); 46 | }); 47 | 48 | it('should wrap ensureTables as the ensure function', async () => { 49 | await wrapped.ensure(); 50 | }); 51 | 52 | it('should wrap dropTables as the drop function', async () => { 53 | await wrapped.drop(); 54 | }); 55 | 56 | }); 57 | -------------------------------------------------------------------------------- /examples/music/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const http = require('http'); 4 | const url = require('url'); 5 | const qs = require('querystring'); 6 | const Datastar = require('datastar'); 7 | const stringify = require('stringify-stream'); 8 | 9 | // 10 | // Setup connection and instances, it will lazily connect since 11 | // we are just using a simple http server 12 | // 13 | const datastar = new Datastar({ 14 | config: require('./config') 15 | }).connect(); 16 | 17 | const models = require('./models')(datastar); 18 | 19 | const stringifyOpts = { open: '[', close: ']' }; 20 | 21 | http.createServer(function respond(req, res) { 22 | const parsed = url.parse(req.url); 23 | const params = qs.parse(parsed.query); 24 | 25 | // 26 | // Naive routes that just return all of a given resource 27 | // to keep it simple 28 | // 29 | if (/^\/album/.test(parsed.path)) { 30 | res.writeHead(200, { 31 | 'content-type': 'application/json', 32 | 'Trailer': 'Error' 33 | }); 34 | 35 | return models.Album.findAll({ 36 | albumId: params.albumId 37 | }) 38 | .once('error', writeTrailers(res)) 39 | .pipe(stringify(stringifyOpts)) 40 | .pipe(res, { end: false }) 41 | .on('finish', () => res.end()) 42 | } 43 | 44 | if (/^\/artist/.test(parsed.path)) { 45 | res.writeHead(200, { 46 | 'content-type': 'application/json', 47 | 'Trailer': 'Error' 48 | }); 49 | 50 | return models.Artist.findAll({}) 51 | .once('error', writeTrailers(res)) 52 | .pipe(stringify(stringifyOpts)) 53 | .pipe(res, { end: false }) 54 | .on('finish', () => res.end()); 55 | } 56 | 57 | res.writeHead(404, { 58 | 'content-type': 'application/json' 59 | }); 60 | res.end(JSON.stringify({ error: 'No Resource found' })); 61 | 62 | }).listen(3000); 63 | 64 | function writeTrailers(res) { 65 | return (err) => { 66 | res.addTrailers({ 'Error': err.message }); 67 | res.end(); 68 | } 69 | }; 70 | 71 | -------------------------------------------------------------------------------- /test/unit/model.instance.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'), 3 | dogFixture = require('../fixtures/dog'), 4 | schemas = require('../fixtures/schemas'), 5 | mocks = require('../mocks'), 6 | helpers = require('../helpers'), 7 | cloneDeep = require('lodash.clonedeep'); 8 | 9 | assume.use(require('assume-sinon')); 10 | 11 | describe('Model instance (unit)', () => { 12 | let dog, Dog, datastar, entity; 13 | 14 | beforeEach(() => { 15 | datastar = helpers.connectDatastar({ mock: true }, mocks.datastar()); 16 | Dog = datastar.define('dog', { 17 | schema: schemas.dog 18 | }); 19 | entity = cloneDeep(dogFixture); 20 | dog = Dog.toInstance(entity); 21 | }); 22 | 23 | it('should "transform" data into an instance of the defined model', () => { 24 | assume(dog).is.instanceof(Dog); 25 | }); 26 | 27 | describe('json type handling', () => { 28 | it('should deserialize a json property', () => { 29 | assume(dog.owner.name).is.equal('John Doe'); 30 | }); 31 | }); 32 | 33 | describe('Stringify an array, toJSON handling', () => { 34 | it('should contain the camelCase key when an array is stringified rather than snake_case', () => { 35 | const ary = [dog]; 36 | assume(JSON.stringify(ary).indexOf('dogThing')).is.not.equal(-1); 37 | }); 38 | }); 39 | 40 | describe('#validate', () => { 41 | it('should validate the current data against the schema validation', () => { 42 | dog.weight = 80; 43 | assume(dog.validate()).is.deep.equal({ id: dog.id, weight: 80 }); 44 | }); 45 | 46 | it('should throw the validation error', () => { 47 | dog.id = 'invalid guid'; 48 | assume(() => dog.validate()).throws(); 49 | }); 50 | }); 51 | 52 | it('should handle de-nulling cyclic objects', () => { 53 | dog.owner.puppies = [dog]; // inject cyclic reference 54 | // See https://github.com/godaddy/datastar/pull/27 55 | // Previously, this would cause recursive loop (Max call stack size exceeded error) 56 | assume(dog.owner).exists(); 57 | }); 58 | }); 59 | 60 | -------------------------------------------------------------------------------- /test/mocks/priam.js: -------------------------------------------------------------------------------- 1 | const { PassThrough } = require('stream'); 2 | 3 | class Chainable { 4 | /* 5 | * function Chainable 6 | * Constructor function for a mock batch 7 | * or query. 8 | */ 9 | constructor() { 10 | this.statements = []; 11 | } 12 | 13 | /* 14 | * function add (statement) 15 | * Adds the statement to this Chainable instance. 16 | */ 17 | add(statement) { 18 | this.statements.push(statement); 19 | return this; 20 | } 21 | 22 | stream() { 23 | const stream = new PassThrough({ objectMode: true }); 24 | stream.end(); 25 | return stream; 26 | } 27 | 28 | *iterate() {} 29 | 30 | /* 31 | * function execute (callback) 32 | * Invokes the callback in the next tick 33 | */ 34 | execute(callback) { 35 | setImmediate(callback); 36 | return this; 37 | } 38 | } 39 | 40 | /* 41 | * function query (cql) 42 | * function options (obj) 43 | * function params (obj) 44 | * function single() -> changes how results are displayed from query 45 | * function first() -> changes how results are displayed from query 46 | * Invokes the callback in the next tick 47 | */ 48 | Chainable.prototype.single = 49 | Chainable.prototype.consistency = 50 | Chainable.prototype.first = 51 | Chainable.prototype.query = 52 | Chainable.prototype.options = 53 | Chainable.prototype.params = function () { 54 | return this; 55 | }; 56 | 57 | class Priam { 58 | 59 | /* 60 | * function Priam (opts) 61 | * Constructor function for the Priam mock responsible for 62 | * mocking our communication with Cassandra. 63 | */ 64 | constructor(opts) { 65 | this.options = opts; 66 | } 67 | 68 | /* 69 | * Alias the priam connect function 70 | */ 71 | connect(keyspace, callback) { 72 | if (!callback) { 73 | callback = keyspace; 74 | keyspace = null; 75 | } 76 | 77 | setImmediate(callback); 78 | } 79 | 80 | beginQuery() { 81 | return new Chainable(); 82 | } 83 | } 84 | 85 | /* 86 | * function beginBatch () 87 | * function beginQuery () 88 | * Begins a new batch. 89 | */ 90 | Priam.prototype.beginBatch = Priam.prototype.beginQuery; 91 | 92 | module.exports = Priam; 93 | -------------------------------------------------------------------------------- /lib/statement-builder/statement.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | module.exports = Statement; 4 | 5 | /** 6 | * 7 | * @param schema {object} - Schema object 8 | * @constructor Statement 9 | * @options Object representing what creates a statement 10 | * @entities (optional) Possible entities to compile into a statement 11 | * @conditionals (optional) Used to compile into a proper query 12 | * @fields (optional) Used to compile the beginning of a select to only fetch certain fields 13 | * 14 | */ 15 | 16 | function Statement(schema) { 17 | // 18 | // The point here is to internally transform these options into a proper CQL 19 | // statement with params to be used in priam. 20 | // 21 | 22 | this.cql = ''; 23 | this.params = []; 24 | this.options = {}; 25 | this.name = ''; 26 | this.schema = schema; 27 | this.table = this.schema.name; 28 | // 29 | // Proxy typeOf function 30 | // 31 | this.typeOf = this.schema.type; 32 | } 33 | 34 | // 35 | // Return the options so we don't unncessarily keep them on the object and 36 | // handle errors 37 | // 38 | Statement.prototype.init = function (options, entity) { 39 | return this._init(options || {}, entity) || {}; 40 | }; 41 | 42 | // 43 | // Overrdide this in higher level statements 44 | // 45 | Statement.prototype._init = function () { 46 | }; 47 | 48 | /* 49 | * Adds this Statement instance to a new query against an 50 | * arbitrary Cassandra connection which could be (or could not be) 51 | * associated with a StatementCollection, Model, or other utilty. 52 | */ 53 | Statement.prototype.extendQuery = function (query) { 54 | // 55 | // Remark: not sure if we can full drop support from reading 56 | // "cached" queries from disk to support all current scenarios. 57 | // 58 | // if (this.filename) { 59 | // query = query.namedQuery(this.filename); 60 | // } 61 | 62 | // 63 | // Allows for invocation of arbitrary additional `priam` 64 | // methods as necessary by concrete statements. e.g. 65 | // `query.first()` or `query.single()` in `FindStatement` 66 | // 67 | if (this.mutate) { 68 | query = this.mutate(query); 69 | } 70 | 71 | return query 72 | .query(this.cql) 73 | .options(this.options) 74 | .params(this.params); 75 | }; 76 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/alter.js: -------------------------------------------------------------------------------- 1 | const 2 | With = require('../partial-statements/with'), 3 | Statement = require('../statement'); 4 | 5 | // 6 | // Remark: A Statement to handle Alter operations. Its not yet known if this will 7 | // be generic enough for all Alter statements but we will see 8 | // 9 | class AlterStatement extends Statement { 10 | constructor(...args) { 11 | super(...args); 12 | 13 | this.types = ['TABLE']; 14 | 15 | this.typesLookup = this.types.reduce(function (acc, type) { 16 | acc[type] = true; 17 | return acc; 18 | }, {}); 19 | } 20 | 21 | // 22 | // Remark: this returns the options passed into build 23 | // 24 | _init(options) { 25 | const opts = {}; 26 | 27 | const actions = options.alter || options.actions || options.with || {}; 28 | opts.type = options.type && options.type.toUpperCase(); 29 | opts.table = options.table; 30 | // 31 | // Simple validation on type of alter statement 32 | // 33 | if (!opts.type || !this.typesLookup[opts.type]) { 34 | throw new Error(`Invalid type ${opts.type}`); 35 | } 36 | 37 | // 38 | // Since the partial statement can error, we generate it in the init step 39 | // and use it later 40 | // 41 | if (actions && Object.keys(actions).length) { 42 | const w = new With(actions); 43 | if (w.error) throw w.error; 44 | opts.with = w.cql; 45 | } 46 | 47 | return opts; 48 | } 49 | 50 | build(options) { 51 | // 52 | // Remark: Uppercase the type for the CQL. We might want to do some validation 53 | // here on type (we should actually do that in statement-builder. 54 | // 55 | const type = options.type; 56 | 57 | this.cql += `ALTER ${type} `; 58 | 59 | // 60 | // switch on the `type` to determine what kind of alteration we are doing. 61 | // This appends the specific alter command to the statement cql 62 | // 63 | switch (type) { 64 | case 'TABLE': 65 | this.xTable(options); 66 | break; 67 | default : 68 | break; 69 | } 70 | return this; 71 | } 72 | 73 | xTable(opts) { 74 | const table = opts.table || this.table; 75 | 76 | this.cql += `${table} `; 77 | 78 | if (opts.with) { 79 | this.cql += opts.with; 80 | } 81 | 82 | return this; 83 | } 84 | } 85 | 86 | module.exports = AlterStatement; 87 | -------------------------------------------------------------------------------- /test/fixtures/schemas.js: -------------------------------------------------------------------------------- 1 | const joi = require('joi-of-cql'); 2 | const cql = joi.cql; 3 | 4 | /** 5 | * @property schemas {Object} 6 | */ 7 | module.exports = { 8 | artist: joi.object({ 9 | artist_id: cql.uuid(), 10 | name: cql.text(), 11 | create_date: cql.timestamp(), 12 | update_date: cql.timestamp(), 13 | members: cql.set(cql.text()), 14 | related_artists: cql.set(cql.uuid()).allow(null), 15 | traits: cql.set(cql.text()), 16 | metadata: cql.map(cql.text(), cql.text()) 17 | }).partitionKey('artist_id') 18 | .rename('id', 'artist_id', { ignoreUndefined: true }), 19 | album: joi.object({ 20 | artist_id: cql.uuid(), 21 | album_id: cql.uuid(), 22 | name: cql.text(), 23 | track_list: cql.list(cql.text()), 24 | song_list: cql.list(cql.uuid()), 25 | release_date: cql.timestamp(), 26 | create_date: cql.timestamp(), 27 | update_date: cql.timestamp(), 28 | producer: cql.text() 29 | }).partitionKey('artist_id') 30 | .clusteringKey('album_id') 31 | .rename('id', 'album_id', { ignoreUndefined: true }), 32 | song: joi.object({ 33 | song_id: cql.uuid(), 34 | unique_id: cql.uuid(), 35 | other_id: cql.uuid(), 36 | name: cql.text(), 37 | length: cql.text(), 38 | artists: cql.set(cql.uuid()) 39 | }).partitionKey('song_id') 40 | .rename('id', 'song_id', { ignoreUndefined: true }), 41 | person: joi.object({ 42 | person_id: cql.uuid(), 43 | name: cql.text(), 44 | create_date: cql.timestamp(), 45 | characteristics: cql.list(cql.text()) 46 | }).partitionKey('person_id').rename('id', 'person_id', { ignoreUndefined: true }), 47 | cat: joi.object({ 48 | cat_id: cql.uuid(), 49 | hash: cql.int(), 50 | name: cql.text(), 51 | create_date: cql.timestamp() 52 | }).partitionKey(['cat_id', 'hash']).rename('id', 'cat_id', { ignoreUndefined: true }), 53 | dog: joi.object({ 54 | id: cql.uuid(), 55 | name: cql.text().disallow(null).min(2), 56 | color: cql.text(), 57 | weight: cql.int(), 58 | owner: cql.json(), 59 | vaccinations: cql.list(cql.json()), 60 | dog_thing: cql.text() 61 | }).partitionKey('id'), 62 | foo: joi.object({ 63 | foo_id: cql.uuid(), 64 | secondary_id: cql.uuid(), 65 | non_nullable_id: cql.uuid().meta({ nullConversion: false }), 66 | nullable_id: cql.uuid(), 67 | something: cql.text() 68 | }).partitionKey('foo_id') 69 | .clusteringKey('secondary_id') 70 | }; 71 | -------------------------------------------------------------------------------- /test/unit/schema.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'), 3 | joi = require('joi-of-cql'), 4 | schemas = require('../fixtures/schemas'), 5 | Schema = require('../../lib/schema'), 6 | helpers = require('../helpers'); 7 | 8 | const debug = helpers.debug, 9 | cql = joi.cql; 10 | 11 | describe('Schema (unit)', () => { 12 | let schema; 13 | 14 | it('should create a schema', () => { 15 | schema = new Schema('artist', schemas.artist); 16 | debug(schema); 17 | 18 | assume(schema._columns).to.not.be.empty; 19 | assume(schema._primaryKeys).to.equal('artist_id'); 20 | assume(schema.primaryKeys()).to.deep.equal(['artist_id']); 21 | }); 22 | 23 | it('should throw an error when given an invalid schema', () => { 24 | const invalid = joi.object({ 25 | id: cql.uuid() 26 | }); 27 | 28 | function init() { 29 | // eslint-disable-next-line 30 | new Schema('invalid', invalid); 31 | } 32 | 33 | assume(init).throws(/must define a partitionKey/); 34 | }); 35 | 36 | it('should throw an error when given an invalid name for the schema', () => { 37 | function init() { 38 | // eslint-disable-next-line 39 | new Schema('has-dashes'); 40 | } 41 | 42 | assume(init).throws('Invalid character in schema name'); 43 | }); 44 | 45 | it('should transform the schema (snakecase and aliases)', () => { 46 | const entity = [ 47 | 'createDate' 48 | ]; 49 | const fields = schema.fixKeys(entity); 50 | assume(fields).to.deep.equal(['create_date']); 51 | debug(fields); 52 | }); 53 | 54 | it('should validate the schema', () => { 55 | const entity = { 56 | name: 'foo', 57 | createDate: new Date(), 58 | helloThere: 'new things' 59 | }; 60 | debug(schema.validator); 61 | assume(() => schema.validate(schema.fixKeys(entity))).throws(); 62 | }); 63 | 64 | it('should allow for null values', () => { 65 | const entity = { 66 | name: 'whocares', 67 | createDate: new Date(), 68 | relatedArtists: null 69 | }; 70 | debug(schema.validator); 71 | const valid = schema.validate(schema.fixKeys(entity)); 72 | assume(valid).eql(schema.fixKeys(entity)); 73 | debug(valid); 74 | }); 75 | 76 | it('#fieldString() should return a list of all fields suitable for CQL ' + 77 | 'consumption if no arguments are given', () => { 78 | schema = new Schema('artist', schemas.artist); 79 | assume(schema.fieldString()).eql( 80 | '"artist_id", "name", "create_date", "update_date", "members", "related_artists", "traits", "metadata"'); 81 | }); 82 | 83 | it('#fieldString() should return a list of fields suitable for CQL ' + 84 | 'consumption when a list of fields is given', () => { 85 | schema = new Schema('artist', schemas.artist); 86 | assume(schema.fieldString(['artist_id', 'name'])).eql('"artist_id", "name"'); 87 | }); 88 | }); 89 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/find.js: -------------------------------------------------------------------------------- 1 | const Statement = require('../statement'); 2 | 3 | class FindStatement extends Statement { 4 | _init(options, entity) { 5 | const opts = {}; 6 | 7 | // 8 | // We assess the length of conditions before and after. We ONLY want to end up 9 | // doing a SELECT * from TABLE if we pass in zero conditions. 10 | // 11 | const conditions = options.conditions || entity; 12 | const condLength = Object.keys(conditions).length; 13 | // 14 | // Parse the conditions into the intended data structure we need. See schema 15 | // code 16 | // 17 | opts.conditionals = this.schema.createConditions(conditions); 18 | // 19 | // Inspect the query length and see if we intended on passing anything with 20 | // a zero length, if we did not, we should error. 21 | // 22 | if (!Object.keys(opts.conditionals.query).length && condLength) { 23 | throw new Error(`Insufficient conditions for find, ${JSON.stringify(conditions)}`); 24 | } 25 | opts.type = options.type; 26 | // 27 | // Transform any fields that were passed in 28 | // 29 | opts.fields = this.schema.fixKeys(options.fields || []); 30 | 31 | opts.limit = options.limit; 32 | opts.allowFiltering = options.allowFiltering; 33 | 34 | return opts; 35 | } 36 | 37 | build({ conditionals, fields, limit, type, allowFiltering }) { 38 | // 39 | // We default to the table set on conditionals if we are doing a find on 40 | // a lookup table. We establish this when we create conditions so it kind of 41 | // makes sense. 42 | // 43 | const table = conditionals.table || this.table; 44 | 45 | const fieldsCql = (type === 'count') 46 | ? 'COUNT(*)' 47 | : this.schema.fieldString(fields); 48 | 49 | this.cql = `SELECT ${fieldsCql} FROM ${table}`; 50 | this.name += `${fields.sort().join('-') || type}-from-${table}`; 51 | 52 | if (conditionals.query && conditionals.query.length) { 53 | this.cql += ` WHERE ${conditionals.query.join(' AND ')}`; 54 | this.name += `-by-${conditionals.fields.sort().join('-')}`; 55 | } 56 | 57 | // 58 | // Limit the query 59 | // 60 | if (typeof limit === 'number' && limit > 0) { 61 | this.cql += ` LIMIT ${limit}`; 62 | this.name += `-limit-${limit}`; 63 | } 64 | 65 | if (allowFiltering) { 66 | this.cql += ' ALLOW FILTERING'; 67 | this.name += '-allow-filtering'; 68 | } 69 | // 70 | // This should ideally be configurable 71 | // 72 | this.options = { 73 | executeAsPrepared: true, 74 | queryName: this.name, 75 | // 76 | // For streaming and large queries 77 | // 78 | autoPage: true 79 | }; 80 | this.params = conditionals.params; 81 | 82 | if (type === 'first' || type === 'count') { 83 | this.mutate = function (query) { 84 | return query.first(); 85 | }; 86 | } else if (type === 'one') { 87 | this.mutate = function (query) { 88 | return query.single(); 89 | }; 90 | } 91 | 92 | return this; 93 | } 94 | } 95 | 96 | module.exports = FindStatement; 97 | -------------------------------------------------------------------------------- /lib/statement-builder/index.js: -------------------------------------------------------------------------------- 1 | const statements = require('./statements'); 2 | const CompoundStatement = require('./compound-statement'); 3 | 4 | /* 5 | * Constructor function for the StatementBuilder responsible for creating 6 | * individual statements to be used individually or as part of a 7 | * StatementCollection. 8 | * 9 | * @param {Model} Model Underlying Model we are building statements for 10 | * @param {Object} Options for building statements. 11 | * 12 | */ 13 | const StatementBuilder = module.exports = function StatementBuilder(schema, options) { 14 | this.schema = schema; 15 | this.options = options || {}; 16 | this.typeOf = this.schema.type; 17 | 18 | // 19 | // Methods that require us to build a specific compound statement to execute 20 | // multiple queries on multiple tables 21 | // 22 | this.multiMethods = ['create', 'update', 'remove'].reduce(function (acc, key) { 23 | acc[key] = true; 24 | return acc; 25 | }, {}); 26 | }; 27 | 28 | // 29 | // Take the conditions and fields and build CQL from it 30 | // 31 | ['create', 'update', 'remove', 'find', 'table', 'alter'].forEach(function (action) { 32 | StatementBuilder.prototype[action] = function (options, entity) { 33 | options = options || {}; 34 | options.action = action; 35 | // 36 | // Construct and build the statement 37 | // 38 | const statement = new statements[action](this.schema); 39 | const opts = statement.init(options, entity); 40 | 41 | // 42 | // If we don't have lookup tables, return the standard built statement, 43 | // otherwise build a compound statement directly here. 44 | // 45 | if (!this.schema.lookups || !this.multiMethods[action]) { 46 | return statement.build(opts); 47 | } 48 | 49 | // 50 | // Build a compound statement and add the initial statement to it 51 | // 52 | const compound = new CompoundStatement(this.schema); 53 | compound.add(statement.build(opts)); 54 | // 55 | // Use the lookup tables map and build the statements. 56 | // Since we already validated the first statement here, its impossible to 57 | // error and we just use the same 58 | // 59 | const lookupMap = this.schema.lookupTables; 60 | const keys = Object.keys(lookupMap); 61 | 62 | for (let i = 0; i < keys.length; i++) { 63 | const table = lookupMap[keys[i]]; 64 | // 65 | // Clone the initialized options and add the new table to them 66 | // 67 | const stmnt = new statements[action](this.schema); 68 | // 69 | // Set table for both steps for the various statements. We does this 70 | // weirdly because we dont want a statement to accept a table from 71 | // a user's input. This would cause unintended behavior 72 | // 73 | options.table = table; 74 | // 75 | // This can never error because we ran `ini` on these SAME set of options 76 | // previously and only added a table property 77 | // 78 | const op = stmnt.init(options, entity); 79 | op.table = table; 80 | compound.add(stmnt.build(op)); 81 | } 82 | 83 | // 84 | // Return an error if we error a compound statement 85 | // 86 | return compound; 87 | }; 88 | }); 89 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /test/helpers/index.js: -------------------------------------------------------------------------------- 1 | const 2 | path = require('path'), 3 | util = require('util'), 4 | Datastar = require('../../lib'), 5 | cassandra = require('cassandra-driver'); 6 | 7 | const model = Datastar.Model; 8 | 9 | /* 10 | * @param {configs} Object 11 | * Configs which have already been imported. 12 | */ 13 | exports.configs = {}; 14 | 15 | /* 16 | * function load (env, callback) 17 | * Responds with the config from `wsb-dev-test-config` 18 | */ 19 | 20 | /* eslint no-process-env: 0*/ 21 | exports.load = function (env, callback) { 22 | 23 | if (!callback && typeof env === 'function') { 24 | callback = env; 25 | env = process.env.NODE_ENV || 'development'; 26 | } 27 | 28 | env = env === 'dev' ? 'development' : env; 29 | if (exports.configs[env]) { 30 | return callback(null, exports.configs[env]); 31 | } 32 | 33 | function createKeyspace(data) { 34 | const client = new cassandra.Client({ 35 | contactPoints: data.cassandra.contactPoints, 36 | localDataCenter: data.cassandra.localDataCenter, 37 | authProvider: new cassandra.auth.PlainTextAuthProvider( 38 | data.cassandra.credentials.username, 39 | data.cassandra.credentials.password 40 | ) 41 | }); 42 | 43 | client.execute(`CREATE KEYSPACE IF NOT EXISTS ${data.cassandra.keyspace} WITH replication = {'class' : 'SimpleStrategy', 'replication_factor' : 1};`, function (err) { 44 | if (err) return callback(err); 45 | client.shutdown(); 46 | setConfig(data); 47 | }); 48 | 49 | } 50 | 51 | /* 52 | * function setConfig(data) 53 | * Sets the config for this env. 54 | */ 55 | function setConfig(data) { 56 | exports.configs[env] = data; 57 | callback(null, exports.configs[env]); 58 | } 59 | 60 | // 61 | // If `DATASTAR_CONFIG` is set then load from 62 | // that file. 63 | // 64 | const configFile = process.env.DATASTAR_CONFIG || path.join(__dirname, '..', 'config', 'config.example.json'); 65 | 66 | return createKeyspace(require(configFile)); 67 | }; 68 | 69 | /* 70 | * function debug (obj) 71 | * Simple debug function when `process.env.DEBUG` is set. 72 | */ 73 | exports.debug = function debug(obj) { 74 | if (process.env.DEBUG) { 75 | console.log(util.inspect(obj, { depth: 20, color: true })); 76 | } 77 | }; 78 | 79 | /* 80 | * function createDatastar(opts) 81 | * Returns a new Datastar instance with the specified opts. 82 | */ 83 | exports.createDatastar = function (opts, Proto) { 84 | Proto = Proto || Datastar; 85 | return new Proto(opts); 86 | }; 87 | 88 | /* 89 | * function connectDatastar(opts) 90 | * Returns a new Datastar instance with the specified opts. 91 | * and then connects 92 | */ 93 | exports.connectDatastar = function (opts, Proto, callback) { 94 | Proto = Proto || Datastar; 95 | return new Proto(opts).connect(callback); 96 | }; 97 | 98 | /* 99 | * function stubModel() 100 | * Returns a stubbed Datastar Model. 101 | */ 102 | exports.stubModel = function (sinon) { 103 | model.before = sinon.stub(); 104 | model.waterfall = sinon.stub(); 105 | model.ensureTables = sinon.stub(); 106 | 107 | return model; 108 | }; 109 | -------------------------------------------------------------------------------- /lib/attributes.js: -------------------------------------------------------------------------------- 1 | const 2 | snakeCase = require('./snake-case'), 3 | camelCase = require('./camel-case'), 4 | clone = require('clone'); 5 | 6 | // 7 | // A class for managing state change and alias manipulation 8 | // of modeled data. 9 | // 10 | class Attributes { 11 | constructor(instance, data) { 12 | this.Model = instance.Model; 13 | this.instance = instance; 14 | this.schema = this.Model.schema; 15 | this.data = this.schema.prepareForUse(data); 16 | this._isDirty = false; 17 | this._was = {}; 18 | this._changed = {}; 19 | } 20 | 21 | // 22 | // An explicit getter method for the properties in `data` 23 | // that manages aliases 24 | // 25 | get(name) { 26 | const key = this.schema.entityKeyToColumnName(name); 27 | const value = this.data[key]; 28 | if (this.schema.requiresNullConversion(key)) { 29 | return this.schema.valueToNull(value); 30 | } 31 | if (this.schema.isKey(key)) { return value; } 32 | return this.schema.nullToValue(this.schema.fieldMeta(key), value); 33 | } 34 | 35 | // 36 | // An explicit setter method for the properties in `data` 37 | // that manages aliases and records a change state as well 38 | // as emitting a state change event if the Model has enabled it. 39 | // 40 | set(name, value) { 41 | if (this.schema._aliases[name]) name = this.schema._aliases[name]; 42 | const camelName = camelCase(name); 43 | const snakeName = snakeCase(name); 44 | this._isDirty = true; 45 | const oldData = this.data[snakeName]; 46 | 47 | // only track the original value in case of multiple changes 48 | this._was[snakeName] = this._was[snakeName] || oldData; 49 | this._changed[snakeName] = this.data[snakeName] = value; 50 | if (this.Model.options.notifyAttributeChanges) { 51 | this.Model.emit('attribute:change', this.instance, camelName, value, oldData); 52 | } 53 | } 54 | 55 | was(name) { 56 | return this._was[snakeCase(name)]; 57 | } 58 | 59 | // 60 | // Return the previous value generated based on current and previous data 61 | // 62 | previous() { 63 | return Object.keys(this.data).reduce((prev, key) => { 64 | if (!(key in prev)) { 65 | prev[key] = this.data[key]; 66 | } 67 | 68 | return prev; 69 | }, clone(this._was)); 70 | } 71 | 72 | needsValidation() { 73 | let names = this.schema.keys(), data = this.data; 74 | 75 | if (this.schema.lookups) { 76 | names = names.concat(Object.keys(this.schema.lookupTables)); 77 | } 78 | 79 | return names.reduce(function (memo, key) { 80 | // don't include keys that are undefined 81 | if (key in memo || key in data) { 82 | memo[key] = key in memo ? memo[key] : data[key]; 83 | } 84 | return memo; 85 | }, clone(this._changed)); 86 | } 87 | 88 | // 89 | // Public getter for state change management 90 | // 91 | isDirty(name) { 92 | return name ? snakeCase(name) in this._changed : this._isDirty; 93 | } 94 | 95 | toJSON(snake) { 96 | const data = this.schema.reNull(this.data); 97 | return snake ? data : this.schema.toCamelCase(data); 98 | } 99 | } 100 | 101 | module.exports = Attributes; 102 | -------------------------------------------------------------------------------- /lib/statement-collection.js: -------------------------------------------------------------------------------- 1 | const async = require('async'); 2 | 3 | class StatementCollection { 4 | /* 5 | * Constructor function for the StatementCollection can decide 6 | * on using a C* batching or non-batching strategy. 7 | * 8 | * @param {Driver} Connection which these statements are associated. 9 | */ 10 | constructor(connection, strategy) { 11 | this.statements = []; 12 | this.connection = connection; 13 | this.strategy = strategy || 'batch'; 14 | // 15 | // Default our consistency to local-quorum as it seems reasonable 16 | // 17 | this._consistency = 'localQuorum'; 18 | 19 | // 20 | // If strategy is not "batch" then it is the upper bound of 21 | // concurrency for the number of statements to execute at once. 22 | // 23 | if (this.strategy !== 'batch') { 24 | this.strategy = this.strategy && !isNaN(this.strategy) 25 | ? this.strategy 26 | : 5; 27 | } 28 | } 29 | 30 | /** 31 | * Hava a way to set consistency for a statementCollection that gets submitted 32 | * @param {Object} consistency - Consistency object 33 | * @returns {exports} - The statement collection object to be returned 34 | */ 35 | consistency(consistency) { 36 | if (!consistency) { 37 | return this; 38 | } 39 | this._consistency = consistency; 40 | return this; 41 | } 42 | 43 | /* 44 | * Executes the set of statements using the appropriate strategy 45 | * (batched or non-batched). "Non-batches" means the set of statements 46 | * is executed in parallel as independent queries. 47 | */ 48 | execute(callback) { 49 | if (this.strategy === 'batch') { 50 | return this.executeBatch(callback); 51 | } 52 | 53 | async.eachLimit( 54 | this.statements, 55 | this.strategy, 56 | (statement, next) => { 57 | const query = statement.batch 58 | ? this.returnBatch(statement.statements) 59 | : statement.extendQuery(this.connection.beginQuery()); 60 | 61 | query.consistency(this._consistency); 62 | query.execute(next); 63 | }, 64 | callback 65 | ); 66 | } 67 | 68 | /* 69 | * Executes the set of statements as a Cassandra batch representing 70 | * a single transation. 71 | */ 72 | executeBatch(callback) { 73 | const batch = this.returnBatch(this.statements) 74 | .consistency(this._consistency); 75 | // 76 | // TODO: How do we handle these additional "options": 77 | // - `.timestamp()` 78 | // This should be exposed via some option passed to the model in the future 79 | // 80 | 81 | batch.execute(callback); 82 | } 83 | 84 | // 85 | // Recursively build the batch including any nested batch statements that need 86 | // to be built as well. This allows us to have nested batches within an 87 | // execution of just individual statements as well! 88 | // 89 | returnBatch(statements) { 90 | const batch = this.connection.beginBatch(); 91 | statements.forEach(function (statement) { 92 | if (statement.batch) { 93 | return batch.add(this.returnBatch(statement.statements)); 94 | } 95 | batch.add(statement.extendQuery(this.connection.beginQuery())); 96 | }, this); 97 | 98 | return batch; 99 | } 100 | 101 | /* 102 | * Add a statement to the collection associated with this instance 103 | */ 104 | add(statement) { 105 | this.statements.push(statement); 106 | } 107 | } 108 | 109 | module.exports = StatementCollection; 110 | -------------------------------------------------------------------------------- /lib/await-wrap.js: -------------------------------------------------------------------------------- 1 | const thenify = require('tinythen'); 2 | 3 | /** 4 | * The beginning experiment for async/await support for Datastar models. 5 | * We use this class to wrap each model and the used methods with thenables. 6 | * We also create a new method so we can still support streaming for findAll 7 | * 8 | * @class Wrap 9 | */ 10 | class AwaitWrap { 11 | /** 12 | * Initialize instance with model 13 | * 14 | * @param {Datastar.Model} model A defined model from datastar 15 | * @constructor 16 | */ 17 | constructor(model) { 18 | this.model = model; 19 | } 20 | /** 21 | * Thenable wrap the create method 22 | * 23 | * @function create 24 | * @returns {Thenable} wrapped result 25 | */ 26 | create() { 27 | return thenify(this.model, 'create', ...arguments); 28 | } 29 | 30 | /** 31 | * Thenable wrap the update method 32 | * 33 | * @function update 34 | * @returns {Thenable} wrapped result 35 | */ 36 | update() { 37 | return thenify(this.model, 'update', ...arguments); 38 | } 39 | 40 | /** 41 | * Thenable wrap the remove method 42 | * 43 | * @function remove 44 | * @returns {Thenable} wrapped result 45 | */ 46 | remove() { 47 | return thenify(this.model, 'remove', ...arguments); 48 | } 49 | 50 | /** 51 | * Thenable wrap the findOne method 52 | * 53 | * @function findOne 54 | * @returns {Thenable} wrapped result 55 | */ 56 | findOne() { 57 | return thenify(this.model, 'findOne', ...arguments); 58 | } 59 | /** 60 | * Thenable wrap the get method 61 | * 62 | * @function get 63 | * @returns {Thenable} wrapped result 64 | */ 65 | get() { 66 | return this.findOne(...arguments); 67 | } 68 | /** 69 | * Return the normal model findAll for the stream 70 | * @function findAllStream 71 | * @returns {Stream} of results 72 | */ 73 | findAllStream() { 74 | // Dont wrap this one since it can return a stream that we may want to leverage 75 | return this.model.findAll(...arguments); 76 | } 77 | 78 | /** 79 | * Thenable wrap the findAll method 80 | * 81 | * @function findAll 82 | * @returns {Thenable} wrapped result 83 | */ 84 | findAll() { 85 | return thenify(this.model, 'findAll', ...arguments); 86 | } 87 | 88 | /** 89 | * Thenable wrap the ensureTables method 90 | * 91 | * @function ensure 92 | * @returns {Thenable} wrapped result 93 | */ 94 | ensure() { 95 | return thenify(this.model, 'ensureTables'); 96 | } 97 | /** 98 | * Thenable wrap the ensureTables method 99 | * 100 | * @function ensureTables 101 | * @returns {Thenable} wrapped result 102 | */ 103 | ensureTables() { 104 | return this.ensure(); 105 | } 106 | 107 | /** 108 | * Thenable wrap the dropTables method 109 | * 110 | * @function drop 111 | * @returns {Thenable} wrapped result 112 | */ 113 | drop() { 114 | return thenify(this.model, 'dropTables'); 115 | } 116 | /** 117 | * Thenable wrap the dropTables method 118 | * 119 | * @function drop 120 | * @returns {Thenable} wrapped result 121 | */ 122 | dropTables() { 123 | return this.drop(); 124 | } 125 | /** 126 | * Get the raw cassandra-driver because we need to do some special shit for 127 | * counters 128 | * @returns {Thenable} raw cassandra driver 129 | */ 130 | _getConnection() { 131 | return thenify(this.model.connection, 'getConnectionPool', null, false); 132 | } 133 | } 134 | 135 | module.exports = AwaitWrap; 136 | -------------------------------------------------------------------------------- /lib/statement-builder/partial-statements/with.js: -------------------------------------------------------------------------------- 1 | const snakeCase = require('../../snake-case'); 2 | const util = require('util'); 3 | 4 | const specialActionMap = { 5 | orderBy: { 6 | cql: 'CLUSTERING ORDER BY (%s' 7 | } 8 | }; 9 | 10 | // 11 | // What is a partial-statement? We are going to assume its a simple string 12 | // building function that gets passed options, returns a string or an error. 13 | // 14 | // We'll also make the assumption that they can be context unaware 15 | // 16 | 17 | class With { 18 | constructor(opts) { 19 | if (!(this instanceof With)) 20 | return new With(opts); 21 | 22 | this.cql = 'WITH '; 23 | this.error = null; 24 | 25 | try { 26 | const result = this.process(opts); 27 | this.cql += result; 28 | } catch (err) { 29 | this.error = err; 30 | } 31 | } 32 | 33 | // 34 | // Lets assume we have a set of actions to do for 35 | // 36 | // An example of what we expect to receive here. 37 | // We handle each data structure differently because we attempt to output it as 38 | // a string representation wrapped in the proper quotes for the text. 39 | // { 40 | // compaction: { /*object of compaction options*/ }, 41 | // gcGraceSeconds: 9680 42 | // } 43 | // 44 | // and the return value here 45 | // WITH compaction = { 46 | // 'some_setting': 'someValue' 47 | // } AND gc_grace_seconds = 9680; 48 | // 49 | process(opts) { 50 | const string = Object.keys(opts) 51 | .map(function (action) { 52 | const args = opts[action]; 53 | const executed = snakeCase(action); 54 | const typeArg = type(args); 55 | // 56 | // Figure out what to do based on the type of args 57 | // and the action 58 | // 59 | switch (typeArg) { 60 | case 'object': 61 | // 62 | // Special cases so we can be generic with this statement 63 | // 64 | if (specialActionMap[action]) 65 | return this[action](args, specialActionMap[action]); 66 | // 67 | // Remark: Convert the object representation into a string 68 | // This is currently used for compaction as an example 69 | // 70 | return `${executed} = ${this[typeArg](args)}`; 71 | // 72 | // Wrap quotes around the string types 73 | // 74 | case 'string': 75 | return `${executed} = '${args}'`; 76 | case 'number': 77 | return `${executed} = ${args}`; 78 | default: 79 | throw new Error( 80 | `Cannot create with statement with ${typeArg} ${args}` 81 | ); 82 | } 83 | // 84 | // This might not be the only separator for these types of commands so 85 | // this might need more variability 86 | // 87 | 88 | 89 | }, this).join(' AND '); 90 | 91 | return string; 92 | } 93 | 94 | // 95 | // Handle turning an object into a string for certain configuration 96 | // 97 | object(mapping) { 98 | return '{ \n' + Object.keys(mapping).map(function (key, i) { 99 | // 100 | // Check if we have to prefix with a comma to build the json object 101 | // 102 | const sub = i !== 0 ? ' ,' : ' '; 103 | // 104 | // Translate any keys to snake_case because thats what seems 105 | // reasonable 106 | // 107 | return `${sub} '${snakeCase(key)}' : '${mapping[key]}'`; 108 | }).join('\n') + ' }'; 109 | } 110 | 111 | // 112 | // Handle specific orderBy syntax 113 | // 114 | orderBy(args, opts) { 115 | let cql = util.format(opts.cql, args.key); 116 | if (args.order) cql += ` ${args.order}`; 117 | cql += ')'; 118 | return cql; 119 | } 120 | } 121 | 122 | function type(of) { 123 | return Object.prototype.toString.call(of).slice(8, -1).toLowerCase(); 124 | } 125 | 126 | module.exports = With; 127 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | /* eslint no-proto:0 */ 2 | 3 | const 4 | Priam = require('priam'), 5 | Understudy = require('understudy'), 6 | jtc = require('joi-of-cql'), 7 | Model = require('./model'), 8 | AwaitWrap = require('./await-wrap'); 9 | 10 | class Datastar { 11 | /** 12 | * Constructor function for the Datastar object which is responsible 13 | * for defining a set of models associated with a given connection 14 | * defined in the `connect` function provided. 15 | * 16 | * @constructor 17 | * @type {module.Datastar} 18 | * @param {Object} options - Config options options 19 | * @param {function} connect - 20 | */ 21 | constructor(options, connect) { 22 | this.options = options || {}; 23 | this.connect = connect || this.connect; 24 | } 25 | 26 | /** 27 | * Attach the connection to the model constructor 28 | * @param {Object} objectModel - Defined object model 29 | * @returns {Datastar} - The datastar object with the attached object model 30 | */ 31 | attach(objectModel) { 32 | if (!this.connection) { 33 | this.connect(); 34 | } 35 | 36 | objectModel.connection = this.connection; 37 | 38 | return this; 39 | } 40 | 41 | /** 42 | * Default connection logic which works with `priam`. This abstract 43 | * exists for future extensibility. 44 | * @param {Function} callback - The async callback 45 | * @returns {Datastar} - The datastar object 46 | */ 47 | connect(callback) { 48 | const config = this.options.config; 49 | // 50 | // Use the cached connection if a model has been defined already via `attach` 51 | // so we dont create more than 1 priam instance. This allows `connect` to be 52 | // called with the callback to ensure the connection is pre-heated for all 53 | // models 54 | // 55 | this.connection = this.connection || new Priam(this.options); 56 | 57 | let create; 58 | if (config && config.keyspaceOptions) { 59 | const replClause = JSON.stringify(config.keyspaceOptions).replace(/"/g, "'"); 60 | create = `CREATE KEYSPACE IF NOT EXISTS ${config.keyspace} WITH replication = ${replClause};`; 61 | } 62 | 63 | if (create) { 64 | // 65 | // Try to create the keyspace. As a side effect pre-heat the connection 66 | // 67 | this.connection.cql(create, [], { keyspace: 'system' }, callback); 68 | } else if (callback) { 69 | // 70 | // If a callback is passed, we pre-heat the connection 71 | // 72 | this.connection.connect(callback); 73 | } 74 | return this; 75 | } 76 | 77 | /** 78 | * Close the underlying connection 79 | * 80 | * @param {Function} callback - The async callback 81 | * @returns {Datastar} - The datastar object 82 | */ 83 | close(callback) { 84 | this.connection.close(callback); 85 | return this; 86 | } 87 | 88 | /* 89 | * Defines a new Model with the given `name` using the 90 | * `definition` function provided. 91 | */ 92 | define(name, definition, options) { 93 | 94 | if (!definition && typeof name === 'function') { 95 | options = definition; 96 | definition = name; 97 | name = definition.name; 98 | } else if (!options && typeof definition === 'object') { 99 | options = definition; 100 | definition = function () { 101 | }; 102 | } 103 | 104 | if (!name) { 105 | throw new Error('A name for the model is required.'); 106 | } 107 | if (!definition && !options) { 108 | throw new Error('A definition function or options are required.'); 109 | } 110 | 111 | // 112 | // Adapted from resourceful 113 | // https://github.com/flatiron/resourceful/blob/master/lib/resourceful/core.js#L82-L219 114 | // 115 | // A simple factory stub where we attach anything to the instance of the Model 116 | // that we deem necessary 117 | // 118 | const Factory = function Factory(data) { 119 | this.Model = Factory; 120 | this.init(data); 121 | }; 122 | 123 | // 124 | // Setup inheritance 125 | // "Trust me, I'm a scientist" 126 | // "Back off, man. I'm a scientist." - Bill Murray 127 | // 128 | 129 | Factory.__proto__ = Model; 130 | Factory.prototype.__proto__ = Model.prototype; 131 | 132 | Understudy.call(Factory); 133 | // NOTE: Call definition here. Beneficial if 134 | // there's any non-function props being set. 135 | definition.call(Factory); 136 | 137 | // 138 | // Attach the connection to the factory constructor 139 | // 140 | this.attach(Factory); 141 | 142 | options = options || this.options; 143 | options.schema = options.schema || {}; 144 | options.name = options.name || name; 145 | // 146 | // Initialize the model and the various attributes that belong there 147 | // 148 | Factory.init(options); 149 | 150 | return Factory; 151 | } 152 | } 153 | 154 | // 155 | // Alias to joi for defining schema for a model 156 | // 157 | Datastar.prototype.schema = jtc; 158 | 159 | // 160 | // Expose StatementCollection on the datastar instance 161 | // 162 | Datastar.prototype.StatementCollection = require('./statement-collection'); 163 | 164 | Datastar.Priam = Priam; 165 | Datastar.Understudy = Understudy; 166 | Datastar.Model = Model; 167 | Datastar.AwaitWrap = AwaitWrap; 168 | 169 | module.exports = Datastar; 170 | -------------------------------------------------------------------------------- /test/unit/model.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const assume = require('assume'), 3 | uuid = require('uuid'), 4 | schemas = require('../fixtures/schemas'), 5 | sinon = require('sinon'), 6 | mocks = require('../mocks'), 7 | helpers = require('../helpers'); 8 | 9 | assume.use(require('assume-sinon')); 10 | 11 | describe('Model (unit)', () => { 12 | const datastar = helpers.connectDatastar({ mock: true }, mocks.datastar()); 13 | const id = '0c664ebb-56b4-4bf4-9e2b-509c1b5cc596'; 14 | let Artist; 15 | 16 | it('should create a model even with a missing name', () => { 17 | const Store = datastar.define('store', { 18 | schema: datastar.schema.object({ 19 | id: datastar.schema.cql.text() 20 | }).partitionKey('id') 21 | }); 22 | assume(Store.schema.name).to.equal('store'); 23 | }); 24 | 25 | it('should create a model', () => { 26 | Artist = datastar.define('artist', { 27 | schema: schemas.artist 28 | }); 29 | 30 | assume(Artist.schema).is.not.an('undefined'); 31 | assume(Artist.connection).is.not.an('undefined'); 32 | }); 33 | 34 | it('should create', done => { 35 | // INSERT INTO [schema.name] ([allFields[0], allFields[1]]) VALUES (?, ?, ...) 36 | const options = { 37 | entity: { 38 | id: id 39 | } 40 | }; 41 | 42 | Artist.create(options, err => { 43 | assume(err).to.be.an('undefined'); 44 | done(); 45 | }); 46 | }); 47 | 48 | it('should error on create when no options are passed', done => { 49 | const options = {}; 50 | 51 | Artist.create(options, err => { 52 | assume(err).is.instanceof(Error); 53 | done(); 54 | }); 55 | }); 56 | 57 | // 58 | // TODO: This should be a new model that is created via datastar.define when 59 | // this is constructor based 60 | // 61 | it('should be able to define a model with ensureTables option', done => { 62 | const Oartist = datastar.define('artist', { 63 | schema: schemas.artist, 64 | ensureTables: true 65 | }); 66 | 67 | Oartist.on('ensure-tables:finish', done.bind(null, null)); 68 | Oartist.on('error', done); 69 | }); 70 | 71 | it('init() function should not call ensureTables if the ensureTables option is false', done => { 72 | const subject = helpers.stubModel(sinon); 73 | const options = { 74 | name: 'artist', 75 | ensureTables: false, 76 | schema: schemas.artist 77 | }; 78 | 79 | subject.init(options); 80 | assume(subject.ensureTables).to.not.be.called(); 81 | 82 | done(); 83 | }); 84 | 85 | it('init() function should call ensureTables if the ensureTables option is true', done => { 86 | const subject = helpers.stubModel(sinon); 87 | const options = { 88 | name: 'artist', 89 | ensureTables: true, 90 | schema: schemas.artist 91 | }; 92 | 93 | subject.init(options); 94 | assume(subject.ensureTables).to.be.called(); 95 | 96 | done(); 97 | }); 98 | 99 | // 100 | // TODO: We need to have the mock priam error properly for some of this testing 101 | // 102 | it.skip('should be able to define a model with ensureTables option and error'); 103 | 104 | it('On find it should emit an error if passed bad fields and no callback', done => { 105 | const stream = Artist.find(); 106 | stream.on('error', err => { 107 | assume(err).is.instanceof(Error); 108 | done(); 109 | }); 110 | }); 111 | 112 | it('should callback with an error if passed bad fields and a callback', done => { 113 | Artist.find(null, err => { 114 | assume(err).is.instanceof(Error); 115 | done(); 116 | }); 117 | }); 118 | 119 | it('should error with an improper find type', done => { 120 | Artist.find({ type: 'what' }, err => { 121 | assume(err).is.instanceof(Error); 122 | done(); 123 | }); 124 | }); 125 | 126 | it('should find', done => { 127 | // SELECT [fields] FROM [table] WHERE [conditions.query[0]] AND [conditionals.query[1]] FROM [schema.name] 128 | // 129 | // We assume 'all' if no type is passed 130 | // 131 | const options = { 132 | fields: ['name'], 133 | conditions: { 134 | artistId: uuid.v4() 135 | } 136 | }; 137 | 138 | // 139 | // Remark: Because of how priam is mocked, we cannot return a proper array here but 140 | // this will be tested in integration 141 | // 142 | Artist.find(options, err => { 143 | assume(err).is.falsey(); 144 | done(); 145 | }); 146 | }); 147 | 148 | it('should find and return a stream if no callback is passed', done => { 149 | const options = { 150 | fields: ['name'], 151 | conditions: { 152 | artistId: uuid.v4() 153 | } 154 | }; 155 | const stream = Artist.find(options); 156 | 157 | stream.on('readable', function () { 158 | let data; 159 | /* eslint no-cond-assign: 0*/ 160 | /* eslint no-invalid-this: 0*/ 161 | 162 | while ((data = this.read()) !== null) { 163 | assume(data).is.an('object'); 164 | } 165 | }); 166 | stream.on('end', done); 167 | 168 | 169 | }); 170 | 171 | it('should run a count query', done => { 172 | const options = { 173 | conditions: { 174 | artistId: uuid.v4() 175 | } 176 | }; 177 | 178 | Artist.count(options, err => { 179 | assume(err).is.falsey(); 180 | done(); 181 | }); 182 | }); 183 | 184 | it('should run a findFirst query', done => { 185 | const options = { 186 | conditions: { 187 | artistId: uuid.v4() 188 | } 189 | }; 190 | 191 | Artist.findFirst(options, err => { 192 | assume(err).is.falsey(); 193 | done(); 194 | }); 195 | 196 | }); 197 | 198 | it('should run a findOne query', done => { 199 | const options = { 200 | conditions: { 201 | artistId: uuid.v4() 202 | } 203 | }; 204 | 205 | Artist.findOne(options, err => { 206 | assume(err).to.not.exist; 207 | done(); 208 | }); 209 | 210 | }); 211 | 212 | it('should remove a single entity', done => { 213 | const entity = { artistId: uuid.v4(), createDate: new Date() }; 214 | Artist.remove(entity, err => { 215 | assume(err).to.be.an('undefined'); 216 | done(); 217 | }); 218 | }); 219 | }); 220 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/table.js: -------------------------------------------------------------------------------- 1 | /* eslint no-process-env: 0 */ 2 | 3 | const clone = require('clone'); 4 | const Statement = require('../statement'); 5 | const With = require('../partial-statements/with'); 6 | 7 | class TableStatement extends Statement { 8 | _init(options) { 9 | const alter = options.alter || options.with || {}; 10 | const opts = {}; 11 | 12 | // 13 | // We want to converge on everything being under alter/with option 14 | // 15 | if (options.orderBy || alter.orderBy) { 16 | // 17 | // Currently this is expected to be an object with properties 18 | // { key: 'createdAt', order: 'ascending' } 19 | // 20 | const orderBy = clone(options.orderBy || alter.orderBy); 21 | // 22 | // Map it to the correct string if passed in correctly if exists (its ok for 23 | // this to be undefined). 24 | // 25 | orderBy.order = this.schema.orderMap[orderBy.order && orderBy.order.toLowerCase()]; 26 | // 27 | // Test if the key exists and returns the transformed key to use if it does, 28 | // otherwise returns undefined. 29 | // 30 | orderBy.key = this.schema.exists(orderBy.key); 31 | if (!this.schema.exists(orderBy.key)) { 32 | throw new Error(`${orderBy.key} does not exist for the ${this.name} schema`); 33 | } 34 | 35 | alter.orderBy = orderBy; 36 | } 37 | 38 | if (options.lookupKey) { 39 | opts.lookupKey = options.lookupKey; 40 | opts.lookupColumn = this.schema.meta[opts.lookupKey]; 41 | if (['map', 'set'].indexOf(opts.lookupColumn.type) !== -1) { 42 | throw new Error( 43 | 'Creating lookup table with type: ' 44 | + opts.lookupColumn.type); 45 | } 46 | } 47 | 48 | opts.useIndex = !!options.useIndex; 49 | 50 | // 51 | // If we are altering the table with a `with` 52 | // 53 | if (alter && Object.keys(alter).length) { 54 | const w = new With(alter); 55 | if (w.error) return w.error; 56 | opts.with = w.cql; 57 | } 58 | 59 | // 60 | // `ensure` or `drop` currently 61 | // 62 | opts.type = options.type; 63 | 64 | const env = process.env.NODE_ENV; 65 | if (['prod', 'production'].indexOf(env) !== -1 66 | && opts.type === 'drop' 67 | && !options.force) { 68 | throw new Error('Please don\'t try and drop your prod tables without being certain'); 69 | } 70 | 71 | return opts; 72 | } 73 | 74 | build(options) { 75 | const schema = options.schema || this.schema; 76 | const type = options.type; 77 | 78 | this.options = { 79 | executeAsPrepared: true, 80 | queryName: `${type}-table-${schema.name}` 81 | }; 82 | 83 | const tableName = this._computeTable(options); 84 | 85 | this.params = []; 86 | 87 | this.cql = this._compile(options, tableName, options.lookupKey || schema.primaryKeys()); 88 | 89 | return this; 90 | } 91 | 92 | _computeTable(options) { 93 | const schema = options.schema || this.schema; 94 | const type = options.type; 95 | 96 | this.options.queryName = [type, 'index', schema.name, options.lookupKey].join('-'); 97 | 98 | if (!options.lookupKey) return this.table; 99 | 100 | const table = this.schema.lookupTables[options.lookupKey]; 101 | 102 | if (table) return table; 103 | // 104 | // Compute the lookupTable name based on the key and if its used as an index 105 | // or not 106 | // 107 | return options.useIndex 108 | ? `${schema.name}_${options.lookupKey}` 109 | : `${schema.name}_by_${options.lookupKey.replace(/_\w+$/, '')}`; 110 | } 111 | 112 | // 113 | // Figure out what statement will be executed 114 | // 115 | _compile(options) { 116 | const fn = this[`_${options.type}`]; 117 | if (fn) return fn.apply(this, arguments); 118 | 119 | // This shouldn't happen 120 | throw new Error(`Invalid type ${options.type}`); 121 | } 122 | 123 | /* 124 | * Drop the table or index 125 | * @options {Object} options passed in 126 | * @tableName {String} Name of table or index 127 | * @returns {String} cql value for statement to be executed 128 | */ 129 | _drop(options, tableName) { 130 | return [ 131 | 'DROP', 132 | (options.useIndex ? 'INDEX' : 'TABLE'), 133 | tableName 134 | ].join(' '); 135 | } 136 | 137 | _ensure(options, tableName, primaryKeys) { 138 | const schema = options.schema || this.schema; 139 | const secondaryKeys = schema.secondaryKeys(); 140 | 141 | tableName = tableName || this.table; 142 | primaryKeys = primaryKeys || schema.primaryKeys(); 143 | 144 | let cql = ''; 145 | 146 | if (options.useIndex) { 147 | cql += `CREATE INDEX IF NOT EXISTS ${tableName} on ${schema.name}(${primaryKeys})`; 148 | return cql; 149 | } 150 | 151 | cql += `CREATE TABLE IF NOT EXISTS ${tableName} (\n`; 152 | 153 | Object.keys(schema.meta).forEach(function (key) { 154 | const column = schema.meta[key]; 155 | cql += ' '; 156 | // 157 | // Handle all the higher level types 158 | // 159 | // 160 | if (['map', 'set', 'list'].indexOf(column.type) !== -1) { 161 | const innerTypes = [].concat(column[`${column.type}Type`]).join(','); 162 | cql += `${key} ${column.type}<${innerTypes}>,\n`; 163 | return; 164 | } 165 | cql += `${key} ${column.type},\n`; 166 | }, this); 167 | 168 | // 169 | // Handle both compoundKeys as well as 170 | // 171 | const primaryKeyExpr = ( 172 | schema.compositePrimary 173 | ? `(${primaryKeys.join(', ')})` 174 | : primaryKeys 175 | ); 176 | cql += ` PRIMARY KEY (${primaryKeyExpr}`; 177 | 178 | // 179 | // Properly support secondary keys / clustering keys 180 | // 181 | cql += secondaryKeys && secondaryKeys.length 182 | ? `, ${secondaryKeys.join(', ')}` 183 | : ''; 184 | // 185 | // Close keys paren 186 | // 187 | cql += ')\n'; 188 | // 189 | // Close table statement paren 190 | // 191 | cql += ')'; 192 | 193 | // If we have a with statement to append, lets do that here 194 | if (options.with) { 195 | cql += ` ${options.with}`; 196 | } 197 | 198 | cql += ';'; 199 | return cql; 200 | } 201 | } 202 | 203 | module.exports = TableStatement; 204 | -------------------------------------------------------------------------------- /lib/model.js: -------------------------------------------------------------------------------- 1 | const 2 | { EventEmitter } = require('events'), 3 | { PassThrough, Readable } = require('stream'), 4 | async = require('async'), 5 | clone = require('clone'), 6 | assign = require('object-assign'), 7 | pick = require('./pick'), 8 | camelCase = require('./camel-case'), 9 | Schema = require('./schema'), 10 | StatementBuilder = require('./statement-builder'), 11 | StatementCollection = require('./statement-collection'), 12 | Attributes = require('./attributes'); 13 | 14 | // 15 | // Types that return a single value from cassandra 16 | // 17 | const singleTypes = ['count', 'one', 'first']; 18 | 19 | /* 20 | * Constructor function for the base Model which handles 21 | * all of the base CRUD logic for a given connection. 22 | * 23 | * Reserve constructor for prototype based initialization. 24 | * We use the functions that exist on the constructor in our specific prototype 25 | * functions that we define 26 | */ 27 | const Model = module.exports = function Model() { 28 | }; 29 | 30 | Model.init = function init(options) { 31 | this.options = options || {}; 32 | 33 | // NOTE: For testing: 34 | if (options.perform) { 35 | this.perform = options.perform; 36 | } 37 | 38 | this.schema = new Schema(options.name, options.schema); 39 | this.builder = new StatementBuilder(this.schema); 40 | 41 | Object.defineProperties(this.prototype, this.schema.buildProperties()); 42 | 43 | // 44 | // Should we have better defaults? 45 | // 46 | this.readConsistency = options.readConsistency || options.consistency || 'one'; 47 | this.writeConsistency = options.writeConsistency || options.consistency || 'one'; 48 | 49 | // 50 | // Remark Add an event emitter to the model constructor we are creating 51 | // 52 | this.emitter = new EventEmitter(); 53 | 54 | // 55 | // Expose the EventEmitter on the constructor to proxy from our real event emitter 56 | // 57 | Object.keys(EventEmitter.prototype).forEach(k => { 58 | this[k] = (...args) => { 59 | return this.emitter[k].apply(this.emitter, args); 60 | }; 61 | }); 62 | 63 | // 64 | // Remark: We might not need to do the following, but we should look into the 65 | // dangers of what happens when a primary/secondary key is modified 66 | // Before we update we need to check a few things. 67 | // 1. If we have lookup tables, we need to ensure we have a `previous` entity 68 | // with all the primary keys 69 | // 2. If any `previous` value(s) are provided ensure that they have the necessary 70 | // keys for all lookup tables. If a `previous` value is not provided, execute 71 | // a find using the main primary key before continuing the update.) 72 | // 3. So for this to happen, we must 73 | // i. Ensure we have the main primaryKey in the entity given to us 74 | // ii. Do a find query on the primary table (table that uses the 75 | // main primary key) to fetch the previous value 76 | // iii. We have to detect if other lookup tables primary keys changed, if 77 | // so, pass in a special truthy flag for the updateStatement to pick up on 78 | // 79 | this.before('update:build', (updateOptions, callback) => { 80 | const entities = updateOptions.entities; 81 | const previous = updateOptions.previous; 82 | 83 | // 84 | // If we aren't dealing with lookup tables or we already have a previous 85 | // value being passed in, we dont have to do anything crazy 86 | // 87 | if (!this.schema.lookups || entities.length === previous.length) { 88 | return callback(); 89 | } 90 | // 91 | // This case just cant happen 92 | // 93 | if (entities.length !== previous.length && previous.length !== 0) { 94 | return void callback( 95 | new Error('You must pass in the same number of `entities` as `previous` values for an update on multiple entities')); 96 | } 97 | // 98 | // If we are dealing with lookup tables and we do not have a previous value, 99 | // we need to fetch it. 100 | // 101 | async.map(entities, (entity, next) => { 102 | this.findOne({ 103 | conditions: this.schema.filterPrimaryConditions(clone(entity)) 104 | }, next); 105 | }, function (err, previous) { 106 | if (err) return void callback(err); 107 | updateOptions.previous = previous; 108 | callback(); 109 | }); 110 | 111 | }); 112 | 113 | if (this.options.ensureTables) { 114 | this.ensureTables(err => { 115 | if (err) { 116 | return this.emit('error', err); 117 | } 118 | this.emit('ensure-tables:finish', this.schema); 119 | }); 120 | } 121 | }; 122 | 123 | /** 124 | * Execute an action on a model 125 | * 126 | * @param {Object} Options object containing previous statements, entity, etc. 127 | * @param {Function} Continuation callback to respond when finished 128 | */ 129 | ['create', 'update', 'remove'].forEach(function (action) { 130 | Model[action] = function (options, callback) { 131 | options = this.validateArgs(options, callback); 132 | if (!options) { 133 | return; 134 | } 135 | 136 | const statements = options.statements = options.statements 137 | || (new StatementCollection(this.connection, options.strategy) 138 | .consistency(options.consistency || this.writeConsistency)); 139 | 140 | // 141 | // Add a hook before the statement is created 142 | // 143 | this.perform(`${action}:build`, options, next => { 144 | try { 145 | // 146 | // We should keep this naming generic so we can remove all this 147 | // boilerplate in the future. 148 | // 149 | const entities = options.entities; 150 | // 151 | // Remark: Certain cases this is the previous entity that could be used 152 | // for update. its required for lookup-table update, otherwise we fetch it 153 | // 154 | const previous = options.previous; 155 | 156 | for (let e = 0; e < entities.length; e++) { 157 | // shallow clone 158 | const opts = assign({}, options); 159 | 160 | opts.previous = previous && previous.length 161 | ? options.previous[e] 162 | : null; 163 | 164 | const statement = this.builder[action](opts, entities[e]); 165 | 166 | statements.add(statement); 167 | } 168 | 169 | // 170 | // If there was an error building the statement, return early 171 | // 172 | return void next(); 173 | } catch (err) { 174 | setImmediate(next, err); 175 | } 176 | }, err => { 177 | if (err) { 178 | return void callback(err); 179 | } else if (!options.shouldExecute) { 180 | return void callback(null, statements); 181 | } 182 | 183 | // 184 | // Add a hook before the statements are executed 185 | // 186 | this.perform(`${action}:execute`, options, function (next) { 187 | statements.execute(next); 188 | }, callback); 189 | }); 190 | }; 191 | }); 192 | 193 | /* 194 | * Performs the base find logic for any given schema 195 | * and connection. 196 | */ 197 | /* eslint consistent-return: 0 */ 198 | Model.find = function find(options, callback) { 199 | try { 200 | options = this.validateFind(options || {}, callback); 201 | } catch (err) { 202 | return callback ? void callback(err) : errorStream(err); 203 | } 204 | 205 | const iterable = this.iterate(options); 206 | 207 | if (options.iterable) { 208 | return iterable; 209 | } 210 | 211 | if (options.stream) { 212 | return Readable.from(iterable); 213 | } 214 | 215 | const action = options.type !== 'all' 216 | ? `find:${options.type}` 217 | : 'find'; 218 | 219 | // 220 | // This allows for cascading of after functions to mutate the result of this 221 | // perform method 222 | // 223 | this.waterfall(action, options, async next => { 224 | const singleResult = singleTypes.includes(options.type); 225 | 226 | const result = []; 227 | try { 228 | for await (const row of iterable) { 229 | if (singleResult) { 230 | return void next(null, row); 231 | } 232 | 233 | result.push(row); 234 | } 235 | 236 | return void next(null, singleResult ? result[0] : result); 237 | } catch (err) { 238 | return void next(err); 239 | } 240 | }, callback); 241 | }; 242 | 243 | Model.iterate = async function *(options) { 244 | options.type = options.type || 'all'; 245 | 246 | const iterable = this.builder.find(options) 247 | // 248 | // ExtendQuery returns the priam connection query so we have access to 249 | // those functions 250 | // 251 | .extendQuery(this.connection.beginQuery()) 252 | // 253 | // Allow configurable consistency 254 | // 255 | .consistency(options.consistency || this.readConsistency) 256 | .iterate(); 257 | 258 | for await (const row of iterable) { 259 | const instance = this.toInstance(row); 260 | yield this.transform ? this.transform(instance) : instance; 261 | } 262 | }; 263 | 264 | /* 265 | * Create the table based on the schema if it doesn't already exist. 266 | */ 267 | ['ensure', 'drop'].forEach(function (type) { 268 | const action = [type, 'tables'].join('-'); 269 | const name = camelCase(action); 270 | 271 | Model[name] = function (options, callback) { 272 | if (!callback) { 273 | callback = options || function () {}; 274 | options = this.options; 275 | } 276 | 277 | // 278 | // Simple validation of merged options 279 | // 280 | options = this.assessOpts( 281 | assign( 282 | pick(this.options, ['alter', 'orderBy', 'with']), options 283 | ) 284 | ); 285 | 286 | // 287 | // Set the type based on the prefix so we know what statement we are 288 | // generating 289 | // 290 | options.type = type; 291 | 292 | const statements = options.statements = options.statements 293 | || (new StatementCollection(this.connection, options.strategy)); 294 | 295 | this.perform([action, 'build'].join(':'), options, next => { 296 | try { 297 | const statement = this.builder.table(options); 298 | statements.add(statement); 299 | 300 | if (!this.schema.lookups) { 301 | return void setImmediate(next); 302 | } 303 | 304 | Object.keys(this.schema.lookupTables).forEach(primaryKey => { 305 | // shallow clone 306 | const tableOpts = assign({}, options, { lookupKey: primaryKey }); 307 | const lookupStatement = this.builder.table(tableOpts); 308 | statements.add(lookupStatement); 309 | }); 310 | 311 | return void next(); 312 | } catch (err) { 313 | setImmediate(next, err); 314 | } 315 | }, err => { 316 | if (err) { 317 | return void callback(err); 318 | } else if (!options.shouldExecute) { 319 | return void callback(null, statements); 320 | } 321 | 322 | this.perform([action, 'execute'].join(':'), options, function (next) { 323 | statements.execute(next); 324 | }, callback); 325 | }); 326 | }; 327 | }); 328 | 329 | 330 | /* 331 | * Default `perform` method. 332 | */ 333 | Model.perform = function perform(action, options, callback) { 334 | return setImmediate(callback, function () { 335 | }); 336 | }; 337 | 338 | /* 339 | * Define the wrapper methods for: 340 | * 341 | * - findFirst 342 | * - findOne 343 | * - count 344 | * 345 | * These methods simple wrap `Model.find` but set the 346 | * appropriate option for the corresponding method 347 | * before execution. 348 | */ 349 | const findTypes = { 350 | findFirst: 'first', 351 | findOne: 'one', 352 | count: 'count', 353 | findAll: 'all' 354 | }; 355 | 356 | const findTypesLookup = Object.keys(findTypes).reduce(function (acc, key) { 357 | acc[findTypes[key]] = true; 358 | return acc; 359 | }, {}); 360 | 361 | Object.keys(findTypes).forEach(function (method) { 362 | Model[method] = function (options, callback) { 363 | options = options || {}; 364 | const type = findTypes[method]; 365 | // 366 | // Remark: If we are a type of function that returns a single value from cassandra 367 | // check to see if a simplified options object was passed 368 | // 369 | options = this.normalizeFindOpts(options, callback); 370 | if (!options) return; 371 | // 372 | // Assign type and pass it down 373 | // 374 | options.type = type; 375 | return this.find(options, callback); 376 | }; 377 | }); 378 | 379 | /** 380 | * Alias get and findOne for simpler syntax 381 | */ 382 | Model.get = Model.findOne; 383 | 384 | // 385 | // Returns a stream that emits an error that it is given. Useful for validation 386 | // errors 387 | // 388 | function errorStream(error) { 389 | const stream = new PassThrough({ objectMode: true }); 390 | setImmediate(stream.emit.bind(stream), 'error', error); 391 | return stream; 392 | } 393 | 394 | // 395 | // Simple validation for ensureTables so that we know when to execute 396 | // 397 | Model.assessOpts = function assessOpts(options) { 398 | const opts = assign({}, options); 399 | 400 | // 401 | // Ensure we are not executed as a batch because you cant do that when we are 402 | // doing a TABLE based operation 403 | // TODO: Get a list of operations that cant be involved in a batch 404 | // 405 | opts.strategy = 10; 406 | 407 | if (!opts.statements) { 408 | opts.shouldExecute = true; 409 | } 410 | 411 | return opts; 412 | }; 413 | 414 | 415 | /** 416 | * 417 | * Allow a simpler API syntax that gets normalized into the conditions object 418 | * expected by the rest of the module 419 | * @param {Object} options - Options object 420 | * @param {Function} callback - Callback function 421 | * @returns {*} - Return varies on the different usecases of the function 422 | */ 423 | Model.normalizeFindOpts = function (options, callback) { 424 | const opts = {}; 425 | 426 | // 427 | // We assume this is the primary/partition key. We also assume that this 428 | // function will only be run on find methods that return a single value and 429 | // a callback is always passed which is a safe assumption in reality 430 | // 431 | if (this.schema.type(options) === 'string') { 432 | try { 433 | opts.conditions = this.schema.generateConditions(options); 434 | return opts; 435 | } catch (err) { 436 | return void callback(err); 437 | } 438 | } 439 | 440 | // 441 | // Allow the object to be defined as options when we arent explicitly passing 442 | // a type 443 | // 444 | if (!options.conditions) { 445 | opts.conditions = options; 446 | return opts; 447 | } 448 | 449 | return options; 450 | }; 451 | 452 | 453 | /** 454 | * 455 | * Validate Find Queries options specifically 456 | * @param {Object} options - Options object 457 | * @param {Function} callback - Callback function 458 | * @returns {*} - Return varies on the different usecases of the function 459 | */ 460 | Model.validateFind = function validateFind(options, callback) { 461 | const stream = !callback || typeof callback !== 'function'; 462 | const opts = assign({}, options); 463 | 464 | opts.type = options.type || 'all'; 465 | if (!findTypesLookup[opts.type]) { 466 | throw new Error(`Improper find type. Must be ${Object.keys(findTypesLookup)}`); 467 | } 468 | 469 | if (!opts.conditions) { 470 | throw new Error('Conditions must be passed to execute a find query'); 471 | } 472 | // 473 | // Set this after we have transposed options -> entity for the simple case in 474 | // create 475 | // 476 | opts.stream = stream; 477 | 478 | return opts; 479 | }; 480 | /* 481 | * Parses the options object into more comprehensible and consistent 482 | * options that are used internally. This is to help support older 483 | * API surface area with minimal spanning upgrade path. 484 | * 485 | * Remark: naming here is a little obtuse since `Args` is only 486 | * `Args` to CRUD methods: create, remove, update. 487 | * 488 | */ 489 | Model.validateArgs = function validateArgs(options, callback) { 490 | 491 | // 492 | // Argument Validation and normalization 493 | // 494 | if (!options) { 495 | return void setImmediate(callback, new Error('Options or entity must be passed in')); 496 | } 497 | // 498 | // Remark: Be a good citizen and use a copy internally just in case with 499 | // a shallow clone 500 | // 501 | let opts = options.isDatastar ? { entities: [options.toJSON(true)] } : assign({}, options); 502 | 503 | // 504 | // Lets assume an entity is given if there is no entity property for the 505 | // simple case. 506 | // Remark: We don't do this when a type is passed in (ie. find) 507 | // 508 | if (!opts.entity && !opts.entities) { 509 | const entity = opts; 510 | opts = { entities: !Array.isArray(entity) ? [entity] : entity }; 511 | } 512 | // 513 | // Since we always map entity to be an array, do the same with previous since 514 | // we enable ourselves to handle arrays 515 | // 516 | if (opts.previous && !Array.isArray(opts.previous)) { 517 | opts.previous = [opts.previous]; 518 | } 519 | 520 | // 521 | // Default to empty array 522 | // 523 | if (!opts.previous) { 524 | opts.previous = []; 525 | } 526 | 527 | // 528 | // Allow entity to be defined in the external interface for semantics but 529 | // remain consistent internally. We assume arrays since we are currently 530 | // handling arrays for less duplication 531 | // 532 | if (opts.entity && !opts.entities) { 533 | opts.entities = !Array.isArray(opts.entity) 534 | ? [opts.entity] 535 | : opts.entity; 536 | } 537 | 538 | if (opts.entity) { 539 | delete opts.entity; 540 | } 541 | 542 | 543 | if (!opts.statements) { 544 | opts.shouldExecute = true; 545 | } 546 | 547 | return opts; 548 | }; 549 | 550 | Model.toInstance = function toInstance(data) { 551 | if (Array.isArray(data)) { 552 | return data.map(toInstance.bind(this)); 553 | } 554 | return new this(data); 555 | }; 556 | 557 | Model.prototype.init = function init(data) { 558 | this.attributes = new Attributes(this, data || {}); 559 | }; 560 | 561 | Model.prototype.isValid = function isValid(type) { 562 | try { 563 | this.validate(type); 564 | return true; 565 | } catch (err) { 566 | return false; 567 | } 568 | }; 569 | 570 | Model.prototype.validate = function validate(type) { 571 | return this.Model.schema.validate(this.attributes.needsValidation(), type); 572 | }; 573 | 574 | Model.prototype.toJSON = function toJSON(snake) { 575 | // 576 | // Work around JSON.stringify passing in the index of the array to this 577 | // function when an instance is a part of an array being stringified 578 | // 579 | if (typeof snake === 'string') { 580 | snake = false; 581 | } 582 | return this.attributes.toJSON(snake); 583 | }; 584 | 585 | Model.prototype.save = function (fn) { 586 | if (!this.attributes.isDirty()) { 587 | return void setImmediate(fn); 588 | } 589 | 590 | try { 591 | const result = this.validate(this.id ? 'update' : 'create'); 592 | return this.Model.update({ 593 | entity: result, 594 | previous: this.attributes.previous() 595 | }, fn); 596 | } catch (err) { 597 | return void fn(err); 598 | } 599 | }; 600 | 601 | Model.prototype.destroy = function (fn) { 602 | return this.Model.remove(this.toJSON(), fn); 603 | }; 604 | 605 | Model.prototype.isDatastar = true; 606 | 607 | -------------------------------------------------------------------------------- /lib/statement-builder/statements/update.js: -------------------------------------------------------------------------------- 1 | const clone = require('clone'); 2 | const assign = require('object-assign'); 3 | const CompoundStatement = require('../compound-statement'); 4 | const CreateStatement = require('./create'); 5 | const RemoveStatement = require('./remove'); 6 | const Statement = require('../statement'); 7 | 8 | const mapKeyBadRegex = /--/; 9 | const positiveIntRegex = /^\d+$/; 10 | 11 | // 12 | // Special statement used for update 13 | // 14 | function PartialStatement() { 15 | this.cql = []; 16 | this.params = []; 17 | this.options = {}; 18 | } 19 | 20 | class UpdateStatement extends CompoundStatement { 21 | constructor(...args) { 22 | super(...args); 23 | 24 | this.buffer = []; 25 | // 26 | // Track the index of the statement we need to append to based on the type of 27 | // operations. Lets map it by type as that should reduce the overall number of 28 | // statements 29 | // 30 | this.index = {}; 31 | this.index.set = 0; 32 | this.index.list = 0; 33 | this.index.map = 0; 34 | this.index.delete = 0; 35 | } 36 | 37 | // 38 | // For lookup tables this is going to be expensive as we need to do a find 39 | // before this even happens unless we pass in the previous entity 40 | // 41 | _init(options, entity) { 42 | let 43 | previous, 44 | changed, 45 | key; 46 | const opts = {}; 47 | 48 | entity = this.schema.fixKeys(entity); 49 | 50 | if (options.previous) { 51 | previous = this.schema.fixKeys(options.previous); 52 | } 53 | 54 | // 55 | // Assess whether or not one of our primaryKeys has changed (pertaining to 56 | // lookup tables). Previous is required to be a fully formed object that 57 | // contains ALL properties, otherwise we will have a very bad time with this 58 | // create/delete that needs to happen 59 | // 60 | if (this.schema.lookups && options.table) { 61 | key = this.schema._reverseLookupKeyMap[options.table]; 62 | // 63 | // If the key exists in the entity that is being updated and it is not 64 | // equal to that of the previous entity, a primary key has changed 65 | // 66 | changed = entity[key] && previous && entity[key] !== previous[key]; 67 | } 68 | 69 | // 70 | // Pass the changed status to the build function so we know we are going to do 71 | // a create/delete statement here 72 | // 73 | opts.changed = changed; 74 | 75 | // 76 | // If we have changed, validate that the previous value would be valid for 77 | // a create operation. This will guarantee that we have a proper previous 78 | // value passed in and not just a stub which would cause terrible things to 79 | // happen to the lookup tables as it would be wrong and out of sync 80 | // 81 | if (changed) { 82 | previous = this.schema.validate(this.schema.deNull(previous), 'create'); 83 | } 84 | 85 | // 86 | // We need to generate specific conditions 87 | // Remark: For updating lookup tables, we need to use the previous entity in 88 | // order to create the conditions so the where clause in finding it is correct 89 | // 90 | opts.conditionals = this.schema.createUpdateConditions(previous || entity, options.table); 91 | 92 | // 93 | // We need a raw transformed entity 94 | // TODO: Proper validation on update with the different data structures that 95 | // can be passed for operations on higher level types, 96 | // In cases where we have detected a primaryKey change, we create an entity 97 | // that will be used in a `create` statement instead 98 | // 99 | 100 | opts.entity = this.schema.deNull( 101 | changed ? this._entityToReplace(previous, entity) : entity 102 | ); 103 | 104 | // 105 | // Validate and transform the entity 106 | // 107 | opts.entity = this.schema.validate(opts.entity, 'update'); 108 | 109 | // 110 | // Pass down the table if we are dealing with lookup tables 111 | // 112 | opts.table = options.table; 113 | 114 | // 115 | // Allow ttl to be passed into an update 116 | // 117 | if (options.ttl) opts.ttl = options.ttl; 118 | 119 | return opts; 120 | } 121 | 122 | // 123 | // Do a merge of the previous and entity while taking into consideration the 124 | // special data-structures we deal with the special commands for sets/lists etc. 125 | // 126 | _entityToReplace(previous, entity) { 127 | // 128 | // Create a normalized entity using the previous as a reference so that we can 129 | // just do a proper shallow merge with the previous to get the entity we want 130 | // 131 | const normalizedEntity = Object.keys(entity).reduce((acc, field) => { 132 | const meta = this.schema.fieldMeta(field); 133 | let value; 134 | 135 | switch (meta.type) { 136 | // 137 | // For a map we need to just merge the values with previous 138 | // 139 | case 'map': 140 | value = assign(previous[field] || {}, entity[field]); 141 | break; 142 | // 143 | // Detect if we have a special version data structure, and take the 144 | // appropriate actions based on the `add` or `remove` 145 | // 146 | case 'set': 147 | value = this._handleSetOrList(entity[field], value); 148 | break; 149 | case 'list': 150 | value = this._handleList(entity[field], value); 151 | break; 152 | default: 153 | value = entity[field]; 154 | break; 155 | } 156 | acc[field] = value; 157 | return acc; 158 | }, {}); 159 | 160 | // 161 | // Return the shallow merged version of the entity to put into cassandra 162 | // 163 | return assign(previous, normalizedEntity); 164 | } 165 | 166 | // 167 | // Remark: We do not do any deletes in update because we are very wary of creating 168 | // tombstones. We use a specific strategy of setting `null-like` characters for 169 | // each type in order to prevent this from happening 170 | // 171 | build(options) { 172 | const conditionals = options.conditionals; 173 | const entity = options.entity; 174 | // 175 | // Remark: If we have a lookup table passed down we use it when considering the 176 | // filtering 177 | // 178 | const isLookup = !!options.table; 179 | const table = options.table || this.table; 180 | 181 | // 182 | // This is a special case where the primaryKey has changed which means an 183 | // update statement will not suffice, we need a delete and a create statement. 184 | // We ensure that we only do these create and remove statements in the case where 185 | // 186 | if (options.changed) { 187 | return this.replaceLookupRecord(options); 188 | } 189 | 190 | this.options = { executeAsPrepared: true, queryName: `update-${table}` }; 191 | 192 | // 193 | // Create a criteria object that is used to add the where clause info needed 194 | // for each statement that gets created 195 | // 196 | this.criteria = {}; 197 | this.criteria.cql = conditionals.query.join(' AND '); 198 | this.criteria.params = conditionals.params; 199 | 200 | Object.keys(entity) 201 | // 202 | // Remark: This filtering should be disabled when dealing with updating 203 | // lookup tables with primary keys. 204 | // 205 | // Why does this filter exist? Assumption: We assume that the primary key 206 | // never changes but this does not hold true when we are dealing with lookup 207 | // tables in certain cases so we need to handle this here 208 | // 209 | .filter(function (field) { 210 | // 211 | // Filter out the primary/secondary keys 212 | // UNLESS we are dealing with lookup tables since we need to update those 213 | // keys properly 214 | // 215 | // When we are a lookup table we only filter out the primary key 216 | // associated with that table, otherwise cassandra yells at us 217 | return isLookup 218 | ? this.schema._reverseLookupKeyMap[table] !== field 219 | : !this.schema.isKey(field); 220 | }, this) 221 | .forEach(function (field) { 222 | // 223 | // Grab the value of the update 224 | // 225 | const value = entity[field]; 226 | // 227 | // Get the column metadata for the given field and create the right kind 228 | // of statement 229 | // 230 | const meta = this.schema.fieldMeta(field); 231 | 232 | switch (meta.type) { 233 | case 'map': 234 | this.mapUpdate(field, value, meta); 235 | break; 236 | case 'set': 237 | this.setUpdate(field, value, meta); 238 | break; 239 | case 'list': 240 | this.listUpdate(field, value, meta); 241 | break; 242 | default: 243 | this.columnUpdate(field, value, meta); 244 | break; 245 | } 246 | }, this); 247 | 248 | // 249 | // Iterate through the statements we created, assess them and build the actual 250 | // statements array if they are valid 251 | // NOTE: Partial Statements end up being a special kind of statement that has 252 | // an array as its `cql` property rather than a string to make it more easily 253 | // extensible. (many many things can be updated in a single statement). The 254 | // finalizeStatement takes care of normalizing these partials into a proper 255 | // full statement 256 | // 257 | for (let i = 0; i < this.buffer.length; i++) { 258 | const partialStatement = this.buffer[i]; 259 | if (partialStatement.cql.length) { 260 | // 261 | // Build the REAL statement and push it to the array 262 | // 263 | this.statements.push(this.finalizeStatement(options, table, partialStatement)); 264 | } 265 | } 266 | this.buffer.length = 0; 267 | 268 | return this; 269 | } 270 | 271 | // 272 | // This is a special function that creates a create and a remove statement based 273 | // on the options given to be executed in cases where 274 | // 275 | replaceLookupRecord(options) { 276 | this.statements.push(new RemoveStatement(this.schema).build(options)); 277 | this.statements.push(new CreateStatement(this.schema).build(options)); 278 | return this; 279 | } 280 | 281 | // 282 | // Handle map updates which receives an object with { key: value } 283 | // as the `value` param 284 | // 285 | mapUpdate(field, value, meta) { 286 | // 287 | // If the value itself is null, set it to an empty object and set the map 288 | // equal to that 289 | // 290 | if (value === null) { 291 | value = {}; 292 | } 293 | 294 | if (Array.isArray(value) || !(value && typeof value === 'object')) { 295 | throw new Error(`Tried to insert value "${value}" into map "${field}" in table "${this.table}". Value should be an object.`); 296 | } 297 | // 298 | // Remark: In theory this validation should happen earlier and this should simply 299 | // generate the statement (which is very simple) 300 | // 301 | Object.keys(value).forEach(mapKey => { 302 | const mapValue = value[mapKey]; 303 | // 304 | // TODO: This validation should be handled by joi schema at a higher level 305 | // 306 | if (mapKeyBadRegex.test(mapKey)) { 307 | throw new Error(`Tried to insert invalid map key "${mapKey}" into map "${field}" in table "${this.table}".`); 308 | } 309 | // 310 | // Strip any undefined values, TODO: This should be done before hand 311 | // 312 | if (typeof mapValue === 'undefined') { 313 | delete value[mapKey]; 314 | } 315 | 316 | // 317 | // Remark: Since null signifies a delete, we explicitly set the property to null. 318 | // Since we don't really want to delete (it creates tombstones) we should 319 | // set this to a value based on the map's valueType before we even get here 320 | // TODO: See if this can be done in the same statement as the full 321 | // collection statement. I feel like this value would need to be stripped in 322 | // any case or would the null be handled correctly if we do it as a standard 323 | // collection statement? 324 | // if (mapValue === null) { 325 | // statement.cql.push(`${field}['${mapKey}'] = ?`); 326 | // statement.params.push(this.schema.valueOf(field, mapValue)); 327 | // } 328 | }); 329 | 330 | this.generateCollectionStatement({ 331 | field: field, 332 | value: value, 333 | type: meta.type, 334 | operator: '+', 335 | suffix: true 336 | }); 337 | } 338 | 339 | // 340 | // Remark: Currently we do not allow objects to be passed in for a set due to schema 341 | // validation, we should consider this for update 342 | // 343 | // value: [] or { add: [], remove: [] } 344 | // 345 | setUpdate(field, value, meta) { 346 | const type = meta.type; 347 | 348 | // 349 | // Assess the typeof the value that is being passed in for the set-type 350 | // 351 | switch (this.typeOf(value)) { 352 | // 353 | // Directly set the value when its an array 354 | // 355 | case 'array': 356 | // 357 | // We just set the array like a regular column 358 | // 359 | this.columnUpdate(field, value, meta); 360 | break; 361 | case 'object': 362 | ['add', 'remove'].forEach(function (key) { 363 | if (!value[key] || !value[key].length) return; 364 | this.generateCollectionStatement({ 365 | field: field, 366 | value: value[key], 367 | type: type, 368 | operator: key === 'remove' ? '-' : '+', 369 | suffix: true 370 | }); 371 | 372 | }, this); 373 | break; 374 | default: 375 | // 376 | // Validation should catch this so this case shouldnt be hit 377 | // 378 | throw new Error(`Invalid value ${value} for set update on ${field}`); 379 | } 380 | } 381 | 382 | // 383 | // List: [] or { prepend: [], append: [], remove: [], index: { idx: value } 384 | // 385 | listUpdate(field, value, meta) { 386 | const type = meta.type; 387 | 388 | switch (this.typeOf(value)) { 389 | case 'array': 390 | this.columnUpdate(field, value, meta); 391 | break; 392 | case 'object': 393 | ['prepend', 'append', 'remove'] 394 | .forEach(function (key) { 395 | // 396 | // If we don't contain the appropriate keys, do nothing 397 | // 398 | if (!value[key] || !value[key].length) return; 399 | this.generateCollectionStatement({ 400 | field: field, 401 | value: !Array.isArray(value[key]) ? [value[key]] : value[key], 402 | type: type, 403 | operator: key === 'remove' ? '-' : '+', 404 | suffix: key !== 'prepend' 405 | }); 406 | }, this); 407 | 408 | // 409 | // Index operations are a little bit more complex 410 | // 411 | if (value.index 412 | && this.typeOf(value.index) === 'object' 413 | && Object.keys(value.index).length) { 414 | this.generateListIndexStatement(field, value.index, meta); 415 | } 416 | break; 417 | default: 418 | throw new Error(`Invalid value ${value} for list update on ${field}`); 419 | } 420 | } 421 | 422 | generateListIndexStatement(field, map, meta) { 423 | const statement = this.statement(); 424 | Object.keys(map).forEach(idx => { 425 | const value = map[idx]; 426 | // 427 | // Don't allow negative indicies 428 | // Remark/TODO: This could live in actual validation for these more complex types 429 | // 430 | if (!positiveIntRegex.test(String(idx))) { 431 | throw new Error(`Tried to insert an invalid index "${idx}" into list "${field}"`); 432 | } 433 | statement.cql.push(`${field}[${idx}] = ?`); 434 | // 435 | // Remark: Override the hint associated with the `field` because it comes out wrong 436 | // for this list operation. We pass the listType in this case to override 437 | // the "hint" because its not an array here 438 | // 439 | statement.params.push(this.schema.valueOf(field, value, meta.listType)); 440 | }); 441 | } 442 | 443 | // 444 | // Update a standard cassandra column 445 | // 446 | columnUpdate(field, value) { 447 | const statement = this.statement(); 448 | statement.cql.push(`${field} = ?`); 449 | // 450 | // This should be the correct value returned 451 | // 452 | statement.params.push(this.schema.valueOf(field, value)); 453 | } 454 | 455 | // 456 | // Generate the more complicated collection operation statement's used here. 457 | // 458 | generateCollectionStatement(opts) { 459 | const statement = this.statement(this.index[opts.type]); 460 | // 461 | // Generate the appropriate statement based on if its a suffix or not for any 462 | // generic collection 463 | // 464 | const valueExpr = opts.suffix 465 | ? `${opts.field} ${opts.operator} ?` 466 | : `? ${opts.operator} ${opts.field}`; 467 | statement.cql.push(`${opts.field} = ${valueExpr}`); 468 | statement.params.push(this.schema.valueOf(opts.field, opts.value)); 469 | // 470 | // Remark: Only set and list operations need to ensure subsequent commands exist on 471 | // a new statement 472 | // 473 | if (['list', 'set'].indexOf(opts.type) !== -1) this.index[opts.type]++; 474 | } 475 | 476 | // 477 | // Return the current statement to modify 478 | // Question: Will index counters of different types end up returning the same statement? 479 | // 480 | statement(idx) { 481 | idx = idx || 0; 482 | while (idx >= this.buffer.length) { 483 | this.buffer.push(new PartialStatement()); 484 | } 485 | 486 | return this.buffer[idx]; 487 | } 488 | 489 | // 490 | // Finalize a statement given a partial statement 491 | // 492 | finalizeStatement(options, table, partial) { 493 | const statement = new Statement(this.schema); 494 | const ttl = options.ttl ? ` USING TTL ${options.ttl}` : ''; 495 | if (!partial.delete) { 496 | statement.cql += `UPDATE ${table}${ttl} SET ${partial.cql.join(', ')} WHERE ${this.criteria.cql}`; 497 | } else { 498 | statement.cql += `DELETE ${partial.cql.join(', ')} FROM ${this.table} WHERE ${this.criteria.cql}`; 499 | } 500 | statement.params = partial.params.concat(clone(this.criteria.params)); 501 | statement.options = this.options; 502 | return statement; 503 | } 504 | 505 | // 506 | // Return a proper value when testing for list specific properties that does not 507 | // have shared semantics with set 508 | // 509 | _handleList(list, prev) { 510 | let value = prev || []; 511 | 512 | // 513 | // Run the functions that set and list share first 514 | // 515 | value = this._handleSetOrList(list, prev); 516 | 517 | if (list.prepend && Array.isArray(list.prepend)) { 518 | value.shift.apply(value, list.prepend); 519 | } 520 | 521 | if (list.index && this.typeOf(list.index) === 'object') { 522 | Object.keys(list.index).forEach(function (idx) { 523 | // 524 | // Don't allow any dangerous operations, and maybe error here 525 | // 526 | if (idx >= value.length) return; 527 | // 528 | // Set the value to the index value of the array 529 | // 530 | value[+idx] = list.index[idx]; 531 | }); 532 | } 533 | 534 | return value; 535 | } 536 | 537 | // 538 | // Return the proper value given a set or a list and the previous value 539 | // 540 | _handleSetOrList(sol, prev) { 541 | const value = prev || []; 542 | const add = sol.add || sol.append; 543 | 544 | if (Array.isArray(sol)) { 545 | return sol; 546 | } 547 | // 548 | // Handle the add or append case for sets or lists by pushing to the back of 549 | // the array 550 | // 551 | if (add && Array.isArray(add)) { 552 | value.push.apply(value, add); 553 | } 554 | 555 | if (sol.remove && Array.isArray(sol.remove)) { 556 | // 557 | // Iterate through the remove array and splice out the index if it 558 | // exists in the previous array. This simulates the Cassandra 559 | // operation 560 | // 561 | sol.remove.forEach(function (key) { 562 | const idx = value.indexOf(key); 563 | if (idx === -1) return; 564 | value.splice(idx, 1); 565 | }); 566 | } 567 | 568 | return value; 569 | } 570 | } 571 | 572 | module.exports = UpdateStatement; 573 | -------------------------------------------------------------------------------- /test/unit/statement-builder.tests.js: -------------------------------------------------------------------------------- 1 | 2 | const fs = require('fs'), 3 | path = require('path'), 4 | clone = require('clone'), 5 | uuid = require('uuid'), 6 | assert = require('assert'), 7 | assume = require('assume'), 8 | schemas = require('../fixtures/schemas'), 9 | StatementBuilder = require('../../lib/statement-builder'), 10 | statements = require('../../lib/statement-builder/statements'), 11 | Schema = require('../../lib/schema'); 12 | 13 | const fixturesDir = path.join(__dirname, '..', 'fixtures'); 14 | 15 | const artistEntity = require(path.join(fixturesDir, 'artist-entity')); 16 | // 17 | // Create a statement builder.is.the factor for the various statements in 18 | // order to correctly parse the arguments as they should. 19 | // 20 | describe('StatementBuilder', () => { 21 | const schema = new Schema('artist', schemas.artist); 22 | const builder = new StatementBuilder(schema); 23 | const fieldList = schema.fieldString(); 24 | 25 | describe('FindStatement', () => { 26 | it('should return an find ALL statement if given an empty object or no options', () => { 27 | const statement = builder.find({ type: 'find' }, {}); 28 | 29 | assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist`); 30 | }); 31 | 32 | it('should return an find ALL statement with allow-filtering included', () => { 33 | const statement = builder.find({ type: 'find', allowFiltering: true }, {}); 34 | 35 | assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist ALLOW FILTERING`); 36 | }); 37 | 38 | it('should return an find statement with a field list if fields in options', () => { 39 | const statement = builder.find({ 40 | type: 'find', 41 | fields: ['artist_id', 'name'] 42 | }, {}); 43 | assume(statement.cql).to.equal('SELECT "artist_id", "name" FROM artist'); 44 | }); 45 | 46 | it('should return a find statement with a limit if specified in options', () => { 47 | const statement = builder.find({ 48 | type: 'all', 49 | limit: 2 50 | }, {}); 51 | 52 | assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist LIMIT 2`); 53 | }); 54 | 55 | it('should throw an error when passed conditions that get filtered (non primary keys)', () => { 56 | assume(() => builder.find({ 57 | type: 'find', 58 | conditions: { 59 | createDate: new Date() 60 | } 61 | })).throws(); 62 | }); 63 | 64 | it('should return a find statement with query params', () => { 65 | const statement = builder.find({ 66 | type: 'find', 67 | conditions: { 68 | artistId: { lte: '2345', gt: '1234' } 69 | } 70 | }); 71 | 72 | assume(statement.cql).to.equal(`SELECT ${fieldList} FROM artist WHERE artist_id <= ? AND artist_id > ?`); 73 | assume(statement.params.length).to.equal(2); 74 | assume(statement.params[0].value).to.equal('2345'); 75 | assume(statement.params[1].value).to.equal('1234'); 76 | }); 77 | }); 78 | 79 | describe('Lookup Tables', () => { 80 | const s = new Schema('artist', schemas.artist); 81 | const b = new StatementBuilder(s); 82 | s.setLookupKeys(['name']); 83 | let entity; 84 | 85 | beforeEach(() => { 86 | entity = clone(artistEntity); 87 | }); 88 | 89 | it('(Find test) should throw an error when passed conditions with conflicting lookup tables', () => { 90 | assume(() => b.find({ 91 | type: 'single', 92 | conditions: { 93 | artistId: uuid.v4(), 94 | name: 'whatever' 95 | } 96 | })).throws(); 97 | }); 98 | 99 | it('should return a valid statement that uses the proper lookup table for find when querying by domainName', () => { 100 | const statement = b.find({ 101 | type: 'single', 102 | conditions: { 103 | name: 'whatever' 104 | } 105 | }); 106 | // 107 | // Ensure we are querying the correct lookup table 108 | // 109 | assume(statement.cql.indexOf('artist_by_name')); 110 | assume(statement.params[0].value).to.equal('whatever'); 111 | } 112 | ); 113 | 114 | it('should return multiple statements to create for each lookup table', () => { 115 | const statement = b.create({}, entity); 116 | assume(statement.statements.length).to.equal(2); 117 | statement.statements.forEach(function (state) { 118 | assume(state.cql).is.a('string'); 119 | }); 120 | }); 121 | 122 | it('should throw a validation error when trying to create without all of the lookup tables', () => { 123 | const ent = clone(entity); 124 | delete ent.name; 125 | 126 | assume(() => b.create({}, ent)).throws(); 127 | }); 128 | 129 | function setupPrevious(entity) { 130 | entity.name = 'something-else'; 131 | return entity; 132 | } 133 | 134 | function modifySet(entity) { 135 | entity.relatedArtists = { 136 | add: entity.relatedArtists.slice(0, 2), 137 | remove: entity.relatedArtists.slice(3, 5) 138 | }; 139 | return entity; 140 | } 141 | 142 | 143 | function setupOtherPrevious(entity) { 144 | entity.domainName = 'something-else'; 145 | return entity; 146 | } 147 | 148 | describe('update', () => { 149 | it('should create a compound statement of compound statements with remove/create for looku tables', () => { 150 | const previous = setupPrevious(clone(artistEntity)); 151 | entity = modifySet(clone(artistEntity)); 152 | const statement = b.update({ previous: previous }, entity); 153 | assume(statement.statements.length).to.equal(2); 154 | // because of add remove on a set 155 | assume(statement.statements[0].statements.length).to.equal(2); 156 | assume(statement.statements[0].statements[0].cql.indexOf('UPDATE')).to.not.equal(-1); 157 | assume(statement.statements[1].statements.length).to.equal(2); 158 | // orion_id 159 | assume(statement.statements[1].statements[0].cql.indexOf('artist_by_name')).to.not.equal(-1); 160 | assume(statement.statements[1].statements[0].cql.indexOf('DELETE')).to.not.equal(-1); 161 | assume(statement.statements[1].statements[1].cql.indexOf('INSERT')).to.not.equal(-1); 162 | }); 163 | 164 | it('should create a compound statement for lookup tables with 1 replacement for domain_name', () => { 165 | const previous = setupOtherPrevious(clone(artistEntity)); 166 | entity = modifySet(clone(artistEntity)); 167 | const statement = b.update({ previous: previous }, entity); 168 | assume(statement.statements.length).to.equal(2); 169 | // because of add remove on a set 170 | assume(statement.statements[0].statements.length).to.equal(2); 171 | assume(statement.statements[0].statements[0].cql.indexOf('UPDATE')).to.not.equal(-1); 172 | assume(statement.statements[1].statements.length).to.equal(2); 173 | // orion_id 174 | assume(statement.statements[1].statements[0].cql.indexOf('artist_by_name')).to.not.equal(-1); 175 | assume(statement.statements[1].statements[0].cql.indexOf('UPDATE')).to.not.equal(-1); 176 | assume(statement.statements[1].statements[1].cql.indexOf('UPDATE')).to.not.equal(-1); 177 | }); 178 | 179 | }); 180 | 181 | it('should return multiple statements to delete from each lookup table', () => { 182 | const statement = b.remove({}, entity); 183 | assume(statement.statements.length).to.equal(2); 184 | statement.statements.forEach(function (state) { 185 | assume(state.cql).is.a('string'); 186 | }); 187 | }); 188 | 189 | it('should throw an error when trying to delete with no conditions when there is a lookup table', () => { 190 | assume(() => b.remove({}, {})).throws(); 191 | }); 192 | 193 | it('should throw an error FOR NOW when trying to delete with insufficient conditions', () => { 194 | assume(() => b.remove({}, { id: uuid.v4() })).throws(); 195 | }); 196 | }); 197 | 198 | // 199 | // This is also tested in cases within TableStatement 200 | // 201 | describe('AlterStatement', () => { 202 | it('should throw an error when given a bad type', () => { 203 | assume(() => builder.alter({ type: 'NANANANA' })).throws(); 204 | }); 205 | 206 | it('should throw an error when given an unknown arg type', () => { 207 | assume(() => builder.alter({ 208 | type: 'table', 209 | actions: { 210 | something: new RegExp() 211 | } 212 | })).throws(); 213 | }); 214 | }); 215 | 216 | describe('TableStatment { schema } valid', () => { 217 | let entity; 218 | it('build()', done => { 219 | const schema = new Schema('artist', schemas.artist); 220 | const builder = new StatementBuilder(schema); 221 | const statement = builder.table({ type: 'ensure' }); 222 | fs.readFile(path.join(fixturesDir, 'tables', 'artist.cql'), 'utf8', (err, data) => { 223 | assert(!err); 224 | assume(statement.cql.trim()).to.equal(data.trim()); 225 | done(); 226 | }); 227 | }); 228 | 229 | function compileTable(options, entity) { 230 | /* eslint-disable-next-line */ 231 | const statement = new statements.table(schema); 232 | const opts = statement.init(options, entity); 233 | let tableName; 234 | assume(opts).to.not.be.instanceof(Error); 235 | 236 | if (options.lookupKey) { 237 | assume(options.lookupColumn.type).to.not.equal('map'); 238 | assume(options.lookupColumn.type).to.not.equal('set'); 239 | if (options.useIndex) { 240 | tableName = `${schema.name}_${options.lookupKey}`; 241 | } else { 242 | tableName = `${schema.name}_by_${options.lookupKey.replace(/_\w+$/, '')}`; 243 | } 244 | } 245 | 246 | return statement._compile(opts, tableName, options.lookupKey); 247 | } 248 | 249 | 250 | it('should return a proper TableStatement with orderBy option', () => { 251 | const statement = builder.table({ 252 | type: 'ensure', 253 | orderBy: { key: 'createDate', order: 'DESC' } 254 | }); 255 | 256 | assume(statement.cql).is.a('string'); 257 | assume(statement.cql.indexOf('WITH CLUSTERING ORDER BY (create_date DESC);')).is.above(0); 258 | }); 259 | 260 | it('should return a proper TableStatement with orderBy option without order', () => { 261 | const statement = builder.table({ 262 | type: 'ensure', 263 | orderBy: { key: 'createDate' } 264 | }); 265 | assume(statement.cql).is.a('string'); 266 | assume(statement.cql.indexOf('WITH CLUSTERING ORDER BY (create_date);')).is.above(0); 267 | }); 268 | 269 | it('should throw an error when given a bad key to orderBy', () => { 270 | assume(() => builder.table({ 271 | type: 'ensure', 272 | orderBy: { key: 'createdAt' } 273 | })).throws(); 274 | }); 275 | 276 | it('should return a table statement with proper alterations', () => { 277 | const statement = builder.table({ 278 | type: 'ensure', 279 | with: { 280 | compaction: { 281 | class: 'LeveledCompactionStrategy', 282 | enabled: true, 283 | sstableSizeInMb: 160, 284 | tombstoneCompactionInterval: '86400' 285 | }, 286 | gcGraceSeconds: 86400, 287 | someOtherThing: 'LoLoLoL' 288 | } 289 | }); 290 | 291 | assume(statement.cql.indexOf('WITH compaction = ')).is.above(0); 292 | }); 293 | 294 | it('should return a proper table statement with a schema that uses composite partition keys', () => { 295 | const s = new Schema('cat', schemas.cat); 296 | const b = new StatementBuilder(s); 297 | const statement = b.table({ type: 'ensure' }); 298 | 299 | assume(statement).to.not.be.instanceof(Error); 300 | assume(statement.cql).contains('PRIMARY KEY ((cat_id, hash))'); 301 | }); 302 | 303 | it('should return a proper table statement with a schema that uses a secondary or clustering key', () => { 304 | const s = new Schema('album', schemas.album); 305 | const b = new StatementBuilder(s); 306 | const statement = b.table({ type: 'ensure' }); 307 | 308 | assume(statement).is.not.instanceof(Error); 309 | assume(statement.cql).contains('PRIMARY KEY (artist_id, album_id)'); 310 | }); 311 | 312 | it('should return a proper TableStatement from _compile()', () => { 313 | entity = {}; 314 | const options = { 315 | type: 'ensure', 316 | useIndex: false, 317 | lookupKey: null, 318 | lookupColumn: null 319 | }; 320 | const cql = compileTable(options, entity); 321 | assume(cql).is.a('string'); 322 | assume(cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1); 323 | // NOTE: Not a lookup table: 324 | assume(cql.indexOf('PRIMARY KEY (artist_id)')).is.above(-1); 325 | }); 326 | 327 | it('should return a proper index TableStatement from _compile()', () => { 328 | entity = {}; 329 | const options = { 330 | type: 'ensure', 331 | useIndex: true, 332 | lookupKey: null, 333 | lookupColumn: null 334 | }; 335 | const cql = compileTable(options, entity); 336 | assume(cql).is.a('string'); 337 | assume(cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1); 338 | // NOTE: Not a lookup table: 339 | assume(cql.indexOf(`on ${schema.name}(artist_id)`)).is.above(-1); 340 | }); 341 | 342 | it('should return a proper TableStatement from build()', () => { 343 | const statement = builder.table({ 344 | type: 'ensure', 345 | useIndex: false, 346 | lookupKey: null, 347 | lookupColumn: null 348 | }); 349 | 350 | assume(statement).to.not.be.instanceof(Error); 351 | assume(statement.cql).is.a('string'); 352 | assume(statement.cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1); 353 | // NOTE: Not a lookup table: 354 | assume(statement.cql.indexOf('PRIMARY KEY (artist_id)')).is.above(-1); 355 | }); 356 | 357 | it('should return a proper index TableStatement from build()', () => { 358 | const statement = builder.table({ 359 | type: 'ensure', 360 | useIndex: true, 361 | lookupKey: null, 362 | lookupColumn: null 363 | }); 364 | 365 | assume(statement).to.not.be.instanceof(Error); 366 | assume(statement.cql).is.a('string'); 367 | assume(statement.cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1); 368 | // NOTE: Not a lookup table: 369 | assume(statement.cql.indexOf(`on ${schema.name}(artist_id)`)).is.above(-1); 370 | }); 371 | 372 | it('should return a proper TableStatement (lookup) from _compile()', () => { 373 | entity = {}; 374 | const options = { 375 | type: 'ensure', 376 | useIndex: false, 377 | lookupKey: 'name', 378 | lookupColumn: { type: 'text' } 379 | }; 380 | const cql = compileTable(options, entity); 381 | assume(cql).is.a('string'); 382 | assume(cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1); 383 | assume(cql.indexOf(`PRIMARY KEY (${options.lookupKey})`)).is.above(-1); 384 | }); 385 | 386 | it('should return a proper index TableStatement (lookup) from _compile()', () => { 387 | entity = {}; 388 | const options = { 389 | type: 'ensure', 390 | useIndex: true, 391 | lookupKey: 'name', 392 | lookupColumn: { type: 'text' } 393 | }; 394 | const cql = compileTable(options, entity); 395 | assume(cql).is.a('string'); 396 | assume(cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1); 397 | assume(cql.indexOf(`on ${schema.name}(${options.lookupKey})`)).is.above(-1); 398 | }); 399 | 400 | it('should return a proper TableStatement (lookup) from build()', () => { 401 | const options = { 402 | type: 'ensure', 403 | useIndex: false, 404 | lookupKey: 'name', 405 | lookupColumn: { type: 'text' } 406 | }; 407 | 408 | const statement = builder.table(options); 409 | assume(statement).to.not.be.instanceof(Error); 410 | assume(statement.cql).is.a('string'); 411 | assume(statement.cql.indexOf('CREATE TABLE IF NOT EXISTS')).is.above(-1); 412 | assume(statement.cql.indexOf(`PRIMARY KEY (${options.lookupKey})`)).is.above(-1); 413 | }); 414 | 415 | it('should return a proper TableStatement for dropping the table from build()', () => { 416 | const options = { 417 | type: 'drop' 418 | }; 419 | 420 | const statement = builder.table(options); 421 | assume(statement).to.not.be.instanceof(Error); 422 | assume(statement.cql).is.a('string'); 423 | assume(statement.cql.indexOf('DROP TABLE')).is.above(-1); 424 | }); 425 | 426 | it('should return a proper TableStatement for dropping an index from build', () => { 427 | const options = { 428 | type: 'drop', 429 | useIndex: true 430 | }; 431 | 432 | const statement = builder.table(options); 433 | assume(statement).to.not.be.instanceof(Error); 434 | assume(statement.cql).is.a('string'); 435 | assume(statement.cql.indexOf('DROP INDEX')).is.above(-1); 436 | }); 437 | 438 | it('should return a proper index TableStatement (lookup) from build()', () => { 439 | const options = { 440 | type: 'ensure', 441 | useIndex: true, 442 | lookupKey: 'name', 443 | lookupColumn: { type: 'text' } 444 | }; 445 | const statement = builder.table(options); 446 | assume(statement).is.not.instanceof(Error); 447 | assume(statement.cql).is.a('string'); 448 | assume(statement.cql.indexOf('CREATE INDEX IF NOT EXISTS')).is.above(-1); 449 | assume(statement.cql.indexOf(`on ${schema.name}(${options.lookupKey})`)).is.above(-1); 450 | }); 451 | }); 452 | 453 | describe('RemoveStatement', () => { 454 | it('should return a proper RemoveStatement passed a single entity', () => { 455 | const id = uuid.v4(); 456 | // 457 | // Pass in an entity to remove to build the statement 458 | // 459 | const statement = builder.remove({}, { id: id, createDate: new Date() }); 460 | 461 | assume(statement.cql).to.equal('DELETE FROM artist WHERE artist_id = ?'); 462 | assume(statement.params[0].value).to.equal(id); 463 | }); 464 | 465 | it('should throw an error when passed an entity without a primary key', () => { 466 | assume(() => builder.remove({}, { createDate: new Date() })).throws(); 467 | }); 468 | 469 | it('should throw an error when passed empty conditions ', () => { 470 | assume(() => builder.remove({}, {})).throws(); 471 | }); 472 | 473 | it('should return a proper RemoveStatement when passed a set of conditions', () => { 474 | const id = uuid.v4(); 475 | const statement = builder.remove({ 476 | conditions: { 477 | artistId: id 478 | } 479 | }); 480 | 481 | assume(statement.cql).to.equal('DELETE FROM artist WHERE artist_id = ?'); 482 | assume(statement.params[0].value).to.equal(id); 483 | }); 484 | }); 485 | 486 | describe('UpdateStatement', () => { 487 | const entity = clone(artistEntity); 488 | // 489 | // More complex case 490 | // 491 | it('should return a proper update statement when given a artist object', () => { 492 | const statement = builder.update({}, entity); 493 | assume(statement.statements.length).to.equal(1); 494 | assume(statement.statements[0].params.length).to.equal(6); 495 | 496 | }); 497 | 498 | it('should return a proper statement with USING TTL if ttl options are passed', () => { 499 | const statement = builder.update({ ttl: 8643462 }, entity); 500 | 501 | assume(statement.statements.length).to.equal(1); 502 | assume(statement.statements[0].cql).contains('USING TTL 8643462'); 503 | }); 504 | 505 | it('should properly generate multiple statements when updating a set with add and remove', () => { 506 | const next = clone(entity); 507 | next.relatedArtists = { 508 | add: next.relatedArtists.slice(0, 2), 509 | remove: next.relatedArtists.slice(3, 5) 510 | }; 511 | const statement = builder.update({}, next); 512 | 513 | assume(statement.statements.length).to.equal(2); 514 | 515 | }); 516 | }); 517 | 518 | describe('CreateStatement', () => { 519 | const entity = clone(artistEntity); 520 | it('should return a proper create statement when given a artist object', () => { 521 | 522 | const statement = builder.create({}, entity); 523 | // 524 | // TODO: better assumes for the specific entity we are dealing with to 525 | // ensure we are accurate in how we convert things 526 | // 527 | assume(statement.table).eqls('artist'); 528 | assume(statement.cql).is.a('string'); 529 | assume(statement.params).is.an('array'); 530 | }); 531 | 532 | it('should append USING TTL to the statement if it is passed as an option', () => { 533 | const statement = builder.create({ ttl: 864342 }, entity); 534 | assume(statement.cql).contains('USING TTL 864342'); 535 | }); 536 | 537 | it('should throw an error when given an improper entity', () => { 538 | const ent = clone(entity); 539 | delete ent.artistId; 540 | assume(() => builder.create({}, ent)).throws(); 541 | }); 542 | }); 543 | }); 544 | -------------------------------------------------------------------------------- /test/integration/model.tests.js: -------------------------------------------------------------------------------- 1 | /* jshint camelcase: false */ 2 | 3 | const 4 | { Stream } = require('stream'), 5 | assume = require('assume'), 6 | uuid = require('uuid'), 7 | async = require('async'), 8 | clone = require('clone'), 9 | schemas = require('../fixtures/schemas'), 10 | Datastar = require('../..'), 11 | helpers = require('../helpers'); 12 | 13 | /* eslint no-invalid-this: 1*/ 14 | describe('Model', function () { 15 | this.timeout(60000); 16 | let datastar, Artist; 17 | 18 | before(done => { 19 | helpers.load((err, data) => { 20 | assume(err).to.equal(null); 21 | datastar = helpers.connectDatastar({ config: data.cassandra }, Datastar, done); 22 | }); 23 | }); 24 | 25 | describe('Artist', () => { 26 | after(done => { 27 | if (Artist) return Artist.dropTables(done); 28 | done(); 29 | }); 30 | 31 | it('should create a model', () => { 32 | Artist = datastar.define('artist', { 33 | schema: schemas.artist 34 | }); 35 | 36 | Artist.before('create:build', (options, next) => { 37 | assume(options.statements).to.be.instanceof(datastar.StatementCollection); 38 | next(); 39 | }); 40 | 41 | assume(Artist.schema).to.not.be.an('undefined'); 42 | assume(Artist.connection).to.not.be.an('undefined'); 43 | }); 44 | 45 | it('should create tables', done => { 46 | Artist.ensureTables(done); 47 | }); 48 | 49 | it('should create', done => { 50 | // INSERT INTO [schema.name] ([allFields[0], allFields[1]]) VALUES (?, ?, ...) 51 | const options = { 52 | entity: { 53 | artistId: '00000000-0000-0000-0000-000000000002', 54 | name: 'hello there' 55 | } 56 | }; 57 | 58 | Artist.create(options, done); 59 | }); 60 | 61 | it('should update', done => { 62 | const entity = { 63 | id: '00000000-0000-0000-0000-000000000003', 64 | name: 'nirvana', 65 | createDate: new Date(), 66 | metadata: { 67 | randomKey: 'hello' 68 | } 69 | }; 70 | 71 | const update = { 72 | id: entity.id, 73 | members: ['Kurt Cobain'], 74 | metadata: { 75 | hello: 'world', 76 | please: 'helpMe', 77 | what: 'can i do' 78 | }, 79 | relatedArtists: [uuid.v4(), uuid.v4()] 80 | }; 81 | 82 | 83 | // 84 | // First run a create, then do an update 85 | // 86 | /* eslint max-nested-callbacks: 1*/ 87 | Artist.create(entity, err => { 88 | assume(err).is.falsey(); 89 | 90 | // 91 | // Now run a find and confirm we are where we assume 92 | // 93 | Artist.get(entity.id, (_, result) => { 94 | assume(result.id).to.equal(entity.id); 95 | assume(result.name).to.equal(entity.name); 96 | assume(result.metadata).to.be.an('object'); 97 | assume(result.metadata.randomKey).to.equal(entity.metadata.randomKey); 98 | 99 | // 100 | // Now update to this same entity 101 | // 102 | Artist.update(update, err => { 103 | assume(err).is.falsey(); 104 | find(Artist, entity.id, (_, result) => { 105 | assume(result.members).to.deep.equal(update.members); 106 | assume(result.metadata.hello).to.equal(update.metadata.hello); 107 | assume(result.metadata.please).to.equal(update.metadata.please); 108 | assume(result.metadata.what).to.equal(update.metadata.what); 109 | assume(result.relatedArtists.sort()).to.deep.equal(update.relatedArtists.sort()); 110 | 111 | // 112 | // By passing a null we set the map value to null 113 | // 114 | Artist.update({ 115 | id: entity.id, 116 | metadata: { 117 | hello: null 118 | } 119 | }, err => { 120 | assume(err).is.falsey(); 121 | find(Artist, entity.id, (_, result) => { 122 | assume(result.metadata.hello).is.falsey(); 123 | 124 | Artist.remove(entity, done); 125 | }); 126 | }); 127 | }); 128 | }); 129 | }); 130 | }); 131 | }); 132 | 133 | it('should find', done => { 134 | // SELECT [fields] FROM [table] WHERE [conditions.query[0]] AND [conditionals.query[1]] FROM [schema.name] 135 | const options = { 136 | type: 'all', 137 | conditions: { 138 | artistId: '00000000-0000-0000-0000-000000000002' 139 | } 140 | }; 141 | 142 | Artist.find(options, (err, results) => { 143 | assume(err).to.equal(null); 144 | assume(results).to.be.an('array'); 145 | assume(results.length).equals(1); 146 | 147 | const result = results[0]; 148 | assume(result.id).to.be.a('string'); 149 | assume(result.name).to.be.a('string'); 150 | done(); 151 | }); 152 | }); 153 | 154 | // 155 | // Do a sequence of create, find, remove, find to validate remove worked 156 | // properly. We may want to put delays in here depending on how cassandra 157 | // behaves 158 | // 159 | it('should remove', done => { 160 | const id = '00000000-0000-0000-0000-000000000001'; 161 | const entity = { 162 | id: id, 163 | name: 'meatloaf' 164 | }; 165 | 166 | const findOptions = { 167 | type: 'all', 168 | conditions: { 169 | artistId: id 170 | } 171 | }; 172 | 173 | Artist.create(entity, err => { 174 | if (err) { 175 | return done(err); 176 | } 177 | async.waterfall([ 178 | Artist.find.bind(Artist, findOptions), 179 | (result, next) => { 180 | const res = result[0]; 181 | assume(result.length).to.equal(1); 182 | assume(res.id).to.be.a('string'); 183 | assume(res.name).to.be.a('string'); 184 | next(null, res); 185 | }, 186 | Artist.remove.bind(Artist), 187 | (_, next) => { 188 | setTimeout(() => { 189 | Artist.find(findOptions, next); 190 | }, 1000); 191 | } 192 | ], (err, last) => { 193 | if (err) { 194 | return done(err); 195 | } 196 | assume(last.length).to.equal(0); 197 | done(); 198 | }); 199 | }); 200 | }); 201 | }); 202 | 203 | describe('update on List type', () => { 204 | let Person; 205 | 206 | const entity = { 207 | id: 'a2fc6faa-ca90-4b00-bfc7-e3a3dcb05be3', 208 | name: 'Geoff', 209 | characteristics: ['egotistical', 'opinionated', 'proud'] 210 | }; 211 | 212 | after(done => { 213 | if (Person) return Person.dropTables(done); 214 | done(); 215 | }); 216 | 217 | it('create a person model', done => { 218 | Person = datastar.define('person', { 219 | ensureTables: true, 220 | schema: schemas.person 221 | }); 222 | 223 | Person.on('ensure-tables:finish', done.bind(null, null)); 224 | Person.on('error', done); 225 | }); 226 | 227 | it('should create a person model with a list', done => { 228 | Person.create(entity, done); 229 | }); 230 | 231 | it('should handle an update operation that prepends a value on the list', done => { 232 | Person.update({ 233 | id: entity.id, 234 | characteristics: { 235 | prepend: ['nosey'] 236 | } 237 | }, err => { 238 | assume(err).is.falsey(); 239 | setTimeout(() => { 240 | Person.findOne({ 241 | conditions: { id: entity.id } 242 | }, (err, result) => { 243 | assume(err).is.falsey(); 244 | assume(result.characteristics[0]).to.equal('nosey'); 245 | done(); 246 | }); 247 | }, 1000); 248 | }); 249 | }); 250 | 251 | it('should handle an update for an append operation', done => { 252 | Person.update({ 253 | id: entity.id, 254 | characteristics: { 255 | append: ['insecure'] 256 | } 257 | }, err => { 258 | assume(err).is.falsey(); 259 | Person.findOne({ 260 | conditions: { id: entity.id } 261 | }, (err, result) => { 262 | assume(err).is.falsey(); 263 | assume(result.characteristics[result.characteristics.length - 1]).to.equal('insecure'); 264 | done(); 265 | }); 266 | }); 267 | }); 268 | 269 | it('should handle an update for a remove operation', done => { 270 | Person.update({ 271 | id: entity.id, 272 | characteristics: { 273 | remove: ['egotistical'] 274 | } 275 | }, err => { 276 | assume(err).is.falsey(); 277 | Person.findOne({ 278 | conditions: { id: entity.id } 279 | }, (err, result) => { 280 | assume(err).is.falsey(); 281 | assume(result.characteristics.indexOf('egotistical')).to.equal(-1); 282 | done(); 283 | }); 284 | }); 285 | }); 286 | 287 | it('should handle an update for an index operation', done => { 288 | Person.update({ 289 | id: entity.id, 290 | characteristics: { 291 | index: { 1: 'ego-driven' } 292 | } 293 | }, err => { 294 | assume(err).is.falsey(); 295 | Person.findOne({ 296 | conditions: { id: entity.id } 297 | }, (err, result) => { 298 | assume(err).is.falsey(); 299 | assume(result.characteristics[1]).to.equal('ego-driven'); 300 | done(); 301 | }); 302 | }); 303 | }); 304 | 305 | it('should handle an update that replaces the list', done => { 306 | const newChars = ['friendly', 'humble', 'present']; 307 | Person.update({ 308 | id: entity.id, 309 | characteristics: newChars 310 | }, err => { 311 | assume(err).is.falsey(); 312 | Person.findOne({ 313 | conditions: { id: entity.id } 314 | }, (err, result) => { 315 | assume(err).is.falsey(); 316 | assume(result.characteristics).to.deep.equal(newChars); 317 | done(); 318 | }); 319 | }); 320 | }); 321 | 322 | it('should fetch a person with the simpler find syntax (object)', done => { 323 | Person.findOne({ 324 | id: entity.id 325 | }, (err, res) => { 326 | assume(err).is.falsey(); 327 | assume(res).to.be.an('object'); 328 | done(); 329 | }); 330 | 331 | }); 332 | 333 | it('should fetch a person with the simpler find syntax (string)', done => { 334 | Person.get(entity.id, (err, res) => { 335 | assume(err).is.falsey(); 336 | assume(res).to.be.an('object'); 337 | done(); 338 | }); 339 | }); 340 | 341 | it('should completely remove the entity', done => { 342 | Person.remove({ 343 | id: entity.id 344 | }, done); 345 | }); 346 | }); 347 | 348 | describe('Composite Partition Keys', () => { 349 | let Cat; 350 | const id = 'c000a7a7-372a-482c-96be-e06050933725'; 351 | const hash = 6; 352 | 353 | after(done => { 354 | if (Cat) return Cat.dropTables(done); 355 | done(); 356 | }); 357 | 358 | it('should create the table when defining the model with composite partition key', done => { 359 | Cat = datastar.define('cat', { 360 | ensureTables: true, 361 | schema: schemas.cat 362 | }); 363 | 364 | Cat.on('ensure-tables:finish', done.bind(null, null)); 365 | Cat.on('error', done); 366 | 367 | assume(Cat.schema).to.not.be.an('undefined'); 368 | assume(Cat.connection).to.not.be.an('undefined'); 369 | }); 370 | 371 | it('should be able to create', done => { 372 | Cat.create({ 373 | id: id, 374 | hash: hash, 375 | name: 'Hector' 376 | }, err => { 377 | assume(err).is.falsey(); 378 | done(); 379 | }); 380 | }); 381 | 382 | it('should be able to update that same record', done => { 383 | Cat.update({ 384 | id: id, 385 | hash: hash, 386 | createDate: new Date() 387 | }, err => { 388 | assume(err).is.falsey(); 389 | done(); 390 | }); 391 | }); 392 | 393 | it('should error with simpler find syntax with more complicated key (string)', done => { 394 | Cat.get(id, err => { 395 | assume(err).to.be.instanceof(Error); 396 | done(); 397 | }); 398 | }); 399 | 400 | it('should be able to find the record', done => { 401 | Cat.findOne({ 402 | conditions: { 403 | id: id, 404 | hash: hash 405 | } 406 | }, (err, res) => { 407 | assume(err).is.falsey(); 408 | assume(res).to.be.an('object'); 409 | done(); 410 | }); 411 | }); 412 | 413 | it('should error when updating without the hash in the composite key', done => { 414 | Cat.update({ 415 | id: id, 416 | createDate: new Date() 417 | }, err => { 418 | assume(err).to.be.instanceof(Error); 419 | done(); 420 | }); 421 | }); 422 | 423 | it('should error when we pass in a key that doesnt exist when running update', done => { 424 | Cat.update({ 425 | id: id, 426 | hash: hash, 427 | whatAReYOuDOing: 'hello' 428 | }, err => { 429 | assume(err).to.be.instanceof(Error); 430 | done(); 431 | }); 432 | }); 433 | 434 | it('should error when we pass in a key that doesnt exist when running create', done => { 435 | Cat.create({ 436 | id: id, 437 | hash: hash, 438 | whatAReYOuDOing: 'hello' 439 | }, err => { 440 | assume(err).to.be.instanceof(Error); 441 | done(); 442 | }); 443 | }); 444 | 445 | it('should remove the record', done => { 446 | Cat.remove({ 447 | id: id, 448 | hash: hash 449 | }, err => { 450 | assume(err).is.falsey(); 451 | done(); 452 | }); 453 | }); 454 | }); 455 | 456 | describe('Clustering Keys', () => { 457 | let Album; 458 | const id = '9adc5c0e-6de5-4cf2-9b96-143f82caba63'; 459 | const artistId = 'd416d385-c57d-4db9-9e37-ca04cb9fceb9'; 460 | 461 | after(done => { 462 | if (Album) return Album.dropTables(done); 463 | done(); 464 | }); 465 | 466 | it('should create a table with secondary/clustering keys', done => { 467 | Album = datastar.define('album', { 468 | ensureTables: true, 469 | schema: schemas.album 470 | }); 471 | 472 | Album.on('ensure-tables:finish', done.bind(null, null)); 473 | Album.on('error', done); 474 | 475 | // 476 | // Work with the literal object on every findOne because Why not? 477 | // 478 | Album.after('find:one', (result, next) => { 479 | next(null, result.toJSON()); 480 | }); 481 | // 482 | // AND THEN LETS CHANGE IT BACK 483 | // 484 | Album.after('find:one', (result, next) => { 485 | next(null, new Album(result)); 486 | }); 487 | }); 488 | 489 | it('should create an album with proper IDs', done => { 490 | Album.create({ 491 | id: id, 492 | artistId: artistId, 493 | name: 'hello', 494 | releaseDate: new Date() 495 | }, err => { 496 | assume(err).is.falsey(); 497 | done(); 498 | }); 499 | }); 500 | 501 | it('should update an album', done => { 502 | Album.update({ 503 | id: id, 504 | artistId: artistId, 505 | trackList: ['whatever whatever whatever', 'you dont know whats coming'] 506 | }, err => { 507 | assume(err).is.falsey(); 508 | done(); 509 | }); 510 | }); 511 | 512 | it('should find an album', done => { 513 | Album.findOne({ 514 | conditions: { 515 | id: id, 516 | artistId: artistId 517 | } 518 | }, (err, res) => { 519 | assume(err).is.falsey(); 520 | assume(res).to.be.an('object'); 521 | assume(res).to.be.instanceof(Album); 522 | done(); 523 | }); 524 | }); 525 | 526 | it('should error when updating without the artistId', done => { 527 | Album.update({ 528 | id: id 529 | }, err => { 530 | assume(err).to.be.instanceof(Error); 531 | done(); 532 | }); 533 | }); 534 | 535 | it('should remove an Album', done => { 536 | Album.remove({ 537 | id: id, 538 | artistId: artistId 539 | }, err => { 540 | assume(err).is.falsey(); 541 | done(); 542 | }); 543 | }); 544 | }); 545 | 546 | describe('Lookup Tables', () => { 547 | let Song; 548 | const uniqueId = '9adc5c0e-6de5-4cf2-9b96-143f82caba63'; 549 | const otherId = 'd416d385-c57d-4db9-9e37-ca04cb9fceb9'; 550 | const id = 'a5fbcd74-12e1-4860-b625-db2c472ba1fa'; 551 | let newOtherId = 'b7c49590-6b37-45c6-9d9b-2a82759c52a8'; 552 | 553 | function findOneAll(ids, callback) { 554 | if (typeof ids === 'function') { 555 | callback = ids; 556 | ids = {}; 557 | } 558 | async.parallel({ 559 | otherId: Song.findOne.bind(Song, { conditions: { otherId: ids.otherId || otherId } }), 560 | uniqueId: Song.findOne.bind(Song, { conditions: { uniqueId: ids.uniqueId || uniqueId } }), 561 | id: Song.findOne.bind(Song, { conditions: { id: ids.id || id } }) 562 | }, callback); 563 | } 564 | 565 | after(done => { 566 | if (Song) return Song.dropTables(done); 567 | done(); 568 | }); 569 | 570 | it('should be created when defining a model with lookupKeys and ensureTables is true', done => { 571 | Song = datastar.define('song', { 572 | ensureTables: true, 573 | schema: schemas.song.lookupKeys(['otherId', 'uniqueId']) 574 | }); 575 | 576 | Song.on('ensure-tables:finish', done.bind(null, null)); 577 | Song.on('error', done); 578 | 579 | assume(Song.schema).to.not.be.an('undefined'); 580 | assume(Song.connection).to.not.be.an('undefined'); 581 | }); 582 | 583 | it('should be able to write all lookup tables', done => { 584 | Song.create({ 585 | id: id, 586 | otherId: otherId, 587 | uniqueId: uniqueId 588 | }, done); 589 | }); 590 | 591 | const previous = { 592 | id: id, 593 | otherId: otherId, 594 | uniqueId: uniqueId 595 | }; 596 | 597 | const update = { 598 | id: id, 599 | otherId: otherId, 600 | uniqueId: uniqueId, 601 | length: '3:21', 602 | artists: [uuid.v4(), uuid.v4()] 603 | }; 604 | 605 | it('should be able to update all lookup tables when not changing the primary keys', done => { 606 | Song.update({ 607 | previous: previous, 608 | entity: update 609 | }, done); 610 | }); 611 | 612 | it('should be able to find by all the `primaryKeys` and return the same value', done => { 613 | findOneAll((err, result) => { 614 | assume(err).is.falsey(); 615 | assume(result.id).to.deep.equal(result.uniqueId); 616 | assume(result.uniqueId).to.deep.equal(result.otherId); 617 | assume(result.id).to.deep.equal(result.otherId); 618 | done(); 619 | }); 620 | }); 621 | // 622 | // Setup second update 623 | // 624 | const up = clone(update); 625 | const newArtist = uuid.v4(); 626 | up.artists.push(newArtist); 627 | up.otherId = newOtherId; 628 | 629 | it('should be able to update to all lookup tables when changing a primaryKey for a lookup table and ensure the old primaryKey reference was deleted', done => { 630 | Song.update({ 631 | previous: [previous], 632 | entities: [up] 633 | }, err => { 634 | assume(err).is.falsey(); 635 | // 636 | // slight delay 637 | // 638 | setTimeout(() => { 639 | Song.findOne({ 640 | conditions: { 641 | otherId: update.otherId 642 | } 643 | }, (err, res) => { 644 | assume(err).is.falsey(); 645 | assume(res).is.falsey(); 646 | done(); 647 | }); 648 | }, 500); 649 | }); 650 | }); 651 | 652 | it('should find all by all primaryKeys, specifically the new one and have equal values', done => { 653 | findOneAll({ 654 | otherId: newOtherId 655 | }, (err, result) => { 656 | assume(err).is.falsey(); 657 | const id = result.id.toJSON(); 658 | const uniqueId = result.uniqueId.toJSON(); 659 | const otherId = result.otherId.toJSON(); 660 | assume(id).to.deep.equal(uniqueId); 661 | assume(id).to.deep.equal(otherId); 662 | assume(uniqueId).to.deep.equal(otherId); 663 | done(); 664 | }); 665 | }); 666 | 667 | it('should properly update when not passing a previous value and doing a find operation to fetch previous state', done => { 668 | Song.update({ 669 | id: id, 670 | artists: { 671 | add: [uuid.v4()] 672 | } 673 | }, done); 674 | }); 675 | 676 | it('should fail to update when calling save on the model if no columns changed directly', done => { 677 | Song.findOne({ id: id }, (err, song) => { 678 | assume(err).is.falsey(); 679 | 680 | song.artists.push(uuid.v4()); 681 | 682 | song.save(() => { 683 | Song.findOne({ id: id }, (err, result) => { 684 | assume(err).is.falsey(); 685 | assume(result.toJSON()).not.to.deep.equal(song.toJSON()); 686 | done(); 687 | }); 688 | }); 689 | }); 690 | }); 691 | 692 | it('should update when calling save on the model and not call an extra findOne since we have the previous state', done => { 693 | const old = Song.findOne; 694 | let called = 0; 695 | 696 | Song.findOne = function () { 697 | called++; 698 | old.apply(Song, arguments); 699 | }; 700 | 701 | Song.findOne({ id: id }, (err, song) => { 702 | assume(err).is.falsey(); 703 | 704 | newOtherId = uuid.v4(); 705 | song.otherId = newOtherId; 706 | 707 | song.save(err => { 708 | assume(err).is.falsey(); 709 | Song.findOne({ id: id }, (err, result) => { 710 | assume(err).is.falsey(); 711 | assume(result.toJSON()).to.deep.equal(song.toJSON()); 712 | assume(called).to.equal(2); 713 | Song.findOne = old; 714 | done(); 715 | }); 716 | }); 717 | }); 718 | }); 719 | 720 | it('should remove from all lookup tables', done => { 721 | Song.remove({ 722 | id: id, 723 | otherId: newOtherId, 724 | uniqueId: uniqueId 725 | }, err => { 726 | assume(err).is.falsey(); 727 | 728 | findOneAll({ otherId: newOtherId }, (err, result) => { 729 | assume(err).is.falsey(); 730 | Object.keys(result).forEach(key => { 731 | assume(result[key]).is.falsey(); 732 | }); 733 | done(); 734 | }); 735 | }); 736 | }); 737 | 738 | it('should error when trying to remove without required attributes', done => { 739 | Song.remove({ 740 | id: id, 741 | otherId: otherId 742 | }, err => { 743 | assume(err).to.be.instanceof(Error); 744 | done(); 745 | }); 746 | }); 747 | }); 748 | 749 | describe('foo', () => { 750 | const zeros = '00000000-0000-0000-0000-000000000000'; 751 | const one = uuid.v4(); 752 | const two = uuid.v4(); 753 | const three = uuid.v4(); 754 | const four = uuid.v4(); 755 | const five = uuid.v4(); 756 | const six = uuid.v4(); 757 | const seven = uuid.v4(); 758 | const eight = uuid.v4(); 759 | const nine = uuid.v4(); 760 | 761 | let Foo; 762 | 763 | after(done => { 764 | if (Foo) return Foo.dropTables(done); 765 | done(); 766 | }); 767 | 768 | it('should create a table with an alter statement', done => { 769 | Foo = datastar.define('foo', { 770 | schema: schemas.foo, 771 | with: { 772 | compaction: { 773 | class: 'LeveledCompactionStrategy' 774 | } 775 | } 776 | }); 777 | 778 | Foo.ensureTables(err => { 779 | if (err) { 780 | console.error(err); 781 | return done(err); 782 | } 783 | done(); 784 | }); 785 | }); 786 | 787 | it('should create multiple records in the database', done => { 788 | const next = assume.wait(2, 2, done); 789 | 790 | Foo.create({ fooId: one, secondaryId: one, nullableId: two }, err => { 791 | assume(err).is.falsey(); 792 | next(); 793 | }); 794 | 795 | Foo.create({ fooId: two, secondaryId: one, nullableId: two }, err => { 796 | assume(err).is.falsey(); 797 | next(); 798 | }); 799 | 800 | }); 801 | 802 | it('should create a record in the database that will properly expire with given ttl', done => { 803 | Foo.create({ entity: { 804 | fooId: three, 805 | secondaryId: zeros, 806 | nullableId: zeros 807 | }, ttl: 1 }, err => { 808 | assume(err).is.falsey(); 809 | 810 | Foo.findOne({ fooId: three, secondaryId: zeros }, (error, res) => { 811 | assume(error).is.falsey(); 812 | assume(res); 813 | 814 | setTimeout(() => { 815 | Foo.findOne({ fooId: three, secondaryId: zeros }, (er, result) => { 816 | assume(er).is.falsey(); 817 | assume(result).is.falsey(); 818 | done(); 819 | }); 820 | }, 1100); 821 | }); 822 | }); 823 | }); 824 | 825 | it('should create a record in the database that will expire but still be found before it reaches given ttl', done => { 826 | Foo.create({ entity: { fooId: four, secondaryId: one }, ttl: 7 }, err => { 827 | assume(err).is.falsey(); 828 | 829 | Foo.findOne({ fooId: four, secondaryId: one }, (err, res) => { 830 | assume(err).is.falsey(); 831 | assume(res); 832 | assume(res.fooId).equals(four); 833 | 834 | setTimeout(() => { 835 | Foo.findOne({ fooId: four, secondaryId: one }, (err, result) => { 836 | assume(err).is.falsey(); 837 | assume(result); 838 | assume(result.fooId).equals(four); 839 | done(); 840 | }); 841 | }, 100); 842 | }); 843 | }); 844 | }); 845 | 846 | it('should update a record in the database that will expire with given ttl', done => { 847 | Foo.update({ entity: { fooId: five, secondaryId: one, something: 'foo' }, ttl: 2 }, err => { 848 | assume(err).is.falsey(); 849 | 850 | setTimeout(() => { 851 | Foo.findOne({ fooId: five, secondaryId: one }, (er, res) => { 852 | assume(er).is.falsey(); 853 | assume(res).is.falsey(); 854 | done(); 855 | }); 856 | }, 2000); 857 | }); 858 | }); 859 | 860 | it('should update a record in the database that can be found before it reaches ttl', done => { 861 | Foo.update({ entity: { fooId: six, secondaryId: one, something: 'foo' }, ttl: 5 }, err => { 862 | assume(err).is.falsey(); 863 | 864 | Foo.findOne({ fooId: six, secondaryId: one }, (error, result) => { 865 | assume(error).is.falsey(); 866 | assume(result); 867 | assume(result.fooId).equals(six); 868 | 869 | setTimeout(() => { 870 | Foo.findOne({ fooId: six, secondaryId: one }, (er, res) => { 871 | assume(er).is.falsey(); 872 | assume(res); 873 | assume(res.fooId).equals(six); 874 | done(); 875 | }); 876 | }, 2000); 877 | }); 878 | }); 879 | }); 880 | 881 | it('should update a record in the database with an updated reset ttl and can be found before it reaches the updated ttl', done => { 882 | Foo.update({ entity: { fooId: seven, secondaryId: one, something: 'boo' }, ttl: 1 }, err => { 883 | assume(err).is.falsey(); 884 | 885 | Foo.findOne({ fooId: seven, secondaryId: one }, (error, result) => { 886 | assume(error).is.falsey(); 887 | assume(result); 888 | assume(result.fooId).equals(seven); 889 | 890 | Foo.update({ entity: { fooId: seven, secondaryId: one, something: 'foo' }, ttl: 10 }, error => { 891 | assume(error).is.falsey(); 892 | 893 | setTimeout(() => { 894 | Foo.findOne({ fooId: seven, secondaryId: one }, (er, res) => { 895 | assume(er).is.falsey(); 896 | assume(res); 897 | assume(res.fooId).equals(seven); 898 | done(); 899 | }); 900 | }, 1100); 901 | }); 902 | }); 903 | }); 904 | }); 905 | 906 | it('should update a record in the database with an updated reset ttl and expire after it reaches the updated ttl', done => { 907 | Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'boo' }, ttl: 1 }, err => { 908 | assume(err).is.falsey(); 909 | 910 | Foo.findOne({ fooId: eight, secondaryId: one }, (error, result) => { 911 | assume(error).is.falsey(); 912 | assume(result).is.not.falsey(); 913 | assume(typeof result).equals('object'); 914 | assume(result.fooId).equals(eight); 915 | 916 | Foo.update({ entity: { fooId: eight, secondaryId: one, something: 'foo' }, ttl: 1 }, error => { 917 | assume(error).is.falsey(); 918 | 919 | setTimeout(() => { 920 | Foo.findOne({ fooId: eight, secondaryId: one }, (er, res) => { 921 | assume(er).is.falsey(); 922 | assume(res).is.falsey(); 923 | done(); 924 | }); 925 | }, 1100); 926 | }); 927 | }); 928 | }); 929 | }); 930 | 931 | it('handles nullable fields properly', done => { 932 | Foo.create({ entity: { 933 | fooId: nine, 934 | secondaryId: zeros, 935 | nullableId: zeros 936 | }, ttl: 1 }, err => { 937 | assume(err).is.falsey(); 938 | 939 | Foo.findOne({ fooId: nine, secondaryId: zeros }, (error, res) => { 940 | assume(error).is.falsey(); 941 | assume(res); 942 | assume(res.fooId).equals(nine); 943 | assume(res.secondaryId).equals(zeros); 944 | assume(res.nonNullableId).equals(zeros); 945 | assume(res.nullableId).equals(null); 946 | 947 | const resAsJson = res.toJSON(); 948 | assume(resAsJson.fooId).equals(nine); 949 | assume(resAsJson.secondaryId).equals(zeros); 950 | assume(resAsJson.nonNullableId).equals(zeros); 951 | assume(resAsJson.nullableId).equals(null); 952 | 953 | done(); 954 | }); 955 | }); 956 | }); 957 | 958 | it('should run a find query with a limit of 1 and return 1 record', done => { 959 | Foo.findAll({ conditions: {}, limit: 1 }, (err, recs) => { 960 | assume(err).is.falsey(); 961 | assume(recs.length).equals(1); 962 | done(); 963 | }); 964 | }); 965 | 966 | it('should remove entities', done => { 967 | const next = assume.wait(2, 2, done); 968 | Foo.remove({ fooId: one, secondaryId: one }, err => { 969 | assume(err).is.falsey(); 970 | next(); 971 | }); 972 | 973 | Foo.remove({ fooId: two, secondaryId: one }, err => { 974 | assume(err).is.falsey(); 975 | next(); 976 | }); 977 | }); 978 | }); 979 | 980 | describe('async iterable functionality', () => { 981 | let artistId, Album; 982 | const YEAR = 365 * 24 * 60 * 60 * 1000; 983 | 984 | before(done => { 985 | artistId = uuid(); 986 | Album = datastar.define('album', { schema: schemas.album }); 987 | 988 | async.auto({ 989 | tableCreated: next => Album.ensureTables(next), 990 | createRows: ['tableCreated', (results, next) => { 991 | async.parallel([ 992 | Album.create.bind(Album, { 993 | id: uuid(), 994 | artistId, 995 | trackList: ['a', 'b'], 996 | releaseDate: new Date(Date.now() - 2 * YEAR) 997 | }), 998 | Album.create.bind(Album, { 999 | id: uuid(), 1000 | artistId, 1001 | trackList: ['c', 'd'], 1002 | releaseDate: new Date(Date.now() - 120 * 24 * 60 * 60 * 1000) 1003 | }) 1004 | ], next); 1005 | }] 1006 | }, done); 1007 | }); 1008 | 1009 | after(done => { 1010 | Album.dropTables(done); 1011 | }); 1012 | 1013 | it('can be invoked through an `iterable` flag sent to `find`', async () => { 1014 | await testIterable(() => Album.findAll({ 1015 | conditions: { artistId }, 1016 | iterable: true 1017 | })); 1018 | }); 1019 | 1020 | it('can be invoked through an `iterate` method', async () => { 1021 | await testIterable(() => Album.iterate({ conditions: { artistId } })); 1022 | }); 1023 | 1024 | it('applies any transform function', async () => { 1025 | Album.transform = before => ({ 1026 | ...before, 1027 | newlyReleased: before.releaseDate.getTime() > (Date.now() - YEAR) 1028 | }); 1029 | 1030 | let newReleaseCount = 0; 1031 | for await (const album of Album.iterate({ conditions: { artistId } })) { 1032 | if (album.newlyReleased) { 1033 | newReleaseCount++; 1034 | } 1035 | } 1036 | assume(newReleaseCount).equals(1); 1037 | }); 1038 | 1039 | async function testIterable(iterateFn) { 1040 | const iterable = iterateFn(); 1041 | assume(iterable).not.instanceof(Stream); 1042 | let allTracks = []; 1043 | for await (const album of iterable) { 1044 | allTracks = allTracks.concat(album.trackList); 1045 | } 1046 | const trackSet = new Set(allTracks); 1047 | assume(trackSet.has('a')).equals(true); 1048 | assume(trackSet.has('b')).equals(true); 1049 | assume(trackSet.has('c')).equals(true); 1050 | assume(trackSet.has('d')).equals(true); 1051 | } 1052 | }); 1053 | 1054 | function find(Entity, id, callback) { 1055 | Entity.findOne({ 1056 | conditions: { 1057 | id: id 1058 | } 1059 | }, (err, res) => { 1060 | assume(err).is.falsey(); 1061 | callback(null, res); 1062 | }); 1063 | } 1064 | 1065 | after(done => { 1066 | datastar.close(done); 1067 | }); 1068 | }); 1069 | -------------------------------------------------------------------------------- /lib/schema.js: -------------------------------------------------------------------------------- 1 | const 2 | uuid = require('uuid'), 3 | clone = require('clone'), 4 | assign = require('object-assign'), 5 | priam = require('priam'), 6 | joi = require('joi-of-cql'), 7 | snakeCase = require('./snake-case'), 8 | camelCase = require('./camel-case'); 9 | 10 | const 11 | dataTypes = priam.dataTypes, 12 | TimeUuid = priam.valueTypes.TimeUuid; 13 | 14 | const invalidChar = /\W/; 15 | 16 | class Schema { 17 | 18 | /** 19 | * In this method we are going to create a denormalized structure from the 20 | * schema representation that we type out as JSON. This structure should be easy 21 | * to ask questions to and lookup various properties, etc. 22 | * 23 | * @param {string} name - Schema name 24 | * @param {Object} schema - Schema object 25 | * @param {String} schema.name - name of the table 26 | * @param {Array} schema.keys - Primary and secondary keys of a schema 27 | * @param {Object} schema.columns - An object of column names to their type 28 | * @param {Object} schema.maps - An object of the column names that are a special map type with their type as value 29 | * @param {Object} schema.sets - An object of the column names that are a special set type with their type as value 30 | * @constructor 31 | */ 32 | constructor(name, schema) { 33 | if (!this) return new Schema(name, schema); 34 | 35 | if (!name || invalidChar.test(name)) 36 | throw new Error(`Invalid character in schema name ${name}, use snake_case`); 37 | 38 | // 39 | // Remark: We lowercase this by default for easier consistency 40 | // 41 | this.name = name.toLowerCase(); 42 | // 43 | // Special operators used for timuuids along with associated functions 44 | // when building queries 45 | // 46 | this.operators = { 47 | gt: '>', 48 | gte: '>=', 49 | lt: '<', 50 | lte: '<=' 51 | }; 52 | 53 | // 54 | // A mapping for possible keys that can be passed in for defining order 55 | // 56 | this.orderMap = { 57 | ascending: 'ASC', 58 | asc: 'ASC', 59 | desc: 'DESC', 60 | descending: 'DESC' 61 | }; 62 | 63 | this.cqlFunctions = { 64 | timeuuid: { 65 | gt: time('min'), 66 | gte: time('min'), 67 | lt: time('max'), 68 | lte: time('max') 69 | } 70 | }; 71 | 72 | // 73 | // Keys used when defaulting values to generate a number 74 | // 75 | this.generateKeysLookup = ['uuid_v4', 'uuid_empty', 'date_now'] 76 | .reduce(function (acc, type) { 77 | acc[type] = defaultValue(type); 78 | return acc; 79 | }, {}); 80 | 81 | // 82 | // Store a reference to the original joi schema thats passed in 83 | // 84 | this.joi = schema; 85 | 86 | // 87 | // We default to having different validators based on the `type` 88 | // 89 | this.validator = { 90 | create: schema, 91 | update: schema 92 | }; 93 | this._columns = this.meta = schema.toCql(); 94 | this._aliases = schema.aliases() || {}; 95 | let keys = schema.clusteringKey(); 96 | if (!Array.isArray(keys)) { 97 | keys = [keys]; 98 | } 99 | 100 | const pKey = schema.partitionKey(); 101 | // 102 | // If there is no partitionKey, throw an error because the schema is not valid 103 | // 104 | if (!pKey || !pKey.length) throw new Error('You must define a partitionKey on your schema'); 105 | 106 | keys.unshift(pKey); 107 | this._originalKeys = keys; 108 | 109 | // 110 | // Set the primary and secondary keys 111 | // 112 | this._primaryKeys = this._originalKeys[0]; 113 | this._secondaryKeys = this._originalKeys.slice(1); 114 | 115 | // 116 | // The flattened array of all the necessary keys that are required 117 | // 118 | this._keys = this.primaryKeys().concat(this._secondaryKeys); 119 | // 120 | // Indication that we have a compound primary/partition key 121 | // 122 | this.compositePrimary = this.primaryKeys().length >= 2; 123 | 124 | // 125 | // Primary or secondary key lookup table 126 | // 127 | this._keysLookup = createLookup(this._keys); 128 | 129 | // 130 | // Lookup for primaryKeys 131 | // 132 | this._primaryKeysLookup = createLookup(this.primaryKeys()); 133 | 134 | // 135 | // Secondary Keys lookup. 136 | // Remark: Not sure if there can be multiple of these but seems possible? 137 | // 138 | this._secondaryKeysLookup = createLookup(this._secondaryKeys); 139 | 140 | // Set our list of keys as "columnKeys" 141 | // 142 | this._columnKeys = Object.keys(this.meta); 143 | 144 | // 145 | // We realize that we store aliases in a way that is backwards when 146 | // considering it as a lookup table to the type of key actually listed in the 147 | // schema, so lets reverse it to be a proper lookup table for the case that we 148 | // use. Keep around the original as well. 149 | // 150 | // Example: { id: artist_id } is the standard schema.aliases 151 | // We use it for proper lookups as { artist_id: id } in mappedFields 152 | // 153 | // This means that any object passed in with a key `id` will be converted to 154 | // the real value `artist_id` when being inserted. This CAN also be 155 | // considered when retransforming on the way out. 156 | // 157 | // 158 | this._aliasesReverse = Object.keys(this._aliases) 159 | .reduce(function (acc, key) { 160 | const newKey = this._aliases[key]; 161 | acc[newKey] = key; 162 | return acc; 163 | }.bind(this), {}); 164 | 165 | // 166 | // If lookupKeys exist in the schema, setup the proper properties to handle 167 | // those cases 168 | // 169 | const badLookups = this.setLookupKeys(schema.lookupKeys()); 170 | 171 | if (badLookups) { 172 | throw badLookups; 173 | } 174 | } 175 | 176 | // 177 | // Require some set of keys to generate another joi schema 178 | // 179 | requireKeys(keys) { 180 | return keys.reduce((memo, key) => { 181 | if (!this.meta[key] || !this.meta[key].default) { 182 | memo[key] = joi.any().required(); 183 | } 184 | return memo; 185 | }, {}); 186 | } 187 | 188 | // 189 | // Create a separate Lookup table JUST for Lookup tables. Yes confusing I know, 190 | // Object lookup for actual cassandra lookup tables. This should default to 191 | // lookupKeys/lookupTables if it is an object 192 | // 193 | setLookupKeys(lookupKeys) { 194 | // 195 | // Return an error to be thrown if we are a compositePrimary key and we are 196 | // given lookupKeys as that is something we do not support 197 | // 198 | if (this.compositePrimary && lookupKeys && 199 | (this.type(lookupKeys) === 'array' && lookupKeys.length !== 0) 200 | || (this.type(lookupKeys) === 'object' && Object.keys(lookupKeys).length !== 0) 201 | ) 202 | throw new Error('You cannot create a lookup table with a compound key'); 203 | 204 | lookupKeys = this.fixKeys(lookupKeys) || {}; 205 | this.lookupTables = this.type(lookupKeys) === 'object' 206 | ? lookupKeys 207 | : lookupKeys.reduce((acc, key) => { 208 | acc[key] = `${this.name}_by_${key}`; 209 | return acc; 210 | }, {}); 211 | 212 | lookupKeys = Object.keys(this.lookupTables); 213 | 214 | // 215 | // If there are any lookup keys that do not exist on this 216 | // Schema then return an error accordingly 217 | // 218 | const missingLookupKeys = lookupKeys.filter(key => { 219 | return !this.exists(key); 220 | }); 221 | 222 | if (missingLookupKeys.length) { 223 | throw new Error(`Invalid lookup keys: ${missingLookupKeys.join(', ')}`); 224 | } 225 | 226 | // 227 | // Reverse lookup of key -> tableName to tableName -> key. e.g. 228 | // 229 | // { 230 | // "model_by_prop1": "prop1", 231 | // "model_by_prop2": "prop2" 232 | // } 233 | // 234 | this._reverseLookupKeyMap = lookupKeys 235 | .reduce((acc, key) => { 236 | const table = this.lookupTables[key]; 237 | acc[table] = key; 238 | return acc; 239 | }, {}); 240 | 241 | // 242 | // Set a property on the schema that tells us if we have lookup tables we need 243 | // to write to. 244 | // 245 | this.lookups = !!lookupKeys.length; 246 | 247 | // 248 | // Setup the requiredKeys lookup. When we are dealing with lookup tables we 249 | // need to require all the primarykeys associated 250 | // 251 | this._requiredKeysLookup = createLookup(lookupKeys.concat(this.keys())); 252 | this._requiredKeys = Object.keys(this._requiredKeysLookup); 253 | // 254 | // Attach any extra restrictions for the create schema 255 | // 256 | if (this._requiredKeys.length) { 257 | this.validator.create = this.validator.create.concat( 258 | joi.object( 259 | this.requireKeys(this._requiredKeys) 260 | ) 261 | ); 262 | } 263 | } 264 | 265 | // 266 | // Validate and default things 267 | // 268 | validate(entity, type) { 269 | type = type || 'update'; 270 | const { error, value } = joi.validate(entity, this.validator[type], { context: { operation: type } }); 271 | if (error) { 272 | throw error; 273 | } else { 274 | return value; 275 | } 276 | } 277 | 278 | // 279 | // Test if the key exists and returns the transformed key to use if it does, 280 | // otherwise returns undefined. This requires us 281 | // to transform the key to snake_case as well as remap any aliases so we can 282 | // specify the key as a standard camelCase key when passing in any options. 283 | // 284 | exists(key) { 285 | const transformed = this.fixKeys(key); 286 | 287 | return !this.meta[transformed] ? null : transformed; 288 | } 289 | 290 | // 291 | // Transform an entity key to the proper key that cassandra expects (snake_case, unalias) 292 | // 293 | entityKeyToColumnName(key) { 294 | const mappedKey = snakeCase(key); 295 | const alias = this._aliases[mappedKey]; 296 | return alias || mappedKey; 297 | } 298 | 299 | // 300 | // Transform an entity, an object of conditions or an array of fields to have the proper 301 | // keys that cassandra expects (snake_case, unalias) 302 | // 303 | fixKeys(entity) { 304 | entity = entity || {}; 305 | 306 | if (entity.isDatastar) { 307 | entity = entity.attributes.data; 308 | } 309 | 310 | if (this.type(entity) === 'object') { 311 | return Object.keys(entity).reduce((acc, key) => { 312 | // 313 | // If we have an alias, check it and convert it to what we expect in C* 314 | // 315 | const mappedKey = this.entityKeyToColumnName(key); 316 | acc[mappedKey] = entity[key]; 317 | 318 | return acc; 319 | }, {}); 320 | } 321 | 322 | // 323 | // If we have an array, this is an array of fields for doing "selects" 324 | // 325 | if (Array.isArray(entity)) { 326 | return entity.map(this.fixKeys, this); 327 | } 328 | 329 | // 330 | // IDK why this would happen but this is an easy case 331 | // 332 | if (this.type(entity) === 'string') { 333 | const mapped = snakeCase(entity); 334 | return this._aliases[mapped] 335 | ? this._aliases[mapped] 336 | : mapped || entity; 337 | } 338 | 339 | // 340 | // If we meet 0 conditions we just return what we got, this maybe should be an 341 | // error? Idk, this is just a weird thing in general 342 | // 343 | return entity; 344 | } 345 | 346 | // 347 | // Transform in the opposite direction of transform by remapping snakeCase back 348 | // to camelCase 349 | // 350 | toCamelCase(entity) { 351 | entity = entity || {}; 352 | 353 | if (this.type(entity) === 'object') { 354 | return Object.keys(entity).reduce((acc, key) => { 355 | // 356 | // If we have an alias, check it and convert it to what we 357 | const mappedKey = camelCase(this._aliasesReverse[key] || key); 358 | 359 | acc[mappedKey] = entity[key]; 360 | 361 | return acc; 362 | }, {}); 363 | } 364 | 365 | // 366 | // If we have an array, this is an array of fields for doing "selects" 367 | // 368 | if (Array.isArray(entity)) { 369 | return entity.map(function (field) { 370 | return camelCase(this._aliasesReverse[field] || field); 371 | }, this); 372 | } 373 | 374 | // 375 | // IDK why this would happen but this is an easy case 376 | // 377 | if (this.type(entity) === 'string') { 378 | return camelCase(this._aliasesReverse[entity] || entity); 379 | } 380 | 381 | // 382 | // If we meet 0 conditions we just return what we got, this maybe should be an 383 | // error? Idk, this is just a weird thing in general 384 | // 385 | return entity; 386 | } 387 | 388 | // 389 | // Generate a conditions object given a value assumed to be the primary key 390 | // 391 | generateConditions(value) { 392 | const primaries = this.primaryKeys(); 393 | 394 | if (primaries.length > 1) { 395 | throw new Error(`More conditions required ${primaries.join(', ')}`); 396 | } 397 | 398 | // 399 | // Return an object with the single primaryKey with the correct case assigned 400 | // to the value passed in. Allows us to support passing a string for findOne 401 | // 402 | return primaries.reduce((acc, key) => { 403 | acc[this.toCamelCase(key)] = value; 404 | return acc; 405 | }, {}); 406 | } 407 | 408 | // 409 | // Return both primary and secondary keys 410 | // 411 | keys() { 412 | return this._keys; 413 | } 414 | 415 | // 416 | // Returns whether or not it is a primary or secondary key 417 | // 418 | isKey(key) { 419 | return !!this._keysLookup[key]; 420 | } 421 | 422 | // 423 | // Return the column type for the given 424 | // 425 | fieldMeta(field) { 426 | return this.meta[field]; 427 | } 428 | 429 | prepareForUse(data) { 430 | return this.convert(this.fixKeys(data), 'deserialize'); 431 | } 432 | 433 | // unknown use case 434 | prepareForSerialization(data) { 435 | return this.convert(this.fixKeys(data), 'serialize'); 436 | } 437 | 438 | convert(data, converter) { 439 | const meta = this.meta; 440 | Object.keys(meta).forEach(function (key) { 441 | if (meta[key][converter]) { 442 | try { 443 | data[key] = meta[key][converter](data[key]); 444 | } catch (e) { 445 | // ignored on purpose 446 | // we should log this invalid data 447 | } 448 | } 449 | }); 450 | return data; 451 | } 452 | 453 | // 454 | // Return the primaryKey based on what type it is which is probably an array. 455 | // Handle the other case as well 456 | // 457 | primaryKeys() { 458 | return Array.isArray(this._primaryKeys) && this._primaryKeys.length 459 | ? this._primaryKeys 460 | : [this._primaryKeys]; 461 | } 462 | 463 | secondaryKeys() { 464 | return this._secondaryKeys; 465 | } 466 | 467 | fields() { 468 | return this._columnKeys; 469 | } 470 | 471 | fieldString(fieldList) { 472 | if (!Array.isArray(fieldList) || !fieldList.length) { 473 | fieldList = this.fields(); 474 | } 475 | return fieldList 476 | .map(function (fieldName) { 477 | return fieldName && (`"${fieldName}"`); 478 | }) 479 | .join(', '); 480 | } 481 | 482 | // 483 | // Return all fields, we are going to default to dealing with this as camelCase 484 | // 485 | mappedFields() { 486 | if (!this._mappedFields) { 487 | this._mappedFields = this._columnKeys.map(key => { 488 | // 489 | // CamelCase and replace alias with actual key name if it exists 490 | // 491 | return camelCase(this._aliasesReverse[key] || key); 492 | }); 493 | } 494 | 495 | return this._mappedFields; 496 | } 497 | 498 | // 499 | // Appropriate typeof checking 500 | // 501 | type(of) { 502 | return Object.prototype.toString.call(of).slice(8, -1).toLowerCase(); 503 | } 504 | 505 | // 506 | // Remark: Create conditions that are meant to be directed at the primary table if there 507 | // is a lookup table situtation. We filter based on the remove logic and do not 508 | // parse into conditionals as this gets passed directly to find 509 | // 510 | filterPrimaryConditions(conditions) { 511 | return this.toCamelCase(this.filterRemoveConditions(this.fixKeys(conditions))); 512 | } 513 | 514 | // 515 | // Evaluate if we have sufficient conditions for the remove we are executing and 516 | // return them 517 | // 518 | createRemoveConditions(conditions, table) { 519 | const transformed = this.fixKeys(conditions); 520 | // 521 | // If we are a lookup table and insufficient conditions are passed to execute 522 | // the queries to ALL the lookup tables, just error for simplicity now. 523 | // 524 | if (!this.sufficientRemoveConditions(transformed)) { 525 | throw new Error('Must pass in all primary keys when using lookup tables'); 526 | } 527 | 528 | conditions = this.filterRemoveConditions(transformed, table); 529 | 530 | const conditionals = this.parseConditions(conditions); 531 | conditionals.table = table; 532 | 533 | return conditionals; 534 | } 535 | 536 | // 537 | // Evaluate if we have sufficient conditions for the remove we are executing and 538 | // return them 539 | // 540 | createUpdateConditions(conditions, table) { 541 | const transformed = this.fixKeys(conditions); 542 | // 543 | // If we are a lookup table and insufficient conditions are passed to execute 544 | // the queries to ALL the lookup tables, just error for simplicity now. Also 545 | // handle the case where we do not have sufficient keys for a query, (need all 546 | // primary keys or both secondary and primary) 547 | // 548 | if (!this.sufficientUpdateConditions(transformed)) { 549 | throw new Error(`All necessary primary keys must be passed in, given: ${JSON.stringify(conditions)}`); 550 | } 551 | 552 | conditions = this.filterRemoveConditions(transformed, table); 553 | 554 | const conditionals = this.parseConditions(conditions); 555 | conditionals.table = table; 556 | 557 | return conditionals; 558 | } 559 | 560 | // 561 | // Ensure we have sufficient keys to do an update operation 562 | // 563 | sufficientUpdateConditions(conditions) { 564 | const keys = this.lookups ? this.keys().concat(Object.keys(this.lookupTables)) : this.keys(); 565 | return keys.every(function (key) { 566 | return !!conditions[key]; 567 | }); 568 | } 569 | 570 | // 571 | // DE-Null the entity, meaning translate known types into our defined null 572 | // equivalents. We expect to receive a fully transformed object with snake case 573 | // keys here. We use a for loop since we do too many iterations over the object 574 | // in this process 575 | // 576 | deNull(entity) { 577 | const keys = Object.keys(entity); 578 | for (let i = 0; i < keys.length; i++) { 579 | const key = keys[i]; 580 | const value = entity[key]; 581 | const meta = this.fieldMeta(key); 582 | if (!meta) { 583 | throw new Error(`${key} is not found in the schema`); 584 | } 585 | 586 | entity[key] = this.nullToValue(meta, value); 587 | } 588 | 589 | return entity; 590 | } 591 | 592 | hasAllRequiredKeys(entity, previous) { 593 | if (!entity) { 594 | return false; 595 | } 596 | 597 | try { 598 | this.validate(assign(clone(entity), previous || {}), 'update'); 599 | return true; 600 | } catch (err) { 601 | return false; 602 | } 603 | } 604 | 605 | // 606 | // Adjust detected values that are `null` and map them to a `null-like` value. 607 | // TODO: Should we iterate through maps and sets and adjust accordingly as well? 608 | // 609 | nullToValue(meta, value) { 610 | const type = meta.type; 611 | 612 | if ((type === 'text' || type === 'ascii') && value === null) { 613 | // null text values will create tombstones in Cassandra 614 | // We will write a null string instead. 615 | return '\x00'; 616 | } 617 | if ((type === 'uuid' || type === 'timeuuid') && isBadUuid(value)) { 618 | // null uuid values will create tombstones in Cassandra 619 | // We will write a zeroed uuid instead. 620 | return this.generateKeysLookup.uuid_empty(); 621 | } 622 | if (type === 'timestamp' && value === null) { 623 | // null timestamp values will create tombstones in Cassandra 624 | // We will write a zero time instead. 625 | return new Date(0); 626 | } 627 | if (type === 'map') { 628 | return Object.keys(value).reduce((memo, key) => { 629 | memo[key] = this.nullToValue({ type: meta.mapType[1] }, value[key]); 630 | return memo; 631 | }, {}); 632 | } 633 | if (type === 'set') { 634 | // Sets are an odd edge case here, it can be an array or an object who's 635 | // values are sit in an add and/or remove property. This means we need to 636 | // a bit more work updating this data structure. 637 | if (this.type(value) === 'object') { 638 | ['add', 'remove'].forEach(method => { 639 | if (method in value) value[method] = value[method].map(value => { 640 | return this.nullToValue({ type: meta.setType }, value); 641 | }); 642 | }); 643 | 644 | return value; 645 | } 646 | return value.map(value => { 647 | return this.nullToValue({ type: meta.setType }, value); 648 | }); 649 | 650 | } 651 | if (type === 'list') { 652 | if (this.type(value) === 'object') { 653 | ['prepend', 'append', 'remove'].forEach(method => { 654 | if (method in value) value[method] = value[method].map(value => { 655 | return this.nullToValue({ type: meta.listType }, value); 656 | }); 657 | }); 658 | 659 | if (value.index && this.type(value.index) === 'object') { 660 | value.index = Object.keys(value.index).reduce((acc, idx) => { 661 | acc[idx] = this.nullToValue({ type: meta.listType }, value.index[idx]); 662 | return acc; 663 | }, {}); 664 | } 665 | } else { 666 | return value.map(value => { 667 | return this.nullToValue({ type: meta.setType }, value); 668 | }); 669 | } 670 | } 671 | 672 | return value; 673 | } 674 | 675 | // 676 | // RE-Null the entity. This translates the defined null equivalents 677 | // into an actual null value for the consumer to use. 678 | // 679 | reNull(entity) { 680 | const keys = Object.keys(entity); 681 | 682 | for (let i = 0; i < keys.length; i++) { 683 | const key = keys[i]; 684 | if (this.requiresNullConversion(key)) { 685 | entity[key] = this.valueToNull(entity[key]); 686 | } else if (!this.isKey(key)) { 687 | entity[key] = this.nullToValue(this.fieldMeta(key), entity[key]); 688 | } 689 | } 690 | 691 | return entity; 692 | } 693 | 694 | requiresNullConversion(columnName) { 695 | if (!this.meta[columnName]) { 696 | columnName = this.entityKeyToColumnName(columnName); 697 | } 698 | const metaData = this.fieldMeta(columnName); 699 | const isKey = this.isKey(columnName); 700 | if (isKey) { return false; } 701 | if (!metaData || typeof metaData.nullConversion !== 'boolean') { return true; } 702 | return metaData.nullConversion; 703 | } 704 | 705 | // 706 | // Detect our `null-like` values and return null if applicable 707 | // 708 | valueToNull(value) { 709 | return valueToNullImpl(value, this.type.bind(this), new WeakSet()); 710 | } 711 | 712 | // 713 | // Assess if we have sufficient conditions during our pre-remove check from 714 | // a table with a lookup table. This will let our user know if they are trying 715 | // to do something they can't do based on how they defined lookup tables 716 | // 717 | sufficientRemoveConditions(conditions) { 718 | const keys = this.lookups 719 | ? Object.keys(this.lookupTables).concat(this.primaryKeys()) 720 | : this.primaryKeys(); 721 | 722 | return keys.every(function (key) { 723 | return !!conditions[key]; 724 | }); 725 | } 726 | 727 | // 728 | // 729 | // These are conditions specific for the remove functionality in the case where 730 | // we are removing from a bunch of lookup tables. Also handles the generic case 731 | // 732 | filterRemoveConditions(conditions, table) { 733 | // 734 | // Filter the conditions and pluck the appropriate primary key and secondary 735 | // keys based on the table 736 | // 737 | return Object.keys(conditions) 738 | .filter(key => { 739 | // 740 | // Only allow secondary keys or the appropriate primary key. If a table is 741 | // passed, we check the lookup table keys as well 742 | // 743 | return (table 744 | ? this._reverseLookupKeyMap[table] === key 745 | : this._primaryKeysLookup[key]) 746 | || this._secondaryKeysLookup[key]; 747 | }) 748 | .reduce(function (acc, key) { 749 | acc[key] = conditions[key]; 750 | return acc; 751 | }, {}); 752 | } 753 | 754 | // 755 | // Remark: Transform the keys and then filter out any keys that are not the 756 | // primary/secondary keys that are used as conditions to query on (creating 757 | // the where clause) 758 | // 759 | filterConditions(conditions) { 760 | let table; 761 | const primaries = []; 762 | 763 | const filtered = Object.keys(conditions) 764 | .filter(key => { 765 | // 766 | // If it exists as a primary or secondary key, we keep it and dont filter 767 | // 768 | const exists = !!this._keysLookup[key]; 769 | if (this._primaryKeysLookup[key]) primaries.push(key); 770 | // 771 | // Check if its part of a lookup table 772 | // 773 | table = this.lookupTables[key]; 774 | if (table) primaries.push(key); 775 | 776 | return exists || !!table; 777 | }) 778 | .reduce(function (acc, key) { 779 | acc[key] = conditions[key]; 780 | return acc; 781 | }, {}); 782 | 783 | // 784 | // Return an error if there are more than one primary key being used, 785 | // meaning we have conflicting lookup tables. Technically we dont need to 786 | // error, we just filter out or delete one of the keys from the filtered 787 | // object 788 | // 789 | if (primaries.length > 1 && !this.compositePrimary) { 790 | throw new Error(`There can only be 1 primary key in a query, found ${primaries.length} ${primaries}`); 791 | } 792 | 793 | return { table: table, conditions: filtered }; 794 | } 795 | 796 | // 797 | // Create conditions based on an entity or conditions. Optional type paremeter 798 | // can be passed as there is one case we don't want lookup table primary keys to be 799 | // considered valid conditions (remove); 800 | // 801 | createConditions(conditions) { 802 | const opts = this.filterConditions(this.fixKeys(conditions)); 803 | const conditionals = this.parseConditions(opts.conditions); 804 | // 805 | // Pass back the table so we can override the standard table after we have 806 | // parsed the conditions 807 | // 808 | conditionals.table = opts.table; 809 | return conditionals; 810 | } 811 | 812 | // 813 | // Parse the conditions into array objects to be used later on 814 | // 815 | parseConditions(conditions) { 816 | // 817 | // Create a data structure 818 | // 819 | const conditionals = { 820 | // 821 | // The keys that get mapped into the where clause 822 | // 823 | query: [], 824 | // 825 | // Hints based on parameters 826 | // 827 | hints: [], 828 | // 829 | // Actual parameter values associated with the query 830 | // 831 | params: [], 832 | // 833 | // Special routing indexes for parameters that are primaryKeys 834 | // 835 | routingIndexes: [], 836 | // 837 | // A simple array of field names (i.e. key names) for 838 | // bookkeeping / logging purposes. 839 | // 840 | fields: [] 841 | }; 842 | 843 | // 844 | // Create an array of `where` objects which have a `query` and `param` 845 | // property as well as the original `field` and `value` i guess? 846 | // 847 | Object.keys(conditions).forEach(field => { 848 | const value = conditions[field]; 849 | conditionals.fields.push(field); 850 | conditionals.query.push(this._getQuery(field, value)); 851 | 852 | // 853 | // Do valueOf on the params to get the value expected by priam. 854 | // Whats returned by this._getParams is actually the proper value for the 855 | // query 856 | // 857 | let params = this._getParams(field, value); 858 | params = Array.isArray(params) ? params : [params]; 859 | params.forEach(function (param) { 860 | conditionals.params.push(this.valueOf(field, param)); 861 | }, this); 862 | 863 | }); 864 | 865 | return conditionals; 866 | } 867 | 868 | // 869 | // Return the params based on the given entity 870 | // 871 | getValues(entity, fields) { 872 | fields = fields || this.fields(); 873 | 874 | // 875 | // Populate all fields (i.e. columns) with 876 | // any values from the entity. If a value for 877 | // a particular column is not present we set 878 | // it EXPLICITLY to `null`. 879 | // 880 | return fields.map(field => { 881 | let value = null; 882 | if (Object.prototype.hasOwnProperty.call(entity, field)) { 883 | value = entity[field]; 884 | } 885 | 886 | return this.valueOf(field, value); 887 | }); 888 | } 889 | 890 | // 891 | // Bit of a hack that returns the data structure expected by priam 892 | // 893 | valueOf(field, value, type) { 894 | return { 895 | value: value, 896 | hint: this._mapFieldHint( 897 | type ? type : this._getFieldHint(field) 898 | ), 899 | isRoutingKey: this.primaryKeys().indexOf(field) !== -1 900 | }; 901 | } 902 | 903 | // 904 | // Add the column names and aliases from the schema definition as 905 | // property getters/setters for the data being modeled by this object 906 | // 907 | buildProperties() { 908 | const columns = Object.keys(this.meta); 909 | const aliasesOf = this._aliasesReverse; 910 | 911 | const definitions = columns.reduce(function (memo, name) { 912 | name = camelCase(aliasesOf[name] || name); 913 | memo[name] = { 914 | get() { 915 | return this.attributes.get(name); 916 | }, 917 | set(value) { 918 | return this.attributes.set(name, value); 919 | }, 920 | enumerable: true, 921 | configurable: true 922 | }; 923 | return memo; 924 | }, {}); 925 | 926 | return definitions; 927 | } 928 | 929 | // 930 | // 931 | _getQuery(field, values) { 932 | let value; 933 | if (Array.isArray(values)) { 934 | if (values.length > 1) { 935 | return `${field} IN (?${new Array(values.length).join(', ?')})`; 936 | } 937 | value = values[0]; 938 | } else if (this.type(values) === 'object') { 939 | value = Object.keys(values) 940 | .map(name => { 941 | const op = this.operators[name]; 942 | 943 | return op 944 | ? `${field} ${op} ?` 945 | : null; 946 | }) 947 | .filter(Boolean) 948 | .join(' AND '); 949 | 950 | return value || null; 951 | } else { 952 | value = values; 953 | } 954 | 955 | return this.type(value) === 'string' || this.type(value) === 'number' 956 | ? `${field} = ?` 957 | : null; 958 | } 959 | 960 | // 961 | // Transform parameters based on the field passed in and the value associated 962 | // with the field 963 | // 964 | _getParams(field, values) { 965 | let value; 966 | 967 | if (Array.isArray(values)) { 968 | values = values.slice(0); 969 | if (values.length > 1) { 970 | return values; 971 | } 972 | value = values[0]; 973 | } else if (this.type(values) === 'object') { 974 | value = Object.keys(values) 975 | .map(function (name) { 976 | const op = this.operators[name]; 977 | if (!op) { 978 | return null; 979 | } 980 | 981 | const type = this.meta[field].type; 982 | return convertRangeType(this.cqlFunctions[type], values[name], name); 983 | }, this) 984 | .filter(Boolean); 985 | 986 | if (value.length) { 987 | return value; 988 | } 989 | } else { 990 | value = values; 991 | } 992 | 993 | return this.type(value) === 'string' || this.type(value) === 'number' ? value : null; 994 | } 995 | 996 | // 997 | // Get the proper hint code from the internal cassandra driver to pass in 998 | // 999 | _mapFieldHint(hint) { 1000 | const hintType = dataTypes[hint] ? dataTypes[hint] : hint; 1001 | return this.type(hintType) === 'string' 1002 | ? dataTypes.getByName(hintType) 1003 | : hintType; 1004 | } 1005 | 1006 | _getFieldHint(field) { 1007 | const meta = this.meta[field]; 1008 | if (!meta || !this._isString(meta.type)) return null; 1009 | 1010 | // 1011 | // Validate and return hints for various types 1012 | // 1013 | if (meta.type === 'map') { 1014 | return Array.isArray(meta.mapType) 1015 | && meta.mapType.length === 2 1016 | && meta.mapType.every(this._isString, this) 1017 | ? `map<${meta.mapType[0]},${meta.mapType[1]}>` 1018 | : null; 1019 | } 1020 | 1021 | // 1022 | // Handle set and lists which are formatted the same 1023 | // 1024 | if (['set', 'list'].indexOf(meta.type) !== -1) { 1025 | const cType = meta[`${meta.type}Type`]; 1026 | return this._isString(cType) 1027 | ? `${meta.type}<${cType}>` 1028 | : null; 1029 | } 1030 | 1031 | return meta.type; 1032 | } 1033 | 1034 | // 1035 | // Helper function for the above 1036 | // 1037 | _isString(type) { 1038 | return this.type(type) === 'string'; 1039 | } 1040 | 1041 | } 1042 | 1043 | // 1044 | // detect both empty string and null as a bad uuid value since cassandra will 1045 | // give us weird errors if we try and insert an empty string 1046 | // 1047 | function isBadUuid(value) { 1048 | return value === null || (typeof value === 'string' && value.length === 0); 1049 | } 1050 | 1051 | /* 1052 | * 1053 | * Performs any coercion for types that have 1054 | * different C* representations in range queries. 1055 | */ 1056 | function convertRangeType(converter, value, name) { 1057 | return converter 1058 | ? converter[name](value) 1059 | : value; 1060 | } 1061 | 1062 | /** 1063 | * 1064 | * Return an object from a given array with values set to true for a simple 1065 | * lookup table 1066 | * @param {Object} set - Set object 1067 | * @returns {Object} - Returns the reduced value 1068 | */ 1069 | function createLookup(set) { 1070 | return set.reduce(function (acc, key) { 1071 | acc[key] = true; 1072 | return acc; 1073 | }, {}); 1074 | } 1075 | 1076 | /** 1077 | * Simple function used to get the correct timeuuids for rangeQueries 1078 | * 1079 | * @param {Object} type - type of the time 1080 | * @returns {TimeUuid} timeUuid - TimeUuid object 1081 | */ 1082 | function time(type) { 1083 | return function (timeuuid) { 1084 | const precision = TimeUuid.fromString(timeuuid).getDatePrecision(); 1085 | return TimeUuid[type](precision.date, precision.ticks); 1086 | }; 1087 | } 1088 | 1089 | // 1090 | // Function used to default values based 1091 | // 1092 | function defaultValue(type) { 1093 | return function () { 1094 | let value; 1095 | switch (type) { 1096 | case 'uuid_v4': 1097 | value = uuid(); 1098 | break; 1099 | case 'uuid_empty': 1100 | value = '00000000-0000-0000-0000-000000000000'; 1101 | break; 1102 | case 'date_now': 1103 | value = new Date(); 1104 | break; 1105 | default: 1106 | break; 1107 | } 1108 | return value; 1109 | }; 1110 | } 1111 | 1112 | 1113 | // 1114 | // Detect our `null-like` values and return null if applicable. 1115 | // Implements recursion for `valueToNull` prototype function. 1116 | // 1117 | function valueToNullImpl(value, getType, visited) { 1118 | if (value === '\x00') { 1119 | return null; 1120 | } 1121 | if (value === '00000000-0000-0000-0000-000000000000') { 1122 | return null; 1123 | } 1124 | 1125 | const type = getType(value); 1126 | 1127 | if (type === 'date' && value.getTime() === 0) { 1128 | return null; 1129 | } 1130 | 1131 | if (value === null || typeof value === 'undefined') { 1132 | return value; 1133 | } 1134 | 1135 | if (isObject(value)) { 1136 | // Prevent cyclic structures from being re-evaluated 1137 | if (visited.has(value)) { 1138 | return value; 1139 | } 1140 | visited.add(value); 1141 | 1142 | if (type === 'array') { 1143 | for (let i = 0; i < value.length; i++) { 1144 | const arrValue = value[i]; 1145 | if (!isObject(arrValue) || !visited.has(arrValue)) { 1146 | value[i] = valueToNullImpl(arrValue, getType, visited); 1147 | } 1148 | } 1149 | } else if (type === 'object') { 1150 | const keys = Object.keys(value); 1151 | for (let i = 0; i < keys.length; i++) { 1152 | const keyValue = value[keys[i]]; 1153 | if (!isObject(keyValue) || !visited.has(keyValue)) { 1154 | value[keys[i]] = valueToNullImpl(keyValue, getType, visited); 1155 | } 1156 | } 1157 | } 1158 | } 1159 | 1160 | return value; 1161 | } 1162 | 1163 | function isObject(value) { 1164 | return typeof value === 'object' && value !== null; 1165 | } 1166 | 1167 | module.exports = Schema; 1168 | --------------------------------------------------------------------------------