├── deploySite.sh ├── index.js ├── test ├── helpers │ └── chai-helper.js ├── integration │ ├── mysql-integration-spec.js │ ├── sqlite-integration-spec.js │ ├── postgres-integration-spec.js │ ├── maria-integration-spec.js │ └── integration-specs.js └── unit │ ├── unescape-spec.js │ ├── fixture-generator-spec.js │ ├── generate-spec-ids-spec.js │ ├── is-postgres-spec.js │ ├── resolve-dependencies-spec.js │ └── prioritize-spec.js ├── lib ├── util.js ├── is-sqlite.js ├── unescape.js ├── is-postgres.js ├── execute-fk-queries.js ├── resolve-dependencies.js ├── generate-spec-ids.js ├── prioritize.js ├── fixture-generator.js └── insert-records.js ├── Dockerfile ├── docker-compose.yml ├── .gitignore ├── LICENSE ├── package.json ├── gulpfile.js ├── integration_tests.md ├── .idea └── workspace.xml ├── README.md ├── site ├── css │ └── docs.css └── index.html └── CHANGELOG /deploySite.sh: -------------------------------------------------------------------------------- 1 | git subtree push --prefix site origin gh-pages 2 | 3 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = require('./lib/fixture-generator'); 3 | -------------------------------------------------------------------------------- /test/helpers/chai-helper.js: -------------------------------------------------------------------------------- 1 | var chai = require('chai'); 2 | var chaiAsPromised = require("chai-as-promised"); 3 | 4 | chai.use(chaiAsPromised); 5 | 6 | global.expect = chai.expect; 7 | -------------------------------------------------------------------------------- /lib/util.js: -------------------------------------------------------------------------------- 1 | var _ = require("lodash"); 2 | 3 | module.exports = { 4 | asArray: function asArray(a) { 5 | if (_.isArray(a)) { 6 | return a; 7 | } 8 | return _.compact([a]); 9 | } 10 | }; 11 | -------------------------------------------------------------------------------- /lib/is-sqlite.js: -------------------------------------------------------------------------------- 1 | /* 2 | * A helper function to determine if a knex client is connected to a sqlite database 3 | */ 4 | 5 | var _ = require("lodash"); 6 | 7 | module.exports = function isPostgres(knex) { 8 | if (!knex.client) { 9 | return false; 10 | } 11 | 12 | return _.includes(knex.client.dialect, "sqlite"); 13 | }; 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:0.12 2 | 3 | ENV LAST_UPDATED "2014-11-16-2:45" 4 | 5 | RUN mkdir /sql-fixtures 6 | 7 | ADD README.md /sql-fixtures/ 8 | ADD package.json /sql-fixtures/ 9 | ADD gulpfile.js /sql-fixtures/ 10 | ADD lib/ /sql-fixtures/lib/ 11 | ADD test/ /sql-fixtures/test/ 12 | 13 | WORKDIR /sql-fixtures 14 | 15 | RUN npm install 16 | RUN npm install -g gulp 17 | RUN npm install -g node-inspector 18 | -------------------------------------------------------------------------------- /test/integration/mysql-integration-spec.js: -------------------------------------------------------------------------------- 1 | var specs = require('./integration-specs'); 2 | 3 | describe("mysql intregation tests", function() { 4 | specs({ 5 | client: 'mysql', 6 | connection: { 7 | host: process.env.DOCKER_IP || 'mysql', 8 | user: 'testdb', 9 | password: 'password', 10 | database: 'testdb', 11 | port: Number(process.env.DOCKER_PORT || 3306) 12 | } 13 | }); 14 | }); 15 | -------------------------------------------------------------------------------- /test/unit/unescape-spec.js: -------------------------------------------------------------------------------- 1 | var unescape = require('../../lib/unescape'); 2 | 3 | describe('unescape', function() { 4 | it('should unescape colons', function() { 5 | var toBeUnescaped = { 6 | Users: [{ 7 | foo: 'http:://google.com', 8 | bar: 'buz::baz::bee' 9 | }] 10 | }; 11 | 12 | var unescaped = unescape(toBeUnescaped); 13 | expect(unescaped).to.eql({ 14 | Users: [{ 15 | foo: 'http://google.com', 16 | bar: 'buz:baz:bee' 17 | }] 18 | }); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /lib/unescape.js: -------------------------------------------------------------------------------- 1 | var _ = require('lodash'); 2 | 3 | function unescape(record) { 4 | _.forIn(record, function(value, key, record) { 5 | if (_.isString(value)) { 6 | record[key] = value.replace(/::/g, ':'); 7 | } 8 | }); 9 | 10 | return record; 11 | } 12 | 13 | module.exports = function(toBeUnescaped) { 14 | toBeUnescaped = _.clone(toBeUnescaped); 15 | 16 | _.forIn(toBeUnescaped, function(records, table, toBeUnescaped) { 17 | toBeUnescaped[table] = records.map(unescape); 18 | }); 19 | 20 | return toBeUnescaped; 21 | }; 22 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | pg: 4 | image: postgres:9.4 5 | environment: 6 | - POSTGRES_USER=testdb 7 | - POSTGRES_PASSWORD=password 8 | ports: 9 | - "15432:5432" 10 | 11 | mysql: 12 | image: mysql:5.7.7 13 | environment: 14 | - MYSQL_ROOT_PASSWORD=password 15 | - MYSQL_USER=testdb 16 | - MYSQL_PASSWORD=password 17 | - MYSQL_DATABASE=testdb 18 | ports: 19 | - "13306:3306" 20 | 21 | maria: 22 | image: mariadb:10.1.2 23 | environment: 24 | - MYSQL_ROOT_PASSWORD=password 25 | - MYSQL_USER=testdb 26 | - MYSQL_PASSWORD=password 27 | - MYSQL_DATABASE=testdb 28 | ports: 29 | - "13307:3306" 30 | -------------------------------------------------------------------------------- /lib/is-postgres.js: -------------------------------------------------------------------------------- 1 | /* 2 | * A helper function to determine if a knex client is connected to a postgres database 3 | */ 4 | 5 | var _ = require("lodash"); 6 | 7 | module.exports = function isPostgres(knex) { 8 | if (!knex.client || !knex.client.connectionSettings) { 9 | return false; 10 | } 11 | 12 | if (knex.client.dialect) { 13 | // dialect is very accurate, prefer it when present 14 | return _.includes(knex.client.dialect, "pg") || _.includes(knex.client.dialect, "postg"); 15 | } 16 | 17 | // fall back on digging into the connection, which can result in false negatives 18 | var host = knex.client.connectionSettings.host || knex.client.connectionSettings; 19 | return _.includes(host, "pg") || _.includes(host, "postg"); 20 | }; 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | 5 | # Runtime data 6 | pids 7 | *.pid 8 | *.seed 9 | 10 | # Directory for instrumented libs generated by jscoverage/JSCover 11 | lib-cov 12 | 13 | # Coverage directory used by tools like istanbul 14 | coverage 15 | 16 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 17 | .grunt 18 | 19 | # Compiled binary addons (http://nodejs.org/api/addons.html) 20 | build/Release 21 | 22 | # Dependency directory 23 | # Commenting this out is preferred by some people, see 24 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- 25 | node_modules 26 | 27 | # Users Environment Variables 28 | .lock-wscript 29 | .DS_Store 30 | README.html 31 | .vagrant 32 | sqlite-integration-spec.db 33 | .npmrc 34 | -------------------------------------------------------------------------------- /lib/execute-fk-queries.js: -------------------------------------------------------------------------------- 1 | var _ = require('lodash'); 2 | var bluebird = require('bluebird'); 3 | var util = require('./util'); 4 | var knex; 5 | 6 | module.exports = function resolveQueryObjects(config, knexInst) { 7 | var pendingQueries = []; 8 | knex = knexInst; 9 | 10 | _.forEach(config, function handleTable(entries) { 11 | util.asArray(entries).forEach(function (entry) { 12 | var promises = _.map(entry, resolveSingleValue); 13 | pendingQueries = pendingQueries.concat(promises); 14 | }); 15 | }); 16 | return pendingQueries; 17 | }; 18 | 19 | function resolveSingleValue(colValue, colName, entry) { 20 | if (isQueryObject(colValue)) { 21 | return knex(colValue.from) 22 | .where(colValue.where) 23 | .then(function(result) { 24 | if (result.length > 1) { 25 | var where = JSON.stringify(colValue); 26 | throw new Error(where + ' matches >1 possible FK!'); 27 | } else { 28 | entry[colName] = result[0][colValue.column || 'id']; 29 | } 30 | }); 31 | } 32 | } 33 | 34 | function isQueryObject(value) { 35 | return _(value).has('from', 'where'); 36 | } 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Matt Greer 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sql-fixtures", 3 | "version": "1.0.4", 4 | "description": "Populate a SQL database with fixture data", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "fig up" 8 | }, 9 | "files": [ 10 | "index.js", 11 | "lib", 12 | "README.md", 13 | "LICENSE", 14 | "CHANGELOG" 15 | ], 16 | "keywords": [ 17 | "fixtures", 18 | "sql", 19 | "postgres", 20 | "sqlite", 21 | "mysql", 22 | "testing", 23 | "dummy", 24 | "data" 25 | ], 26 | "author": "Matt Greer (http://www.mattgreer.org/)", 27 | "contributors": [ 28 | "Blaine Bublitz (http://iceddev.com/)" 29 | ], 30 | "license": "MIT", 31 | "dependencies": { 32 | "bluebird": "3.2.2", 33 | "knex": "^0.20.3", 34 | "lodash": "^4.17.19" 35 | }, 36 | "repository": { 37 | "type": "git", 38 | "url": "git://github.com/city41/node-sql-fixtures.git" 39 | }, 40 | "homepage": "https://github.com/city41/node-sql-fixtures", 41 | "devDependencies": { 42 | "chai": "^1.9.1", 43 | "chai-as-promised": "^4.1.1", 44 | "gulp": "^4.0.2", 45 | "gulp-jshint": "^2.1.0", 46 | "gulp-mocha": "^7.0.2", 47 | "gulp-shell": "^0.7.1", 48 | "jshint": "^2.10.3", 49 | "mocha": "^6.2.2", 50 | "mysql": "^2.5.2", 51 | "pg": "^4.4.4", 52 | "sqlite3": "^4.1.0" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /test/unit/fixture-generator-spec.js: -------------------------------------------------------------------------------- 1 | // NOTE: fixtureGenerator is mostly tested in the integration specs 2 | 3 | var _ = require('lodash'); 4 | var fixtureGenerator = require('../../lib/fixture-generator'); 5 | 6 | var dbConfig = { 7 | client: 'pg', 8 | connection: { 9 | host: 'localhost', 10 | user: 'testdb', 11 | password: 'password', 12 | database: 'testdb', 13 | port: 5432 14 | } 15 | }; 16 | 17 | describe('fixtureGenerator', function() { 18 | describe('error handling', function() { 19 | var impossible = { 20 | Users: { 21 | fooId: "DontExist:0" 22 | } 23 | }; 24 | 25 | it('should reject invalid database config', function(done) { 26 | fixtureGenerator.create({ client: 'sqlite3' }, impossible) 27 | .catch(function(err) { 28 | expect(err).to.be.an.instanceOf(Error); 29 | expect(err.toString()).to.contain('DontExist'); 30 | done(); 31 | }); 32 | }); 33 | 34 | it('should reject the promise', function(done) { 35 | fixtureGenerator.create(dbConfig, impossible).then(function(result) { 36 | throw new Error("Should not have returned a result"); 37 | }, function(err) { 38 | expect(err).to.be.an.instanceOf(Error); 39 | done(); 40 | }); 41 | }); 42 | }); 43 | 44 | describe('disconnect alias', function() { 45 | it('should be an alias to destroy', function() { 46 | expect(fixtureGenerator.disconnect === fixtureGenerator.destroy).to.be.true; 47 | }); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /gulpfile.js: -------------------------------------------------------------------------------- 1 | var gulp = require('gulp'); 2 | var mocha = require('gulp-mocha'); 3 | var shell = require('gulp-shell'); 4 | var jshint = require('gulp-jshint'); 5 | 6 | function runSpecs(path) { 7 | return gulp.src([ 8 | './test/helpers/*.js', 9 | path 10 | ]) 11 | .pipe(mocha()); 12 | } 13 | 14 | gulp.task('lint', function() { 15 | return gulp.src(['./lib/**/*.js', './test/**/*.js']) 16 | .pipe(jshint({ loopfunc: true, expr: true })) 17 | .pipe(jshint.reporter('default')) 18 | .pipe(jshint.reporter('fail')); 19 | }); 20 | 21 | gulp.task('test:unit', gulp.series('lint', function() { 22 | return runSpecs('./test/unit/*.js'); 23 | })); 24 | 25 | gulp.task('test:integration:postgres', function() { 26 | return runSpecs('./test/integration/postgres*.js'); 27 | }); 28 | 29 | gulp.task('test:integration:mysql', function() { 30 | return runSpecs('./test/integration/mysql*.js'); 31 | }); 32 | 33 | gulp.task('test:integration:maria', function() { 34 | return runSpecs('./test/integration/maria*.js'); 35 | }); 36 | 37 | gulp.task('delete:sqlite', shell.task(['rm -f ./sqlite-integration-spec.db'])); 38 | 39 | gulp.task('test:integration:sqlite', gulp.series('delete:sqlite', function() { 40 | return runSpecs('./test/integration/sqlite*.js'); 41 | })); 42 | 43 | gulp.task('test:integration', gulp.series( 44 | 'test:integration:sqlite', 45 | 'test:integration:postgres', 46 | 'test:integration:mysql', 47 | 'test:integration:maria' 48 | )) 49 | 50 | gulp.task('test', gulp.series( 51 | 'lint', 52 | 'test:unit', 53 | 'test:integration:sqlite' 54 | )); 55 | -------------------------------------------------------------------------------- /lib/resolve-dependencies.js: -------------------------------------------------------------------------------- 1 | /* 2 | * resolve-dependencies 3 | * ==================== 4 | * Given an array of fixture specs and the already resolved fixtures 5 | * from previous runs, resolves the current specs if it can. 6 | * 7 | * So if a current spec has "userId: 'Users:0'", and the available prereqs 8 | * contains a User, then userId will get resolved to the actual id 9 | */ 10 | 11 | var _ = require('lodash'); 12 | 13 | function resolve(value, availablePreReqs) { 14 | if (!_.isString(value) || !_.includes(value, ':') || _.includes(value, '::')) { 15 | return value; 16 | } 17 | 18 | if (value.indexOf('{') > -1 && value.indexOf('}') > -1) { 19 | return value.replace(/{([^}]+)}/g, function(match, value) { 20 | return resolve(value, availablePreReqs); 21 | }); 22 | } 23 | 24 | var split = value.split(':'); 25 | 26 | var tableName = split[0]; 27 | var specId = split[1]; 28 | var property = split[2] || 'id'; 29 | 30 | var records = availablePreReqs[tableName] || []; 31 | var resolvedDep = _.find(_.compact(records), { specId: specId }); 32 | return resolvedDep && resolvedDep[property] || value; 33 | } 34 | 35 | module.exports = function resolveDependencies(availablePreReqs, toBeResolved) { 36 | toBeResolved = _.clone(toBeResolved); 37 | 38 | _.forIn(toBeResolved, function(entries, table) { 39 | toBeResolved[table] = entries.map(function(entry) { 40 | if(_.isString(entry)) { 41 | return resolve(entry, availablePreReqs); 42 | } 43 | else if(_.isObject(entry)) { 44 | _.forIn(entry, function(value, property, entry) { 45 | // entry[property] = resolve(value, availablePreReqs); 46 | if (_.isArray(value)) { 47 | entry[property] = _.map(value, function(arrayValue) { 48 | return resolve(arrayValue, availablePreReqs); 49 | }); 50 | } else { 51 | entry[property] = resolve(value, availablePreReqs); 52 | } 53 | }); 54 | 55 | return entry; 56 | } 57 | }); 58 | }); 59 | 60 | return toBeResolved; 61 | }; 62 | -------------------------------------------------------------------------------- /integration_tests.md: -------------------------------------------------------------------------------- 1 | # Integration Tests 2 | 3 | There is a simple integration test suite that runs against Postgres, MySQL, Maria and sqlite. 4 | 5 | It is located at `test/integration`. The actual specs live in `integration-specs.js`, and each of the database specific suites invoke these specs, passing in their database specific knex config. 6 | 7 | Each database specific spec file can also have its own specs, postgres does this to test out array support for example. 8 | 9 | ## Need a quick smoke test? 10 | 11 | sqlite is your friend, `gulp test:integration:sqlite` is a quick easy way to run tests that hit an actual database. It should require no setup, just call it. 12 | 13 | ## Setting up the environment 14 | 15 | Docker containers provide the database engines. Getting these running is not too bad, but will take a while the first time. 16 | 17 | 1. [Install docker-compose](https://docs.docker.com/compose/install/) -- these instructions also show how to install Docker, which you will also need. 18 | 1. from the root of sql-fixtures, run `docker-compose up -d` 19 | * go get some coffee, this will take a long time the first time 20 | 21 | 22 | ## Running the tests 23 | 24 | Once the environment is up, you can run the tests with, where DOCKER_IP is `localhost` on linux or whatever `boot2docker ip` tells you for OSX (I've never tried any of this on Windows). 25 | 26 | ### postgres 27 | DOCKER_IP= DOCKER_PORT=15432 gulp test:integration:postgres 28 | 29 | ### mysql 30 | DOCKER_IP= DOCKER_PORT=13306 gulp test:integration:mysql 31 | 32 | ### maria 33 | DOCKER_IP= DOCKER_PORT=13307 gulp test:integration:maria 34 | 35 | ## Cleaning things up 36 | 37 | To clean up the containers docker-compose is managing: `docker-compose stop && docker-compose rm` 38 | 39 | To clean up all Docker containers: `docker rm $(docker ps -q -a)` you may need to add `-f` flag to force the clean up if some containers are still running. 40 | 41 | ## Debugging with node-inspector 42 | 43 | 1. docker-compose up -d 44 | 1. `node-inspector` 45 | 1. `DOCKER_IP= DOCKER_PORT= node --debug-brk $(which gulp) test:integration:` 46 | * see above section for database specific values 47 | 1. Head to node-inspector and debug as usual. 48 | -------------------------------------------------------------------------------- /.idea/workspace.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 12 | 13 | 15 | 16 | 17 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 1610487760362 32 | 37 | 38 | 39 | 40 | 42 | -------------------------------------------------------------------------------- /test/integration/sqlite-integration-spec.js: -------------------------------------------------------------------------------- 1 | var _ = require('lodash'); 2 | var bluebird = require('bluebird'); 3 | var FixtureGenerator = require('../../lib/fixture-generator'); 4 | var specs = require('./integration-specs'); 5 | 6 | var dbConfig = { 7 | client: 'sqlite3', 8 | connection: { 9 | filename: './sqlite-integration-spec.db' 10 | } 11 | }; 12 | 13 | describe("sqlite intregation tests", function() { 14 | specs(dbConfig); 15 | 16 | // sqlite specific specs below 17 | 18 | describe("using rowids", function() { 19 | before(function() { 20 | this.fixtureGenerator = new FixtureGenerator(dbConfig); 21 | this.knex = this.fixtureGenerator.knex; 22 | }); 23 | 24 | after(function(done) { 25 | this.fixtureGenerator.destroy(done); 26 | }); 27 | 28 | beforeEach(function(done) { 29 | var knex = this.knex; 30 | 31 | var dropPromises = [ 32 | knex.schema.dropTableIfExists('has_no_id_and_timestamps') 33 | ]; 34 | 35 | bluebird.all(dropPromises).then(function() { 36 | knex.schema.createTable('has_no_id_and_timestamps', function(table) { 37 | table.integer('integer_column'); 38 | table.timestamp('timestamp_column').defaultTo(knex.fn.now()); 39 | }).then(function() { 40 | done(); 41 | }); 42 | }); 43 | }); 44 | 45 | it('should select the correct row', function(done) { 46 | this.timeout(4000); 47 | var me = this; 48 | 49 | var firstConfig = { 50 | has_no_id_and_timestamps: [ 51 | { integer_column: 2, timestamp_column: null }, 52 | { integer_column: 2, timestamp_column: null }, 53 | { integer_column: 2, timestamp_column: null }, 54 | { integer_column: 2, timestamp_column: null }, 55 | { integer_column: 2, timestamp_column: null } 56 | ] 57 | }; 58 | 59 | this.fixtureGenerator.create(firstConfig).then(function(firstResults) { 60 | setTimeout(function() { 61 | var dataConfig = { 62 | has_no_id_and_timestamps: { integer_column: 2, timestamp_column: null } 63 | }; 64 | 65 | me.fixtureGenerator.create(dataConfig).then(function(nextResults) { 66 | // if we got the correct row, it will have a timestamp not found in the previous set 67 | var previousTimestamps = _.map(firstResults.has_no_id_and_timestamps, 'timestamp_column'); 68 | expect(previousTimestamps).to.not.contain(nextResults.has_no_id_and_timestamps[0].timestamp_column); 69 | done(); 70 | }); 71 | }, 2000); 72 | }); 73 | }); 74 | }); 75 | }); 76 | -------------------------------------------------------------------------------- /test/unit/generate-spec-ids-spec.js: -------------------------------------------------------------------------------- 1 | var generateSpecIds = require('../../lib/generate-spec-ids'); 2 | 3 | describe('generate-spec-ids', function() { 4 | describe('generating spec ids', function() { 5 | it('should generate ids for a simple case', function() { 6 | var config = { 7 | Users: { 8 | username: 'bob' 9 | } 10 | }; 11 | 12 | var generated = generateSpecIds(config); 13 | 14 | expect(generated).not.to.equal(config); 15 | expect(generated.Users.username).to.eql('bob'); 16 | expect(generated.Users.specId).to.be.a('string'); 17 | }); 18 | 19 | it('should generate ids for a dependency case', function() { 20 | var config = { 21 | Users: { 22 | username: 'bob' 23 | }, 24 | Items: [{ 25 | userId: "Users:0", 26 | name: "my item" 27 | }, { 28 | name: "Items:0:name" 29 | }] 30 | }; 31 | 32 | var generated = generateSpecIds(config); 33 | 34 | expect(generated).not.to.equal(config); 35 | expect(generated.Users.username).to.eql('bob'); 36 | expect(generated.Items[0].name).to.eql('my item'); 37 | expect(generated.Users.specId).to.be.a('string'); 38 | 39 | var expectedUserId = "Users:" + generated.Users.specId; 40 | expect(generated.Items[0].userId).to.eql(expectedUserId); 41 | 42 | 43 | expect(generated.Items[0].specId).to.be.a('string'); 44 | var expectedNameId = "Items:" + generated.Items[0].specId + ":name"; 45 | expect(generated.Items[1].name).to.eql(expectedNameId); 46 | }); 47 | 48 | it('should generate ids for sql strings', function() { 49 | var config = { 50 | Users: [{ 51 | username: 'bob' 52 | }, { 53 | username: 'sally' 54 | }], 55 | sql: 'foo {Users:0} {Users:0} {Users:1}' 56 | }; 57 | 58 | var generated = generateSpecIds(config); 59 | var user0SpecId = generated.Users[0].specId; 60 | var user1SpecId = generated.Users[1].specId; 61 | var expectedSql = 'foo {Users:' + user0SpecId +'} {Users:' + user0SpecId +'} {Users:' + user1SpecId + '}'; 62 | expect(generated.sql).to.eql(expectedSql); 63 | }); 64 | 65 | it('should generate ids for array values', function() { 66 | var config = { 67 | has_integer: [{ 68 | integer: 9 69 | }], 70 | needs_integer: [{ 71 | integers: ['has_integer:0:integer', 4] 72 | }] 73 | }; 74 | 75 | var generated = generateSpecIds(config); 76 | 77 | var expectedSpecId = generated.has_integer[0].specId; 78 | expect(generated.needs_integer[0].integers).to.eql([ 79 | 'has_integer:' + expectedSpecId + ':integer', 80 | 4 81 | ]); 82 | }); 83 | }); 84 | }); 85 | -------------------------------------------------------------------------------- /lib/generate-spec-ids.js: -------------------------------------------------------------------------------- 1 | /** 2 | * generate-spec-ids 3 | * given a config, all records in the config are given a spec id 4 | * then all index references are updated to use spec ids instead 5 | * 6 | * this allows prioritize to be much simpler and stable. 7 | */ 8 | 9 | var _ = require('lodash'); 10 | var util = require('./util'); 11 | 12 | var seed = -1; 13 | 14 | function generateId() { 15 | seed += 1; 16 | return "__genned_specId_" + seed; 17 | } 18 | 19 | function assignId(record) { 20 | record.specId = record.specId || generateId(); 21 | } 22 | 23 | function assignIds(config) { 24 | _.forIn(config, function(records, table) { 25 | _.each(util.asArray(records), assignId); 26 | }); 27 | 28 | return config; 29 | } 30 | 31 | function findSpecId(table, index, config) { 32 | var records = util.asArray(config[table]); 33 | return records[index].specId; 34 | } 35 | 36 | function substituteSpecId(value, config) { 37 | var split = value.split(":"); 38 | 39 | // did we just find : ? 40 | if (config[split[0]] && !isNaN(split[1])) { 41 | split[1] = findSpecId(split[0], split[1], config); 42 | value = split.join(":"); 43 | } 44 | return value; 45 | } 46 | 47 | function updateRefsForString(str, config) { 48 | return str.replace(/{([^}]+)}/g, function(match, value) { 49 | return "{" + substituteSpecId(value, config) + "}"; 50 | }); 51 | } 52 | 53 | function updateRefsForRecord(record, config) { 54 | _.forIn(record, function(value, key, record) { 55 | if (_.isString(value)) { 56 | record[key] = substituteSpecId(value, config); 57 | } 58 | if (_.isArray(value)) { 59 | record[key] = _.map(value, function(arrayValue) { 60 | if (_.isString(arrayValue)) { 61 | return substituteSpecId(arrayValue, config); 62 | } else { 63 | return arrayValue; 64 | } 65 | }); 66 | } 67 | }); 68 | 69 | return config; 70 | } 71 | 72 | function updateSql(sql, config) { 73 | if (_.isString(sql)) { 74 | return updateRefsForString(sql, config); 75 | } else { 76 | return _.map(sql, function(sqlEntry) { 77 | return updateRefsForString(sqlEntry, config); 78 | }); 79 | } 80 | } 81 | 82 | function updateRefs(config) { 83 | _.forIn(config, function(records, table) { 84 | if (table === 'sql') { 85 | config.sql = updateSql(records, config); 86 | } else { 87 | records = util.asArray(records); 88 | _.each(records, function(record) { 89 | updateRefsForRecord(record, config); 90 | }); 91 | } 92 | }); 93 | 94 | return config; 95 | } 96 | 97 | module.exports = function(config) { 98 | var transformFn = _.flowRight(updateRefs, assignIds); 99 | return transformFn(_.clone(config)); 100 | }; 101 | -------------------------------------------------------------------------------- /test/unit/is-postgres-spec.js: -------------------------------------------------------------------------------- 1 | var isPostgres = require('../../lib/is-postgres'); 2 | 3 | describe('is-postgres', function() { 4 | describe('not a knex object', function() { 5 | it('should say it is not postgres', function() { 6 | expect(isPostgres({})).to.be.false; 7 | 8 | var notKnex = { 9 | client: {} 10 | }; 11 | 12 | expect(isPostgres(notKnex)).to.be.false; 13 | }); 14 | }); 15 | 16 | describe('dialect', function() { 17 | it('should indicate it is postgres if the dialect says so', function() { 18 | var knex = { 19 | client: { 20 | connectionSettings: true, 21 | dialect: "postgresql" 22 | } 23 | }; 24 | 25 | expect(isPostgres(knex)).to.be.true; 26 | }); 27 | 28 | it('should not say it is postgres if the dialect is something else', function() { 29 | var knex = { 30 | client: { 31 | connectionSettings: true, 32 | dialect: "mysql" 33 | } 34 | }; 35 | 36 | expect(isPostgres(knex)).to.be.false; 37 | }); 38 | 39 | it('should indicate it is postgres if the dialect says so but other things dont', function() { 40 | var knex = { 41 | client: { 42 | connectionSettings: "somethingelse", 43 | dialect: "postgresql" 44 | } 45 | }; 46 | 47 | expect(isPostgres(knex)).to.be.true; 48 | }); 49 | }); 50 | 51 | describe('host object', function() { 52 | it('should indicate it is postgres if found in the host', function() { 53 | var knex = { 54 | client: { 55 | connectionSettings: { 56 | host: "postgres:foobar" 57 | } 58 | } 59 | }; 60 | 61 | expect(isPostgres(knex)).to.be.true; 62 | }); 63 | 64 | it('should not indicate it is postgres if not found in the host', function() { 65 | var knex = { 66 | client: { 67 | connectionSettings: { 68 | host: "somethingelse" 69 | } 70 | } 71 | }; 72 | 73 | expect(isPostgres(knex)).to.be.false; 74 | }); 75 | }); 76 | 77 | describe('string connectionSettings', function() { 78 | it('should indicate it is postgres if it is found in the connection string', function() { 79 | var knex = { 80 | client: { 81 | connectionSettings: "postgres:foobar" 82 | } 83 | }; 84 | 85 | expect(isPostgres(knex)).to.be.true; 86 | }); 87 | 88 | it('should not indicate it is postgres if not found in the connection string', function() { 89 | var knex = { 90 | client: { 91 | connectionSettings: "mysql:foobar" 92 | } 93 | }; 94 | 95 | expect(isPostgres(knex)).to.be.false; 96 | }); 97 | }); 98 | }); 99 | 100 | -------------------------------------------------------------------------------- /test/integration/postgres-integration-spec.js: -------------------------------------------------------------------------------- 1 | var bluebird = require('bluebird'); 2 | var FixtureGenerator = require('../../lib/fixture-generator'); 3 | var specs = require('./integration-specs'); 4 | 5 | var dbConfig = { 6 | client: 'pg', 7 | connection: { 8 | host: process.env.DOCKER_IP || 'pg', 9 | user: 'testdb', 10 | password: 'password', 11 | database: 'testdb', 12 | port: Number(process.env.DOCKER_PORT || 5432) 13 | } 14 | }; 15 | 16 | describe("postgres intregation tests", function() { 17 | specs(dbConfig); 18 | 19 | // postgres specific specs below 20 | 21 | describe("array data type", function() { 22 | this.timeout(6000); 23 | 24 | before(function() { 25 | this.fixtureGenerator = new FixtureGenerator(dbConfig); 26 | this.knex = this.fixtureGenerator.knex; 27 | }); 28 | 29 | after(function(done) { 30 | this.fixtureGenerator.destroy(done); 31 | }); 32 | 33 | beforeEach(function(done) { 34 | var knex = this.knex; 35 | 36 | var dropPromises = [ 37 | knex.schema.dropTableIfExists('simple_array_table'), 38 | knex.schema.dropTableIfExists('has_array_column') 39 | ]; 40 | 41 | bluebird.all(dropPromises).then(function() { 42 | knex.schema.createTable('simple_array_table', function(table) { 43 | table.increments('id').primary(); 44 | table.integer('integer_column'); 45 | }).then(function() { 46 | knex.raw("create table has_array_column(integers integer[])").then(function() { 47 | done(); 48 | }); 49 | }); 50 | }); 51 | }); 52 | 53 | it('should insert hard coded arrays', function(done) { 54 | var dataConfig = { 55 | has_array_column: { 56 | integers: [4,5,6] 57 | } 58 | }; 59 | 60 | var knex = this.knex; 61 | 62 | this.fixtureGenerator.create(dataConfig).then(function(results) { 63 | expect(results.has_array_column[0].integers).to.eql([4,5,6]); 64 | 65 | knex('has_array_column').then(function(result) { 66 | expect(result[0].integers).to.eql([4,5,6]); 67 | done(); 68 | }); 69 | }); 70 | }); 71 | 72 | it('should resolve array values', function(done) { 73 | var dataConfig = { 74 | simple_array_table: { 75 | integer_column: 8 76 | }, 77 | has_array_column: { 78 | integers: [4,5, 'simple_array_table:0:integer_column'] 79 | } 80 | }; 81 | 82 | var knex = this.knex; 83 | 84 | this.fixtureGenerator.create(dataConfig).then(function(results) { 85 | expect(results.has_array_column[0].integers).to.eql([4,5,8]); 86 | 87 | knex('has_array_column').then(function(result) { 88 | expect(result[0].integers).to.eql([4,5,8]); 89 | done(); 90 | }); 91 | }); 92 | }); 93 | }); 94 | }); 95 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # No longer supported 2 | 3 | This module is now archived and no longer being updated 4 | 5 | # sql-fixtures 6 | 7 | 8 | Easily generate data that gets saved in the database, with foreign key dependencies automatically resolved. Ideal for integration tests and generating dummy data. Uses [knex](http://knexjs.org) internally. 9 | 10 | # Supported Databases 11 | 12 | | Database | Works? | As of version | Integration Tests? | Usage | 13 | |-----------|----------|---------------|--------------------|--------------------------------------------| 14 | | Postgres | Yes | 0.0.0 | [Yes](https://github.com/city41/node-sql-fixtures/blob/master/test/integration/postgres-integration-spec.js) | several projects known | 15 | | MySQL | Yes* | 0.4.0 | [Yes](https://github.com/city41/node-sql-fixtures/blob/master/test/integration/mysql-integration-spec.js) | several projects known | 16 | | sqlite3 | Yes* | 0.3.0 | [Yes](https://github.com/city41/node-sql-fixtures/blob/master/test/integration/sqlite-integration-spec.js) | one known project | 17 | | MariaDB | Yes** | 0.7.0 | [Yes](https://github.com/city41/node-sql-fixtures/blob/master/test/integration/maria-integration-spec.js) | no known projects :( | 18 | 19 | *For MySQL and Maria you can run into issues for [tables that lack a singular primary key](http://city41.github.io/node-sql-fixtures/#no-primary-key-warning). 20 | 21 | **For sqlite, you can hit the same issue as MySQL and Maria above if you create your tables using "without rowid" 22 | 23 | # Install 24 | 25 | `npm install sql-fixtures` 26 | 27 | # Simple Example 28 | 29 | ```javascript 30 | var sqlFixtures = require('sql-fixtures'); 31 | 32 | // depending on which database engine you are using 33 | // this is a typical PostgreSQL config for the pg driver 34 | var dbConfig = { 35 | client: 'pg', 36 | connection: { 37 | host: 'localhost', 38 | user: 'testdb', 39 | password: 'password', 40 | database: 'testdb', 41 | port: 15432 42 | } 43 | }; 44 | 45 | var dataSpec = { 46 | users: { 47 | username: 'Bob', 48 | email: 'bob@example.com' 49 | } 50 | }; 51 | 52 | sqlFixtures.create(dbConfig, dataSpec, function(err, result) { 53 | // at this point a row has been added to the users table 54 | console.log(result.users[0].username); 55 | }); 56 | ``` 57 | 58 | # Documentation and Examples 59 | 60 | Are available at the [sql-fixtures website](http://city41.github.io/node-sql-fixtures) 61 | 62 | # Contributing 63 | **NOTE:** I am hesitant to add more features at this point. I feel sql-fixtures is feature complete now and I want to keep it a small, focused module. If you have an idea for a feature you want to implement, please contact me first. 64 | 65 | Please fork and send pull requests in the typical fashion. 66 | 67 | There are both unit and integration tests. The unit tests are invoked with `gulp test:unit`. 68 | 69 | Checkout integration_tests.md for the scoop on the integration tests. 70 | -------------------------------------------------------------------------------- /test/unit/resolve-dependencies-spec.js: -------------------------------------------------------------------------------- 1 | var resolveDependencies = require('../../lib/resolve-dependencies'); 2 | 3 | describe('resolve-dependencies', function() { 4 | it('should leave a non-dependency alone', function() { 5 | var toBeResolved = { 6 | Users: [{ 7 | foo: 'bar' 8 | }] 9 | }; 10 | 11 | var previouslyResolved = {}; 12 | 13 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 14 | expect(resolved).to.eql(toBeResolved); 15 | }); 16 | 17 | it('should resolve the dependency', function() { 18 | var toBeResolved = { 19 | Challenges: [{ 20 | foo: 'Users:u0' 21 | }] 22 | }; 23 | 24 | var previouslyResolved = { 25 | Users: [{ 26 | id: 4, 27 | specId: 'u0' 28 | }] 29 | }; 30 | 31 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 32 | expect(resolved).to.eql({ 33 | Challenges: [{ 34 | foo: 4 35 | }] 36 | }); 37 | }); 38 | 39 | it('should resolve a non-default property', function() { 40 | var toBeResolved = { 41 | Challenges: [{ 42 | foo: 'Users:u0:bar' 43 | }] 44 | }; 45 | 46 | var previouslyResolved = { 47 | Users: [{ 48 | bar: 'baz', 49 | specId: 'u0' 50 | }] 51 | }; 52 | 53 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 54 | expect(resolved).to.eql({ 55 | Challenges: [{ 56 | foo: 'baz' 57 | }] 58 | }); 59 | }); 60 | 61 | it('should leave the value alone if no resolution can be found', function() { 62 | var toBeResolved = { 63 | Challenges: [{ 64 | foo: 'Users:u1' 65 | }] 66 | }; 67 | 68 | var previouslyResolved = { 69 | Users: [{ 70 | id: 6, 71 | specId: 'u0' 72 | }] 73 | }; 74 | 75 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 76 | expect(resolved).to.eql(toBeResolved); 77 | }); 78 | 79 | it('should resolve sql strings', function() { 80 | var toBeResolved = { 81 | sql: ['foo {Users:u0} {Users:u0}'] 82 | }; 83 | 84 | var previouslyResolved = { 85 | Users: [{ 86 | id: 6, 87 | specId: 'u0' 88 | }] 89 | }; 90 | 91 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 92 | expect(resolved.sql[0]).to.eql('foo 6 6'); 93 | }); 94 | 95 | it('should resolve arrays', function() { 96 | var toBeResolved = { 97 | needs_integer: [{ 98 | integers: ['has_integer:hi0:integer', 5] 99 | }] 100 | }; 101 | 102 | var previouslyResolved = { 103 | has_integer: [{ 104 | id: 6, 105 | specId: 'hi0', 106 | integer: 7 107 | }] 108 | }; 109 | 110 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 111 | expect(resolved.needs_integer[0].integers).to.eql([7, 5]); 112 | }); 113 | 114 | describe('spec ids', function() { 115 | it('should resolve the spec id', function() { 116 | var toBeResolved = { 117 | Challenges: [{ 118 | foo: 'Users:myId' 119 | }] 120 | }; 121 | 122 | var previouslyResolved = { 123 | Users: [{ 124 | id: 8, 125 | specId: 'myId' 126 | }] 127 | }; 128 | 129 | var resolved = resolveDependencies(previouslyResolved, toBeResolved); 130 | expect(resolved).to.eql({ 131 | Challenges: [{ 132 | foo: 8 133 | }] 134 | }); 135 | }); 136 | }); 137 | }); 138 | -------------------------------------------------------------------------------- /site/css/docs.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-size: 14px; 3 | line-height: 22px; 4 | background: #f4f4f4; 5 | color: #000; 6 | font-family: Helvetica Neue, Helvetica, Arial; 7 | } 8 | .logo { 9 | font-size: 38.5px; 10 | font-weight: bold; 11 | } 12 | .interface font-family: "Lucida Grande", 13 | "Lucida Sans Unicode", 14 | Helvetica, 15 | Arial, 16 | sans-serif !important, 17 | .doc_disclaimer { 18 | font-style: italic; 19 | font-size: 0.8em; 20 | } 21 | .highlight { 22 | background-color: #ff0; 23 | padding: 2px; 24 | } 25 | div#sidebar { 26 | background: #fff; 27 | position: fixed; 28 | top: 0; 29 | left: 0; 30 | bottom: 0; 31 | width: 240px; 32 | overflow-y: auto; 33 | overflow-x: hidden; 34 | -webkit-overflow-scrolling: touch; 35 | padding: 15px 0 30px 30px; 36 | border-right: 1px solid #bbb; 37 | box-shadow: 0 0 20px #ccc; 38 | -webkit-box-shadow: 0 0 20px #ccc; 39 | -moz-box-shadow: 0 0 20px #ccc; 40 | } 41 | a.toc_title, 42 | a.toc_title:visited { 43 | display: block; 44 | color: #000; 45 | font-weight: bold; 46 | margin-top: 15px; 47 | } 48 | a.toc_title:hover { 49 | text-decoration: underline; 50 | } 51 | #sidebar .version { 52 | font-size: 10px; 53 | font-weight: normal; 54 | } 55 | ul.toc_section { 56 | font-size: 11px; 57 | line-height: 14px; 58 | margin: 5px 0 0 0; 59 | padding-left: 0px; 60 | list-style-type: none; 61 | font-family: Lucida Grande; 62 | } 63 | .toc_section li { 64 | cursor: pointer; 65 | margin: 0 0 3px 0; 66 | } 67 | .toc_section li a { 68 | text-decoration: none; 69 | color: #000; 70 | } 71 | .toc_section li a:hover { 72 | text-decoration: underline; 73 | } 74 | div.container { 75 | width: 550px; 76 | margin: 40px 0 50px 300px; 77 | } 78 | img#logo { 79 | width: 396px; 80 | height: 69px; 81 | } 82 | div.warning { 83 | margin-top: 15px; 84 | font: bold 11px Arial; 85 | color: #700; 86 | } 87 | p { 88 | margin: 20px 0; 89 | width: 550px; 90 | } 91 | a:visited { 92 | color: #444; 93 | } 94 | a:active, 95 | a:hover { 96 | color: #000; 97 | } 98 | h1, 99 | h2, 100 | h3, 101 | h4, 102 | h5, 103 | h6 { 104 | padding-top: 20px; 105 | } 106 | h2 { 107 | font-size: 20px; 108 | } 109 | b.header { 110 | font-size: 16px; 111 | line-height: 30px; 112 | } 113 | span.alias { 114 | font-size: 14px; 115 | font-style: italic; 116 | margin-left: 20px; 117 | } 118 | table, 119 | tr, 120 | td { 121 | margin: 0; 122 | padding: 0; 123 | } 124 | td { 125 | padding: 2px 12px 2px 0; 126 | } 127 | table .rule { 128 | height: 1px; 129 | background: #ccc; 130 | margin: 5px 0; 131 | } 132 | ul { 133 | list-style-type: circle; 134 | padding: 0 0 0 20px; 135 | } 136 | li { 137 | width: 500px; 138 | margin-bottom: 10px; 139 | } 140 | code, 141 | pre, 142 | tt { 143 | font-family: Monaco, Consolas, "Lucida Console", monospace; 144 | font-size: 12px; 145 | line-height: 18px; 146 | font-style: normal; 147 | } 148 | tt { 149 | padding: 0px 3px; 150 | background: #fff; 151 | border: 1px solid #ddd; 152 | zoom: 1; 153 | } 154 | code { 155 | margin-left: 20px; 156 | } 157 | pre { 158 | font-size: 12px; 159 | padding: 2px 0 2px 15px; 160 | border-left: 5px solid #bbb; 161 | margin: 0px 0 30px; 162 | } 163 | @media only screen and (-webkit-min-device-pixel-ratio: 1.5) and (max-width: 640px), only screen and (-o-min-device-pixel-ratio: 3/2) and (max-width: 640px), only screen and (min-device-pixel-ratio: 1.5) and (max-width: 640px) { 164 | img { 165 | max-width: 100%; 166 | } 167 | div#sidebar { 168 | -webkit-overflow-scrolling: initial; 169 | position: relative; 170 | width: 90%; 171 | height: 120px; 172 | left: 0; 173 | top: -7px; 174 | padding: 10px 0 10px 30px; 175 | border: 0; 176 | } 177 | img#logo { 178 | width: auto; 179 | height: auto; 180 | } 181 | div.container { 182 | margin: 0; 183 | width: 100%; 184 | } 185 | p, 186 | div.container ul { 187 | max-width: 98%; 188 | overflow-x: scroll; 189 | } 190 | pre { 191 | overflow: scroll; 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /test/integration/maria-integration-spec.js: -------------------------------------------------------------------------------- 1 | var bluebird = require('bluebird'); 2 | var FixtureGenerator = require('../../lib/fixture-generator'); 3 | var specs = require('./integration-specs'); 4 | 5 | var dbConfig = { 6 | client: 'mysql', 7 | connection: { 8 | host: process.env.DOCKER_IP || 'maria', 9 | user: 'testdb', 10 | password: 'password', 11 | database: 'testdb', 12 | port: Number(process.env.DOCKER_PORT || 3306) 13 | } 14 | }; 15 | 16 | describe("maria integration tests", function() { 17 | this.timeout(0); 18 | 19 | specs(dbConfig); 20 | 21 | // issue 31 was reported against Maria, may not be Maria specific 22 | // TODO: make this a general spec for all databases 23 | 24 | describe("issue 31", function() { 25 | before(function() { 26 | this.fixtureGenerator = new FixtureGenerator(dbConfig); 27 | this.knex = this.fixtureGenerator.knex; 28 | }); 29 | 30 | after(function(done) { 31 | this.fixtureGenerator.destroy(done); 32 | }); 33 | 34 | beforeEach(function(done) { 35 | var knex = this.knex; 36 | 37 | var dropPromises = [ 38 | knex.schema.dropTableIfExists('C'), 39 | knex.schema.dropTableIfExists('B'), 40 | knex.schema.dropTableIfExists('A') 41 | ]; 42 | 43 | bluebird.all(dropPromises).then(function() { 44 | knex.raw([ 45 | 'create table A(', 46 | 'id tinyint unsigned auto_increment primary key,', 47 | 'title varchar(30) not null,', 48 | 'unique(title)', 49 | ')' 50 | ].join(' ')).then(function() { 51 | knex.raw([ 52 | 'create table B(', 53 | 'id smallint unsigned auto_increment primary key,', 54 | 'title varchar(30) not null,', 55 | 'description varchar(500) not null,', 56 | 'created datetime not null,', 57 | 'a_id tinyint unsigned not null,', 58 | 'unique(title),', 59 | 'constraint B_A_a_id_id', 60 | 'foreign key (a_id)', 61 | 'references A (id)', 62 | 'on delete cascade', 63 | 'on update restrict', 64 | ')' 65 | ].join(' ')).then(function() { 66 | knex.raw([ 67 | 'create table C(', 68 | 'id smallint unsigned not null,', 69 | 'b_id smallint unsigned not null,', 70 | 'data varchar(1000) not null,', 71 | 'primary key (id, b_id),', 72 | 'constraint C_B_b_id_id', 73 | 'foreign key (b_id)', 74 | 'references B (id)', 75 | 'on delete cascade', 76 | 'on update restrict', 77 | ')' 78 | ].join(' ')).then(function() { 79 | done(); 80 | }); 81 | }); 82 | }); 83 | }); 84 | }); 85 | 86 | it('should insert the data', function(done) { 87 | var dataSpec = { 88 | A: [ 89 | { title: 'A1' }, 90 | { title: 'A2' }, 91 | { title: 'A3' } 92 | ], 93 | B: [ 94 | { title: 'B1', description: 'D1', created: new Date(), a_id: 'A:0' }, 95 | { title: 'B2', description: 'D2', created: new Date(), a_id: 'A:0' }, 96 | { title: 'B3', description: 'D3', created: new Date(), a_id: 'A:1' }, 97 | { title: 'B4', description: 'D4', created: new Date(), a_id: 'A:1' }, 98 | { title: 'B5', description: 'D5', created: new Date(), a_id: 'A:2' } 99 | ], 100 | C: [ 101 | { id: 1, b_id: 'B:0', data: 'Test 1' }, 102 | { id: 2, b_id: 'B:0', data: 'Test 2' }, 103 | { id: 3, b_id: 'B:0', data: 'Test 3' }, 104 | { id: 4, b_id: 'B:0', data: 'Test 4' }, 105 | { id: 5, b_id: 'B:0', data: 'Test 5' } 106 | ] 107 | }; 108 | 109 | this.fixtureGenerator.create(dataSpec).bind(this).then(function(results) { 110 | this.knex('C').where('id', results.C[0].id).then(function(result) { 111 | expect(result[0].id).to.eql(1); 112 | expect(result[0].b_id).to.eql(results.B[0].id); 113 | expect(result[0].data).to.eql('Test 1'); 114 | 115 | expect(results.B[2].title).to.eql('B3'); 116 | expect(results.B[2].description).to.eql('D3'); 117 | expect(results.B[2].a_id).to.eql(results.A[1].id); 118 | done(); 119 | }); 120 | }); 121 | }); 122 | }); 123 | }); 124 | -------------------------------------------------------------------------------- /CHANGELOG: -------------------------------------------------------------------------------- 1 | 1.0.4 - Aug 3, 2020 2 | -- Upgrade Lodash to avoid CVE-2020-8203 3 | 4 | 1.0.3 - Aug 3, 2020 5 | -- Don't fail if the passed in data spec is empty (ie, a noop) 6 | 7 | 1.0.2 - March 9, 2020 8 | -- Fix to destroy() to enable using latest version of Knex (thanks woyuen) 9 | 10 | 1.0.1 - Nov 30, 2019 11 | -- updated all dependencies to resolve known vulnerabilities 12 | -- changed from fig to docker-compose for integration tests 13 | 14 | 1.0.0 - Feb 21, 2016 15 | -- sqlite now uses rowids to avoid "achilles heel" problem 16 | -- fixed bug where people using callbacks would not be informed of errors 17 | -- populating from existing data now allows specifying column_name 18 | -- bumped to 1.0.0 due to deciding this module is feature complete and stable 19 | 20 | 0.13.0 - Feb 9, 2016 21 | -- Upgraded all dependent modules 22 | 23 | 0.12.0 - Jan 30, 2016 24 | -- Added ability to resolve foreign keys from existing data 25 | 26 | 0.11.0 - June 14th, 2015 27 | -- Added ability to pass an existing knex instance into sql-fixtures 28 | 29 | 0.10.2 - June 14th, 2015 30 | -- fix for issue #31 where tables with date columns were not being retrieved 31 | properly in insertRecordsSerially() 32 | 33 | 0.10.1 - April 26, 2015 34 | -- more accurate detection of postgres. Some postgres users were falsely 35 | detected, causing them to go into serial mode when it wasn't needed 36 | 37 | 0.10.0 - April 26, 2015 38 | -- Since everything got confusing with 0.9.0 and 0.9.3, just bumping a new 39 | release to get past the confusion. 40 | 41 | 0.9.0 - March 29, 2015 42 | -- added showWarning option to allow suppressing the no primary key warning. 43 | 44 | 0.8.3 - Feb 3, 2015 45 | -- Fix for isPostgres(). It wasn't accounting for connection strings versus objects. 46 | 47 | 0.8.2 - Jan 31, 2015 48 | -- hopefully more robust fix for the MySQL bug 0.8.1 tried to address (#22) 49 | 50 | 0.8.1 - Jan 29, 2015 51 | -- added "order by id" for retrieving inserted records for mysql, fixes issue #22 52 | 53 | 0.8.0 - Jan 7, 2015 54 | -- added documentation website 55 | 56 | 0.7.0 - Dec 26, 2014 57 | -- added integration tests for MariaDB 58 | 59 | 0.6.0 - Dec 10, 2014 60 | -- added experimental support for "unique" (see README) 61 | 62 | 0.5.0 - Dec 1, 2014 63 | -- added support for Postgres array columns 64 | 65 | 0.4.2 - Dec 1, 2014 66 | -- Postgres now inserts in bulk again for a nice speed up. MySQL and Sqlite also insert in bulk now, but they must then serially query the database to retrieve what was inserted. 67 | -- clean up: integration tests are cleaner, and use more generic/test appropriate database tables 68 | 69 | 0.4.1 - Oct 31, 2014 70 | -- Before MySQL and sqlite worked (< 0.3.0), inserts were done in bulk, and that meant primary keys ascended in a predictable manner. 71 | From 0.3.0 on, inserts were done one at a time and in parallel, removing the predictable id ordering. 0.4.1 changes it to 72 | insert records serially to bring back that predictable ordering at the cost of sql-fixtures being slightly slower. 73 | 74 | 0.4.0 - Oct 30, 2014 75 | -- Added integration tests for MySQL 76 | 77 | 0.3.1 - Oct 27, 2014 78 | -- Tiny branch depending on db dialect, keeps postgres rock solid and does the best it can with other dbs 79 | (basically postgres will return what got inserted, mysql/sqlite/etc do not, so have to make a slight guess with those dbs) 80 | 81 | 0.3.0 - Oct 27, 2014 82 | -- Added integration tests for sqlite, feeling pretty good about sqlite support 83 | 84 | 0.2.2 - Oct 23, 2014 85 | -- bug fix: allowing more than one instance of sqlFixtures to get generated 86 | 87 | 0.2.1 - Oct 22, 2014 88 | -- bug fix: only cloning what is being mutated, so knex raw objects don't get wiped out 89 | 90 | 0.2.0 - Oct 21, 2014 91 | -- the only columns returned in the result are those that are specified 92 | 93 | 0.1.10 - Oct 20, 2014 94 | -- Added alias for disconnect over to destroy, to maintain backwards compatibility 95 | 0.1.9 lacked disconnect, it got renamed to destroy. 96 | 97 | 0.1.9 - Oct 20, 2014 98 | -- sql-fixtures can now be instantiated if need be, and also used same as before (see README for more info) 99 | 100 | 0.1.8 - Oct 16, 2014 101 | -- The inserts into the database now return all columns, enabling many-to-many and auto-populated columns to work 102 | 103 | 0.1.7 - Oct 16, 2014 104 | -- Fixed some minor bugs that creeped in during 0.1.6 105 | 106 | 0.1.6 - Oct 16, 2014 107 | -- fixed issue #5, where complex dependencies could result in records changing position in the final result 108 | 109 | 0.1.5 - Oct 11, 2014 110 | -- Cleaned up the npm package, removing all tests and vagrant stuff 111 | 112 | 0.1.4 - Oct 10, 2014 113 | -- escaping colons with '::' 114 | 115 | 0.1.3 - Oct 3, 2014 116 | -- Bug fix for same types at different priorities not resolving 117 | 118 | 0.1.2 - Oct 2, 2014 119 | -- Better error output if a dedendency cannot be resolved 120 | 121 | 0.1.1 - Sept 30, 2014 122 | -- Fixed the README for specIds 123 | 124 | 0.1.0 - Sept 30, 2014 125 | -- support for specIds 126 | 127 | 0.0.2 - Sept 29, 2014 128 | -- Improved README 129 | 130 | 0.0.1 - Sept 26, 2014 131 | -- bug fix: Cloning the passed in data spec so as to not alter the original 132 | 133 | 0.0.0 - Sept 26, 2014 134 | -- original release 135 | -------------------------------------------------------------------------------- /lib/prioritize.js: -------------------------------------------------------------------------------- 1 | /* 2 | * prioritize 3 | * ========== 4 | * 5 | * Given a fixture spec, it returns the spec in prioritized order. 6 | * This allows a fixture spec to contain dependencies inside of it, 7 | * and is what really gives sql-fixtures its power. 8 | * 9 | * The fixture spec is assumed to have specIds on every records. This 10 | * is accomplished by calling into generate-spec-ids before prioritize 11 | * 12 | * Example: 13 | * 14 | * fixtureSpec = { 15 | * Users: { 16 | * username: "bob", 17 | * specId: "u0" 18 | * }, 19 | * Items: { 20 | * name: "bob's item", 21 | * userId: "Users:u0", 22 | * specId: "i0" 23 | * } 24 | * }; 25 | * 26 | * In the above spec, the item has a foreign key dependency on the user. 27 | * Since the userId isn't known yet, the spec just says "put the user's id here". 28 | * Most databases won't allow item to be created unless that foreign key is 29 | * satisfied. So when actually generating the fixtures, the user needs to get 30 | * created first. prioritize takes the above and returns: 31 | * 32 | * [{ 33 | * Users: [{ 34 | * username: "bob", 35 | * specId: "u0" 36 | * }] 37 | * }, { 38 | * Items: [{ 39 | * name: "bob's item", 40 | * userId: "Users:u0", 41 | * specId: "i0" 42 | * }] 43 | * }] 44 | * 45 | * Allowing the user to get created first. 46 | * 47 | * Errors 48 | * ------ 49 | * If a dependency cannot be resolved, then prioritize returns an Error 50 | */ 51 | 52 | 53 | var _ = require('lodash'); 54 | var util = require('./util'); 55 | 56 | /* 57 | * Given an entry, digs into it and finds its dependencies. 58 | * The dependencies are returned as a simple nested array, ie 59 | * 60 | * [['Users', '0'], ['Items', '1', 'bar']] 61 | */ 62 | function getNeeds(entry) { 63 | if(_.isString(entry)) { 64 | return getNeedsString(entry); 65 | } else { 66 | return getNeedsObject(entry); 67 | } 68 | } 69 | 70 | function getNeedsString(str) { 71 | var match; 72 | var needs = []; 73 | var regex = /{([^}]+)}/g; 74 | while((match = regex.exec(str)) !== null) { 75 | needs.push(match[1].split(':')); 76 | } 77 | return needs; 78 | } 79 | 80 | function getNeedsObject(obj) { 81 | var values = _.reduce(_.values(obj), function(result, value) { 82 | if (_.isArray(value)) { 83 | result = result.concat(value); 84 | } else { 85 | result.push(value); 86 | } 87 | return result; 88 | 89 | }, []); 90 | 91 | return values.filter(function(property) { 92 | return _.isString(property) && property.indexOf(':') > -1 && property.indexOf('::') === -1; 93 | }).map(function(property) { 94 | return property.split(':'); 95 | }); 96 | } 97 | 98 | function getRemainingNeeds(obj) { 99 | var remainingNeeds = []; 100 | 101 | _.forIn(obj, function(entries, table) { 102 | entries.forEach(function (entry) { 103 | remainingNeeds = remainingNeeds.concat(getNeeds(entry)); 104 | }); 105 | }); 106 | 107 | return remainingNeeds.map(function(need) { 108 | return need.join(":"); 109 | }); 110 | } 111 | 112 | /* 113 | * given an entry and a set of prerequisites 114 | * determines if the entry's dependencies can be satisfied 115 | * from the prerequisites 116 | */ 117 | function satisfied(entry, availablePreReqs) { 118 | var needs = getNeeds(entry); 119 | 120 | return _.every(needs, function(need) { 121 | var preReq = availablePreReqs[need[0]]; 122 | if (preReq) { 123 | return _.includes(preReq, need[1]); 124 | } 125 | }); 126 | } 127 | 128 | function addNewPreReqs(newReqs, currentReqs) { 129 | _.forIn(newReqs, function(value, table) { 130 | if (!currentReqs[table]) { 131 | currentReqs[table] = newReqs[table]; 132 | } else { 133 | currentReqs[table] = currentReqs[table].concat(newReqs[table]); 134 | } 135 | }); 136 | 137 | return currentReqs; 138 | } 139 | 140 | /* 141 | * Continually walks over the given fixture spec (config) 142 | * and pulls out entries as their dependencies get satisfied 143 | * into a prioritized array 144 | */ 145 | module.exports = function prioritize(config) { 146 | config = _.clone(config); 147 | var prioritized = []; 148 | var availablePreReqs = {}; 149 | 150 | while (Object.keys(config).length) { 151 | var levelEntries = {}; 152 | var upcomingPreReqs = {}; 153 | 154 | _.forIn(config, function(entries, table) { 155 | entries = config[table] = util.asArray(entries); 156 | 157 | entries.forEach(function(entry, index) { 158 | if (satisfied(entry, availablePreReqs)) { 159 | levelEntries[table] = levelEntries[table] || []; 160 | levelEntries[table].push(entry); 161 | 162 | upcomingPreReqs[table] = upcomingPreReqs[table] || []; 163 | upcomingPreReqs[table].push(entry.specId); 164 | } 165 | }); 166 | 167 | config[table] = _.difference(entries, levelEntries[table]); 168 | if (_.isEmpty(config[table])) { 169 | delete config[table]; 170 | } 171 | }); 172 | 173 | // every pass over the spec should resolve at least one set of entries 174 | // if nothing could be resolved, there are impossible dependencies in the spec 175 | // TODO: would be nice to tell the user what dependency doesn't exist 176 | if (_.isEmpty(levelEntries)) { 177 | return new Error("Non-existant dependency. Remaining needs: " + getRemainingNeeds(config).join("\n")); 178 | } 179 | 180 | prioritized.push(levelEntries); 181 | availablePreReqs = addNewPreReqs(availablePreReqs, upcomingPreReqs); 182 | } 183 | 184 | return prioritized; 185 | }; 186 | -------------------------------------------------------------------------------- /lib/fixture-generator.js: -------------------------------------------------------------------------------- 1 | var _ = require("lodash"); 2 | var knex = require("knex"); 3 | var bluebird = require("bluebird"); 4 | 5 | var util = require("./util"); 6 | var executeFkQueries = require("./execute-fk-queries"); 7 | var generateSpecIds = require("./generate-spec-ids"); 8 | var prioritize = require("./prioritize"); 9 | var insertRecords = require("./insert-records"); 10 | var resolveDependencies = require("./resolve-dependencies"); 11 | var unescape = require("./unescape"); 12 | 13 | function addToFinalResult(finalResult, levelResults, originalConfig) { 14 | levelResults.forEach(function (levelResult) { 15 | _.forIn(levelResult, function (records, table, levelResult) { 16 | finalResult[table] = finalResult[table] || []; 17 | _.each(records, function (record) { 18 | var index = _.findIndex(util.asArray(originalConfig[table]), { 19 | specId: record.specId, 20 | }); 21 | finalResult[table][index] = record; 22 | }); 23 | }); 24 | }); 25 | 26 | return finalResult; 27 | } 28 | 29 | function fail(error, callback) { 30 | return bluebird.reject(error).nodeify(callback); 31 | } 32 | 33 | function stripSpecIds(result) { 34 | _.forIn(result, function (records, table) { 35 | _.each(records, function (record) { 36 | delete record.specId; 37 | }); 38 | }); 39 | return result; 40 | } 41 | 42 | function clone(object) { 43 | return _.mapValues(object, function (value) { 44 | if (_.isArray(value)) { 45 | return _.map(value, _.clone); 46 | } 47 | 48 | return _.clone(value); 49 | }); 50 | } 51 | 52 | function isKnexInstance(obj) { 53 | return ( 54 | obj && 55 | _.isFunction(obj.where) && 56 | _.isFunction(obj.andWhere) && 57 | _.isFunction(obj.insert) 58 | ); 59 | } 60 | 61 | function isAllTablesEmpty(dataConfig) { 62 | return _.every(Object.keys(dataConfig), function (key) { 63 | return dataConfig[key].length === 0; 64 | }); 65 | } 66 | 67 | function FixtureGenerator(knexInstanceOrConnectionConfig) { 68 | if (isKnexInstance(knexInstanceOrConnectionConfig)) { 69 | this.knex = knexInstanceOrConnectionConfig; 70 | } else { 71 | this._connectionConfig = knexInstanceOrConnectionConfig; 72 | this.knex = knex(knexInstanceOrConnectionConfig); 73 | } 74 | } 75 | 76 | FixtureGenerator.prototype.create = function resolveQueriesAndCreateRecords( 77 | dataConfig, 78 | options, 79 | callback 80 | ) { 81 | dataConfig = clone(dataConfig); 82 | 83 | if (_.isFunction(options)) { 84 | callback = options; 85 | } 86 | options = options || {}; 87 | 88 | if (isAllTablesEmpty(dataConfig)) { 89 | // user is asking us to insert no rows. Which is a strange, but valid, request. 90 | // Rather than talk to the db at all, we can immediately return in this scenario. 91 | return bluebird.resolve(dataConfig).nodeify(callback); 92 | } 93 | 94 | var withSpecIds = generateSpecIds(dataConfig); 95 | var prioritized = prioritize(withSpecIds); 96 | 97 | if (prioritized instanceof Error) { 98 | return fail(prioritized, callback); 99 | } 100 | 101 | var knexInst = (this.knex = this.knex || knex(this._connectionConfig)); 102 | var fkQueries = executeFkQueries(dataConfig, knexInst); 103 | 104 | var mainPromise = bluebird.all(fkQueries).then(function createRecords() { 105 | return bluebird 106 | .reduce( 107 | prioritized, 108 | function (buildingFinalResult, priorityLevel) { 109 | priorityLevel = resolveDependencies( 110 | buildingFinalResult, 111 | priorityLevel 112 | ); 113 | priorityLevel = unescape(priorityLevel); 114 | var priorityLevelPromises = insertRecords( 115 | knexInst, 116 | priorityLevel, 117 | options.unique, 118 | options.showWarning 119 | ); 120 | return bluebird 121 | .all(priorityLevelPromises) 122 | .then(function (levelResults) { 123 | return addToFinalResult( 124 | buildingFinalResult, 125 | levelResults, 126 | withSpecIds 127 | ); 128 | }); 129 | }, 130 | {} 131 | ) 132 | .then(function (finalResult) { 133 | return stripSpecIds(finalResult); 134 | }); 135 | }); 136 | 137 | return mainPromise.nodeify(callback); 138 | }; 139 | 140 | FixtureGenerator.prototype.destroy = function destroy(callback) { 141 | if (this.knex) { 142 | return bluebird 143 | .resolve() 144 | .bind(this) 145 | .then(function () { 146 | return this.knex.destroy(); 147 | }) 148 | .tap(function () { 149 | this.knex = null; 150 | }) 151 | .nodeify(callback); 152 | } else { 153 | return bluebird.resolve().nodeify(callback); 154 | } 155 | }; 156 | 157 | var singleton; 158 | 159 | FixtureGenerator.create = function staticCreateRecords( 160 | connectionConfig, 161 | dataConfig, 162 | options, 163 | callback 164 | ) { 165 | if (!singleton) { 166 | singleton = new FixtureGenerator(connectionConfig); 167 | } 168 | 169 | return singleton.create(dataConfig, options, callback); 170 | }; 171 | 172 | FixtureGenerator.destroy = function staticDestroy(callback) { 173 | if (!singleton) { 174 | return bluebird.resolve().nodeify(callback); 175 | } 176 | 177 | // TODO: look into using bluebird's disposer pattern 178 | return bluebird 179 | .resolve() 180 | .then(function () { 181 | return singleton.destroy(); 182 | }) 183 | .nodeify(callback) 184 | .finally(function () { 185 | singleton = null; 186 | }); 187 | }; 188 | 189 | FixtureGenerator.disconnect = FixtureGenerator.destroy; 190 | 191 | module.exports = FixtureGenerator; 192 | -------------------------------------------------------------------------------- /test/unit/prioritize-spec.js: -------------------------------------------------------------------------------- 1 | var prioritize = require('../../lib/prioritize'); 2 | 3 | describe('prioritize', function() { 4 | describe('prioritizing', function() { 5 | it('should prioritize a simple case', function() { 6 | var config = { 7 | Users: { 8 | username: 'bob', 9 | specId: 'u0' 10 | } 11 | }; 12 | 13 | expect(prioritize(config)).to.eql([{ 14 | Users: [{ 15 | username: 'bob', 16 | specId: 'u0' 17 | }] 18 | }]); 19 | }); 20 | 21 | it('should prioritize with one dependency', function() { 22 | var config = { 23 | Users: { 24 | username: 'bob', 25 | specId: 'u0' 26 | }, 27 | Challenges: [{ 28 | createdById: 'Users:u0', 29 | name: 'my challenge', 30 | specId: 'c0' 31 | }] 32 | }; 33 | 34 | expect(prioritize(config)).to.eql([{ 35 | Users: [{ 36 | username: 'bob', 37 | specId: 'u0' 38 | }] 39 | }, { 40 | Challenges: [{ 41 | name: 'my challenge', 42 | createdById: 'Users:u0', 43 | specId: 'c0' 44 | }] 45 | }]); 46 | }); 47 | 48 | it('should prioritize later dependencies correctly', function() { 49 | var config = { 50 | Users: [{ 51 | username: 'bob', 52 | specId: 'u0' 53 | }, { 54 | username: 'Challenges:c0:name', 55 | specId: 'u1' 56 | }], 57 | Challenges: [{ 58 | createdById: 'Users:u0', 59 | name: 'my challenge', 60 | specId: 'c0' 61 | }] 62 | }; 63 | 64 | expect(prioritize(config)).to.eql([{ 65 | Users: [{ 66 | username: 'bob', 67 | specId: 'u0' 68 | }] 69 | }, { 70 | Challenges: [{ 71 | name: 'my challenge', 72 | createdById: 'Users:u0', 73 | specId: 'c0' 74 | }] 75 | }, { 76 | Users: [{ 77 | username: 'Challenges:c0:name', 78 | specId: 'u1' 79 | }] 80 | }]); 81 | }); 82 | 83 | it('should prioritize sql dependencies correctly', function() { 84 | var config = { 85 | Users: { 86 | username: 'bob', 87 | specId: 'u0' 88 | }, 89 | Items: { 90 | name: 'my item', 91 | userId: 'Users:u0', 92 | specId: 'i0' 93 | }, 94 | sql: 'foo {Users:u0} {Items:i0}' 95 | }; 96 | 97 | expect(prioritize(config)).to.eql([{ 98 | Users: [{ 99 | username: 'bob', 100 | specId: 'u0' 101 | }] 102 | }, { 103 | Items: [{ 104 | name: 'my item', 105 | userId: 'Users:u0', 106 | specId: 'i0' 107 | }] 108 | }, { 109 | sql: ['foo {Users:u0} {Items:i0}'] 110 | }]); 111 | }); 112 | 113 | it('should prioritize a more advanced case', function() { 114 | var config = { 115 | Users: [{ 116 | username: "bob", 117 | specId: 'u0' 118 | }], 119 | Comments: [{ 120 | comment: 'comment 1', 121 | createdById: "Users:u0", 122 | userId: "Users:u0", 123 | specId: 'c0' 124 | }, { 125 | comment: 'child of 1', 126 | createdById: "Users:u0", 127 | userId: "Users:u0", 128 | parentId: "Comments:c0", 129 | specId: 'c1' 130 | }], 131 | LikeVotes: [{ 132 | commentId: "Comments:c0", 133 | createdById: "Users:u0", 134 | specId: 'lv0' 135 | }, { 136 | commentId: "Comments:c1", 137 | createdById: "Users:u0", 138 | specId: 'lv1' 139 | }] 140 | }; 141 | 142 | expect(prioritize(config)).to.eql([ 143 | { 144 | Users: [{ 145 | username: "bob", 146 | specId: 'u0' 147 | }], 148 | }, 149 | { 150 | Comments: [{ 151 | comment: 'comment 1', 152 | createdById: "Users:u0", 153 | userId: "Users:u0", 154 | specId: 'c0' 155 | }] 156 | }, 157 | { 158 | Comments: [{ 159 | comment: 'child of 1', 160 | createdById: "Users:u0", 161 | userId: "Users:u0", 162 | parentId: "Comments:c0", 163 | specId: 'c1' 164 | }], 165 | LikeVotes: [{ 166 | commentId: "Comments:c0", 167 | createdById: "Users:u0", 168 | specId: 'lv0' 169 | }] 170 | }, 171 | { 172 | LikeVotes: [{ 173 | commentId: "Comments:c1", 174 | createdById: "Users:u0", 175 | specId: 'lv1' 176 | }] 177 | }]); 178 | }); 179 | }); 180 | 181 | describe('spec ids', function() { 182 | it('should utilize spec ids when prioritizing', function() { 183 | var config = { 184 | Users: [{ 185 | username: 'bob', 186 | specId: 'myId' 187 | }, { 188 | username: 'Challenges:c0:name', 189 | specId: 'u1' 190 | }], 191 | Challenges: [{ 192 | createdById: 'Users:myId', 193 | name: 'my challenge', 194 | specId: 'c0' 195 | }] 196 | }; 197 | 198 | expect(prioritize(config)).to.eql([{ 199 | Users: [{ 200 | username: 'bob', 201 | specId: 'myId' 202 | }] 203 | }, { 204 | Challenges: [{ 205 | name: 'my challenge', 206 | createdById: 'Users:myId', 207 | specId: 'c0' 208 | }] 209 | }, { 210 | Users: [{ 211 | username: 'Challenges:c0:name', 212 | specId: 'u1' 213 | }] 214 | }]); 215 | }); 216 | }); 217 | 218 | describe('arrays', function() { 219 | it('should take arrays into account when prioritizing', function() { 220 | var config = { 221 | has_integer: { 222 | integer: 9, 223 | specId: 'hi0' 224 | }, 225 | needs_integer: [{ 226 | integers: ['has_integer:hi0:integer', 4], 227 | specId: 'ni0' 228 | }] 229 | }; 230 | 231 | expect(prioritize(config)).to.eql([{ 232 | has_integer: [{ 233 | integer: 9, 234 | specId: 'hi0' 235 | }] 236 | }, { 237 | needs_integer: [{ 238 | integers: ['has_integer:hi0:integer', 4], 239 | specId: 'ni0' 240 | }] 241 | }]); 242 | }); 243 | }); 244 | 245 | describe('errors', function() { 246 | it('should return an error if a dependency does not exist', function() { 247 | var config = { 248 | Users: { 249 | username: 'bob', 250 | specId: 'u0' 251 | }, 252 | Challenges: [{ 253 | createdById: 'Tasks:t0', 254 | name: 'my challenge', 255 | specId: 'c0' 256 | }] 257 | }; 258 | 259 | var result = prioritize(config); 260 | expect(result).to.be.an.instanceOf(Error); 261 | expect(result.toString()).to.contain("Tasks:t0"); 262 | }); 263 | 264 | it('should return an error if a dependency is out of bounds', function() { 265 | var config = { 266 | Users: { 267 | username: 'bob', 268 | specId: 'u0' 269 | }, 270 | Challenges: [{ 271 | createdById: 'Users:u1', 272 | name: 'my challenge', 273 | specId: 'c0' 274 | }] 275 | }; 276 | 277 | expect(prioritize(config)).to.be.an.instanceOf(Error); 278 | }); 279 | }); 280 | }); 281 | -------------------------------------------------------------------------------- /lib/insert-records.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Calls into knex to actually insert the records into the database 3 | * and returns an array of promises that knex generated 4 | * 5 | * Also massages the result of knex's insert into entire hydrated records. 6 | * 7 | * Database differences 8 | * ==================== 9 | * The insertion happens differently for postgres versus all the other supported 10 | * databases. The return result of an insert in Postgres can be the actual records 11 | * that were inserted. sql-fixtures takes advantage of that in order to return to 12 | * the user their data. For MySQL et al, this is not true. The best you get is 13 | * the id (primary key) of the last record that got inserted. 14 | * 15 | * So for non-postgres dbs, the insertions happen serially, and after each insert 16 | * a select is done to grab the inserted record. 17 | * 18 | * This also means for non-postgres, if a table lacks a primary key, then it can 19 | * lead into undefined behavior. Also see 20 | * http://city41.github.io/node-sql-fixtures/#no-primary-key-warning 21 | */ 22 | var _ = require('lodash'); 23 | var bluebird = require('bluebird'); 24 | 25 | var isPostgres = require('./is-postgres'); 26 | var isSqlite = require('./is-sqlite'); 27 | 28 | function removeExtraKeys(trimmedRecord, value, key) { 29 | if (key !== 'specId' && value !== null && typeof value !== 'undefined') { 30 | trimmedRecord[key] = value; 31 | } 32 | } 33 | 34 | function buildRawSqlPromises(knex, sqls) { 35 | return sqls.map(function(rawSql) { 36 | var sqlPromise = knex.raw(rawSql).then(function(result) { 37 | return {}; 38 | }); 39 | return sqlPromise; 40 | }); 41 | } 42 | 43 | function getInsertableRecords(knex, tableName, candidateRecords, unique) { 44 | if (unique) { 45 | var checkIfEquivalentRecordExistsPromises = _.map(candidateRecords, function(candidateRecord) { 46 | return knex(tableName).where(candidateRecord).then(function(result) { 47 | if (result.length === 0) { 48 | return candidateRecord; 49 | } 50 | }); 51 | }); 52 | 53 | return bluebird.all(checkIfEquivalentRecordExistsPromises).then(function(results) { 54 | return _.compact(results); 55 | }); 56 | 57 | } else { 58 | return bluebird.resolve(candidateRecords); 59 | } 60 | } 61 | 62 | function getAllKeys(records) { 63 | var keys = _.reduce(records, function(keysResult, record) { 64 | keysResult = keysResult.concat(_.keys(record)); 65 | return keysResult; 66 | }, []); 67 | return _.compact(_.uniq(keys)); 68 | } 69 | 70 | /** 71 | * insertRecordsSerially, the "Achilles heel" of sql-fixtures 72 | * 73 | * TL;DR: sql-fixtures was originally created for postgres and takes advantage 74 | * of a postgres specific feature. Expanding to mysql, maria and sqlite has been 75 | * a problem because they lack that feature. This method *mostly* addresses the problem 76 | * 77 | * The problem: After inserting a record sql-fixtures needs to retrieve the entire 78 | * record in order to resolve downstream dependencies. If nothing else, the record's 79 | * ID is needed, to resolve downstream relation dependencies, but autogenerated columns 80 | * also need to be retrieved. With Postgres, the return value of an insert is 81 | * the record that was created, which is awesome and works perfecty. Thank you Postgres! 82 | * No other database does this (booooo). MySQL and Maria instead offer LAST_INSERT_ID() 83 | * which you can then use to select the inserted record, but only if the table has a 84 | * primary key. sqlite also has this feature using rowids. The problem emerges for tables 85 | * that lack a singular primary key column 86 | * 87 | * MySQL/Maria with a singular ID column 88 | * ----------- 89 | * if they have a singular ID column, this function works perfectly. We are forced 90 | * to insert records serially, but we get expected results and it's solid 91 | * 92 | * MySQL/Maria without a singular ID column 93 | * ----------- 94 | * trouble can brew here. Since there is no ID column, LAST_INSERT_ID() does not work. 95 | * We fall back to doing a select using the record's spec and hope we get the right row. 96 | * we *usually* get the right row and things are *usually* fine, but datetime rows 97 | * can mess this up. 98 | * 99 | * Sqlite with rowids, still TODO 100 | * ----------- 101 | * Sqlite has rowids, a secret primary key column that is added by default. It allows 102 | * us to avoid this problem and get perfect results. If someone creates a sqlite table 103 | * and specifies "without rowid", then sql-fixtures will not support that case. 104 | * TODO: using rowids is still todo 105 | */ 106 | function insertRecordsSerially(knex, tableName, insertRecords, showWarning) { 107 | var insertedRecords = []; 108 | 109 | function onInsertedResult(index, result, showWarning) { 110 | if (showWarning && (!result || result.length === 0)) { 111 | // this happens if our fallback failed. Warn the user and move on, not 112 | // much else we can do :-/ 113 | console.warn("Failed to retrieve the most recently inserted record for table " + tableName + 114 | ", you will probably get unexpected results" + 115 | "see: http://city41.github.io/node-sql-fixtures/#no-primary-key-warning"); 116 | } 117 | 118 | insertedRecords[index] = result[0]; 119 | return insertRecordAt(index + 1); 120 | } 121 | 122 | function insertRecordAt(index) { 123 | if (index < insertRecords.length) { 124 | return knex(tableName).insert(insertRecords[index]).then(function(insertResult) { 125 | var selectPromise; 126 | if (!insertResult || insertResult.length === 0 || insertResult[0] < 1) { 127 | // table lacks an ID column, we are most likely in mysql/maria 128 | // this noop promise allows us to go into the fallback (see below) 129 | selectPromise = bluebird.resolve(); 130 | } else { 131 | // we have a good ID column, awesome, all databases work well if we get here 132 | 133 | // if sqlite, there is a hidden rowid column, which is what knex returned 134 | // it's possible to get here without an id column, but not a rowid column, 135 | // this makes sqlite 100% accurate, as long as the user does not use "without rowid" 136 | // when creating their tables 137 | var idColumn = isSqlite(knex) ? 'rowid' : 'id'; 138 | 139 | var idQuery = {}; 140 | idQuery[idColumn] = insertResult[0]; 141 | selectPromise = knex(tableName).where(idQuery); 142 | } 143 | 144 | return selectPromise.then(function(retrievedRecordResult) { 145 | if (!retrievedRecordResult || retrievedRecordResult.length === 0) { 146 | // fallback: no id column, failed to get the row, we will try with a 147 | // generic where using the spec itself as the where clause. This 148 | // *usually* works, but can fail in certain scenarios. 149 | return knex(tableName).where(insertRecords[index]).limit(1).then(function(finalResult) { 150 | return onInsertedResult(index, finalResult, showWarning); 151 | }); 152 | } else { 153 | return onInsertedResult(index, retrievedRecordResult, false); 154 | } 155 | }); 156 | }); 157 | } else { 158 | return bluebird.resolve(insertedRecords); 159 | } 160 | } 161 | 162 | return insertRecordAt(0); 163 | } 164 | 165 | function buildInsertPromise(knex, tableName, records, unique, showWarning) { 166 | var insertRecords = _.map(records, function(record) { 167 | return _.transform(record, removeExtraKeys); 168 | }); 169 | 170 | if (unique) { 171 | insertRecords = _.uniqBy(insertRecords, getAllKeys(insertRecords)); 172 | } 173 | 174 | function assembleFinalResult(insertedRecords) { 175 | var finalResult = {}; 176 | 177 | finalResult[tableName] = _.map(insertedRecords, function(insertedRecord, i) { 178 | // only attach keys that were passed with the data 179 | var recordResult = _.pick(insertedRecord, _.union(_.keys(records[i]), ['id'])); 180 | return _.extend(records[i], recordResult); 181 | }); 182 | 183 | return finalResult; 184 | } 185 | 186 | return getInsertableRecords(knex, tableName, insertRecords, unique).then(function(insertableRecords) { 187 | insertRecords = insertableRecords; 188 | 189 | if (insertRecords.length === 0) { 190 | return assembleFinalResult(insertRecords); 191 | } 192 | 193 | if (isPostgres(knex)) { 194 | return knex(tableName).returning('*').insert(insertRecords).then(function(insertResults) { 195 | return assembleFinalResult(insertResults); 196 | }); 197 | } else { 198 | return insertRecordsSerially(knex, tableName, insertRecords, showWarning).then(function(insertResults) { 199 | return assembleFinalResult(insertResults); 200 | }); 201 | } 202 | }); 203 | } 204 | 205 | module.exports = function insertRecords(knex, configs, unique, showWarning) { 206 | var promises = []; 207 | 208 | _.forIn(configs, function(records, table) { 209 | if (table === 'sql') { 210 | var sqlPromises = buildRawSqlPromises(knex, records); 211 | promises = promises.concat(sqlPromises); 212 | } else { 213 | var insertPromise = buildInsertPromise(knex, table, records, unique, showWarning); 214 | promises.push(insertPromise); 215 | } 216 | }); 217 | 218 | return promises; 219 | }; 220 | -------------------------------------------------------------------------------- /site/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | docs - sql-fixtures 12 | 13 | 14 | 15 | 43 |
44 |

45 |

sql-fixtures (for Node.js)

46 |

47 |

48 | sql-fixtures is a Node.js module that generates SQL data. It is ideal for integration tests and dummy data. It supports PostgreSQL, MySQL, MariaDB and SQLite, and is easy to use. It uses knex under the hood. 49 |

50 |
51 |

installation

52 |
npm install --save-dev sql-fixtures
53 | 54 |

55 | You will also need to install a driver for whatever database you are using, such as pg for PostgreSQL, for example. 56 |

57 |

Basic Usage

58 |
var sqlFixtures = require('sql-fixtures');
 59 | 
 60 | // depending on which database engine you are using
 61 | // this is a typical PostgreSQL config for the pg driver
 62 | var dbConfig = {
 63 |   client: 'pg',
 64 |   connection: {
 65 |     host: 'localhost',
 66 |     user: 'testdb',
 67 |     password: 'password',
 68 |     database: 'testdb',
 69 |     port: 15432
 70 |   }
 71 | };
 72 | 
 73 | var dataSpec = {
 74 |   users: {
 75 |     username: 'Bob',
 76 |     email: 'bob@example.com'
 77 |   }
 78 | };
 79 | 
 80 | sqlFixtures.create(dbConfig, dataSpec, function(err, result) {
 81 |   // at this point a row has been added to the users table
 82 |   console.log(result.users[0].username);
 83 | });
84 | 85 |

Callbacks and Promises

86 |

Both callbacks and promises are supported by sqlFixtures.create(). 87 | You can either do 88 |

89 | 90 |

91 | sqlFixtures.create(dbConfig, dataSpec, callback) 92 |

93 | 94 |

or

95 | 96 |

97 | sqlFixtures.create(dbConfig, dataSpec).then(callback, errback). 98 |

99 |

When using callbacks, the signature is the Node standard function(err, result), with err being falsy if there is no error. 100 |

101 | 102 |

The dataSpec Object

103 |

This object indicates what data should get added to the database. The root keys of the object must match your table names exactly. The keys of the objects under the table must match column names exactly.

104 | 105 |
var dataSpec = {
106 |   // this must match your table's name exactly
107 |   users: {
108 |     // this must match your column name exactly
109 |     username: 'Bob'
110 |   }
111 | };
112 | 113 |

If you need to insert more than one record, use an array instead

114 | 115 |
var dataSpec = {
116 |   users: [
117 |     { username: 'Bob' },
118 |     { username: 'Sally'}
119 |   ]
120 | };
121 | 122 |

Resolving Foreign Relations

123 |

sql-fixtures will resolve foreign relation columns automatically for you. This feature is the real power behind the module. To do this, reference the desired foreign relation from another table using a string

124 | 125 |
var dataSpec = {
126 |   users: [
127 |     { username: 'Bob' },
128 |     { username: 'Sally'}
129 |   ],
130 | 
131 |   items: {
132 |     name: 'book',
133 |     // at database insertion time, this will be Bob's id
134 |     user_id: 'users:0'
135 |   }
136 | };
137 | 
138 | sqlFixtures.create(dbConfig, dataSpec, function(err, result) {
139 |   console.log(result.items[0].user_id == result.users[0].id); // true
140 | });
141 | 
142 | 143 |

Foreign Relations From Existing Data

144 |

As of version 0.12.0, you can resolve foreign keys against data already in the database.

145 | 146 |
var dataSpec = {
147 |   items: {
148 |     name: 'book',
149 |     user_id: {from: 'users', where: {'name': 'Bob'}}
150 |   }
151 | };
152 | 
153 | 154 |

In the above, sql-fixtures will look for a user whose name is set to Bob. The found row's id will be set as the user_id before inserting the item row.

155 |

from must indicate a table name, and where can be any knex where expression. The result must return one row, otherwise an error will be thrown.

156 | 157 | 158 |

Resolving Other Columns

159 |

You can resolve other columns by appending their name to the resolution string. If left off it assumes you want id

160 | 161 |
var dataSpec = {
162 |   users: [
163 |     { username: 'bob' },
164 |     { username: 'users:0:username' }
165 |   ]
166 | };
167 | 168 |

Both of the above users will have their username set to "bob"

169 |

As of version 1.0.0, you can resolve other columns from existing data:

170 | 171 |
var dataSpec = {
172 |   items: {
173 |     name: {from: 'users', column: 'middle_name', where: {'name': 'Bob'}}
174 |   }
175 | };
176 | 
177 | 178 |

In the above, a query for Bob's middle name will be performed, then set as the item's name.

179 | 180 |

Resolving Using Spec IDs

181 |

If you are generating a lot of data, indices can get tedious. You can use a specId instead.

182 | 183 |
var dataSpec = {
184 |   users: {
185 |     username: 'bob',
186 |     specId: 'mySpecialUser'
187 |   },
188 |   items: {
189 |     // this resolves to bob's id
190 |     // at creation time
191 |     user_id: 'users:mySpecialUser',
192 |     name: 'book'
193 |   }
194 | };
195 | 
196 | var dbConfig = {...}; // see below
197 | sqlFixtures.create(dbConfig, dataSpec, function(err, result) {
198 |   console.log(result.Items[0].userId === result.Users[0].id); // true
199 | });
200 | 201 |

String Values With Colons In Them

202 |

If you need a literal : in a string column, double it up to escape it

203 | 204 |
var dataSpec = {
205 |   websites: {
206 |     // https://github.com is what gets written to the database
207 |     url: 'https:://github.com'
208 |   }
209 | };
210 | 211 |

Arbitrary SQL

212 |

sql-fixtures can run arbitrary sql by use of the sql key on the dataSpec object. This SQL must conform to your database. After any resolution has been done, the string is passed as is to the database.

213 | 214 |
var dataSpec = {
215 |   users: {
216 |     username: 'bob'
217 |   },
218 |   // notice the sql key also supports resolution
219 |   sql: 'insert into items ("name", "user_id") values (\'book\', {users:0})'
220 | };
221 | 222 |

NOTE: the result object that is returned will have nothing in it in regards to arbitrary SQL. As far as that object is concerned, arbitrary sql does not exist.

223 | 224 |

Also: the sql key can be an array of sql statements. The statements will be performed in parallel with no guarantee of order.

225 | 226 |

Array Columns (PostgreSQL Only)

227 |

When using Postgres, you can specify array columns. They support resolution as well.

228 | 229 |

var dataSpec = {
230 |   has_integer: {
231 |     integer: 7
232 |   },
233 |   needs_integer: {
234 |     // the last value will be 7 at insertion time
235 |     integer_array: [1, 2, 'has_integer:0:integer']
236 |   }
237 | };
238 | 239 |

Additional Options to create()

240 |

You can pass in an options object to create() to further control the data generation.

241 | 242 |
var options = {
243 |   unique: true,
244 |   showWarning: false
245 | };
246 | sqlFixtures.create(dbConfig, dataSpec, options, function(err, result) {
247 |   ...
248 | });
249 | 250 |

unique, If true, sqlFixtures will attempt to avoid inserting a new row that is identical to existing rows in the table.

251 | 252 |

NOTE: unique is slightly experimental and might change or go away in the future.

253 | 254 |

showWarning, if false, suppresses the warning about no primary key columns, see the No primary key warning section for more details.

255 | 256 |

Connecting to the Database

257 |

The dbConfig object that you pass to sqlFixtures is handed off as-is to Knex. See Knex's documentation on clients for more info on what needs to be in this object.

258 | 259 |

Disconnecting from the Database

260 |

calling sqlFixtures.destroy(callback) will close the connection to the database. destroy() both can take a callback and returns a promise to indicate when the connection has been closed or if there was an error.

261 | 262 |

Creating an instance of sql-fixtures

263 |

Generally you work with sql-fixtures calling the static create() method. You can also create an instance of sql-fixtures if you like. This has the advantage of not having to pass the database config to create() every time, and also gives you access to the knex object that sql-fixtures creates right away.

264 | 265 |
var sqlFixtures = require('sql-fixtures');
266 | var fixtureCreator = new sqlFixtures(dbConfig);
267 | 
268 | // fixtureCreator.knex is now available and ready to go
269 | // when calling create(), omit the dbConfig parameter
270 | fixtureCreator.create(dataSpec, callback);
271 | 272 |

Using an existing knex instance

273 |

If you already have your own instance of knex, you can have sql-fixtures use it by creating an instance of sql-fixtures and passing in the knex instance.

274 | 275 |
var sqlFixtures = require('sql-fixtures');
276 | var myKnex = knex(myDbConfig);
277 | var fixtureCreator = new sqlFixtures(myKnex);
278 | 
279 | // your instance of knex is now used to create data
280 | fixtureCreator.create(dataSpec, callback);
281 | 282 |

If you are using your own instance of knex, then sqlFixtures.destroy() is not supported, you must destroy knex yourself.

283 | 284 |

Assumptions and Limitations

285 |
    286 |
  • sql-fixtures assumes your primary key columns are all called "id". Any other primary key naming scheme will not work. In theory this is a temporary limitation, but honestly not sure when it will get resolved as I've seen no demand to change this.
  • 287 |
  • sql-fixtures offers no means of deleting data. You can use sqlFixtures.knex to delete data if you wish.
  • 288 |
  • sql-fixtures makes no attempt whatsoever to be secure. Not intended for use in a production environment!
  • 289 |
290 | 291 |

No Primary Key Warning

292 |

293 | If you are using MySQL, Maria or SQLite with "without rowids on your tables" and you want to insert records 294 | using sql-fixtures on a table that lacks a primary key, you may get unexpected results. Postgres 295 | does not have this limitation. 296 |

297 |

SQLite note: SQLite only has this limitation if you created your tables without rowids. If you don't know what rowids are, then you don't have this limitation.

298 |

This is due to how these databases perform inserts. After an insert, sql-fixtures needs to then retrieve the 299 | entire inserted row from the database. This is so data that is needed to resolve other dependencies in fixtures can be obtained. 300 |

301 |

The only way I know of to do this in non-postgres databases is to make a SELECT query using the id of the most recently inserted row. 302 | For example, in MySQL/Maria this can be done with SELECT * from table where id = LAST_INSERT_ID(). But if your table lacks a primary key 303 | column, then there's no way to do this. In this scenario, sql-fixtures will still do a SELECT and hope the row that comes back is the right one. 304 |

305 |

Most of the time this won't be a problem, but it can be in more advanced/unusual scenarios. 306 |

307 |

Postgres avoids this problem because you can have Postgres return the inserted rows as the result of the insert. 308 | This also means sql-fixtures is more performant with Postgres, as it can insert rows in bulk, where with the other databases 309 | records must be inserted one at a time. 310 |

311 |

Suppress the Warning

312 |

You can suppress this warning by adding {showWarning: false} as an additional parameter to create().

313 |
314 | 315 | 316 | -------------------------------------------------------------------------------- /test/integration/integration-specs.js: -------------------------------------------------------------------------------- 1 | var _ = require("lodash"); 2 | var knex = require("knex"); 3 | var bluebird = require("bluebird"); 4 | var FixtureGenerator = require("../../lib/fixture-generator"); 5 | 6 | module.exports = function (dbConfig) { 7 | describe("FixtureGenerator", function () { 8 | this.enableTimeouts(false); 9 | 10 | before(function () { 11 | this.fixtureGenerator = new FixtureGenerator(dbConfig); 12 | this.knex = this.fixtureGenerator.knex; 13 | }); 14 | 15 | beforeEach(function (done) { 16 | // NOTE: these test tables don't actually have foreign key constraints. 17 | // This is to make clearing the data between tests easier. The tests are 18 | // still asserting that cross-table dependencies get resolved correctly 19 | // so the lack of a true foreign key doesn't affect what is being tested. 20 | 21 | // snake case is used to keep everything lowercase, avoiding case-sensitivity issues, 22 | // which makes running these common specs against the different database 23 | // engines easier (especially the arbitrary sql specs) 24 | 25 | var knex = this.knex; 26 | var dropPromises = [ 27 | knex.schema.dropTableIfExists("simple_table"), 28 | knex.schema.dropTableIfExists("has_foreign_key"), 29 | knex.schema.dropTableIfExists("has_foreign_key_to_itself"), 30 | knex.schema.dropTableIfExists("has_two_foreign_keys"), 31 | knex.schema.dropTableIfExists("has_no_id_column"), 32 | ]; 33 | 34 | bluebird 35 | .all(dropPromises) 36 | .then(function () { 37 | return knex.schema.createTable("simple_table", function (table) { 38 | table.increments("id").primary(); 39 | table.string("string_column"); 40 | table 41 | .string("auto_populated_column") 42 | .notNullable() 43 | .defaultTo("autopopulated"); 44 | }); 45 | }) 46 | .then(function () { 47 | return knex.schema.createTable("has_foreign_key", function (table) { 48 | table.increments("id").primary(); 49 | table.string("string_column"); 50 | table.integer("simple_table_id"); 51 | }); 52 | }) 53 | .then(function () { 54 | return knex.schema.createTable("has_foreign_key_to_itself", function ( 55 | table 56 | ) { 57 | table.increments("id").primary(); 58 | table.string("string_column"); 59 | table.integer("parent_id"); 60 | }); 61 | }) 62 | .then(function () { 63 | return knex.schema.createTable("has_two_foreign_keys", function ( 64 | table 65 | ) { 66 | table.increments("id").primary(); 67 | table.integer("has_foreign_key_to_itself_id"); 68 | table.integer("simple_table_id"); 69 | table.string("string_column"); 70 | }); 71 | }) 72 | .then(function () { 73 | return knex.schema.createTable("has_no_id_column", function (table) { 74 | table.integer("foreign_a_id"); 75 | table.integer("foreign_b_id"); 76 | table 77 | .string("auto_populated_column") 78 | .notNullable() 79 | .defaultTo("autopopulated"); 80 | }); 81 | }) 82 | .then(function () { 83 | done(); 84 | }); 85 | }); 86 | 87 | after(function (done) { 88 | this.fixtureGenerator.destroy(done); 89 | }); 90 | 91 | describe("selecting pre-existing data with unknown ID:s", function () { 92 | beforeEach(function createTableWithUnknownIds(done) { 93 | var wantedData = "unique row"; 94 | this.knex("simple_table") 95 | .insert([ 96 | { 97 | string_column: "non-unique row", 98 | }, 99 | { 100 | string_column: wantedData, 101 | }, 102 | { 103 | string_column: "non-unique row", 104 | }, 105 | ]) 106 | .then(function (result) { 107 | done(); 108 | }); 109 | 110 | this.dataConfig = { 111 | has_foreign_key: [ 112 | { 113 | string_column: "first row with external reference", 114 | simple_table_id: { 115 | from: "simple_table", 116 | where: { string_column: wantedData }, 117 | }, 118 | }, 119 | { 120 | string_column: "second row with external reference", 121 | simple_table_id: { 122 | from: "simple_table", 123 | where: { string_column: wantedData }, 124 | }, 125 | }, 126 | ], 127 | }; 128 | }); 129 | 130 | it("should replace a single query object with its result, corresponding to a FK", function (done) { 131 | var knex = this.knex; 132 | this.fixtureGenerator 133 | .create(this.dataConfig) 134 | .then(function checkSimpleTableIdMatches(fixtures) { 135 | expect( 136 | fixtures.has_foreign_key[0].simple_table_id, 137 | "resolved FK" 138 | ).to.be.gt(0); 139 | 140 | knex("simple_table") 141 | .first() 142 | .where("id", fixtures.has_foreign_key[0].simple_table_id) 143 | .then(function (rowIdentifiedByFk) { 144 | expect(rowIdentifiedByFk, "rowIdentifiedByFk").to.have.property( 145 | "string_column" 146 | ); 147 | expect(rowIdentifiedByFk.string_column).to.eql("unique row"); 148 | done(); 149 | }); 150 | }) 151 | .catch(function (err) { 152 | done(err); 153 | }); 154 | }); 155 | 156 | it("should replace multiple query objects with their corresponding FK:s", function (done) { 157 | var knex = this.knex; 158 | this.fixtureGenerator 159 | .create(this.dataConfig) 160 | .then(function checkSimpleTableIdMatches(fixtures) { 161 | expect( 162 | fixtures.has_foreign_key[0].simple_table_id, 163 | "resolved FK" 164 | ).to.be.gt(0); 165 | 166 | knex("simple_table") 167 | .first() 168 | .where("id", fixtures.has_foreign_key[0].simple_table_id) 169 | .then(function (rowIdentifiedByFk) { 170 | expect(rowIdentifiedByFk, "rowIdentifiedByFk").to.have.property( 171 | "string_column" 172 | ); 173 | expect(rowIdentifiedByFk.string_column).to.eql("unique row"); 174 | done(); 175 | }); 176 | }) 177 | .catch(function (err) { 178 | done(err); 179 | }); 180 | }); 181 | 182 | it("should fail early for errors during FK lookup", function (done) { 183 | var dataConfig = { 184 | has_foreign_key: { 185 | simple_table_id: { from: "invalid_table", where: {} }, 186 | }, 187 | }; 188 | 189 | this.fixtureGenerator 190 | .create(dataConfig) 191 | .then(function (res) { 192 | done(new Error("promise should have been rejected")); 193 | }) 194 | .catch(function (err) { 195 | done(); 196 | }); 197 | }); 198 | 199 | it("should fail early if FK lookup finds multiple rows", function (done) { 200 | this.dataConfig.has_foreign_key[0].simple_table_id.where = { 201 | string_column: "non-unique row", 202 | }; 203 | 204 | this.fixtureGenerator 205 | .create(this.dataConfig) 206 | .then(function (res) { 207 | done(new Error("promise should have been rejected")); 208 | }) 209 | .catch(function (err) { 210 | expect(err.message).to.contain(">1 possible"); 211 | done(); 212 | }); 213 | }); 214 | 215 | it("should allow getting a column other than id from existing data", function (done) { 216 | var knex = this.knex; 217 | 218 | var wantedData = "unique row"; 219 | var dataConfig = { 220 | has_foreign_key: [ 221 | { 222 | string_column: { 223 | from: "simple_table", 224 | column: "string_column", 225 | where: { string_column: wantedData }, 226 | }, 227 | simple_table_id: { 228 | from: "simple_table", 229 | where: { string_column: wantedData }, 230 | }, 231 | }, 232 | ], 233 | }; 234 | 235 | this.fixtureGenerator 236 | .create(dataConfig) 237 | .then(function (fixtures) { 238 | expect(fixtures.has_foreign_key[0].string_column).to.equal( 239 | "unique row" 240 | ); 241 | done(); 242 | }) 243 | .catch(function (err) { 244 | done(err); 245 | }); 246 | }); 247 | }); 248 | 249 | describe("generating fixtures", function () { 250 | it("should create a fixture with dependencies resolved", function (done) { 251 | var dataConfig = { 252 | simple_table: { 253 | string_column: "value1", 254 | }, 255 | has_foreign_key: { 256 | string_column: "value2", 257 | simple_table_id: "simple_table:0", 258 | }, 259 | }; 260 | 261 | var knex = this.knex; 262 | 263 | this.fixtureGenerator.create(dataConfig).then(function (results) { 264 | expect(results.simple_table[0].string_column).to.eql("value1"); 265 | expect(results.simple_table[0]).to.not.have.property("specId"); 266 | 267 | expect(results.has_foreign_key[0].simple_table_id).to.eql( 268 | results.simple_table[0].id 269 | ); 270 | expect(results.has_foreign_key[0].string_column).to.eql("value2"); 271 | expect(results.has_foreign_key[0]).to.not.have.property("specId"); 272 | 273 | // verify the data made it into the database 274 | knex("simple_table") 275 | .where("id", results.simple_table[0].id) 276 | .then(function (result) { 277 | expect(result[0].string_column).to.eql("value1"); 278 | done(); 279 | }); 280 | }); 281 | }); 282 | 283 | it("should leave the passed in data spec alone", function (done) { 284 | var dataConfig = { 285 | simple_table: { 286 | string_column: "value1", 287 | }, 288 | has_foreign_key: { 289 | string_column: "value2", 290 | simple_table_id: "simple_table:0", 291 | }, 292 | }; 293 | 294 | var originalConfig = _.cloneDeep(dataConfig); 295 | 296 | this.fixtureGenerator.create(dataConfig).then(function (results) { 297 | expect(results.simple_table[0].string_column).to.eql("value1"); 298 | expect(originalConfig).to.eql(dataConfig); 299 | done(); 300 | }); 301 | }); 302 | 303 | it("should properly deal with same types at different priorities", function (done) { 304 | var dataConfig = { 305 | simple_table: [ 306 | { 307 | string_column: "value1", 308 | }, 309 | { 310 | string_column: "has_foreign_key:0:string_column", 311 | }, 312 | ], 313 | has_foreign_key: { 314 | string_column: "value2", 315 | simple_table_id: "simple_table:0", 316 | }, 317 | }; 318 | 319 | var knex = this.knex; 320 | this.fixtureGenerator.create(dataConfig).then(function (results) { 321 | expect(results.simple_table[0].id).to.be.a("number"); 322 | expect(results.simple_table[1].id).to.be.a("number"); 323 | expect(results.has_foreign_key[0].id).to.be.a("number"); 324 | 325 | expect(results.simple_table[0].string_column).to.eql("value1"); 326 | expect(results.simple_table[1].string_column).to.eql("value2"); 327 | expect(results.has_foreign_key[0].string_column).to.eql("value2"); 328 | expect(results.has_foreign_key[0].simple_table_id).to.eql( 329 | results.simple_table[0].id 330 | ); 331 | 332 | // verify the data made it into the database 333 | knex("simple_table") 334 | .whereIn("id", [ 335 | results.simple_table[0].id, 336 | results.simple_table[1].id, 337 | ]) 338 | .then(function (result) { 339 | expect(result[0].string_column).to.eql("value1"); 340 | expect(result[1].string_column).to.eql("value2"); 341 | 342 | knex("has_foreign_key") 343 | .where("id", results.has_foreign_key[0].id) 344 | .then(function (result) { 345 | expect(result[0].string_column).to.eql("value2"); 346 | done(); 347 | }); 348 | }); 349 | }); 350 | }); 351 | 352 | it("should properly resolve same types at different priorities", function (done) { 353 | var dataConfig = { 354 | simple_table: { 355 | string_column: "value1", 356 | }, 357 | has_foreign_key_to_itself: [ 358 | { 359 | string_column: "value2", 360 | }, 361 | { 362 | string_column: "value3", 363 | parent_id: "has_foreign_key_to_itself:0", 364 | }, 365 | ], 366 | has_two_foreign_keys: [ 367 | { 368 | has_foreign_key_to_itself_id: "has_foreign_key_to_itself:0", 369 | simple_table_id: "simple_table:0", 370 | }, 371 | { 372 | has_foreign_key_to_itself_id: "has_foreign_key_to_itself:1", 373 | simple_table_id: "simple_table:0", 374 | }, 375 | ], 376 | }; 377 | 378 | var knex = this.knex; 379 | this.fixtureGenerator.create(dataConfig).then(function (results) { 380 | expect(results.has_foreign_key_to_itself[1].parent_id).to.be.a( 381 | "number" 382 | ); 383 | expect(results.has_foreign_key_to_itself[1].parent_id).to.eql( 384 | results.has_foreign_key_to_itself[0].id 385 | ); 386 | 387 | expect( 388 | results.has_two_foreign_keys[1].has_foreign_key_to_itself_id 389 | ).to.be.a("number"); 390 | expect( 391 | results.has_two_foreign_keys[1].has_foreign_key_to_itself_id 392 | ).to.eql(results.has_foreign_key_to_itself[1].id); 393 | 394 | expect(results.has_two_foreign_keys[1].simple_table_id).to.be.a( 395 | "number" 396 | ); 397 | expect(results.has_two_foreign_keys[1].simple_table_id).to.eql( 398 | results.simple_table[0].id 399 | ); 400 | 401 | knex("has_two_foreign_keys") 402 | .where("id", results.has_two_foreign_keys[1].id) 403 | .then(function (result) { 404 | expect(result[0].simple_table_id).to.eql( 405 | results.simple_table[0].id 406 | ); 407 | done(); 408 | }); 409 | }); 410 | }); 411 | 412 | // this spec asserts that issue #5 is resolved 413 | // in the below, the prioritization ends up moving the has_two_foreign_keys around 414 | // and so before the fix, results.has_two_foreign_keys[0] is what the user expected to 415 | // find at results.has_two_foreign_keys[1] 416 | it("should properly resolve dependencies that might move around", function (done) { 417 | // the below config will end up as these priority levels 418 | // notice the "twos" switched places 419 | // 420 | // 1 -> simpletable 0 and 1, itself 0 421 | // 2 -> itself 1, two 1 422 | // 3 -> two 0 423 | 424 | var dataConfig = { 425 | simple_table: [ 426 | { 427 | string_column: "value1", 428 | }, 429 | { 430 | string_column: "value2", 431 | }, 432 | ], 433 | has_foreign_key_to_itself: [ 434 | { 435 | string_column: "value3", 436 | }, 437 | { 438 | string_column: "value4", 439 | parent_id: "has_foreign_key_to_itself:0", 440 | }, 441 | ], 442 | has_two_foreign_keys: [ 443 | { 444 | simple_table_id: "simple_table:0", 445 | has_foreign_key_to_itself_id: "has_foreign_key_to_itself:0", 446 | string_column: "value5", 447 | }, 448 | { 449 | simple_table_id: "simple_table:1", 450 | string_column: "value6", 451 | }, 452 | ], 453 | }; 454 | 455 | this.fixtureGenerator.create(dataConfig).then(function (results) { 456 | expect(results.has_two_foreign_keys[0].string_column).to.eql( 457 | "value5" 458 | ); 459 | expect(results.has_two_foreign_keys[1].string_column).to.eql( 460 | "value6" 461 | ); 462 | expect(results.has_two_foreign_keys[1].simple_table_id).to.eql( 463 | results.simple_table[1].id 464 | ); 465 | done(); 466 | }); 467 | }); 468 | 469 | it("should insert records in the same order as defined in the spec", function (done) { 470 | var dataConfig = { 471 | simple_table: [], 472 | }; 473 | for (var i = 0; i < 20; ++i) { 474 | dataConfig.simple_table.push({ string_column: "value" + i }); 475 | } 476 | 477 | this.fixtureGenerator.create(dataConfig).then(function (results) { 478 | for (var i = 1; i < results.simple_table.length; ++i) { 479 | expect(results.simple_table[i].id).to.be.greaterThan( 480 | results.simple_table[i - 1].id 481 | ); 482 | } 483 | done(); 484 | }); 485 | }); 486 | 487 | it("should properly treat an empty spec as a noop", function (done) { 488 | var dataConfig = { 489 | simple_table: [], 490 | has_foreign_key_to_itself: [], 491 | }; 492 | 493 | var self = this; 494 | 495 | this.fixtureGenerator.create(dataConfig).then(function (results) { 496 | expect(results.simple_table.length).to.equal(0); 497 | expect(results.has_foreign_key_to_itself.length).to.equal(0); 498 | 499 | self.fixtureGenerator.create({}).then(function (results) { 500 | expect(results).to.deep.equal({}); 501 | done(); 502 | }); 503 | }); 504 | }); 505 | 506 | describe("when inserting multiple times (issue #22)", function () { 507 | it("should resolve foreign dependencies correctly", function (done) { 508 | var dataConfig = { 509 | simple_table: [ 510 | { 511 | string_column: "sc1", 512 | }, 513 | { 514 | string_column: "sc2", 515 | }, 516 | ], 517 | has_foreign_key: [ 518 | { 519 | string_column: "hfk1", 520 | simple_table_id: "simple_table:0", 521 | }, 522 | { 523 | string_column: "hfk1", 524 | simple_table_id: "simple_table:1", 525 | }, 526 | ], 527 | }; 528 | 529 | this.fixtureGenerator 530 | .create(dataConfig) 531 | .bind(this) 532 | .then(function (firstResult) { 533 | this.fixtureGenerator 534 | .create(dataConfig) 535 | .then(function (secondResult) { 536 | expect( 537 | secondResult.has_foreign_key[0].simple_table_id 538 | ).to.not.equal(firstResult.simple_table[0].id); 539 | expect( 540 | secondResult.has_foreign_key[0].simple_table_id 541 | ).to.not.equal(firstResult.simple_table[1].id); 542 | 543 | expect( 544 | secondResult.has_foreign_key[1].simple_table_id 545 | ).to.not.equal(firstResult.simple_table[0].id); 546 | expect( 547 | secondResult.has_foreign_key[1].simple_table_id 548 | ).to.not.equal(firstResult.simple_table[1].id); 549 | 550 | expect( 551 | secondResult.has_foreign_key[1].simple_table_id 552 | ).to.equal(secondResult.simple_table[1].id); 553 | 554 | expect(firstResult.simple_table[0].id).to.not.equal( 555 | secondResult.simple_table[0].id 556 | ); 557 | expect(firstResult.simple_table[0].id).to.not.equal( 558 | secondResult.simple_table[1].id 559 | ); 560 | expect(firstResult.simple_table[1].id).to.not.equal( 561 | secondResult.simple_table[0].id 562 | ); 563 | expect(firstResult.simple_table[1].id).to.not.equal( 564 | secondResult.simple_table[1].id 565 | ); 566 | done(); 567 | }); 568 | }); 569 | }); 570 | }); 571 | 572 | describe("tables without an id column", function () { 573 | it("should create rows for tables without an id column", function (done) { 574 | var dataConfig = { 575 | has_no_id_column: [ 576 | { 577 | foreign_a_id: 2, 578 | foreign_b_id: 3, 579 | auto_populated_column: null, 580 | }, 581 | { 582 | foreign_a_id: 5, 583 | foreign_b_id: 6, 584 | }, 585 | ], 586 | }; 587 | 588 | var knex = this.knex; 589 | 590 | this.fixtureGenerator.create(dataConfig).then(function (results) { 591 | expect(results.has_no_id_column[0].foreign_a_id).to.eql(2); 592 | expect(results.has_no_id_column[0].foreign_b_id).to.eql(3); 593 | expect(results.has_no_id_column[0].auto_populated_column).to.exist; 594 | 595 | expect(results.has_no_id_column[1].foreign_a_id).to.eql(5); 596 | expect(results.has_no_id_column[1].foreign_b_id).to.eql(6); 597 | expect(results.has_no_id_column[1]).to.not.have.property( 598 | "auto_populated_column" 599 | ); 600 | 601 | knex("has_no_id_column") 602 | .where("foreign_a_id", 2) 603 | .then(function (knexResult) { 604 | expect(knexResult[0].foreign_b_id).to.eql(3); 605 | expect(knexResult[0].auto_populated_column).to.exist; 606 | done(); 607 | }); 608 | }); 609 | }); 610 | }); 611 | 612 | describe("invoking raw sql", function () { 613 | it("should invoke raw sql", function (done) { 614 | var dataConfig = { 615 | simple_table: [ 616 | { 617 | string_column: "value1", 618 | }, 619 | { 620 | string_column: "value2", 621 | }, 622 | ], 623 | sql: [ 624 | "insert into has_foreign_key (string_column, simple_table_id) values ('rawsql0', {simple_table:0})", 625 | "insert into has_foreign_key (string_column, simple_table_id) values ('rawsql1', {simple_table:1})", 626 | ], 627 | }; 628 | 629 | var knex = this.knex; 630 | this.fixtureGenerator.create(dataConfig).then(function (results) { 631 | expect(results.simple_table.length).to.eql(2); 632 | expect(results).to.not.have.property("has_foreign_key"); 633 | knex("has_foreign_key") 634 | .where("string_column", "rawsql0") 635 | .then(function (knexResult) { 636 | expect(knexResult[0].simple_table_id).to.eql( 637 | results.simple_table[0].id 638 | ); 639 | knex("has_foreign_key") 640 | .where("string_column", "rawsql1") 641 | .then(function (knexResult) { 642 | expect(knexResult[0].simple_table_id).to.eql( 643 | results.simple_table[1].id 644 | ); 645 | done(); 646 | }); 647 | }); 648 | }); 649 | }); 650 | 651 | it("should allow raw sql in a column", function (done) { 652 | var knex = this.fixtureGenerator.knex; 653 | 654 | var dataConfig = { 655 | simple_table: [ 656 | { 657 | string_column: knex.raw("?", ["value1"]), 658 | }, 659 | ], 660 | }; 661 | 662 | this.fixtureGenerator.create(dataConfig).then(function (results) { 663 | expect(results.simple_table[0].string_column).to.equal("value1"); 664 | done(); 665 | }); 666 | }); 667 | }); 668 | 669 | describe("auto populated columns", function () { 670 | it("should return the auto populated columns in the result if they are in the config", function (done) { 671 | var dataConfig = { 672 | simple_table: [ 673 | { 674 | string_column: "value1", 675 | auto_populated_column: null, 676 | }, 677 | { 678 | string_column: "value2", 679 | }, 680 | ], 681 | }; 682 | 683 | this.fixtureGenerator.create(dataConfig).then(function (results) { 684 | expect(results.simple_table[0].auto_populated_column).to.eql( 685 | "autopopulated" 686 | ); 687 | expect(results.simple_table[1]).to.not.have.property( 688 | "auto_populated_column" 689 | ); 690 | done(); 691 | }); 692 | }); 693 | }); 694 | }); 695 | 696 | describe("spec ids", function () { 697 | it("should resolved spec ids", function (done) { 698 | var dataConfig = { 699 | simple_table: { 700 | specId: "mySimpleTableRow", 701 | string_column: "value1", 702 | }, 703 | has_foreign_key: { 704 | string_column: "value2", 705 | simple_table_id: "simple_table:mySimpleTableRow", 706 | }, 707 | }; 708 | 709 | var knex = this.knex; 710 | this.fixtureGenerator.create(dataConfig).then(function (results) { 711 | expect(results.simple_table[0].string_column).to.eql("value1"); 712 | expect(results.has_foreign_key[0].simple_table_id).to.eql( 713 | results.simple_table[0].id 714 | ); 715 | 716 | // verify the data made it into the database 717 | knex("simple_table") 718 | .where("id", results.simple_table[0].id) 719 | .then(function (result) { 720 | expect(result[0].string_column).to.eql("value1"); 721 | done(); 722 | }); 723 | }); 724 | }); 725 | }); 726 | 727 | describe("escaping", function () { 728 | it("should unescape colons", function (done) { 729 | var dataConfig = { 730 | simple_table: { 731 | string_column: "foo::bar", 732 | }, 733 | }; 734 | 735 | var knex = this.knex; 736 | this.fixtureGenerator.create(dataConfig).then(function (results) { 737 | expect(results.simple_table[0].string_column).to.eql("foo:bar"); 738 | 739 | // verify the data made it into the database 740 | knex("simple_table") 741 | .where("id", results.simple_table[0].id) 742 | .then(function (result) { 743 | expect(result[0].string_column).to.eql("foo:bar"); 744 | done(); 745 | }); 746 | }); 747 | }); 748 | }); 749 | 750 | describe("unique rows", function () { 751 | it("should not insert the same data more than once", function (done) { 752 | var dataConfig = { 753 | simple_table: [ 754 | { string_column: "same value" }, 755 | { string_column: "same value" }, 756 | { string_column: "same value" }, 757 | ], 758 | }; 759 | var knex = this.knex; 760 | var fg = this.fixtureGenerator; 761 | 762 | fg.create(dataConfig, { unique: true }).then(function (results) { 763 | expect(results.simple_table.length).to.equal(1); 764 | knex("simple_table").then(function (result) { 765 | expect(result.length).to.equal(1); 766 | 767 | fg.create(dataConfig, { unique: true }).then(function (results) { 768 | expect(results.simple_table.length).to.equal(0); 769 | 770 | knex("simple_table").then(function (result) { 771 | expect(result.length).to.equal(1); 772 | done(); 773 | }); 774 | }); 775 | }); 776 | }); 777 | }); 778 | }); 779 | 780 | describe("errors", function (done) { 781 | it("should reject the promise if knex fails", function (done) { 782 | var dataConfig = { 783 | non_existant_table: { 784 | foo: "bar", 785 | }, 786 | }; 787 | 788 | this.fixtureGenerator.create(dataConfig).then( 789 | function (result) { 790 | done(new Error("should not have succeeded")); 791 | }, 792 | function (err) { 793 | expect(err).to.exist; 794 | expect(err.toString()).to.contain("non_existant"); 795 | done(); 796 | } 797 | ); 798 | }); 799 | 800 | it("should call the callback with an error if knex fails", function (done) { 801 | var dataConfig = { 802 | non_existant_table: { 803 | foo: "bar", 804 | }, 805 | }; 806 | 807 | this.fixtureGenerator.create(dataConfig, function (err, result) { 808 | expect(err).to.exist; 809 | expect(err.toString()).to.contain("non_existant"); 810 | done(); 811 | }); 812 | }); 813 | }); 814 | 815 | describe("calling the callback", function () { 816 | it("should call the callback if provided", function (done) { 817 | var dataConfig = { 818 | simple_table: { 819 | string_column: "a value", 820 | }, 821 | }; 822 | 823 | this.fixtureGenerator.create(dataConfig, function (err, results) { 824 | expect(err).to.not.exist; 825 | expect(results.simple_table[0].string_column).to.eql("a value"); 826 | done(); 827 | }); 828 | }); 829 | }); 830 | 831 | describe("providing a knex instance", function () { 832 | it("should use the provided knex instance", function (done) { 833 | var myKnex = knex(dbConfig); 834 | var fixtureGenerator = new FixtureGenerator(myKnex); 835 | 836 | expect(fixtureGenerator.knex).to.equal(myKnex); 837 | 838 | var dataConfig = { 839 | simple_table: { 840 | string_column: "a value", 841 | }, 842 | }; 843 | 844 | fixtureGenerator.create(dataConfig, function (err, results) { 845 | expect(err).to.not.exist; 846 | expect(results.simple_table[0].string_column).to.eql("a value"); 847 | 848 | bluebird 849 | .resolve() 850 | .then(function () { 851 | return myKnex.destroy(); 852 | }) 853 | .nodeify(done); 854 | }); 855 | }); 856 | }); 857 | 858 | describe("reusing the instance", function () { 859 | it("should reconnect after a destroy", function (done) { 860 | var dataConfig = { 861 | simple_table: { 862 | string_column: "a value", 863 | }, 864 | }; 865 | 866 | var fixtureGenerator = this.fixtureGenerator; 867 | 868 | fixtureGenerator.create(dataConfig, function (err, results) { 869 | expect(err).to.not.exist; 870 | expect(results.simple_table[0].string_column).to.eql("a value"); 871 | 872 | fixtureGenerator.destroy(function () { 873 | fixtureGenerator.create(dataConfig, function (err, results) { 874 | expect(err).to.not.exist; 875 | expect(results.simple_table[0].string_column).to.eql("a value"); 876 | done(); 877 | }); 878 | }); 879 | }); 880 | }); 881 | }); 882 | }); 883 | }; 884 | --------------------------------------------------------------------------------