├── .prettierignore ├── .gitignore ├── tests ├── expected │ └── get-sqlite-router │ │ ├── index.js │ │ ├── delete.js │ │ ├── post.js │ │ ├── post_update.js │ │ ├── head.js │ │ ├── unsupported.js │ │ └── get.js ├── resources │ ├── beer.sqlite3 │ ├── big.beer.sqlite3 │ ├── sqlite-to-rest-config.json │ ├── schema.out │ └── dump.out ├── helpers │ └── common.js └── get-sqlite-router.js ├── lib ├── index.js ├── commands │ ├── resources │ │ └── skeleton.tpl │ └── generate-skeleton.js ├── common.js ├── api │ ├── services │ │ ├── db-schema.js │ │ └── config.js │ ├── helpers │ │ ├── route-builders │ │ │ ├── handle-head.js │ │ │ ├── delete.js │ │ │ ├── err-ids.js │ │ │ ├── handle-update.js │ │ │ ├── post.js │ │ │ ├── common.js │ │ │ └── get.js │ │ └── get-schema.js │ └── get-sqlite-router.js ├── services │ └── state.js └── utils.js ├── license.txt ├── bin └── sqlite-to-rest.js ├── cli └── commands │ └── generate-skeleton.js ├── TODO.md ├── package.json ├── docs └── tutorial.md └── README.md /.prettierignore: -------------------------------------------------------------------------------- 1 | /tests/expected/get-sqlite-router/big-result.json 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /test.js 3 | /notes.txt 4 | /test.sql 5 | /tests/tmp/ 6 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = require('require-dir')('./') 4 | -------------------------------------------------------------------------------- /tests/resources/beer.sqlite3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/olsonpm/sqlite-to-rest/HEAD/tests/resources/beer.sqlite3 -------------------------------------------------------------------------------- /tests/resources/big.beer.sqlite3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/olsonpm/sqlite-to-rest/HEAD/tests/resources/big.beer.sqlite3 -------------------------------------------------------------------------------- /tests/resources/sqlite-to-rest-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "allTablesAndViews": { 3 | "flags": ["sendContentRangeInHEAD"] 4 | }, 5 | "tablesAndViews": { 6 | "beer": { 7 | "maxRange": 5 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const generateSkeleton = require('./commands/generate-skeleton'), 8 | getSqliteRouter = require('./api/get-sqlite-router') 9 | 10 | //---------// 11 | // Exports // 12 | //---------// 13 | 14 | module.exports = { 15 | getSqliteRouter: getSqliteRouter, 16 | generateSkeleton: generateSkeleton.mFn, 17 | } 18 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/delete.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const res = { 8 | success: getSuccess(), 9 | } 10 | 11 | //-------------// 12 | // Helper Fxns // 13 | //-------------// 14 | 15 | function getSuccess() { 16 | return { 17 | statusCode: 204, 18 | } 19 | } 20 | 21 | //---------// 22 | // Exports // 23 | //---------// 24 | 25 | module.exports = res 26 | -------------------------------------------------------------------------------- /lib/commands/resources/skeleton.tpl: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const Koa = require("koa"), 8 | sqliteToRest = require("sqlite-to-rest"); 9 | 10 | //------// 11 | // Init // 12 | //------// 13 | 14 | const dbPath = "<%= dbPath %>", 15 | getSqliteRouter = sqliteToRest.getSqliteRouter, 16 | PORT = 8085; 17 | 18 | //------// 19 | // Main // 20 | //------// 21 | 22 | const app = new Koa(); 23 | 24 | getSqliteRouter({ dbPath }).then((router) => { 25 | app.use(router.routes()).use(router.allowedMethods()).listen(PORT); 26 | 27 | console.log(`Listening on port: ${PORT}`); 28 | }); 29 | -------------------------------------------------------------------------------- /license.txt: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO BUT IT'S NOT MY FAULT PUBLIC LICENSE 2 | Version 1, October 2013 3 | 4 | Copyright © 2013 Ben McGinnes 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified copies of this license document, and changing it is allowed as long as the name is changed. 7 | 8 | DO WHAT THE FUCK YOU WANT TO BUT IT'S NOT MY FAULT PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 9 | 10 | 0. You just DO WHAT THE FUCK YOU WANT TO. 11 | 12 | 1. Do not hold the author(s), creator(s), developer(s) or distributor(s) liable for anything that happens or goes wrong with your use of the work. 13 | -------------------------------------------------------------------------------- /tests/resources/schema.out: -------------------------------------------------------------------------------- 1 | CREATE TABLE city ( 2 | state, city_name, PRIMARY KEY (state, city_name) 3 | ); 4 | CREATE TABLE brewery( 5 | id integer primary key 6 | , state 7 | , city_name 8 | , name 9 | , foreign key (state, city_name) references city(state, city_name) 10 | ); 11 | CREATE VIEW beer_per_brewery( 12 | beer_id, beer_name, brewery_state, brewery_city, brewery_name 13 | ) 14 | as 15 | select beer.id 16 | , beer.name 17 | , state 18 | , city_name 19 | , brewery.name 20 | from beer 21 | join brewery on beer.brewery_id = brewery.id; 22 | CREATE TABLE beer( 23 | id integer primary key 24 | , brewery_id references brewery (id) 25 | , description 26 | , name 27 | ); 28 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/post.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const res = { 8 | eauClaireSuccess: getEauClaireSuccess(), 9 | } 10 | 11 | //-------------// 12 | // Helper Fxns // 13 | //-------------// 14 | 15 | function getEauClaireSuccess() { 16 | return { 17 | statusCode: 201, 18 | body: { 19 | state: 'WI', 20 | city_name: 'Eau Claire', 21 | }, 22 | headers: { 23 | 'content-type': 'application/json; charset=utf-8', 24 | location: '/city?state=WI&city_name=Eau Claire', 25 | 'content-length': '39', 26 | }, 27 | } 28 | } 29 | 30 | //---------// 31 | // Exports // 32 | //---------// 33 | 34 | module.exports = res 35 | -------------------------------------------------------------------------------- /bin/sqlite-to-rest.js: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 'use strict' 3 | 4 | //---------// 5 | // Imports // 6 | //---------// 7 | 8 | const structuredCli = require('structured-cli'), 9 | fp = require('lodash/fp'), 10 | requireDir = require('require-dir'), 11 | state = require('../lib/services/state'), 12 | pjson = require('../package.json') 13 | 14 | //------// 15 | // Init // 16 | //------// 17 | 18 | state.setIsCli(true) 19 | 20 | //------// 21 | // Main // 22 | //------// 23 | 24 | structuredCli.create({ 25 | description: 26 | 'A collection of tools exposing the sqlite-to-rest functionality' + 27 | ' via cli. All commands here are also exposed on the required object.', 28 | commands: fp.values(requireDir('../cli/commands')), 29 | version: pjson.version, 30 | }) 31 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/post_update.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const res = { 8 | thaiSuccess: getThaiSuccess(), 9 | } 10 | 11 | //-------------// 12 | // Helper Fxns // 13 | //-------------// 14 | 15 | function getThaiSuccess() { 16 | return { 17 | statusCode: 201, 18 | body: { 19 | brewery_id: 2, 20 | description: 'The old description was just too long', 21 | id: 5, 22 | name: 'Thai Style White IPA', 23 | }, 24 | headers: { 25 | 'content-length': '107', 26 | 'content-location': '/beer?id=5', 27 | 'content-type': 'application/json; charset=utf-8', 28 | }, 29 | } 30 | } 31 | 32 | //---------// 33 | // Exports // 34 | //---------// 35 | 36 | module.exports = res 37 | -------------------------------------------------------------------------------- /lib/common.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const fs = require('fs') 8 | 9 | //------// 10 | // Main // 11 | //------// 12 | 13 | const isSqlite3FileSync = (fpath) => { 14 | let res 15 | try { 16 | const fd = fs.openSync(fpath, 'r') 17 | const b = Buffer.alloc(16) 18 | fs.readSync(fd, b, 0, 16, 0) 19 | res = 20 | b.toString().toLowerCase() === 21 | 'sqlite format 3' + String.fromCharCode('0x00') 22 | } catch (e) { 23 | res = false 24 | } 25 | return res 26 | } 27 | 28 | const isDirectorySync = (fpath) => { 29 | return fs.statSync(fpath).isDirectory() 30 | } 31 | 32 | //---------// 33 | // Exports // 34 | //---------// 35 | 36 | module.exports = { 37 | isDirectorySync, 38 | isSqlite3FileSync, 39 | } 40 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/head.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const res = { 8 | beer: getBeer(), 9 | brewery: getBrewery(), 10 | } 11 | 12 | //-------------// 13 | // Helper Fxns // 14 | //-------------// 15 | 16 | function getBeer() { 17 | return { 18 | statusCode: 200, 19 | headers: { 20 | 'accept-order': 'id,brewery_id,description,name', 21 | 'accept-ranges': 'rows', 22 | 'content-range': 'rows */16', 23 | 'max-range': '5', 24 | }, 25 | } 26 | } 27 | function getBrewery() { 28 | return { 29 | statusCode: 200, 30 | headers: { 31 | 'accept-order': 'id,state,city_name,name', 32 | 'accept-ranges': 'rows', 33 | 'content-range': 'rows */5', 34 | 'max-range': '1000', 35 | }, 36 | } 37 | } 38 | 39 | //---------// 40 | // Exports // 41 | //---------// 42 | 43 | module.exports = res 44 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/unsupported.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Main // 5 | //------// 6 | 7 | const res = { 8 | beer_per_brewery: getBeer_per_brewery(), 9 | beer: getBeer(), 10 | } 11 | 12 | //-------------// 13 | // Helper Fxns // 14 | //-------------// 15 | 16 | function getBeer_per_brewery() { 17 | return { 18 | statusCode: 405, 19 | body: 'Method Not Allowed', 20 | headers: { 21 | allow: 'HEAD, GET', 22 | 'content-length': '18', 23 | 'content-type': 'text/plain; charset=utf-8', 24 | }, 25 | } 26 | } 27 | 28 | function getBeer() { 29 | return { 30 | statusCode: 405, 31 | body: 'Method Not Allowed', 32 | headers: { 33 | allow: 'HEAD, GET, POST, DELETE', 34 | 'content-length': '18', 35 | 'content-type': 'text/plain; charset=utf-8', 36 | }, 37 | } 38 | } 39 | 40 | //---------// 41 | // Exports // 42 | //---------// 43 | 44 | module.exports = res 45 | -------------------------------------------------------------------------------- /lib/api/services/db-schema.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const deepFreezeStrict = require('deep-freeze-strict'), 8 | fp = require('lodash/fp') 9 | 10 | //------// 11 | // Init // 12 | //------// 13 | 14 | let dbSchemaObj 15 | 16 | //------// 17 | // Main // 18 | //------// 19 | 20 | const getDbSchema = () => dbSchemaObj 21 | 22 | function setDbSchema({ tables, views }) { 23 | if (arguments.length !== 1) { 24 | throw new Error( 25 | 'Invalid Input: This function requires exactly ' + 'one argument' 26 | ) 27 | } 28 | 29 | const aDbSchemaObj = fp.cloneDeep({ tables, views }) 30 | 31 | // no errors - good to go 32 | 33 | aDbSchemaObj.tablesAndViews = fp.assign(tables, views) 34 | dbSchemaObj = deepFreezeStrict(aDbSchemaObj) 35 | 36 | return dbSchemaObj 37 | } 38 | 39 | //---------// 40 | // Exports // 41 | //---------// 42 | 43 | module.exports = { 44 | get: getDbSchema, 45 | set: setDbSchema, 46 | } 47 | -------------------------------------------------------------------------------- /lib/services/state.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const fp = require('lodash/fp') 8 | 9 | //------// 10 | // Init // 11 | //------// 12 | 13 | // can't grab from utils because that would be a circular reference 14 | const mutableSet = fp.set.convert({ immutable: false }), 15 | setState = getSetState(), 16 | state = {} 17 | 18 | //------// 19 | // Main // 20 | //------// 21 | 22 | const res = buildRes(['isCli']) 23 | 24 | //-------------// 25 | // Helper Fxns // 26 | //-------------// 27 | 28 | function getSetState() { 29 | return fp.curry((path, state, val) => mutableSet(path, val, state)) 30 | } 31 | 32 | function buildRes(props) { 33 | return fp.reduce( 34 | (res, cur) => 35 | fp.flow( 36 | mutableSet('set' + fp.upperFirst(cur), setState(cur, state)), 37 | mutableSet(cur, () => state[cur]) 38 | )(res), 39 | {}, 40 | props 41 | ) 42 | } 43 | 44 | //---------// 45 | // Exports // 46 | //---------// 47 | 48 | module.exports = res 49 | -------------------------------------------------------------------------------- /cli/commands/generate-skeleton.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const common = require('../../lib/common'), 8 | generateSkeleton = require('../../lib/commands/generate-skeleton') 9 | //------// 10 | // Init // 11 | //------// 12 | 13 | const isSqlite3FileSync = common.isSqlite3FileSync 14 | 15 | //------// 16 | // Main // 17 | //------// 18 | 19 | const command = { 20 | name: 'generate-skeleton', 21 | fn: callGenerateSkeleton, 22 | desc: 'Creates a bare-minimum koa server in your working directory.', 23 | marg: { 24 | dbPath: { 25 | flags: ['require'], 26 | custom: { isSqlite3File: isSqlite3FileSync }, 27 | }, 28 | }, 29 | args: [ 30 | { 31 | name: 'dbPath', 32 | desc: 33 | 'Path to sqlite database required to create the routing' + 34 | ' and validation', 35 | example: '', 36 | flags: ['require'], 37 | type: 'string', 38 | }, 39 | ], 40 | } 41 | 42 | //-------------// 43 | // Helper Fxns // 44 | //-------------// 45 | 46 | function callGenerateSkeleton({ dbPath }) { 47 | return generateSkeleton.fn({ 48 | dbPath, 49 | dir: process.cwd(), 50 | }) 51 | } 52 | 53 | //---------// 54 | // Exports // 55 | //---------// 56 | 57 | module.exports = command 58 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/handle-head.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | common = require('./common'), 9 | fp = require('lodash/fp') 10 | 11 | //------// 12 | // Init // 13 | //------// 14 | 15 | const { getQuery, getParams } = common 16 | 17 | //------// 18 | // Main // 19 | //------// 20 | 21 | const handleHead = ( 22 | ctx, 23 | next, 24 | tableConfig, 25 | connections, 26 | columnNames, 27 | parsed, 28 | name 29 | ) => { 30 | ctx.set({ 31 | 'accept-ranges': 'rows', 32 | 'accept-order': columnNames.join(','), 33 | }) 34 | let bRes = bPromise.resolve() 35 | 36 | if (tableConfig.maxRange) { 37 | ctx.set('max-range', tableConfig.maxRange) 38 | } 39 | 40 | if (fp.contains('sendContentRangeInHEAD', tableConfig.flags)) { 41 | const selectCount = 'SELECT COUNT(*) as count FROM ' + name, 42 | query = getQuery({ parsed: parsed, queryStart: selectCount }), 43 | params = getParams(parsed) 44 | bRes = bRes 45 | .then(() => { 46 | return connections.readOnly.getAsync(query, params) 47 | }) 48 | .then((res) => { 49 | ctx.set('content-range', 'rows */' + res.count) 50 | }) 51 | } 52 | return bRes.then(() => { 53 | ctx.status = 200 54 | return next() 55 | }) 56 | } 57 | 58 | //---------// 59 | // Exports // 60 | //---------// 61 | 62 | module.exports = handleHead 63 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | ### An unordered list of items I may tend to in the future 2 | 3 | - 'accept-order' response header should be sent upon an invalid order 4 | request header 5 | - Rows must be one indexed to handle case of no rows in table 6 | - Test for invalid query cases and ensure friendly errors are returned 7 | - Remove generateInfo since that should just happen upon server startup. 8 | - extract cli (generateSkeleton) to its own module so that installing globally 9 | doesn't take so long 10 | - Enforce primary keys upon info generation 11 | - Allow for table validation prior to insert 12 | - db_info.json should only hold column names for views 13 | - Look into performance 14 | - Add error ids for each invalid range case and test them 15 | - Implement transactions in testing. Currently state is ensured to be stable 16 | by copying the database file, then renaming the copy to the original file 17 | after the unsafe operations are done. The database connection then needs to 18 | be renewed which is most easily done by restarting the server itself. All 19 | this overhead is small now but will grow linearly with the number of unsafe 20 | method tests. 21 | - Test all positive range scenarios as well (including when max range 22 | isn't specified) 23 | - Write tests for cli and those same commands exposed via js api 24 | - Move shared functionality and validation out into separate middleware. 25 | - Implement PUT and add caution about lack of validation when using 26 | partial update via POST with a querystring 27 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/delete.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const common = require('./common'), 8 | errIds = require('./err-ids'), 9 | fp = require('lodash/fp') 10 | 11 | //------// 12 | // Init // 13 | //------// 14 | 15 | const { 16 | attachError, 17 | bRunQuery, 18 | getQuery, 19 | getParams, 20 | getPkColumnNames, 21 | parseQueryForPkColumns, 22 | } = common, 23 | qsErrIds = errIds.delete.queryString 24 | 25 | //------// 26 | // Main // 27 | //------// 28 | 29 | const buildDelete = (name, columns, connections, router) => { 30 | const deleteRow = 'DELETE FROM ' + name, 31 | pkColumnNames = getPkColumnNames(columns) 32 | 33 | router.delete('/' + name, (ctx, next) => { 34 | const parsed = parseQueryForPkColumns( 35 | ctx.decodedQuerystring, 36 | pkColumnNames, 37 | qsErrIds 38 | ) 39 | 40 | if (fp.invoke('hasErr', parsed)) { 41 | return attachError(ctx, parsed) 42 | } 43 | 44 | const query = getQuery({ parsed: parsed, queryStart: deleteRow }), 45 | params = getParams(parsed) 46 | return bRunQuery(connections.readWrite, query, params) 47 | .then(({ changes }) => { 48 | ctx.status = changes ? 204 : 404 49 | return next() 50 | }) 51 | .catch((err) => { 52 | ctx.status = 500 53 | console.error(err) 54 | }) 55 | }) 56 | } 57 | 58 | //---------// 59 | // Exports // 60 | //---------// 61 | 62 | module.exports = buildDelete 63 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/err-ids.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const utils = require('../../../utils') 8 | 9 | //------// 10 | // Main // 11 | //------// 12 | 13 | const errIds = { 14 | get: { 15 | queryString: { 16 | ampersandRequired: 'ampersand-required', 17 | columnRequired: 'column-required', 18 | operatorRequired: 'operator-required', 19 | openingQuoteRequired: 'opening-quote-required', 20 | closingQuoteRequired: 'closing-quote-required', 21 | }, 22 | invalidRange: 'invalid-range', 23 | invalidOrder: 'invalid-order', 24 | }, 25 | delete: { 26 | queryString: { 27 | duplicatePkColumnsNotAllowed: 'duplicate-pk-columns-not-allowed', 28 | equalsRequired: 'equals-required', 29 | missingPkColumns: 'missing-pk-columns', 30 | pkColumnRequired: 'pk-column-required', 31 | }, 32 | }, 33 | post: { 34 | requestBody: { 35 | invalidColumns: 'invalid-columns', 36 | missingRequiredColumns: 'missing-required-columns', 37 | }, 38 | }, 39 | update: { 40 | queryString: { 41 | duplicatePkColumnsNotAllowed: 'duplicate-pk-columns-not-allowed', 42 | equalsRequired: 'equals-required', 43 | missingPkColumns: 'missing-pk-columns', 44 | pkColumnRequired: 'pk-column-required', 45 | }, 46 | requestBody: { 47 | mustBeNonEmpty: 'must-be-non-empty', 48 | invalidColumns: 'invalid-columns', 49 | }, 50 | }, 51 | } 52 | 53 | //---------// 54 | // Exports // 55 | //---------// 56 | 57 | module.exports = utils.mapErrIdPropToString(errIds) 58 | -------------------------------------------------------------------------------- /tests/helpers/common.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | fp = require('lodash/fp'), 9 | Koa = require('koa'), 10 | { bodyParser: koaBodyparser } = require('@koa/bodyparser'), 11 | path = require('path'), 12 | portfinder = require('portfinder'), 13 | sqliteToRest = require('../../lib'), 14 | sqliteToRestConfig = require('../resources/sqlite-to-rest-config') 15 | 16 | //------// 17 | // Init // 18 | //------// 19 | 20 | const defaultDbPath = path.resolve( 21 | path.join(__dirname, '../resources/beer.sqlite3') 22 | ), 23 | getSqliteRouter = sqliteToRest.getSqliteRouter, 24 | getPortAsync = bPromise.promisify(portfinder.getPort) 25 | let server 26 | 27 | //------// 28 | // Main // 29 | //------// 30 | 31 | const startServer = ({ dbPath = defaultDbPath, configOverrides = {} } = {}) => { 32 | const app = new Koa() 33 | app.use(koaBodyparser()) 34 | 35 | return bPromise 36 | .props({ 37 | router: getSqliteRouter({ 38 | dbPath: dbPath, 39 | config: fp.assign(sqliteToRestConfig, configOverrides), 40 | }), 41 | port: getPortAsync(), 42 | }) 43 | .then(({ router, port }) => 44 | bPromise 45 | .fromCallback((cb) => { 46 | server = app 47 | .use(router.routes()) 48 | .use(router.allowedMethods()) 49 | .listen(port, cb) 50 | }) 51 | .thenReturn(port) 52 | ) 53 | } 54 | 55 | const stopServer = () => bPromise.fromCallback((cb) => server.close(cb)) 56 | 57 | //---------// 58 | // Exports // 59 | //---------// 60 | 61 | module.exports = { 62 | startServer: startServer, 63 | stopServer: stopServer, 64 | } 65 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlite-to-rest", 3 | "version": "0.3.0", 4 | "description": "Create a rest api from an existing sqlite3 database", 5 | "bin": { 6 | "sqlite-to-rest": "bin/sqlite-to-rest.js" 7 | }, 8 | "main": "lib/index.js", 9 | "scripts": { 10 | "test": "mocha tests", 11 | "is-pretty": "prettier --check \"**/*.{js,md}\"", 12 | "prettify": "prettier --write \"**/*.{js,md}\"", 13 | "lint": "eslint \"**/*.js\"", 14 | "verify": "npm run is-pretty && npm run lint && npm test" 15 | }, 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/olsonpm/sqlite-to-rest.git" 19 | }, 20 | "author": "phil olson", 21 | "license": "SEE LICENSE IN license.txt", 22 | "bugs": { 23 | "url": "https://github.com/olsonpm/sqlite-to-rest/issues" 24 | }, 25 | "homepage": "https://github.com/olsonpm/sqlite-to-rest#readme", 26 | "dependencies": { 27 | "@koa/bodyparser": "^5.1.1", 28 | "bluebird": "^3.4.0", 29 | "deep-freeze-strict": "^1.1.1", 30 | "JSONStream": "^1.1.1", 31 | "koa": "^2.15.3", 32 | "koa-decoded-querystring": "0.1.1", 33 | "koa-router": "^12.0.1", 34 | "lodash": "^4.17.15", 35 | "madonna-fp": "github:olsonpm/madonna-fp", 36 | "madonna-function": "github:olsonpm/madonna-function", 37 | "ncp": "^2.0.0", 38 | "portfinder": "^1.0.3", 39 | "require-dir": "^0.3.0", 40 | "sqlite3": "^5.0.8", 41 | "structured-cli": "github:olsonpm/structured-cli#d299cf432dfdf34393fa5349f42f30cea79a32a3", 42 | "through2": "^2.0.1" 43 | }, 44 | "devDependencies": { 45 | "@olsonpm/eslint-config-personal": "github:olsonpm/eslint-config-personal", 46 | "chai": "^4.5.0", 47 | "chai-as-promised": "^7.1.2", 48 | "chai-subset": "^1.6.0", 49 | "del": "^4.1.1", 50 | "eslint": "^8.57.1", 51 | "filecompare": "^1.0.4", 52 | "make-dir": "^3.0.0", 53 | "mocha": "^9.2.2", 54 | "prettier": "^3.3.3", 55 | "request": "^2.88.2", 56 | "request-promise": "^4.2.6" 57 | }, 58 | "eslintConfig": { 59 | "extends": "@olsonpm/personal" 60 | }, 61 | "prettier": { 62 | "singleQuote": true, 63 | "trailingComma": "es5", 64 | "semi": false 65 | }, 66 | "engines": { 67 | "node": ">=16" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /lib/commands/generate-skeleton.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | bFs = bPromise.promisifyAll(require('fs')), 9 | child_process = require('child_process'), 10 | common = require('../common'), 11 | fp = require('lodash/fp'), 12 | madonnaFunction = require('madonna-function'), 13 | path = require('path'), 14 | utils = require('../utils') 15 | 16 | //------// 17 | // Init // 18 | //------// 19 | 20 | const cliLog = utils.cliLog, 21 | createMadonnaFn = madonnaFunction.create, 22 | execAsync = bPromise.promisify(child_process.exec), 23 | isSqlite3FileSync = common.isSqlite3FileSync, 24 | marg = getGenerateSkeletonMarg() 25 | 26 | //------// 27 | // Main // 28 | //------// 29 | 30 | const mGenerateSkeleton = createMadonnaFn({ 31 | marg: marg, 32 | fn: generateSkeleton, 33 | }) 34 | 35 | function generateSkeleton({ dir, dbPath }) { 36 | const installDeps = hasFileInDirectory('package.json', dir) 37 | .then(function (hasPJson) { 38 | if (hasPJson) { 39 | cliLog('package.json found in working directory.') 40 | } else { 41 | cliLog( 42 | 'package.json not found in working directory. Running `npm init -f`.' 43 | ) 44 | return execAsync('npm init -f', { cwd: dir }) 45 | } 46 | }) 47 | .then(function () { 48 | cliLog('Installing dependencies') 49 | return execAsync('npm i --save koa@^2 olsonpm/sqlite-to-rest', { 50 | cwd: dir, 51 | }) 52 | }) 53 | 54 | const writeTpl = bPromise 55 | .props({ 56 | tpl: bFs.readFileAsync( 57 | path.join(__dirname, 'resources/skeleton.tpl'), 58 | 'utf8' 59 | ), 60 | skeletonFName: getSkeletonFName(dir), 61 | }) 62 | .then(({ tpl, skeletonFName }) => { 63 | // deactivates es6 delimiter 64 | // https://github.com/lodash/lodash/issues/399 65 | fp.templateSettings.interpolate = /<%=([\s\S]+?)%>/g 66 | 67 | const contents = fp.template(tpl)({ dbPath }) 68 | cliLog('Writing the skeleton server to: ' + skeletonFName) 69 | return bFs.writeFileAsync(path.join(dir, skeletonFName), contents) 70 | }) 71 | 72 | return bPromise.all([writeTpl, installDeps]).then(() => { 73 | cliLog('Finished!') 74 | }) 75 | } 76 | 77 | //-------------// 78 | // Helper Fxns // 79 | //-------------// 80 | 81 | function getGenerateSkeletonMarg() { 82 | return { 83 | dir: { 84 | custom: { isDirectory: common.isDirectorySync }, 85 | }, 86 | dbPath: { 87 | flags: ['require'], 88 | custom: { isSqlite3File: isSqlite3FileSync }, 89 | }, 90 | } 91 | } 92 | 93 | function hasFileInDirectory(fname, dir) { 94 | return bFs.readdirAsync(dir).then(fp.includes(fname)) 95 | } 96 | 97 | function getSkeletonFName(dir) { 98 | return bFs.readdirAsync(dir).then((fileNames) => { 99 | if (!fp.includes('skeleton.js', fileNames)) return 'skeleton.js' 100 | else return getNextSkeletonFName({ fileNames }) 101 | }) 102 | } 103 | 104 | function getNextSkeletonFName({ fileNames, i = 1 }) { 105 | const fname = 'skeleton.' + i + '.js' 106 | if (!fp.includes(fname, fileNames)) return fname 107 | else return getNextSkeletonFName({ fileNames, i: i + 1 }) 108 | } 109 | 110 | //---------// 111 | // Exports // 112 | //---------// 113 | 114 | module.exports = { 115 | fn: generateSkeleton, 116 | mFn: mGenerateSkeleton, 117 | marg: marg, 118 | } 119 | -------------------------------------------------------------------------------- /lib/api/get-sqlite-router.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | common = require('../common'), 9 | configService = require('./services/config'), 10 | dbSchema = require('./services/db-schema'), 11 | fp = require('lodash/fp'), 12 | getSchema = require('./helpers/get-schema'), 13 | { bodyParser: koaBodyparser } = require('@koa/bodyparser'), 14 | koaDecodedQuerystring = require('koa-decoded-querystring'), 15 | KoaRouter = require('koa-router'), 16 | madonnaFn = require('madonna-function'), 17 | requireDir = require('require-dir'), 18 | sqlite3 = require('sqlite3'), 19 | utils = require('../utils') 20 | 21 | //------// 22 | // Init // 23 | //------// 24 | 25 | bPromise.promisifyAll(sqlite3.Database.prototype) 26 | 27 | const createMadonnaFunction = madonnaFn.create, 28 | forEachWithKey = utils.forEachWithKey, 29 | isSqlite3FileSync = common.isSqlite3FileSync, 30 | routeBuilders = requireDir('./helpers/route-builders', { recurse: true }) 31 | 32 | //------// 33 | // Main // 34 | //------// 35 | 36 | const mGetSqliteRouter = createMadonnaFunction({ 37 | marg: { 38 | schema: { 39 | dbPath: { 40 | flags: ['require'], 41 | custom: { isSqlite3File: isSqlite3FileSync }, 42 | }, 43 | config: ['isLadenPlainObject'], 44 | }, 45 | }, 46 | fn: getSqliteRouter, 47 | }) 48 | 49 | function getSqliteRouter({ dbPath, config = {} }) { 50 | return getSchema(dbPath) 51 | .then((schema) => { 52 | dbSchema.set(schema) 53 | configService.set(config) 54 | 55 | const router = new KoaRouter({ prefix: configService.get().prefix }) 56 | 57 | router.use(koaBodyparser()).use(koaDecodedQuerystring()) 58 | 59 | return bPromise.props({ 60 | readOnly: bGetConnection(dbPath, sqlite3.OPEN_READONLY), 61 | readWrite: bGetConnection(dbPath, sqlite3.OPEN_READWRITE), 62 | router: router, 63 | }) 64 | }) 65 | .then(({ readOnly, readWrite, router }) => { 66 | const connections = { 67 | readOnly: readOnly, 68 | readWrite: readWrite, 69 | }, 70 | structTypes = { 71 | table: { 72 | tabularItem: dbSchema.get().tables, 73 | methods: ['get', 'post', 'delete'], 74 | }, 75 | view: { 76 | tabularItem: dbSchema.get().views, 77 | methods: ['get'], 78 | }, 79 | } 80 | 81 | // a whole bunch of routing side effects happen here! 82 | fp.each(getBuildRoutes(router, connections), structTypes) 83 | 84 | return router 85 | }) 86 | } 87 | 88 | //-------------// 89 | // Helper Fxns // 90 | //-------------// 91 | 92 | function getBuildRoutes(router, connections) { 93 | return (aStructType) => { 94 | forEachWithKey((columns, name) => { 95 | fp.each((aTableMethod) => { 96 | columns = fp.keyBy('name', columns) 97 | routeBuilders[aTableMethod](name, columns, connections, router) 98 | }, aStructType.methods) 99 | }, aStructType.tabularItem) 100 | } 101 | } 102 | 103 | function bGetConnection(dbPath, mode) { 104 | return new bPromise((resolve, reject) => { 105 | const db = new sqlite3.Database(dbPath, mode, (err) => { 106 | return err ? reject(err) : resolve(db) 107 | }) 108 | }) 109 | } 110 | 111 | //---------// 112 | // Exports // 113 | //---------// 114 | 115 | module.exports = mGetSqliteRouter 116 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const fp = require('lodash/fp'), 8 | state = require('./services/state'), 9 | vanillaReduce = require('lodash/reduce') 10 | 11 | //------// 12 | // Init // 13 | //------// 14 | 15 | const capIteratee = getCapIteratee(), 16 | transformWithKey = capIteratee(3, fp.transform.convert({ cap: false })) 17 | 18 | //------// 19 | // Main // 20 | //------// 21 | 22 | // 23 | // Flipped fp fxns. These could be done via convert, but I prefer the 24 | // explicit representation. 25 | // 26 | const concat = fp.curry((a, b) => fp.concat(b, a)), 27 | get = fp.curry((b, a) => fp.get(a, b)), 28 | gt = fp.curry((a, b) => fp.gt(b, a)), 29 | includes = fp.curry((b, a) => fp.includes(a, b)), 30 | set = fp.curry((b, c, a) => fp.set(a, b, c)), 31 | startsWith = fp.curry((b, a) => fp.startsWith(a, b)) 32 | // 33 | // End of flipped fxns 34 | // 35 | 36 | // aliases 37 | const append = concat 38 | const prepend = fp.concat 39 | // end aliases 40 | 41 | const cliLog = (str) => { 42 | if (state.isCli()) console.log(str) 43 | } 44 | 45 | const forEachWithKey = capIteratee(2, fp.forEach.convert({ cap: false })) 46 | 47 | const hasAll = (strArr) => fp.allPass(fp.map((str) => fp.has(str), strArr)) 48 | 49 | const hasFileExtension = (path) => !!path.match(/\/?[^/]*\.[^/]*$/) 50 | 51 | const isDefined = fp.negate(fp.isUndefined) 52 | 53 | const jstring = (toStr) => JSON.stringify(toStr, null, 2) 54 | 55 | const mapErrIdPropToString = transformWithKey(getErrIdsToString([]), {}) 56 | 57 | const mapValuesWithKey = capIteratee(2, fp.mapValues.convert({ cap: false })) 58 | 59 | const mapWithKey = capIteratee(2, fp.map.convert({ cap: false })) 60 | 61 | const mutableAssign = fp.assign.convert({ immutable: false }) 62 | 63 | const mutableSet = fp.set.convert({ immutable: false }) 64 | 65 | const reduceFirst = fp.curry((a, b) => vanillaReduce(b, a)) 66 | 67 | const reduceWithKey = capIteratee(3, fp.reduce.convert({ cap: false })) 68 | 69 | const reduceWithKeyAndObj = fp.reduce.convert({ cap: false }) 70 | 71 | const takeWhileWithIndexAndArr = fp.takeWhile.convert({ cap: false }) 72 | 73 | const tee = (val) => { 74 | console.log(jstring(val)) 75 | return val 76 | } 77 | 78 | const teep = fp.curry((first, val) => { 79 | console.log(first) 80 | console.log(jstring(val)) 81 | return val 82 | }) 83 | 84 | const transformWithKeyAndObj = fp.transform.convert({ cap: false }) 85 | 86 | //-------------// 87 | // Helper Fxns // 88 | //-------------// 89 | 90 | function getErrIdsToString(path) { 91 | return function errIdsToString(res, val, key) { 92 | // val should only ever be a string or a plain object 93 | if (fp.isPlainObject(val)) { 94 | path.push(key) 95 | res[key] = transformWithKey(getErrIdsToString(path), {}, val) 96 | path.pop() 97 | } else { 98 | res[key] = fp.reduce((res, val) => res + val + '_', '', path) + val 99 | } 100 | } 101 | } 102 | 103 | function getCapIteratee() { 104 | return fp.curry((cap, fn) => 105 | fp.curryN(fn.length, (iteratee, ...args) => 106 | fn.apply(null, [fp.ary(cap, iteratee)].concat(args)) 107 | ) 108 | ) 109 | } 110 | 111 | //---------// 112 | // Exports // 113 | //---------// 114 | 115 | module.exports = { 116 | append: append, 117 | cliLog: cliLog, 118 | forEachWithKey: forEachWithKey, 119 | get: get, 120 | gt: gt, 121 | hasAll: hasAll, 122 | hasFileExtension: hasFileExtension, 123 | includes: includes, 124 | isDefined: isDefined, 125 | jstring: jstring, 126 | mapErrIdPropToString: mapErrIdPropToString, 127 | mapValuesWithKey: mapValuesWithKey, 128 | mapWithKey: mapWithKey, 129 | mutableAssign: mutableAssign, 130 | mutableSet: mutableSet, 131 | prepend: prepend, 132 | reduceFirst: reduceFirst, 133 | reduceWithKey: reduceWithKey, 134 | reduceWithKeyAndObj: reduceWithKeyAndObj, 135 | set: set, 136 | startsWith: startsWith, 137 | takeWhileWithIndexAndArr: takeWhileWithIndexAndArr, 138 | tee: tee, 139 | teep: teep, 140 | transformWithKey: transformWithKey, 141 | transformWithKeyAndObj: transformWithKeyAndObj, 142 | } 143 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/handle-update.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const common = require('./common'), 8 | errIds = require('./err-ids'), 9 | fp = require('lodash/fp') 10 | 11 | //------// 12 | // Init // 13 | //------// 14 | 15 | const { 16 | attachError, 17 | bRunQuery, 18 | getPkColumnNames, 19 | hasFlag, 20 | parseQueryForPkColumns, 21 | } = common, 22 | qsErrIds = errIds.update.queryString, 23 | rbErrIds = errIds.update.requestBody 24 | 25 | //------// 26 | // Main // 27 | //------// 28 | 29 | const handleHead = (ctx, next, name, columns, connections) => { 30 | const pkColumnNames = getPkColumnNames(columns), 31 | updateRow = `UPDATE ${name}`, 32 | // validate querystring to ensure all pks were passed and nothing else 33 | parsedQuery = parseQueryForPkColumns( 34 | ctx.decodedQuerystring, 35 | pkColumnNames, 36 | qsErrIds 37 | ) 38 | 39 | if (fp.invoke('hasErr', parsedQuery)) { 40 | return attachError(ctx, parsedQuery) 41 | } 42 | 43 | // now validate the payload to ensure valid non-pk columns were passed 44 | 45 | const reqBody = ctx.request.body, 46 | err = getRequestBodyError(reqBody, columns) 47 | 48 | if (err) { 49 | return attachError(ctx, err) 50 | } 51 | 52 | // so far so good - movin' on 53 | 54 | let { query, params } = getUpdatequery(ctx.query, reqBody, updateRow), 55 | where 56 | 57 | return bRunQuery(connections.readWrite, query, params) 58 | .then((res) => { 59 | if (!res.changes) { 60 | ctx.status = 404 61 | return next() 62 | } 63 | 64 | ctx.status = 201 65 | ;({ where, params } = getWhereClause(ctx.query)) 66 | 67 | query = `SELECT * FROM ${name} WHERE ` + where 68 | 69 | return connections.readOnly.getAsync(query, params).then((row) => { 70 | ctx.body = row 71 | ctx.set('content-location', `${ctx.path}?${ctx.decodedQuerystring}`) 72 | return next() 73 | }) 74 | }) 75 | .catch((err) => { 76 | ctx.status = 500 77 | console.error(err) 78 | }) 79 | } 80 | 81 | //-------------// 82 | // Helper Fxns // 83 | //-------------// 84 | 85 | function getRequestBodyError(reqBody, columns) { 86 | const allowedColumns = getNonPkColumnNames(columns), 87 | requestedColumns = fp.keys(reqBody), 88 | parseErr = 'Error while parsing the request body: ' 89 | 90 | if (!fp.size(reqBody)) { 91 | return { 92 | msg: parseErr + 'Update requires a non-empty request body', 93 | id: rbErrIds.mustBeNonEmpty, 94 | } 95 | } 96 | 97 | const invalidColumns = fp.without(allowedColumns, requestedColumns) 98 | if (fp.size(invalidColumns)) { 99 | return { 100 | msg: 101 | parseErr + 102 | 'Invalid columns passed.\n' + 103 | 'invalid columns: ' + 104 | invalidColumns.join(', ') + 105 | '\navailable columns: ' + 106 | allowedColumns.join(', '), 107 | id: rbErrIds.invalidColumns, 108 | } 109 | } 110 | } 111 | 112 | function getNonPkColumnNames(columns) { 113 | return fp.flow( 114 | fp.values, 115 | fp.reject(hasFlag('isPrimaryKey')), 116 | fp.map('name') 117 | )(columns) 118 | } 119 | 120 | function getUpdatequery(query, reqBody, queryStart) { 121 | const setClause = getSetClause(reqBody), 122 | whereClause = getWhereClause(query) 123 | 124 | return { 125 | query: queryStart + setClause.set + ' WHERE ' + whereClause.where, 126 | params: setClause.params.concat(whereClause.params), 127 | } 128 | } 129 | 130 | function getSetClause(reqBody) { 131 | return { 132 | set: 133 | ' SET ' + 134 | fp.flow( 135 | fp.keys, 136 | fp.map((col) => col + ' = ?'), 137 | fp.join(', ') 138 | )(reqBody), 139 | params: fp.values(reqBody), 140 | } 141 | } 142 | 143 | function getWhereClause(query) { 144 | return { 145 | where: fp.flow( 146 | fp.keys, 147 | fp.map((col) => col + ' = ?'), 148 | fp.join(' AND ') 149 | )(query), 150 | params: fp.values(query), 151 | } 152 | } 153 | 154 | //---------// 155 | // Exports // 156 | //---------// 157 | 158 | module.exports = handleHead 159 | -------------------------------------------------------------------------------- /lib/api/services/config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const deepFreezeStrict = require('deep-freeze-strict'), 8 | dbSchema = require('./db-schema'), 9 | fp = require('lodash/fp'), 10 | madonna = require('madonna-fp/es6'), 11 | utils = require('../../utils') 12 | 13 | //------// 14 | // Init // 15 | //------// 16 | 17 | let configObj 18 | 19 | const { reduceWithKeyAndObj } = utils 20 | 21 | //------// 22 | // Main // 23 | //------// 24 | 25 | function setConfig(aConfigObj) { 26 | const dbSchemaObj = dbSchema.get() 27 | 28 | if (arguments.length !== 1) { 29 | throw new Error( 30 | 'Invalid Input: This function requires exactly ' + 'one argument' 31 | ) 32 | } 33 | validateConfig(aConfigObj, dbSchemaObj) 34 | 35 | // no errors - good to go 36 | const appConfigDefaults = getApplicationConfigDefaults(dbSchemaObj) 37 | 38 | // First we merge the passed config over the app defaults 39 | // Then we merge each table and view over the 40 | // 'allTablesAndViews' configuration 41 | // Finally we add the previously lost opts property 42 | const prefix = aConfigObj.prefix || '' 43 | aConfigObj = fp.omit('prefix', aConfigObj) 44 | const mergedTablesAndViews = fp.flow( 45 | fp.mergeWith(configCustomizer, appConfigDefaults), 46 | reduceWithKeyAndObj(mergeEachTableAndView, {}) 47 | )(aConfigObj) 48 | 49 | configObj = { 50 | opts: aConfigObj.opts, 51 | tablesAndViews: mergedTablesAndViews, 52 | } 53 | if (prefix) configObj.prefix = prefix 54 | 55 | configObj = deepFreezeStrict(configObj) 56 | 57 | return configObj 58 | } 59 | 60 | const getConfig = () => configObj 61 | 62 | //-------------// 63 | // Helper Fxns // 64 | //-------------// 65 | 66 | function getVConfig(dbSchemaObj) { 67 | return madonna.createValidator({ 68 | schema: getConfigSchema(dbSchemaObj), 69 | opts: { name: 'vConfig' }, 70 | }) 71 | } 72 | 73 | const vItem = getVItem() 74 | 75 | function mergeEachTableAndView(res, val, key, obj) { 76 | switch (key) { 77 | case 'allTablesAndViews': 78 | return res 79 | case 'tablesAndViews': 80 | return fp.mapValues( 81 | fp.mergeWith(configCustomizer, obj.allTablesAndViews), 82 | val 83 | ) 84 | default: 85 | return fp.assign(res, val) 86 | } 87 | } 88 | 89 | function getConfigSchema(dbSchemaObj) { 90 | const vTablesAndViews = getVTablesAndViews(dbSchemaObj) 91 | return { 92 | allTablesAndViews: { passTo: vItem }, 93 | tablesAndViews: { passTo: vTablesAndViews }, 94 | prefix: ['isLadenString'], 95 | } 96 | } 97 | 98 | function validateConfig(dirtyObj, dbSchemaObj) { 99 | return madonna.validateSternly(getConfigSchema(dbSchemaObj), dirtyObj) 100 | } 101 | 102 | function getVTablesAndViews(dbSchemaObj) { 103 | const tablesAndViewsMarg = fp.flow( 104 | fp.assign(dbSchemaObj.views), 105 | fp.mapValues(() => ({ passTo: vItem })) 106 | )(dbSchemaObj.tables) 107 | 108 | return madonna.createSternValidator({ 109 | schema: tablesAndViewsMarg, 110 | opts: { 111 | name: 'vTablesAndViews', 112 | }, 113 | }) 114 | } 115 | 116 | function getVItem() { 117 | return madonna.createSternValidator({ 118 | schema: { 119 | maxRange: ['isPositiveNumber'], 120 | flags: { 121 | allContainedIn: ['sendContentRangeInHEAD'], 122 | }, 123 | }, 124 | opts: { 125 | name: 'vItem', 126 | }, 127 | }) 128 | } 129 | 130 | function getApplicationConfigDefaults(dbSchemaObj) { 131 | const emptyObjPerTable = fp.mapValues(() => ({}), dbSchemaObj.tablesAndViews) 132 | 133 | return { 134 | allTablesAndViews: { 135 | maxRange: 1000, 136 | }, 137 | tablesAndViews: emptyObjPerTable, 138 | } 139 | } 140 | 141 | function configCustomizer(objVal, srcVal) { 142 | if (fp.isArray(objVal)) { 143 | // src will also be an array then 144 | let res = fp.union(objVal, srcVal) 145 | const removeThese = fp.flow( 146 | fp.filter(fp.startsWith('-')), 147 | fp.map((str) => str.slice(1)) 148 | )(res) 149 | 150 | res = fp.flow( 151 | fp.reject(fp.startsWith('-')), 152 | fp.without(fp, removeThese), 153 | fp.sortBy(fp.identity) 154 | )(res) 155 | 156 | return res 157 | } 158 | } 159 | 160 | //---------// 161 | // Exports // 162 | //---------// 163 | 164 | module.exports = { 165 | get: getConfig, 166 | set: setConfig, 167 | getVConfig: getVConfig, 168 | } 169 | -------------------------------------------------------------------------------- /lib/api/helpers/get-schema.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | fp = require('lodash/fp'), 9 | path = require('path'), 10 | sqlite3 = require('sqlite3'), 11 | utils = require('../../utils') 12 | 13 | //------// 14 | // Init // 15 | //------// 16 | 17 | sqlite3.verbose() 18 | bPromise.promisifyAll(sqlite3.Database.prototype) 19 | 20 | const queries = getQueries(), 21 | reduceWithKey = utils.reduceWithKey 22 | let db // set in `bGetDb`. Needs to have file-wide scope 23 | 24 | //------// 25 | // Main // 26 | //------// 27 | 28 | function getSchema(dbPath) { 29 | return bGetDb(dbPath) 30 | .then(() => db.allAsync(queries.allTablesAndViews)) 31 | .then((rows) => { 32 | const tableAndViewNames = fp.flow( 33 | fp.partition(['type', 'table']), 34 | reduceWithKey((res, aPartition, index) => { 35 | return index === 0 36 | ? fp.set('tables', fp.map('name', aPartition), res) 37 | : fp.set('views', fp.map('name', aPartition), res) 38 | }, {}) 39 | )(rows) 40 | 41 | // ugly I know, but I'm trying to pass tableAndViewNames without conflicting 42 | // with the actual table and view names 43 | return bPromise.props({ 44 | partitioned: tableAndViewNames, 45 | flat: bPromise.props( 46 | fp.reduce( 47 | (res, name) => 48 | fp.set(name, db.allAsync(queries.getPragmaTable(name)), res), 49 | {}, 50 | fp.flatMap(fp.identity, tableAndViewNames) 51 | ) 52 | ), 53 | }) 54 | }) 55 | .then(({ flat, partitioned }) => { 56 | const tableAndViewToPragma = fp.mapValues( 57 | fp.flow(fp.sortBy('cid'), fp.map(modifyColumnProperties)), 58 | flat 59 | ) 60 | 61 | let schema = { 62 | dbPath: path.resolve(dbPath), 63 | } 64 | 65 | schema = fp.assign( 66 | schema, 67 | fp.mapValues( 68 | fp.reduce( 69 | (res, name) => fp.set(name, tableAndViewToPragma[name], res), 70 | {} 71 | ), 72 | partitioned 73 | ) 74 | ) 75 | 76 | return schema 77 | }) 78 | } 79 | 80 | //-------------// 81 | // Helper Fxns // 82 | //-------------// 83 | 84 | // Slim down and reword some of the column properties so we have something 85 | // sensible to work with 86 | function modifyColumnProperties(cols) { 87 | return fp.flow( 88 | fp.omit('cid'), 89 | fp.omitBy(isUnnecessary), 90 | fp.mapKeys(renamePragmaProperties), 91 | booleansToFlags 92 | )(cols) 93 | } 94 | 95 | // by the time this is called, falsey booleans will have been omitted 96 | function booleansToFlags(val) { 97 | const possibleFlags = ['isNullable', 'isPrimaryKey'], 98 | flags = fp.intersection(possibleFlags, fp.keys(val)) 99 | 100 | if (flags.length) { 101 | // unsure why notnull can be set to 0 while also pk set to 1 and type set to 102 | // INTEGER. Those are supposed to mean the value is set to rowid (and 103 | // thus never null). Let's set it to just isPrimaryKey for now. 104 | let filteredFlags = flags 105 | if (flags.length === 2) filteredFlags = ['isPrimaryKey'] 106 | 107 | val.flags = filteredFlags 108 | val = fp.omitAll(flags, val) 109 | } 110 | 111 | return val 112 | } 113 | 114 | // just make some of the property names more readable. I understand this may 115 | // have implications in debugging - but I first want to get this program 116 | // working prior to dealing with badly named properties. 117 | function renamePragmaProperties(key) { 118 | switch (key) { 119 | case 'notnull': 120 | return 'isNullable' 121 | case 'dflt_value': 122 | return 'default' 123 | case 'pk': 124 | return 'isPrimaryKey' 125 | default: 126 | return key 127 | } 128 | } 129 | 130 | // omits properties where defaults can be assumed. This reduces verbosity. 131 | function isUnnecessary(val, key) { 132 | switch (key) { 133 | case 'type': 134 | return !val 135 | case 'notnull': 136 | return val 137 | case 'dflt_value': 138 | return !val 139 | case 'pk': 140 | return !val 141 | } 142 | } 143 | 144 | function bGetDb(dbPath) { 145 | return new bPromise((resolve, reject) => { 146 | db = new sqlite3.Database(dbPath, (err) => (err ? reject(err) : resolve())) 147 | }) 148 | } 149 | 150 | function getQueries() { 151 | return { 152 | allTablesAndViews: 153 | "select name, type from sqlite_master where type='table' or type='view';", 154 | getPragmaTable: (tbl) => `pragma table_info(${tbl})`, 155 | } 156 | } 157 | 158 | //---------// 159 | // Exports // 160 | //---------// 161 | 162 | module.exports = getSchema 163 | -------------------------------------------------------------------------------- /tests/resources/dump.out: -------------------------------------------------------------------------------- 1 | PRAGMA foreign_keys=OFF; 2 | BEGIN TRANSACTION; 3 | CREATE TABLE city ( 4 | state, city_name, PRIMARY KEY (state, city_name) 5 | ); 6 | INSERT INTO "city" VALUES('CO','Boulder'); 7 | INSERT INTO "city" VALUES('WI','Madison'); 8 | INSERT INTO "city" VALUES('WI','New Glarus'); 9 | INSERT INTO "city" VALUES('WI','Milwaukee'); 10 | CREATE TABLE brewery( 11 | id integer primary key 12 | , state 13 | , city_name 14 | , name 15 | , foreign key (state, city_name) references city(state, city_name) 16 | ); 17 | INSERT INTO "brewery" VALUES(1,'CO','Boulder','Avery'); 18 | INSERT INTO "brewery" VALUES(2,'CO','Boulder','Upslope'); 19 | INSERT INTO "brewery" VALUES(3,'WI','New Glarus','New Glarus'); 20 | INSERT INTO "brewery" VALUES(4,'WI','Madison','One Barrel'); 21 | INSERT INTO "brewery" VALUES(5,'WI','Madison','n/a'); 22 | CREATE TABLE beer( 23 | id integer primary key 24 | , brewery_id references brewery (id) 25 | , description 26 | , name 27 | ); 28 | INSERT INTO "beer" VALUES(1,1,'An authentic Belgian style white ale, this Rascal is unfiltered and cleverly spiced with coriander and Curaçao orange peel producing a refreshingly zesty classic ale.','White Rascal'); 29 | INSERT INTO "beer" VALUES(2,1,'Avery IPA features a citrusy, floral bouquet and a rich, malty finish.','Avery IPA'); 30 | INSERT INTO "beer" VALUES(3,1,'Chocolate malt gives this beer a brown sugar maltiness with hints of vanilla and nuts, while subtle hopping gives it an overall drinkability that’s second to none.','Ellie''s Brown Ale'); 31 | INSERT INTO "beer" VALUES(4,1,'A contemporary rendition of a classic style, Joe''s is hopped with purpose: beautifully bitter and dry with an abundance of floral, Noble German hops.','Joe''s Pils'); 32 | INSERT INTO "beer" VALUES(5,2,'Drawing inspiration from all over the globe, our Thai Style White IPA is an artful combination of uncommon, yet carefully chosen ingredients. Brewed using Belgian Wit yeast, hopped like an American IPA and infused with seven Asian-inspired spices, this beer is far from traditional. The sharp, juicy citrus notes of the hops interplay with the unique Thai spice blend to create an unexpected, one of a kind refreshment.','Thai Style White IPA'); 33 | INSERT INTO "beer" VALUES(6,2,'This clear, crisp, dry Pale Ale is our signature beer–the one that started it all! It’s refreshing characteristics, and signature dry finish, blend smooth malt flavors with a unique spicy hop bitterness. Sure to be the “go-to” beer on any camping trip, fishing excursion or music festival.','Pale Ale'); 34 | INSERT INTO "beer" VALUES(7,2,'Blood oranges and Mandarina Bavaria hops seamlessly complement one another in this saison providing aromatic notes of citrus and bright, juicy flavors. The addition of pink peppercorns amplifies the spicy notes imparted by the saison yeast while balancing the beer’s natural sweetness and dry finish.','Blood Orange Saison'); 35 | INSERT INTO "beer" VALUES(8,2,'Adding organic baby bear pumpkins to malt and hops makes this truly a local farm to brewhouse collaboration. A custom blend of six spices rounds out the flavors in this highly anticipated beer.','Pumpkin Ale'); 36 | INSERT INTO "beer" VALUES(9,3,'Expect this ale to be fun, fruity and satisfying. You know you''re in Wisconsin when you see the Spotted Cow.','Spotted Cow'); 37 | INSERT INTO "beer" VALUES(10,3,'A session beer with a bright bold blend of five hops that flirt obligingly with the smooth malty backside.','Moon Man'); 38 | INSERT INTO "beer" VALUES(11,3,'The collaboration of two Craft companies both led by women, New Glarus Brewing and Weyermann Malting, is unique. You hold the result “Two Women” a Classic Country Lager.','Two Women'); 39 | INSERT INTO "beer" VALUES(12,3,'You hold the happy accident of Wisconsin’s favorite fruit aged in oak with an almost magical wild fermentation.','Serendipity'); 40 | INSERT INTO "beer" VALUES(13,4,'A good example of the American twist on the English classic. It is more of a deep copper than pale gold, as is common for the style, and has a unique malt profile due in large part to the addition of 20% American Wheat Malt. A strong American hop structure provides an enticing aroma of tangerine and grapefruit and smooth finish that will keep you coming back for more.','Penguin Pale Ale'); 41 | INSERT INTO "beer" VALUES(14,4,'This beer is black in color but very smooth and does not have a bitter malt flavor found in many stouts and porters. It is aggressively hopped, including a dry hop addition that gives Banjo Cat a strong citrus aroma and vibrant flavor resulting in a well-balanced black ale.','Banjo Cat'); 42 | INSERT INTO "beer" VALUES(15,4,'This is very true to the style that originated in Koln (Cologne), Germany. Our Kolsch is unfiltered and cold-conditioned. The result is a beer that is clean and refreshing, with a crisp finish. It is agreeable without being boring; the beer lover’s session ale.','Commuter Kolsch'); 43 | INSERT INTO "beer" VALUES(16,5,'Belgian',NULL); 44 | CREATE VIEW beer_per_brewery( 45 | beer_id, beer_name, brewery_state, brewery_city, brewery_name 46 | ) 47 | as 48 | select beer.id 49 | , beer.name 50 | , state 51 | , city_name 52 | , brewery.name 53 | from beer 54 | join brewery on beer.brewery_id = brewery.id; 55 | COMMIT; 56 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/post.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const common = require('./common'), 8 | errIds = require('./err-ids'), 9 | fp = require('lodash/fp'), 10 | handleUpdate = require('./handle-update'), 11 | utils = require('../../../utils') 12 | 13 | //------// 14 | // Init // 15 | //------// 16 | 17 | const { bRunQuery, attachError, hasFlag } = common, 18 | rbErrIds = errIds.post.requestBody 19 | 20 | //------// 21 | // Main // 22 | //------// 23 | 24 | const buildPost = (name, columns, connections, router) => { 25 | const insertRow = 'INSERT INTO ' + name 26 | 27 | router.post('/' + name, (ctx, next) => { 28 | if (ctx.decodedQuerystring) { 29 | return handleUpdate(ctx, next, name, columns, connections) 30 | } 31 | 32 | const reqBody = ctx.request.body, 33 | err = getRequestBodyError(reqBody, columns) 34 | 35 | if (err) { 36 | return attachError(ctx, err) 37 | } 38 | 39 | let query = getInsertQuery(reqBody, insertRow), 40 | params = fp.values(reqBody), 41 | where 42 | 43 | return bRunQuery(connections.readWrite, query, params) 44 | .then((res) => { 45 | ctx.status = 201 46 | query = `SELECT * FROM ${name} WHERE ` 47 | if (res.lastID) { 48 | query += `rowid = ${res.lastID}` 49 | params = [] 50 | } else { 51 | // if lastID is falsey, then that means we have a without rowid table 52 | ;({ where, params } = getPostInsertWhere(columns, reqBody)) 53 | query += where 54 | } 55 | 56 | return connections.readOnly.getAsync(query, params) 57 | }) 58 | .then((row) => { 59 | ctx.body = row 60 | const pkProps = fp.flow( 61 | fp.values, 62 | fp.filter(hasFlag('isPrimaryKey')), 63 | fp.map('name') 64 | )(columns) 65 | 66 | const locationQuery = fp.flow( 67 | fp.pick(pkProps), 68 | fp.toPairs, 69 | fp.map((keyVal) => keyVal.join('=')), 70 | fp.join('&') 71 | )(row) 72 | 73 | ctx.set('location', `/${name}?` + locationQuery) 74 | return next() 75 | }) 76 | .catch((err) => { 77 | ctx.status = 500 78 | console.error(err) 79 | }) 80 | }) 81 | } 82 | 83 | //-------------// 84 | // Helper Fxns // 85 | //-------------// 86 | 87 | function getRequestBodyError(reqBody, columns) { 88 | const columnNames = fp.keys(columns), 89 | requiredColumns = getRequiredColumns(columnNames, columns), 90 | requestedColumns = fp.keys(reqBody), 91 | parseErr = 'Error while parsing request body: ' 92 | 93 | const invalidColumns = fp.without(columnNames, requestedColumns) 94 | if (fp.size(invalidColumns)) { 95 | return { 96 | msg: 97 | parseErr + 98 | 'Invalid columns passed.\n' + 99 | 'invalid columns: ' + 100 | invalidColumns.join(', ') + 101 | '\navailable columns: ' + 102 | columnNames.join(', '), 103 | id: rbErrIds.invalidColumns, 104 | } 105 | } 106 | 107 | const requiredColumnsMissing = fp.without(requestedColumns, requiredColumns) 108 | if (fp.size(requiredColumnsMissing)) { 109 | return { 110 | msg: 111 | parseErr + 112 | 'All non-nullable and non INTEGER PRIMARY KEY columns ' + 113 | 'must be passed.\n' + 114 | 'missing columns: ' + 115 | requiredColumnsMissing.join(', ') + 116 | '\npost request body: ' + 117 | utils.jstring(reqBody), 118 | id: rbErrIds.missingRequiredColumns, 119 | } 120 | } 121 | } 122 | 123 | function getPostInsertWhere(columns, reqBody) { 124 | const pkReqProperties = fp.pick(getPrimaryKeyColumns(columns), reqBody) 125 | 126 | const params = fp.flow(fp.values, fp.flatten)(pkReqProperties) 127 | 128 | const where = fp.flow( 129 | fp.keys, 130 | fp.map((col) => col + ' = ?'), 131 | fp.join(' AND ') 132 | )(pkReqProperties) 133 | 134 | return { 135 | where: where, 136 | params: params, 137 | } 138 | } 139 | 140 | function getPrimaryKeyColumns(columns) { 141 | return fp.flow(fp.pickBy(hasFlag('isPrimaryKey')), fp.get('name'))(columns) 142 | } 143 | 144 | function getRequiredColumns(columnNames, columns) { 145 | const isNullable = (cname) => fp.includes('isNullable', columns[cname].flags) 146 | const isIntegerPk = (cname) => { 147 | return ( 148 | fp.includes('isPrimaryKey', columns[cname].flags) && 149 | fp.getOr('', cname + '.type', columns).toLowerCase() === 'integer' 150 | ) 151 | } 152 | 153 | return fp.reject(fp.anyPass([isNullable, isIntegerPk]), columnNames) 154 | } 155 | 156 | function getInsertQuery(reqBody, queryStart) { 157 | let res = queryStart 158 | const cols = fp.keys(reqBody) 159 | if (fp.size(reqBody)) { 160 | res += 161 | ' (' + 162 | cols.join(', ') + 163 | ') VALUES (' + 164 | fp.map(fp.constant('?'), cols).join(', ') + 165 | ')' 166 | } else { 167 | res += ' DEFAULT VALUES' 168 | } 169 | 170 | return res 171 | } 172 | 173 | //---------// 174 | // Exports // 175 | //---------// 176 | 177 | module.exports = buildPost 178 | -------------------------------------------------------------------------------- /docs/tutorial.md: -------------------------------------------------------------------------------- 1 | # Use sqlite-to-rest 2 | 3 | This tutorial will initially walk you through creating a simple sqlite database 4 | to avoid gotchas particular to your existing database. [Feel free to skip 5 | those steps](#end-of-database-steps) 6 | 7 | 1. Before you do anything you need a sqlite database to consume. If you 8 | don't already have sqlite3 installed (test via `which sqlite3`), then either 9 | [download a precompiled binary](https://www.sqlite.org/download.html) with the 10 | cli tools built-in, or more preferably use your package manager to install 11 | it for you. 12 | 13 | 2. Now that you have the sqlite3 command line tool available, you can create 14 | your very own beer database. 15 | 16 | ```sh 17 | $ cd 18 | $ sqlite3 beer.sqlite3 19 | ``` 20 | 21 | 3. Create your brewery and beer tables. 22 | 23 | _Just a friendly reminder to [enable foreign key support](https://www.sqlite.org/foreignkeys.html#fk_enable) 24 | if that sort of thing matters to you_ 25 | 26 | ```sql 27 | CREATE TABLE brewery( 28 | id integer primary key 29 | , state 30 | , city_name 31 | , name 32 | ); 33 | 34 | CREATE TABLE beer( 35 | id integer primary key 36 | , brewery_id references brewery (id) 37 | , description 38 | , name 39 | ); 40 | ``` 41 | 42 | 4. And insert some data 43 | 44 | ```sql 45 | -- breweries 46 | INSERT INTO "brewery" (state, city_name, name) 47 | VALUES ('CO','Boulder','Avery') 48 | , ('WI','New Glarus','New Glarus'); 49 | 50 | -- beers 51 | INSERT INTO "beer" (brewery_id, description, name) 52 | VALUES(1,'An authentic Belgian style white ale, this Rascal is unfiltered and cleverly spiced with coriander and Curaçao orange peel producing a refreshingly zesty classic ale.','White Rascal') 53 | , (1,'Avery IPA features a citrusy, floral bouquet and a rich, malty finish.','Avery IPA') 54 | , (1,'Chocolate malt gives this beer a brown sugar maltiness with hints of vanilla and nuts, while subtle hopping gives it an overall drinkability that’s second to none.','Ellie''s Brown Ale') 55 | , (2,'Expect this ale to be fun, fruity and satisfying. You know you''re in Wisconsin when you see the Spotted Cow.','Spotted Cow') 56 | , (2,'A session beer with a bright bold blend of five hops that flirt obligingly with the smooth malty backside.','Moon Man') 57 | , (2,'The collaboration of two Craft companies both led by women, New Glarus Brewing and Weyermann Malting, is unique. You hold the result “Two Women” a Classic Country Lager.','Two Women'); 58 | ``` 59 | 60 | 5. Phew, all that copy pasta. Go ahead and exit. 61 | ```sh 62 | sqlite> .exit 63 | ``` 64 | 65 | ##### End of database steps 66 | 67 | 1. Time to install sqlite-to-rest globally via npm to gain its cli 68 | 69 | ```sh 70 | $ npm install --global olsonpm/sqlite-to-rest#dev 71 | ``` 72 | 73 | _The cli is very friendly and easy to explore via `sqlite-to-rest --help`_ 74 | 75 | 2. And generate a bare-bones koa server to test against. 76 | 77 | ```sh 78 | $ sqlite-to-rest generate-skeleton --db-path ./beer.sqlite3 79 | package.json not found in working directory. Running `npm init -f`. 80 | Writing the skeleton server to: /home/phil/garbage/skeleton.js 81 | Installing dependencies 82 | Finished! 83 | ``` 84 | 85 | 3. Finally run the server 86 | 87 | ```sh 88 | $ node skeleton.js 89 | Listening on port: 8085 90 | ``` 91 | 92 | 4. And consume! 93 | 94 | _I have [jq](https://stedolan.github.io/jq/) installed for formatting, though 95 | the unformatted output isn't terrible_ 96 | 97 | ```sh 98 | # get all breweries 99 | $ curl -s http://localhost:8085/brewery | jq 100 | # outputs 101 | [ 102 | { 103 | "id": 1, 104 | "state": "CO", 105 | "city_name": "Boulder", 106 | "name": "Avery" 107 | }, 108 | { 109 | "id": 2, 110 | "state": "WI", 111 | "city_name": "New Glarus", 112 | "name": "New Glarus" 113 | } 114 | ] 115 | 116 | # get the first three beers 117 | $ curl -s -H 'range: rows=0-2' http://localhost:8085/beer | jq 118 | # outputs 119 | [ 120 | { 121 | "id": 1, 122 | "brewery_id": 1, 123 | "description": "An authentic Belgian style white ale, this Rascal is unfiltered and cleverly spiced with coriander and Curaçao orange peel producing a refreshingly zesty classic ale.", 124 | "name": "White Rascal" 125 | }, 126 | { 127 | "id": 2, 128 | "brewery_id": 1, 129 | "description": "Avery IPA features a citrusy, floral bouquet and a rich, malty finish.", 130 | "name": "Avery IPA" 131 | }, 132 | { 133 | "id": 3, 134 | "brewery_id": 1, 135 | "description": "Chocolate malt gives this beer a brown sugar maltiness with hints of vanilla and nuts, while subtle hopping gives it an overall drinkability that’s second to none.", 136 | "name": "Ellie's Brown Ale" 137 | } 138 | ] 139 | 140 | # create another brewery 141 | $ curl -s -H "Content-Type: application/json" \ 142 | -d '{"state":"WI", "city_name":"Madison", "name": "One Barrel"}' \ 143 | http://localhost:8085/brewery | jq 144 | # outputs 145 | { 146 | "id": 3, 147 | "state": "WI", 148 | "city_name": "Madison", 149 | "name": "One Barrel" 150 | } 151 | 152 | # delete that brewery 153 | $ curl -X DELETE http://localhost:8085/brewery?id=3 154 | 155 | # update an existing brewery 156 | $ curl -s -H "Content-Type: application/json" \ 157 | -d '{"name": "New Glarus Brewing"}' \ 158 | http://localhost:8085/brewery?id=2 | jq 159 | # outputs 160 | { 161 | "id": 2, 162 | "state": "WI", 163 | "city_name": "New Glarus", 164 | "name": "New Glarus Brewing" 165 | } 166 | ``` 167 | 168 | You're done son! 169 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/common.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | fp = require('lodash/fp'), 9 | stream = require('stream'), 10 | utils = require('../../../utils') 11 | 12 | //------// 13 | // Init // 14 | //------// 15 | 16 | const { isDefined, startsWith } = utils, 17 | removeLeading = getRemoveLeading() 18 | 19 | //------// 20 | // Main // 21 | //------// 22 | 23 | const appendMidParsed = (str) => '\nquery string (mid-parsed): ' + str 24 | 25 | const attachError = (ctx, err) => { 26 | ctx.status = 400 27 | ctx.body = { 28 | msg: err.msg, 29 | id: err.id, 30 | } 31 | } 32 | 33 | const bRunQuery = (conn, query, params) => { 34 | return new bPromise((resolve, reject) => { 35 | try { 36 | conn.run(query, params, function (err) { 37 | if (err) reject(err) 38 | else resolve(this) 39 | }) 40 | } catch (err) { 41 | reject(err) 42 | } 43 | }) 44 | } 45 | 46 | const bStreamFinished = (aStream) => { 47 | return new bPromise((resolve, reject) => { 48 | try { 49 | stream.finished(aStream, (err) => { 50 | if (err) reject(err) 51 | else resolve(aStream) 52 | }) 53 | } catch (err) { 54 | reject(err) 55 | } 56 | }) 57 | } 58 | 59 | const doesNotHaveFlag = (flag) => fp.negate(hasFlag(flag)) 60 | 61 | const getParams = fp.flow( 62 | fp.values, 63 | fp.flatten, 64 | fp.filter(fp.has('val')), 65 | fp.map('val') 66 | ) 67 | 68 | const getPkColumnNames = fp.flow(fp.pickBy(isPrimaryKey), fp.map('name')) 69 | 70 | const getQuery = ({ parsed, queryStart, limit, offset, order }) => { 71 | const parsedPairs = fp.flow(fp.toPairs, fp.reduce(flattenParsed, []))(parsed) 72 | 73 | let res = queryStart 74 | if (fp.size(parsed)) { 75 | res += 76 | ' WHERE ' + 77 | fp.reduce( 78 | genWhereClause, 79 | pairToCondition(parsedPairs[0]), 80 | parsedPairs.slice(1) 81 | ) 82 | } 83 | if (isDefined(order)) res += ' ORDER BY ' + order 84 | if (isDefined(limit)) res += ' LIMIT ' + limit 85 | if (offset) res += ' OFFSET ' + offset 86 | 87 | return res 88 | } 89 | 90 | const hasFlag = (flagVal) => fp.flow(fp.get('flags'), fp.includes(flagVal)) 91 | 92 | const isAdjacentToSingleQuote = (i, str) => { 93 | const quoteBefore = i !== 0 && str[i - 1] === "'", 94 | quoteAfter = i < str.length - 1 && str[i + 1] === "'" 95 | 96 | return quoteBefore || quoteAfter 97 | } 98 | 99 | function parseQueryForPkColumns(str, pkColumnNames, qsErrIds) { 100 | const res = {}, 101 | originalQueryStr = str, 102 | parseErr = 'Error while parsing query string: ' 103 | 104 | let col, val 105 | 106 | const pkColumnCounts = fp.reduce( 107 | (res, val) => fp.set(val, 0, res), 108 | {}, 109 | pkColumnNames 110 | ) 111 | 112 | while (str.length) { 113 | // loop goes through three steps. Extract column -> operator -> and value 114 | // if applicable 115 | 116 | col = fp.find(startsWith(str), pkColumnNames) 117 | if (!col) { 118 | const midParsed = str === originalQueryStr ? '' : '(mid-parsed)' 119 | 120 | return { 121 | msg: 122 | parseErr + 123 | 'PK Column name required.\n' + 124 | 'query string ' + 125 | midParsed + 126 | ': ' + 127 | str + 128 | '\navailable columns: ' + 129 | pkColumnNames.join(', '), 130 | hasErr: fp.constant(true), 131 | id: qsErrIds.pkColumnRequired, 132 | } 133 | } else if (pkColumnCounts[col] === 1) { 134 | return { 135 | msg: 136 | parseErr + 137 | 'Duplicate pk columns not allowed.\n' + 138 | 'duplicate column: ' + 139 | col + 140 | appendMidParsed(str) + 141 | '\noriginal query: ' + 142 | originalQueryStr, 143 | hasErr: fp.constant(true), 144 | id: qsErrIds.duplicatePkColumnsNotAllowed, 145 | } 146 | } 147 | 148 | pkColumnCounts[col] += 1 149 | 150 | str = removeLeading(str, col) 151 | if (!startsWith(str, '=')) { 152 | return { 153 | msg: parseErr + "Operator '=' required." + appendMidParsed(str), 154 | hasErr: fp.constant(true), 155 | id: qsErrIds.equalsRequired, 156 | } 157 | } 158 | 159 | str = str.slice(1) 160 | val = fp.takeWhile(fp.negate(fp.eq('&')), str).join('') 161 | 162 | str = removeLeading(str, val) 163 | 164 | if (str.length) str = str.slice(1) 165 | 166 | // so far so good - append operator + value to result column 167 | res[col] = (res[col] || []).concat({ 168 | op: '=', 169 | val: val, 170 | }) 171 | } 172 | 173 | const pkColumnsMissing = fp.flow(fp.pickBy(fp.eq(0)), fp.keys)(pkColumnCounts) 174 | 175 | if (fp.size(pkColumnsMissing)) { 176 | return { 177 | msg: 178 | parseErr + 179 | 'All pk columns must be passed.\n' + 180 | 'query: ' + 181 | originalQueryStr + 182 | '\nmissing pk columns: ' + 183 | pkColumnsMissing.join(', '), 184 | hasErr: fp.constant(true), 185 | id: qsErrIds.missingPkColumns, 186 | } 187 | } 188 | 189 | return res 190 | } 191 | 192 | const quoteNotAdjacentToAnother = (char, i, str) => { 193 | return char !== "'" || isAdjacentToSingleQuote(i, str) 194 | } 195 | 196 | // assumes valid input 197 | function getRemoveLeading() { 198 | return (str, startsWith) => str.slice(startsWith.length) 199 | } 200 | 201 | //-------------// 202 | // Helper Fxns // 203 | //-------------// 204 | 205 | function pairToCondition(aPair) { 206 | // _ISNULL and _NOTNULL don't have corresponding values 207 | const valPlaceholder = fp.has('val', aPair[1]) ? ' ?' : '' 208 | 209 | return aPair[0] + ' ' + aPair[1].op + valPlaceholder 210 | } 211 | function genWhereClause(res, aPair) { 212 | return res + ' AND ' + pairToCondition(aPair) 213 | } 214 | function flattenParsed(res, aPair) { 215 | return res.concat(fp.map((parsedVal) => [aPair[0], parsedVal], aPair[1])) 216 | } 217 | function isPrimaryKey(aColumn) { 218 | return fp.flow(fp.get('flags'), fp.contains('isPrimaryKey'))(aColumn) 219 | } 220 | 221 | function getInvalidOrderElements(order, columnNames) { 222 | const orderRegex = new RegExp( 223 | '^(?:' + columnNames.join('|') + ')(?: (?:asc|desc))?$' 224 | ) 225 | return fp.flow( 226 | fp.invokeArgs('split', [',']), 227 | fp.reject(fp.invokeArgs('match', [orderRegex])) 228 | )(order) 229 | } 230 | 231 | //---------// 232 | // Exports // 233 | //---------// 234 | 235 | module.exports = { 236 | appendMidParsed: appendMidParsed, 237 | attachError: attachError, 238 | bRunQuery: bRunQuery, 239 | bStreamFinished: bStreamFinished, 240 | doesNotHaveFlag: doesNotHaveFlag, 241 | getInvalidOrderElements: getInvalidOrderElements, 242 | getParams: getParams, 243 | getPkColumnNames: getPkColumnNames, 244 | getQuery: getQuery, 245 | hasFlag: hasFlag, 246 | parseQueryForPkColumns: parseQueryForPkColumns, 247 | quoteNotAdjacentToAnother: quoteNotAdjacentToAnother, 248 | removeLeading: removeLeading, 249 | } 250 | -------------------------------------------------------------------------------- /tests/expected/get-sqlite-router/get.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //------// 4 | // Init // 5 | //------// 6 | 7 | const { 8 | averyIPA, 9 | banjoCat, 10 | commuterKolsch, 11 | elliesBrown, 12 | serendipity, 13 | thaiStyleWhiteIPA, 14 | whiteRascal, 15 | } = getCommonBeer() 16 | 17 | //------// 18 | // Main // 19 | //------// 20 | 21 | const res = { 22 | firstFiveRows: getFirstFiveRows(), 23 | firstFiveRowsByNameAsc: getFirstFiveRowsByNameAsc(), 24 | firstFiveRowsByNameDesc: getFirstFiveRowsByNameDesc(), 25 | firstRow: getFirstRow(), 26 | lastFiveRows: getLastFiveRows(), 27 | latterFourRows: getLatterFourRows(), 28 | namedBelgians: getNamedBelgians(), 29 | nullName: getNullName(), 30 | } 31 | 32 | //-------------// 33 | // Helper Fxns // 34 | //-------------// 35 | 36 | function getFirstFiveRows() { 37 | return { 38 | statusCode: 206, 39 | body: [whiteRascal].concat(getLatterFourBodyRows()), 40 | headers: { 41 | 'content-range': 'rows 0-4/15', 42 | 'content-type': 'application/octet-stream', 43 | 'transfer-encoding': 'chunked', 44 | }, 45 | } 46 | } 47 | 48 | function getFirstFiveRowsByNameAsc() { 49 | return { 50 | statusCode: 206, 51 | body: getFirstFiveBodyRowsByNameAsc(), 52 | headers: { 53 | 'content-range': 'rows 0-4/14', 54 | 'content-type': 'application/octet-stream', 55 | 'transfer-encoding': 'chunked', 56 | }, 57 | } 58 | } 59 | 60 | function getFirstFiveRowsByNameDesc() { 61 | return { 62 | statusCode: 206, 63 | body: getFirstFiveBodyRowsByNameDesc(), 64 | headers: { 65 | 'content-range': 'rows 0-4/14', 66 | 'content-type': 'application/octet-stream', 67 | 'transfer-encoding': 'chunked', 68 | }, 69 | } 70 | } 71 | 72 | function getLastFiveRows() { 73 | return { 74 | statusCode: 206, 75 | body: getLastFiveBodyRows(), 76 | headers: { 77 | 'content-range': 'rows 11-15/15', 78 | 'content-type': 'application/octet-stream', 79 | 'transfer-encoding': 'chunked', 80 | }, 81 | } 82 | } 83 | 84 | function getLatterFourRows() { 85 | return { 86 | statusCode: 200, 87 | body: getLatterFourBodyRows(), 88 | headers: { 89 | 'content-range': 'rows 0-3/3', 90 | 'content-type': 'application/octet-stream', 91 | 'transfer-encoding': 'chunked', 92 | }, 93 | } 94 | } 95 | 96 | function getFirstRow() { 97 | return { 98 | statusCode: 200, 99 | body: [whiteRascal], 100 | headers: { 101 | 'content-range': 'rows 0-0/0', 102 | 'content-type': 'application/octet-stream', 103 | 'transfer-encoding': 'chunked', 104 | }, 105 | } 106 | } 107 | 108 | function getNullName() { 109 | return { 110 | statusCode: 200, 111 | body: [getBodyNullName()], 112 | headers: { 113 | 'content-range': 'rows 0-0/0', 114 | 'content-type': 'application/octet-stream', 115 | 'transfer-encoding': 'chunked', 116 | }, 117 | } 118 | } 119 | 120 | function getFirstFiveBodyRowsByNameAsc() { 121 | return [ 122 | averyIPA, 123 | banjoCat, 124 | { 125 | id: 7, 126 | brewery_id: 2, 127 | description: 128 | 'Blood oranges and Mandarina Bavaria hops seamlessly complement one another in this saison providing aromatic notes of citrus and bright, juicy flavors. The addition of pink peppercorns amplifies the spicy notes imparted by the saison yeast while balancing the beer’s natural sweetness and dry finish.', 129 | name: 'Blood Orange Saison', 130 | }, 131 | commuterKolsch, 132 | elliesBrown, 133 | ] 134 | } 135 | 136 | function getFirstFiveBodyRowsByNameDesc() { 137 | return [ 138 | whiteRascal, 139 | { 140 | id: 11, 141 | brewery_id: 3, 142 | description: 143 | 'The collaboration of two Craft companies both led by women, New Glarus Brewing and Weyermann Malting, is unique. You hold the result “Two Women” a Classic Country Lager.', 144 | name: 'Two Women', 145 | }, 146 | thaiStyleWhiteIPA, 147 | { 148 | id: 9, 149 | brewery_id: 3, 150 | description: 151 | "Expect this ale to be fun, fruity and satisfying. You know you're in Wisconsin when you see the Spotted Cow.", 152 | name: 'Spotted Cow', 153 | }, 154 | serendipity, 155 | ] 156 | } 157 | 158 | function getBodyNullName() { 159 | return { 160 | id: 16, 161 | brewery_id: 5, 162 | description: 'Belgian', 163 | name: null, 164 | } 165 | } 166 | 167 | function getNamedBelgians() { 168 | return { 169 | statusCode: 200, 170 | body: [whiteRascal, thaiStyleWhiteIPA], 171 | headers: { 172 | 'content-range': 'rows 0-1/1', 173 | 'content-type': 'application/octet-stream', 174 | 'transfer-encoding': 'chunked', 175 | }, 176 | } 177 | } 178 | 179 | function getLatterFourBodyRows() { 180 | return [ 181 | averyIPA, 182 | elliesBrown, 183 | { 184 | id: 4, 185 | brewery_id: 1, 186 | description: 187 | "A contemporary rendition of a classic style, Joe's is hopped with purpose: beautifully bitter and dry with an abundance of floral, Noble German hops.", 188 | name: "Joe's Pils", 189 | }, 190 | thaiStyleWhiteIPA, 191 | ] 192 | } 193 | 194 | function getLastFiveBodyRows() { 195 | return [ 196 | serendipity, 197 | { 198 | id: 13, 199 | brewery_id: 4, 200 | description: 201 | 'A good example of the American twist on the English classic. It is more of a deep copper than pale gold, as is common for the style, and has a unique malt profile due in large part to the addition of 20% American Wheat Malt. A strong American hop structure provides an enticing aroma of tangerine and grapefruit and smooth finish that will keep you coming back for more.', 202 | name: 'Penguin Pale Ale', 203 | }, 204 | banjoCat, 205 | commuterKolsch, 206 | getBodyNullName(), 207 | ] 208 | } 209 | 210 | function getCommonBeer() { 211 | return { 212 | averyIPA: { 213 | id: 2, 214 | brewery_id: 1, 215 | description: 216 | 'Avery IPA features a citrusy, floral bouquet and a rich, malty finish.', 217 | name: 'Avery IPA', 218 | }, 219 | banjoCat: { 220 | id: 14, 221 | brewery_id: 4, 222 | description: 223 | 'This beer is black in color but very smooth and does not have a bitter malt flavor found in many stouts and porters. It is aggressively hopped, including a dry hop addition that gives Banjo Cat a strong citrus aroma and vibrant flavor resulting in a well-balanced black ale.', 224 | name: 'Banjo Cat', 225 | }, 226 | elliesBrown: { 227 | id: 3, 228 | brewery_id: 1, 229 | description: 230 | 'Chocolate malt gives this beer a brown sugar maltiness with hints of vanilla and nuts, while subtle hopping gives it an overall drinkability that’s second to none.', 231 | name: "Ellie's Brown Ale", 232 | }, 233 | serendipity: { 234 | id: 12, 235 | brewery_id: 3, 236 | description: 237 | 'You hold the happy accident of Wisconsin’s favorite fruit aged in oak with an almost magical wild fermentation.', 238 | name: 'Serendipity', 239 | }, 240 | thaiStyleWhiteIPA: { 241 | id: 5, 242 | brewery_id: 2, 243 | description: 244 | 'Drawing inspiration from all over the globe, our Thai Style White IPA is an artful combination of uncommon, yet carefully chosen ingredients. Brewed using Belgian Wit yeast, hopped like an American IPA and infused with seven Asian-inspired spices, this beer is far from traditional. The sharp, juicy citrus notes of the hops interplay with the unique Thai spice blend to create an unexpected, one of a kind refreshment.', 245 | name: 'Thai Style White IPA', 246 | }, 247 | whiteRascal: { 248 | id: 1, 249 | brewery_id: 1, 250 | description: 251 | 'An authentic Belgian style white ale, this Rascal is unfiltered and cleverly spiced with coriander and Curaçao orange peel producing a refreshingly zesty classic ale.', 252 | name: 'White Rascal', 253 | }, 254 | commuterKolsch: { 255 | id: 15, 256 | brewery_id: 4, 257 | description: 258 | 'This is very true to the style that originated in Koln (Cologne), Germany. Our Kolsch is unfiltered and cold-conditioned. The result is a beer that is clean and refreshing, with a crisp finish. It is agreeable without being boring; the beer lover’s session ale.', 259 | name: 'Commuter Kolsch', 260 | }, 261 | } 262 | } 263 | 264 | //---------// 265 | // Exports // 266 | //---------// 267 | 268 | module.exports = res 269 | -------------------------------------------------------------------------------- /lib/api/helpers/route-builders/get.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | common = require('./common'), 9 | config = require('../../services/config'), 10 | errIds = require('./err-ids'), 11 | fp = require('lodash/fp'), 12 | handleHead = require('./handle-head'), 13 | jsonStream = require('JSONStream'), 14 | madonna = require('madonna-fp/es6'), 15 | utils = require('../../../utils') 16 | 17 | //------// 18 | // Init // 19 | //------// 20 | 21 | const { 22 | appendMidParsed, 23 | attachError, 24 | getInvalidOrderElements, 25 | getParams, 26 | getQuery, 27 | quoteNotAdjacentToAnother, 28 | removeLeading, 29 | } = common, 30 | { isDefined, startsWith, takeWhileWithIndexAndArr } = utils, 31 | outsideE = madonna.CRITERION_FNS.outsideE, 32 | qsErrIds = errIds.get.queryString 33 | 34 | //------// 35 | // Main // 36 | //------// 37 | 38 | const buildGet = (name, columns, connections, router) => { 39 | const columnNames = fp.keys(columns), 40 | configObj = config.get(), 41 | selectAll = 'SELECT ' + columnNames.join(', ') + ' FROM ' + name, 42 | selectCount = 'SELECT COUNT(*) as count FROM ' + name, 43 | tableConfig = configObj.tablesAndViews[name] 44 | router.get('/' + name, (ctx, next) => { 45 | if (ctx.headers.order) { 46 | const invalidOrderElements = getInvalidOrderElements( 47 | ctx.headers.order, 48 | columnNames 49 | ) 50 | if (fp.size(invalidOrderElements)) { 51 | return attachError(ctx, { 52 | msg: 53 | 'Invalid order. The order header must contain comma-delimited' + 54 | " column names each optionally followed by a space and 'asc'" + 55 | " or 'desc'. The following comma-separated strings found in" + 56 | ' order are invalid\n' + 57 | invalidOrderElements.join('\n'), 58 | id: errIds.get.invalidOrder, 59 | }) 60 | } 61 | } 62 | 63 | const parsed = parseQuery(ctx.decodedQuerystring, columnNames) 64 | 65 | if (fp.invoke('hasErr', parsed)) { 66 | return attachError(ctx, parsed) 67 | } 68 | // query string is good - movin' on 69 | 70 | if (ctx.method === 'HEAD') { 71 | return handleHead( 72 | ctx, 73 | next, 74 | tableConfig, 75 | connections, 76 | columnNames, 77 | parsed, 78 | name 79 | ) 80 | } 81 | 82 | // else method is GET 83 | 84 | // handle the case of no range request header first since it's the easiest 85 | if (!ctx.headers.range) { 86 | return handleNoRangeRequest( 87 | parsed, 88 | selectAll, 89 | selectCount, 90 | connections, 91 | tableConfig, 92 | ctx, 93 | next 94 | ) 95 | } 96 | 97 | // otherwise a range request header was passed, verify it 98 | 99 | let rangeHeader = ctx.headers.range.replace(/^\s*/g, ''), 100 | reason 101 | 102 | const rangeIsValid = 103 | !!rangeHeader.match(/^rows=([0-9]+)?-([0-9]+)?$/) && 104 | rangeHeader.replace !== 'rows=-', 105 | range = {} 106 | 107 | if (!rangeIsValid) { 108 | reason = 109 | 'Range must have the syntax ' + 110 | "'rows=-', where either A or B" + 111 | ' must be specified.' 112 | return attachRangeNotValid(reason, ctx) 113 | } 114 | 115 | rangeHeader = rangeHeader.slice(5) 116 | if (rangeHeader[0] !== '-') { 117 | range.start = fp.toNumber(rangeHeader.match(/^[0-9]+/)[0]) 118 | if (fp.last(rangeHeader) !== '-') { 119 | range.end = fp.toNumber(rangeHeader.match(/[0-9]+$/)[0]) 120 | 121 | if (range.start === range.end) { 122 | reason = 123 | 'Range start cannot equal range end.' + 124 | '\nrange -> start: ' + 125 | range.start + 126 | '\nrange -> end: ' + 127 | range.end 128 | return attachRangeNotValid(reason, ctx) 129 | } else if (range.start > range.end) { 130 | ;[range.start, range.end] = [range.end, range.start] 131 | } 132 | } 133 | } 134 | 135 | // range is valid thus far - woo woo 136 | 137 | let query = getQuery({ 138 | order: ctx.headers.order, 139 | parsed: parsed, 140 | queryStart: selectCount, 141 | }) 142 | 143 | const params = getParams(parsed) 144 | // We must know the size of the result set prior to deciding whether 145 | // the range is satisfiable. 146 | return connections.readOnly 147 | .getAsync(query, params) 148 | .then(({ count }) => { 149 | if (count === 0) { 150 | ctx.status = 404 151 | return next() 152 | } 153 | 154 | // finalize the range object 155 | if (fp.isUndefined(range.start)) { 156 | range.start = count - fp.toNumber(rangeHeader.slice(1)) 157 | // need to make sure the outsideE check below doesn't include a 158 | // negative range 159 | range.end = fp.max([count - 1, 0]) 160 | } 161 | if (fp.isUndefined(range.end)) { 162 | range.end = fp.min([count, range.start + tableConfig.maxRange - 1]) 163 | } else { 164 | range.end = fp.min([range.end, count - 1]) 165 | } 166 | 167 | if (outsideE([0, count], range.start)) { 168 | reason = 169 | range.start < 0 170 | ? 'Range cannot have a start value of less than zero.\n' 171 | : 'Range cannot have a start value greater than the result count.\n' 172 | 173 | reason += 174 | 'parameterized query: ' + 175 | query + 176 | '\nparams: ' + 177 | params.join(', ') + 178 | '\nrange header: ' + 179 | ctx.headers.range + 180 | '\nresult count: ' + 181 | count + 182 | '\nrange.start: ' + 183 | range.start + 184 | '\nrange.end: ' + 185 | range.end 186 | 187 | return attachRangeNotValid(reason, ctx) 188 | } else if (range.end - range.start + 1 > tableConfig.maxRange) { 189 | ctx.set({ 190 | 'max-range': tableConfig.maxRange, 191 | 'content-range': 'rows */' + count, 192 | }) 193 | reason = 194 | 'The range you requested is too large to send across in one' + 195 | " response. See 'max-range' header for the maximum number of" + 196 | ' rows you may request' 197 | return attachRangeNotValid(reason, ctx) 198 | } 199 | 200 | // range is finally valid. Finish Him! 201 | query = getQuery({ 202 | limit: range.end - range.start + 1, 203 | offset: range.start, 204 | order: ctx.headers.order, 205 | parsed: parsed, 206 | queryStart: selectAll, 207 | }) 208 | 209 | const stringifyStream = (ctx.body = jsonStream.stringify()), 210 | limit = count - 1, 211 | isPartialResource = range.end - range.start !== limit 212 | ctx.set( 213 | 'content-range', 214 | 'rows ' + range.start + '-' + fp.min([range.end, limit]) + '/' + limit 215 | ) 216 | ctx.status = isPartialResource ? 206 : 200 217 | 218 | eachAsync(connections.readOnly, query, params, stringifyStream) 219 | .catch((err) => { 220 | console.error(err) 221 | }) 222 | .finally(() => { 223 | stringifyStream.end() 224 | }) 225 | }) 226 | .catch((err) => { 227 | ctx.status = 500 228 | console.error(err) 229 | }) 230 | }) 231 | } 232 | 233 | //-------------// 234 | // Helper Fxns // 235 | //-------------// 236 | 237 | // 238 | // eachAsync concept from here: 239 | // https://gist.github.com/erikman/a494925ae6ce95869dd56076fb810831#file-node-sqlite3-performance-test-js-L87 240 | // 241 | // the idea is that 'each' doesn't provide a stream-friendly interface to 242 | // querying the database. By wrapping the query into a statement, we have 243 | // control over when to ask the database for another row which means we can 244 | // wait when the stream reaches its max throughput. This is still not a great 245 | // solution however because 246 | // 247 | // 1. this is slower than database.prototype.each 248 | // 2. it's unlikely we'll be reaching max throughput by waiting for the 249 | // database each row 250 | // 3. per sqlite3 docs, this locks the database until the statement 251 | // is finalized. 252 | // 253 | // If the drawbacks become a problem then maybe we can expose a separate 254 | // interface which loops over rows in batches via LIMIT, allowing the client 255 | // to make the choice between atomicity and performance. 256 | // 257 | function eachAsync(conn, query, params, stringifyStream) { 258 | return new bPromise((resolve, reject) => { 259 | try { 260 | let rowCount = 0 261 | 262 | const stmt = conn.prepare(query, params, (err) => { 263 | if (err) { 264 | return bPromise.reject(err) 265 | } 266 | 267 | const recursiveGet = (err, row) => { 268 | if (err) { 269 | cleanupAndDone(err) 270 | return 271 | } 272 | 273 | if (!row) { 274 | cleanupAndDone() 275 | return 276 | } 277 | rowCount += 1 278 | const shouldContinue = stringifyStream.write(row) 279 | 280 | if (!shouldContinue) { 281 | stringifyStream.once('drain', () => { 282 | stmt.get(recursiveGet) 283 | }) 284 | } else { 285 | stmt.get(recursiveGet) 286 | } 287 | } 288 | 289 | // Start recursion 290 | stmt.get(recursiveGet) 291 | }) 292 | 293 | const cleanupAndDone = (err) => { 294 | stmt.finalize(() => { 295 | if (err) reject(err) 296 | else resolve(rowCount) 297 | }) 298 | } 299 | } catch (e) { 300 | reject(e) 301 | } 302 | }) 303 | } 304 | 305 | function attachRangeNotValid(internalReason, ctx) { 306 | ctx.status = 416 307 | ctx.body = "The request 'range' header is invalid.\n" + internalReason + '\n' 308 | } 309 | 310 | // Currently there's other data associated with these operators, but not enough 311 | // to warrant a configuration structure. 312 | const operators = [ 313 | '_LIKE', 314 | '_ISNULL', 315 | '_NOTNULL', 316 | '=', 317 | '!=', 318 | '>=', 319 | '<=', 320 | '>', 321 | '<', 322 | ] 323 | 324 | function parseQuery(str, columnNames) { 325 | const res = {}, 326 | originalQueryStr = str, 327 | ampersandErr = 328 | ' must be followed by an ampersand when not' + 329 | ' at the end of the query string', 330 | parseErr = 'Error while parsing query string: ' 331 | 332 | while (str.length) { 333 | let val 334 | 335 | // loop goes through three steps. Extract column -> operator -> and value 336 | // if applicable 337 | 338 | const col = fp.find(startsWith(str), columnNames) 339 | if (!col) { 340 | const midParsed = str === originalQueryStr ? '' : '(mid-parsed)' 341 | 342 | return { 343 | msg: 344 | parseErr + 345 | 'Column name required.\n' + 346 | 'query string ' + 347 | midParsed + 348 | ': ' + 349 | str + 350 | '\navailable columns: ' + 351 | columnNames.join(', '), 352 | hasErr: fp.constant(true), 353 | id: qsErrIds.columnRequired, 354 | } 355 | } 356 | 357 | str = removeLeading(str, col) 358 | const op = fp.find(startsWith(str), operators) 359 | 360 | if (!op) { 361 | return { 362 | msg: 363 | parseErr + 364 | 'Operator required.' + 365 | appendMidParsed(str) + 366 | '\navailable operators: ' + 367 | operators.join(', '), 368 | hasErr: fp.constant(true), 369 | id: qsErrIds.operatorRequired, 370 | } 371 | } 372 | 373 | str = removeLeading(str, op) 374 | // no values with ISNULL or NOTNULL, so just make sure an ampersand 375 | // was passed 376 | if (fp.includes(op, ['_ISNULL', '_NOTNULL'])) { 377 | // no values to parse 378 | } else if (fp.includes(op, ['_LIKE'])) { 379 | // need to ensure quotes in _LIKE 380 | if (!startsWith(str, "'")) { 381 | return { 382 | msg: 383 | parseErr + 384 | 'The first character after _LIKE must be a single quote' + 385 | appendMidParsed(str), 386 | hasErr: fp.constant(true), 387 | id: qsErrIds.openingQuoteRequired, 388 | } 389 | } 390 | str = removeLeading(str, "'") 391 | 392 | // perform quote escaping 393 | val = takeWhileWithIndexAndArr(quoteNotAdjacentToAnother, str).join('') 394 | if (fp.endsWith(val, str)) { 395 | return { 396 | msg: 397 | parseErr + '_LIKE must have a closing quote' + appendMidParsed(str), 398 | hasErr: fp.constant(true), 399 | id: qsErrIds.closingQuoteRequired, 400 | } 401 | } 402 | 403 | str = removeLeading(str, val + "'") 404 | } else { 405 | // dealing with a binary operator that's not _LIKE 406 | val = fp.takeWhile(fp.negate(fp.eq('&')), str).join('') 407 | str = removeLeading(str, val) 408 | } 409 | 410 | if (str.length) { 411 | if (str[0] !== '&') { 412 | return { 413 | msg: parseErr + op + ampersandErr + appendMidParsed(str), 414 | hasErr: fp.constant(true), 415 | id: qsErrIds.ampersandRequired, 416 | } 417 | } 418 | str = removeLeading(str, '&') 419 | } 420 | 421 | // so far so good - append operator + value to result column 422 | res[col] = (res[col] || []).concat( 423 | fp.pickBy(isDefined, { 424 | op: getOp(op), 425 | val: val, 426 | }) 427 | ) 428 | } 429 | 430 | return res 431 | } 432 | 433 | const queryStringOpToSqlOp = { 434 | _ISNULL: 'IS NULL', 435 | _LIKE: 'LIKE', 436 | _NOTNULL: 'IS NOT NULL', 437 | } 438 | 439 | function getOp(val) { 440 | return fp.getOr(val, val, queryStringOpToSqlOp) 441 | } 442 | 443 | function handleNoRangeRequest( 444 | parsed, 445 | selectAll, 446 | selectCount, 447 | connections, 448 | tableConfig, 449 | ctx, 450 | next 451 | ) { 452 | let query = getQuery({ 453 | parsed: parsed, 454 | queryStart: selectCount, 455 | }) 456 | 457 | const params = getParams(parsed) 458 | 459 | return connections.readOnly 460 | .getAsync(query, params) 461 | .then(({ count }) => { 462 | if (count > (tableConfig.maxRange || Infinity)) { 463 | ctx.set({ 464 | 'accept-ranges': 'rows', 465 | 'max-range': tableConfig.maxRange, 466 | 'content-range': 'rows */' + count, 467 | }) 468 | 469 | attachError(ctx, { 470 | msg: 471 | "'range' header required. This resource's content" + 472 | ' is too large to send across in one response. See ' + 473 | " 'max-range' header for the maximum number of rows you" + 474 | ' may request', 475 | id: errIds.get.invalidRange, 476 | }) 477 | return 478 | } else if (count === 0) { 479 | ctx.status = 404 480 | return next() 481 | } 482 | 483 | query = getQuery({ 484 | order: ctx.headers.order, 485 | parsed: parsed, 486 | queryStart: selectAll, 487 | }) 488 | 489 | const stringifyStream = (ctx.body = jsonStream.stringify()) 490 | 491 | stringifyStream.on('error', (err) => { 492 | console.error(err) 493 | stringifyStream.end() 494 | }) 495 | 496 | const limit = count - 1 497 | if (tableConfig.maxRange) { 498 | ctx.set('content-range', 'rows 0-' + limit + '/' + limit) 499 | } 500 | 501 | eachAsync(connections.readOnly, query, params, stringifyStream) 502 | .catch((err) => { 503 | console.error(err) 504 | }) 505 | .finally(() => { 506 | stringifyStream.end() 507 | }) 508 | }) 509 | .catch((err) => { 510 | ctx.status = 500 511 | console.error(err) 512 | }) 513 | } 514 | 515 | //---------// 516 | // Exports // 517 | //---------// 518 | 519 | module.exports = buildGet 520 | -------------------------------------------------------------------------------- /tests/get-sqlite-router.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | //---------// 4 | // Imports // 5 | //---------// 6 | 7 | const bPromise = require('bluebird'), 8 | bFs = bPromise.promisifyAll(require('fs')), 9 | chai = require('chai'), 10 | chaiAsPromised = require('chai-as-promised'), 11 | chaiSubset = require('chai-subset'), 12 | common = require('./helpers/common'), 13 | del = require('del'), 14 | errIds = require('../lib/api/helpers/route-builders/err-ids'), 15 | expected = require('./expected/get-sqlite-router'), 16 | filecompare = require('filecompare'), 17 | fp = require('lodash/fp'), 18 | fs = require('fs'), 19 | makeDir = require('make-dir'), 20 | ncpAsync = bPromise.promisify(require('ncp')), 21 | path = require('path'), 22 | rp = require('request-promise'), 23 | request = require('request'), 24 | stream = require('stream'), 25 | utils = require('../lib/utils') 26 | 27 | //------// 28 | // Init // 29 | //------// 30 | 31 | chai.use(chaiSubset) 32 | chai.use(chaiAsPromised) 33 | chai.should() 34 | 35 | const mapValuesWithKey = utils.mapValuesWithKey, 36 | resourcesDir = path.join(__dirname, 'resources'), 37 | beerDb = path.join(resourcesDir, 'beer.sqlite3'), 38 | beerDbBak = path.join(resourcesDir, 'beer.sqlite3.bak'), 39 | queryStringsShouldResultInErrors = getQueryStringsShouldResultInErrors(), 40 | bStreamFinished = (aStream) => { 41 | return new bPromise((resolve, reject) => { 42 | try { 43 | stream.finished(aStream, (err) => { 44 | if (err) reject(err) 45 | else resolve(aStream) 46 | }) 47 | } catch (e) { 48 | reject(e) 49 | } 50 | }) 51 | }, 52 | bAreFilesEqual = (fpath1, fpath2) => { 53 | return new bPromise((resolve, reject) => { 54 | try { 55 | filecompare(fpath1, fpath2, (result) => { 56 | resolve(result) 57 | }) 58 | } catch (e) { 59 | reject(e) 60 | } 61 | }) 62 | } 63 | 64 | //------// 65 | // Main // 66 | //------// 67 | 68 | describe('safe', () => { 69 | describe('router prefix', () => { 70 | let rpt 71 | 72 | before(() => { 73 | const configOverrides = { prefix: '/api' } 74 | 75 | return common.startServer({ configOverrides }).then( 76 | (port) => 77 | (rpt = getRequestPromiseTransformed({ 78 | uri: 'api/beer', 79 | port: port, 80 | })) 81 | ) 82 | }) 83 | after(() => common.stopServer()) 84 | 85 | const exp = expected.get 86 | 87 | it('should return the first five rows', async () => { 88 | await rpt({ 89 | headers: { range: 'rows=0-4' }, 90 | }).should.eventually.containSubset(exp.firstFiveRows) 91 | }) 92 | 93 | it('should return a 400', async () => { 94 | await rpt({ 95 | uri: 'beer', 96 | headers: { range: 'rows=0-4' }, 97 | }).should.eventually.have.property('statusCode', 404) 98 | }) 99 | }) 100 | 101 | describe('head', () => { 102 | let rpt 103 | 104 | before(() => 105 | common 106 | .startServer() 107 | .then( 108 | (port) => 109 | (rpt = getRequestPromiseTransformed({ method: 'HEAD', port: port })) 110 | ) 111 | ) 112 | after(() => common.stopServer()) 113 | 114 | const exp = expected.head 115 | 116 | it('should return the expected successful responses', async () => { 117 | await bPromise.all([ 118 | rpt({ uri: 'beer' }).should.eventually.containSubset(exp.beer), 119 | rpt({ uri: 'brewery' }).should.eventually.containSubset(exp.brewery), 120 | ]) 121 | }) 122 | }) 123 | 124 | describe('get', () => { 125 | let rpt 126 | 127 | before(() => 128 | common 129 | .startServer() 130 | .then( 131 | (port) => 132 | (rpt = getRequestPromiseTransformed({ uri: 'beer', port: port })) 133 | ) 134 | ) 135 | after(() => common.stopServer()) 136 | 137 | const qsErrIds = errIds.get.queryString, 138 | exp = expected.get 139 | it('should return the expected successful beer range and order responses', async () => { 140 | // should cover all supported rows syntax variations 141 | await bPromise.all([ 142 | rpt({ headers: { range: 'rows=0-4' } }).should.eventually.containSubset( 143 | exp.firstFiveRows 144 | ), 145 | 146 | rpt({ 147 | qss: 'name_NOTNULL', 148 | headers: { 149 | range: 'rows=0-4', 150 | order: 'name', 151 | }, 152 | }).should.eventually.containSubset(exp.firstFiveRowsByNameAsc), 153 | 154 | rpt({ 155 | qss: 'name_NOTNULL', 156 | headers: { 157 | range: 'rows=0-4', 158 | order: 'name desc', 159 | }, 160 | }).should.eventually.containSubset(exp.firstFiveRowsByNameDesc), 161 | 162 | rpt({ headers: { range: 'rows=0-' } }).should.eventually.containSubset( 163 | exp.firstFiveRows 164 | ), 165 | 166 | rpt({ headers: { range: 'rows=-5' } }).should.eventually.containSubset( 167 | exp.lastFiveRows 168 | ), 169 | ]) 170 | }) 171 | 172 | it('should return the expected successful beer query responses', async () => { 173 | // should cover all query operators 174 | await bPromise.all([ 175 | rpt({ qss: 'id=1' }).should.eventually.containSubset(exp.firstRow), 176 | 177 | rpt({ qss: 'id!=1&id<=5' }).should.eventually.containSubset( 178 | exp.latterFourRows 179 | ), 180 | 181 | rpt({ qss: 'id>1&id<=5' }).should.eventually.containSubset( 182 | exp.latterFourRows 183 | ), 184 | 185 | rpt({ qss: 'id>=2&id<6' }).should.eventually.containSubset( 186 | exp.latterFourRows 187 | ), 188 | 189 | rpt({ qss: 'name_ISNULL' }).should.eventually.containSubset( 190 | exp.nullName 191 | ), 192 | 193 | rpt({ 194 | qss: "description_LIKE'%Belgian%'&name_NOTNULL", 195 | }).should.eventually.containSubset(exp.namedBelgians), 196 | ]) 197 | }) 198 | 199 | it('should return the correct error responses', async () => { 200 | // should cover all errors found in err-ids -> get 201 | const getAllBeer = rpt 202 | await bPromise.all([ 203 | getAllBeer().should.eventually.have.nested.property( 204 | 'body.id', 205 | errIds.get.invalidRange 206 | ), 207 | 208 | rpt({ 209 | headers: { order: 'notAColumn' }, 210 | }).should.eventually.have.nested.property( 211 | 'body.id', 212 | errIds.get.invalidOrder 213 | ), 214 | 215 | queryStringsShouldResultInErrors(rpt, [ 216 | ['notAColumn', qsErrIds.columnRequired], 217 | ['id', qsErrIds.operatorRequired], 218 | ['name_LIKE', qsErrIds.openingQuoteRequired], 219 | ["name_LIKE'White Rascal", qsErrIds.closingQuoteRequired], 220 | ['name_NOTNULLid', qsErrIds.ampersandRequired], 221 | ]), 222 | ]) 223 | }) 224 | }) 225 | 226 | describe('get - big', function () { 227 | this.timeout(60000) 228 | 229 | const pathToStreamOut = path.join(__dirname, 'tmp/result.json') 230 | let port 231 | 232 | before(() => { 233 | const configOverrides = { 234 | tablesAndViews: { 235 | beer: { 236 | maxRange: Infinity, 237 | }, 238 | }, 239 | } 240 | 241 | const dbPath = path.join(__dirname, 'resources/big.beer.sqlite3') 242 | 243 | return Promise.all([ 244 | common.startServer({ dbPath, configOverrides }), 245 | makeDir(path.dirname(pathToStreamOut)), 246 | ]).then(([serverPort]) => { 247 | port = serverPort 248 | }) 249 | }) 250 | after(() => Promise.all([common.stopServer(), del(pathToStreamOut)])) 251 | 252 | it('should stream a large response', async () => { 253 | const writeStream = fs.createWriteStream(pathToStreamOut), 254 | expectedLargeResult = path.join( 255 | __dirname, 256 | 'expected/get-sqlite-router/big-result.json' 257 | ), 258 | resultStream = request 259 | .get(`http://localhost:${port}/beer`) 260 | .on('response', (response) => { 261 | response.statusCode.should.equal(200) 262 | response.headers['content-type'].should.equal( 263 | 'application/octet-stream' 264 | ) 265 | response.headers['content-range'].should.equal( 266 | 'rows 0-100015/100015' 267 | ) 268 | response.headers['transfer-encoding'].should.equal('chunked') 269 | }) 270 | .on('error', (err) => { 271 | console.error(err) 272 | }) 273 | .pipe(writeStream) 274 | 275 | await bStreamFinished(resultStream).then(() => 276 | bAreFilesEqual(pathToStreamOut, expectedLargeResult) 277 | ).should.eventually.be.true 278 | }) 279 | }) 280 | 281 | describe('Unsupported Methods', () => { 282 | let rpt 283 | 284 | before(() => 285 | common.startServer().then( 286 | (port) => 287 | (rpt = getRequestPromiseTransformed({ 288 | method: 'PATCH', 289 | port: port, 290 | })) 291 | ) 292 | ) 293 | after(() => common.stopServer()) 294 | 295 | const exp = expected.unsupported 296 | 297 | it('should return the correct 405 responses', async () => { 298 | await bPromise.all([ 299 | rpt({ uri: 'beer_per_brewery' }).should.eventually.containSubset( 300 | exp.beer_per_brewery 301 | ), 302 | rpt({ uri: 'beer' }).should.eventually.containSubset(exp.beer), 303 | ]) 304 | }) 305 | }) 306 | }) 307 | 308 | describe('unsafe', () => { 309 | afterEach(() => 310 | bPromise.all([bFs.renameAsync(beerDbBak, beerDb), common.stopServer()]) 311 | ) 312 | 313 | describe('delete', () => { 314 | let rpt 315 | 316 | beforeEach(() => 317 | bPromise 318 | .props({ 319 | unused: ncpAsync(beerDb, beerDbBak), 320 | port: common.startServer(), 321 | }) 322 | .then( 323 | ({ port }) => 324 | (rpt = getRequestPromiseTransformed({ 325 | uri: 'city', 326 | method: 'DELETE', 327 | port: port, 328 | })) 329 | ) 330 | ) 331 | 332 | const qsErrIds = errIds.delete.queryString, 333 | exp = expected.delete 334 | it('should return the expected successful beer responses', async () => { 335 | await rpt({ 336 | qss: 'state=WI&city_name=Milwaukee', 337 | }).should.eventually.containSubset(exp.success) 338 | }) 339 | 340 | it('should return 404 on non-existent resource', async () => { 341 | await rpt({ 342 | qss: 'state=WI&city_name=Eau Claire', 343 | }).should.eventually.have.property('statusCode', 404) 344 | }) 345 | 346 | it('should return the correct error responses', async () => { 347 | // should cover all errors found in err-ids -> delete 348 | 349 | await queryStringsShouldResultInErrors(rpt, [ 350 | ['state=CO&state=CO', qsErrIds.duplicatePkColumnsNotAllowed], 351 | ['state>CO', qsErrIds.equalsRequired], 352 | ['state=CO', qsErrIds.missingPkColumns], 353 | ['notAColumn', qsErrIds.pkColumnRequired], 354 | ]) 355 | }) 356 | }) 357 | 358 | describe('insert', () => { 359 | let rpt 360 | 361 | beforeEach(() => 362 | bPromise 363 | .props({ 364 | unused: ncpAsync(beerDb, beerDbBak), 365 | port: common.startServer(), 366 | }) 367 | .then( 368 | ({ port }) => 369 | (rpt = getRequestPromiseTransformed({ 370 | uri: 'city', 371 | method: 'POST', 372 | port: port, 373 | })) 374 | ) 375 | ) 376 | 377 | const rbErrIds = errIds.post.requestBody, 378 | exp = expected.post 379 | it('should return the expected successful beer responses', async () => { 380 | await rpt({ 381 | body: exp.eauClaireSuccess.body, 382 | }).should.eventually.containSubset(exp.eauClaireSuccess) 383 | }) 384 | 385 | it('should return the correct error responses', async () => { 386 | // should cover all errors found in err-ids -> post 387 | 388 | await bPromise.all([ 389 | rpt({ 390 | body: { notAColumn: 'error' }, 391 | }).should.eventually.have.nested.property( 392 | 'body.id', 393 | rbErrIds.invalidColumns 394 | ), 395 | 396 | rpt({ body: { state: 'WI' } }).should.eventually.have.nested.property( 397 | 'body.id', 398 | rbErrIds.missingRequiredColumns 399 | ), 400 | ]) 401 | }) 402 | }) 403 | 404 | describe('update', () => { 405 | let rptb, rptc 406 | 407 | beforeEach(() => 408 | bPromise 409 | .props({ 410 | unused: ncpAsync(beerDb, beerDbBak), 411 | port: common.startServer(), 412 | }) 413 | .then(({ port }) => { 414 | rptb = getRequestPromiseTransformed({ 415 | qs: { id: 5 }, 416 | uri: 'beer', 417 | method: 'POST', 418 | port: port, 419 | }) 420 | rptc = getRequestPromiseTransformed({ 421 | uri: 'city', 422 | method: 'POST', 423 | port: port, 424 | }) 425 | }) 426 | ) 427 | 428 | const rbErrIds = errIds.update.requestBody, 429 | qsErrIds = errIds.update.queryString, 430 | exp = expected.post_update 431 | it('should return the expected successful beer responses', async () => { 432 | await rptb({ 433 | body: fp.pick('description', exp.thaiSuccess.body), 434 | }).should.eventually.containSubset(exp.thaiSuccess) 435 | }) 436 | 437 | it('should return the correct error responses', async () => { 438 | // should cover all errors found in err-ids -> update 439 | 440 | await bPromise.all([ 441 | rptb({ 442 | body: { notAColumn: 'error' }, 443 | }).should.eventually.have.nested.property( 444 | 'body.id', 445 | rbErrIds.invalidColumns 446 | ), 447 | 448 | rptb().should.eventually.have.nested.property( 449 | 'body.id', 450 | rbErrIds.mustBeNonEmpty 451 | ), 452 | 453 | queryStringsShouldResultInErrors(rptc, [ 454 | ['state=CO&state=CO', qsErrIds.duplicatePkColumnsNotAllowed], 455 | ['state>CO', qsErrIds.equalsRequired], 456 | ['state=CO', qsErrIds.missingPkColumns], 457 | ['notAColumn', qsErrIds.pkColumnRequired], 458 | ]), 459 | ]) 460 | }) 461 | }) 462 | }) 463 | 464 | //-------------// 465 | // Helper Fxns // 466 | //-------------// 467 | 468 | const omitDateHeader = mapValuesWithKey((val, key) => { 469 | return key === 'headers' ? fp.omit('date', val) : val 470 | }) 471 | 472 | const getResponse = (full) => { 473 | return fp.has('response.toJSON', full) 474 | ? full.response.toJSON() 475 | : fp.invoke('toJSON', full) || full 476 | } 477 | 478 | const cleanFullResponse = fp.flow( 479 | getResponse, 480 | fp.omit('request'), 481 | fp.omitBy(fp.isUndefined), 482 | omitDateHeader 483 | ) 484 | 485 | const allowHttpErrors = (err) => { 486 | if (err.statusCode) return err 487 | throw err 488 | } 489 | 490 | function getRequestPromiseTransformed(defaultOpts) { 491 | return (opts) => 492 | rp(getOptions(fp.assign(defaultOpts, opts))) 493 | .catch(allowHttpErrors) 494 | .then(cleanFullResponse) 495 | } 496 | 497 | function getOptions(argsObj) { 498 | let { qss } = argsObj 499 | 500 | const { port, qs, uri } = argsObj 501 | 502 | if (qss && qs) throw new Error('qs && qss cannot both be defined') 503 | 504 | qss = qss ? '?' + encodeURIComponent(qss) : '' 505 | return fp.assign( 506 | { 507 | uri: `http://localhost:${port}/${uri}${qss}`, 508 | json: true, 509 | resolveWithFullResponse: true, 510 | }, 511 | fp.omit(['uri', 'qss'], argsObj) 512 | ) 513 | } 514 | 515 | function getQueryStringsShouldResultInErrors() { 516 | return fp.curry(async (rpt, pairArr) => { 517 | await bPromise.all(fp.map(fp.spread(testQs), pairArr)) 518 | 519 | // scoped helper fxns 520 | async function testQs(qss, anErrId) { 521 | await rpt({ qss: qss }).should.eventually.have.nested.property( 522 | 'body.id', 523 | anErrId 524 | ) 525 | } 526 | }) 527 | } 528 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sqlite To Rest 2 | 3 | Koa routing middleware allowing you to expose a sqlite database via RESTful CRUD 4 | 5 | 6 | 7 | 8 | ## Table of Contents 9 | 10 | - [Why build it?](#why-build-it) 11 | - [Features](#features) 12 | - [Limitations](#limitations) 13 | - [Tutorial](#tutorial) 14 | - [CLI](#cli) 15 | - [API](#api) 16 | - [RESTful CRUD Operations](#restful-crud-operations) 17 | - [Reference](#reference) 18 | - [Test](#test) 19 | 20 | 21 | 22 | ## Why build it? 23 | 24 | Mostly because I wanted to dig deeper into node web server code, but also 25 | because I haven't jumped onto the NoSQL bandwagon and think that web APIs are 26 | extremely useful. The result is a modest attempt at automating the CRUD 27 | boilerplate that every developer hates, while following the specs to make API 28 | consumption intuitive. I chose sqlite to keep the database side of things 29 | simple, with the intent that the API isn't serving heavy loads. 30 | 31 | ## Features 32 | 33 | - Spec compliant CRUD RESTful API to an existing database's tables and views 34 | - GET utilizes [JSONStream](https://github.com/dominictarr/JSONStream) so the 35 | entire response is not held in memory, allowing for arbitrarily 36 | large responses. 37 | - Range requests with the custom range unit 'rows' can be used to GET specific 38 | rows. While compliant with rfc7233, the syntax and semantics were kept 39 | extremely similar to byte-ranges. 40 | - The server can configure a maximum request range per table since the amount 41 | of data per row will vary per-table. 42 | - The server can also configure whether to send the content-range header 43 | in a HEAD request. This allows the author to save the server from 44 | unnecessarily calculating the count on a table that is known to be 45 | very large. 46 | - Custom request header 'order' and conditional response header 'accept-order' 47 | exposes row sorting by column with optional ascending and 48 | descending specifiers 49 | - The API enforces correct usage, while sending developer-friendly error 50 | messages upon 4xx errors. 51 | - Comes with a friendly CLI to create a bare-bones koa server to get you up and 52 | running quickly. 53 | 54 | ## Limitations 55 | 56 | - All tables must use primary keys. The next limitation explains why. 57 | - In effort to mitigate damage, unsafe methods only allow modification of 58 | single rows. This is enforced by matching the query parameters with the 59 | 'primary key' columns - friendly errors will tell you if called called an 60 | unsafe method incorrectly. 61 | - No built-in API key management. The library as-is can only serve 62 | trusted consumers. 63 | - It's sqlite. This library is not meant for clustering or large workloads. 64 | See ['Situations Where A Client/Server RDBMS May Work 65 | Better'](https://www.sqlite.org/whentouse.html) for details. 66 | - This is my foray into reading rfc's and working with web server libraries 67 | (koa and middleware in general). I have tests and feel confident in my 68 | comprehension of the concepts, but the code is not the prettiest. 69 | - No friendly data validation currently. Right now contextless 500 statuses 70 | are returned if data doesn't pass constraints, and I don't have tests 71 | ensuring consistent behavior around data validation. 72 | 73 | ## Tutorial 74 | 75 | [This tutorial](https://github.com/olsonpm/sqlite-to-rest/blob/dev/docs/tutorial.md) 76 | will walk you through 77 | 78 | 1. Creating an initial database 79 | 2. Using sqlite-to-rest's CLI to create a bare-bones koa server 80 | 3. Walk you through some curl commands to test the server's CRUD RESTful api. 81 | 82 | ## CLI 83 | 84 | By installing this library globally, you receive access to `sqlite-to-rest`. 85 | 86 | The CLI currently contains one command `generate-skeleton` which creates an 87 | initial bare-bones koa server from an existing sqlite database. This should 88 | help you get started. 89 | 90 | See `sqlite-to-rest --help` for more info. 91 | 92 | ## API 93 | 94 | `require('sqlite-to-rest')` returns an object with two properties. 95 | 96 | - **generateSkeleton**: [madonna-function](https://github.com/olsonpm/madonna-function) 97 | -> promise(undefined) 98 | This will usually be called from the CLI but is also made 99 | available via the js API. Its purpose is to generate a barebones koa 100 | server to get you up and running. In the directory it will: 101 | 102 | 1. Run `npm init -f` if a package.json doesn't exist 103 | 2. Output the generated koa server named 'skeleton.js' 104 | 3. Install and save the dependencies required to run the server 105 | 106 | It takes two properties 107 | 108 | - **dir**: [`isLadenString`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 109 | [`isDirectory`](#isdirectory) 110 | Directory to generate the koa server. 111 | 112 | - **dbPath** _optional_: [`isSqliteFile`](#issqlitefile) 113 | Path to your sqlite3 database. 114 | 115 | ```js 116 | // example 117 | sqliteToRest 118 | .generateSkeleton({ 119 | dir: beerApiDir, 120 | dbPath: 'path/to/your/db.sqlite3', 121 | }) 122 | .then(() => { 123 | /* skeleton.js is ready to be ran */ 124 | }) 125 | ``` 126 | 127 | - **getSqliteRouter**: [madonna-function](https://github.com/olsonpm/madonna-function) 128 | -> promise([koa-router](https://github.com/alexmingoia/koa-router/tree/master)) 129 | This function generates the RESTful CRUD routing and returns the modified 130 | koa-router instance. 131 | 132 | It takes two properties 133 | 134 | - **dbPath**: [`isSqliteFile`](#issqlitefile) 135 | Path to your sqlite3 database. 136 | 137 | - **config** _optional_: A [routing config object](#routing-config-object) 138 | 139 | ```js 140 | // example 141 | const app = new require('koa')(), 142 | dbPath = 'path/to/your/db.sqlite3' 143 | 144 | getSqliteRouter({ dbPath }).then((router) => { 145 | app.use(router.routes()) 146 | // ... 147 | }) 148 | ``` 149 | 150 | ## RESTful CRUD Operations 151 | 152 | The following is a list of the available crud operations made available by the 153 | RESTful API in the form of pseudo examples. All assume a beer table with two 154 | columns `id INTEGER PRIMARY KEY` and `name` which is nullable. 155 | 156 | As noted in [limitations](#limitations), Be aware that unsafe methods (DELETE 157 | and POST) can only affect one row at a time. 158 | 159 | - GET 160 | This allows for the most variation. [Click here](#get-query-operators) for 161 | all available query operators. Keep in mind the following examples ignore 162 | proper [query encoding](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent) 163 | 164 | Headers may be specified right below urls 165 | 166 | - **/beer** 167 | Requests for all rows 168 | 169 | - **/beer?id=1** 170 | Where id = 1 171 | 172 | - **/beer** 173 | `range: rows=0-2` 174 | First three rows 175 | 176 | - **/beer** 177 | `range: rows=-5` 178 | Last five rows 179 | 180 | - **/beer** 181 | `range: rows=0-` 182 | As many rows as the server is able to provide, which in practice will be 183 | the smaller of [`maxRange`](#tabular-config-object) and total row count. 184 | 185 | - **/beer** 186 | `range: rows=1-` 187 | As many rows as the server is able to provide, starting from row 1. 188 | 189 | - **/beer** 190 | `order: name` 191 | Ordered by name ascending 192 | 193 | - **/beer** 194 | `order: name desc` 195 | Ordered by name descending 196 | 197 | - **/beer** 198 | `order: name desc,id` 199 | Contrived, but orders first by name descending, and in the case of a tie 200 | by id ascending. 201 | 202 | - **/beer?id>1** 203 | Where id > 1 204 | 205 | - **/beer?id>=2&id<5** 206 | Where id >= 2 and id < 5 207 | 208 | - **/beer?name_NOTNULL** 209 | Where name is not null 210 | 211 | - **/beer?name_ISNULL** 212 | Where name is null 213 | 214 | - **/beer?id!=5&name_LIKE'Spotted%'** 215 | Where id != 5 and name is LIKE "Spotted%" (ignore quotes) 216 | 217 | - **/beer?id>=1&id<10&name_LIKE'Avery%'** 218 | `order: name desc,id` `range: rows=2-4` 219 | Contrived for sake of example. 220 | Get beer with ids between 1 and 9 inclusive, with name like "Avery%", 221 | ordered first by name descending then by id ascending, getting the third 222 | through 5th rows of the result. Or in SQL: 223 | 224 | ```sql 225 | SELECT * 226 | FROM beer 227 | WHERE id >= 1 228 | AND id <10 229 | AND name LIKE 'Avery%' 230 | ORDER BY name desc, id 231 | LIMIT 3 OFFSET 2 232 | ``` 233 | 234 | - DELETE 235 | Requires a query string with all primary keys set equal to a value. This 236 | enforces a maximum deletion of a single row. 237 | 238 | - **/beer?id=1** 239 | Deletes beer with id=1 240 | 241 | _if the beer table instead had a composite primary key of both id and name_ 242 | 243 | - **/beer?id=1&name='Avery IPA'** 244 | 245 | * POST create 246 | Must not pass a query string. If a query string is passed, then POST update 247 | is assumed. All POST requests must pass the header 248 | `content-type: application/json`. 249 | 250 | Keep in mind the body must contain all non-nullable and non INTEGER 251 | PRIMARY KEY columns. A 400 response will be sent otherwise indicating what 252 | fields were missed. Nullable columns will default to null and INTEGER 253 | PRIMARY KEY columns will automatically increment per 254 | [sqlite3 specifications](https://www.sqlite.org/autoinc.html). 255 | 256 | Json data will be specified right below urls 257 | 258 | - **/beer** 259 | `{"id":1,"name":"Serendipity"}` 260 | Creates a beer with id = 1 and name = 'Serendipity' 261 | 262 | - **/beer** 263 | `{"id":1}` 264 | Creates a beer with id = 1 and name = NULL 265 | 266 | - **/beer** 267 | `{"name":"Serendipity"}` 268 | Creates a beer with id set to the next incremented value per 269 | [sqlite3 INTEGER PRIMARY KEY specifications](https://www.sqlite.org/autoinc.html) 270 | 271 | - **/beer** 272 | `{}` 273 | Creates a beer with id incremented, and name set to NULL 274 | 275 | * POST update 276 | Must contain a query string. Without a query string, POST create is assumed. 277 | As with POST create, the header `content-type: application/json` 278 | is mandatory. 279 | 280 | The query string must contain all primary keys to ensure only a single row 281 | gets updated. If incorrect values are passed, a 400 will be returned 282 | listing the offending keys. 283 | 284 | The request body must contain a non-empty object and must contain valid 285 | keys corresponding to column names. 286 | 287 | Json data will be specified right below urls 288 | 289 | - **/beer?id=1** 290 | `{"id":2}` 291 | Updates beer with id of 1 setting it to two. 292 | 293 | - **/beer?id=1** 294 | `{"name":"Two Women"}` 295 | Updates beer with id of 1 setting its name to Two Women. 296 | 297 | _if the beer table instead had a composite primary key of both id and name_ 298 | 299 | - **/beer?id=1&name=Two Women** 300 | `{"name":"Moon Man"}` 301 | Updates beer where id is one and name is Two Women, setting name to Moon Man 302 | 303 | ## Reference 304 | 305 | #### isSqliteFile 306 | 307 | - Just checks the first 16 bytes of the file to see if it equals 308 | 'sqlite format 3' followed by a null byte. 309 | 310 | #### isDirectory 311 | 312 | - Returns the result of [fs.statsSync](https://nodejs.org/api/fs.html#fs_fs_statsync_path) 313 | followed by [.isDirectory](https://nodejs.org/api/fs.html#fs_class_fs_stats) 314 | 315 | #### isFile 316 | 317 | - Returns the result of [fs.statsSync](https://nodejs.org/api/fs.html#fs_fs_statsync_path) 318 | followed by [.isFile](https://nodejs.org/api/fs.html#fs_class_fs_stats) 319 | 320 | ### GET query operators 321 | 322 | Query conditions must be delimited by ampersands e.g. `id>5&name!=Spotted Cow` 323 | 324 | Binary operators (require a value after) 325 | **=** 326 | **!=** 327 | **\>=** 328 | **<=** 329 | **\>** 330 | **<** 331 | **\_LIKE** 332 | 333 | - **\_LIKE** is special in that it must have opening and closing single 334 | quotes. If not, a 400 error will be thrown showing where the parsing was 335 | unable to complete and what was expected. See [RESTful CRUD Operations](#restful-crud-operations) for examples. 336 | 337 | Unary operators (must follow a column name) 338 | **\_ISNULL** 339 | **\_NOTNULL** 340 | 341 | #### Router config object 342 | 343 | [`isLadenPlainObject`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 344 | The purpose of this object is to provide generic configuration for the sqlite 345 | router. The following properties are allowed: 346 | 347 | - **prefix**: [`isLadenString`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 348 | The string passed to [`koa-router's`](https://github.com/alexmingoia/koa-router/tree/master) 349 | [`prefix`](https://github.com/alexmingoia/koa-router/tree/master#new-routeropts) 350 | constructor option. For example, the skeleton server doesn't specify 351 | a prefix, allowing the beer api to be hit directly from the domain root 352 | `http://localhost:8085/beer`. If you set prefix to '/api', then you 353 | would instead send requests to `http://localhost:8085/api/beer`. 354 | 355 | - **allTablesAndViews**: A [tabular configuration object](#tabular-config-object) 356 | The configurations specified in this object will apply for all tables and 357 | views, optionally overridden by the `tablesAndViews` property. 358 | 359 | - **tablesAndViews**: [`isLadenPlainObject`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 360 | The object passed **must** have keys matching the database column or view 361 | names. If not, a friendly error message will be thrown. The values for each 362 | table and view must be a [tabular configuration object](#tabular-config-object) 363 | 364 | #### Tabular config object 365 | 366 | [`isLadenPlainObject`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 367 | This object represents configurations that can be set for either views 368 | or tables. It allows the following properties: 369 | 370 | - **maxRange**: [`isPositiveNumber`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 371 | _Application default_: 1000 372 | This is the maximum range your server will allow requests for. If a GET 373 | request comes in with no range header, the spec assumes they want the entire 374 | resource. If the number of rows resulting in that GET is greater than 375 | maxRange, then a 416 status is returned with the custom header 376 | [`max-range`](#custom-headers). The application default is purposefully 377 | conservative in hopes that authors will set maxRange according to 378 | their needs. 379 | _Note that 'Infinity' is a valid positive number._ 380 | 381 | - **flags**: [`isLadenArray`](https://github.com/olsonpm/madonna-fp#custom-to-this-library) 382 | Currently the only flag accepted is the string 'sendContentRangeInHEAD'. 383 | When set, HEAD requests will return the available content range in the form 384 | `content-range: */`. The reason it's configurable is that 385 | calculating max-range may be more work than its worth, depending on the load 386 | of the server and the size of your tables. 387 | 388 | ### Custom Headers 389 | 390 | #### Request 391 | 392 | - **order**: This header is only defined for GET, and can be thought of as 393 | the sql ORDER BY equivalent. It must contain a comma-delimited column names, 394 | each optionally followed by a space and the strings 'asc' or 'desc'. If 395 | incorrect order values are sent, a 400 response will indicate which ones. 396 | 397 | #### Response 398 | 399 | These aren't all necessarily custom, but all their usage falls outside what's 400 | defined in the spec and thus need clarification. 401 | 402 | - **GET** 403 | 404 | - **max-range**: This header is returned when the requested number of rows 405 | surpasses the configured [`maxRange`](#tabular-config-object). Note the 406 | request might not specify the range header, but the number of rows 407 | resulting in that resource will still be checked. 408 | 409 | - **content-range**: [rfc7233](https://tools.ietf.org/html/rfc7233#section-4.2) 410 | states 411 | 412 | > only the 206 (Partial Content) and 416 (Range Not Satisfiable) status 413 | > codes describe a meaning for Content-Range. 414 | 415 | When sqlite-to-rest responds with a 200 status code, the content-range 416 | header is sent with the 206 format of `-/`. 417 | 418 | When a request is sent without a range header and the number of resulting 419 | rows surpasses [`maxRange`](#tabular-config-object), a 400 is returned 420 | with content-range set in the 416 format of `*/` 421 | 422 | Note this header may be returned in a HEAD response. 423 | 424 | - **accept-order**: This will be returned if the request header `order` 425 | had bad syntax or specified incorrect column names. For details, refer 426 | to HEAD -> accept-order below. 427 | 428 | - **HEAD** 429 | 430 | - **accept-order**: `accept-order` is just a comma-delimited list of the 431 | requested table columns, intended to tell the client the valid columns 432 | able to be used in the request header `order`. 433 | 434 | - **max-range**: The configured [`maxRange`](#tabular-config-object) 435 | of the requested table. 436 | 437 | - **content-range**: This header will only be sent if the table has been 438 | configured with the flag [`sendContentRangeInHEAD`](#tabular-config-object). 439 | In that case, content-range is set to the 416 format of `*/` 440 | 441 | ## Test 442 | 443 | `npm test` 444 | --------------------------------------------------------------------------------