├── .eslintrc ├── test ├── sql │ ├── postgres.sql │ ├── chinook.sql │ ├── queries.sql │ └── create-table.sql └── hugsql.test.js ├── resources └── templates │ └── nameless.handlebars ├── .travis.yml ├── package.json ├── src ├── util.js ├── cli.js └── compiler.js ├── .gitignore ├── README.md └── LICENSE.md /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "env": { 4 | "jest": true 5 | }, 6 | "rules": { 7 | "no-restricted-syntax": "off", 8 | "no-console": "off" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /test/sql/postgres.sql: -------------------------------------------------------------------------------- 1 | /** 2 | * @function postgressExample 3 | * @param {String} $1 the first name of the user 4 | * @param {String} $2 the last name of the user 5 | */ 6 | SELECT $1::text as first_name from users where last_name = $2::text; 7 | -------------------------------------------------------------------------------- /resources/templates/nameless.handlebars: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * Automatically generated by HugSql 🤗 4 | */ 5 | 6 | {{#sections}} 7 | {{&docstring}} 8 | module.exports.{{functionName}} = ({{parameters}}) => ({ 9 | query: `{{&query}}`, 10 | parameters: [{{sortedParameters}}] 11 | }); 12 | 13 | {{/sections}} 14 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "10" 4 | 5 | jobs: 6 | include: 7 | - stage: Tests 8 | script: 9 | - npm test 10 | #- npx semantic-release --dry-run --generate-notes 11 | 12 | - stage: release 13 | if: branch = master 14 | deploy: 15 | provider: script 16 | skip_cleanup: true 17 | script: npx semantic-release 18 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@carocad/hugsql", 3 | "version": "0.0.0-development", 4 | "description": "Node.js library for embracing SQL", 5 | "author": "Camilo Roca", 6 | "homepage": "https://github.com/carocad/hugsql", 7 | "repository": { 8 | "type": "git", 9 | "url": "https://github.com/carocad/hugsql" 10 | }, 11 | "license": "LGPL-3.0-only", 12 | "bin": { 13 | "hugsql": "src/cli.js" 14 | }, 15 | "scripts": { 16 | "test": "npx eslint --fix . && jest --collectCoverage" 17 | }, 18 | "dependencies": { 19 | "docopt": "^0.6.2", 20 | "handlebars": "^4.7.3" 21 | }, 22 | "devDependencies": { 23 | "eslint": "^6.3.0", 24 | "eslint-config-airbnb-base": "^14.0.0", 25 | "eslint-plugin-import": "^2.18.2", 26 | "jest": "^24.9.0", 27 | "semantic-release": "^17.0.4" 28 | }, 29 | "engines": { 30 | "node": ">=10.10" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /test/hugsql.test.js: -------------------------------------------------------------------------------- 1 | 2 | const path = require('path'); 3 | const fs = require('fs'); 4 | const { Linter, CLIEngine } = require('eslint'); 5 | const hugsql = require('../src/compiler'); 6 | const { recursiveReaddirSync } = require('../src/util'); 7 | 8 | const eslintrc = fs.readFileSync(`${__dirname}/../.eslintrc`, 'utf-8'); 9 | const config = JSON.parse(eslintrc); 10 | 11 | /** 12 | * compile all sql files found in the test/sql directory and validate them 13 | * with EsLint 14 | */ 15 | for (const filepath of recursiveReaddirSync(`${__dirname}/sql/`)) { 16 | const linter = new Linter(); 17 | const cli = new CLIEngine(config); 18 | 19 | if (path.extname(filepath) === '.sql') { 20 | test(`should return valid JS code from ${filepath}`, () => { 21 | const output = hugsql.compile(filepath, false); 22 | 23 | const fileConfig = cli.getConfigForFile(filepath); 24 | const result = linter.verifyAndFix(output, fileConfig, { 25 | filename: filepath, 26 | }); 27 | 28 | const errors = result.messages.filter((msg) => msg.fatal); 29 | expect(errors.length).toBe(0); 30 | }); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/util.js: -------------------------------------------------------------------------------- 1 | 2 | const path = require('path'); 3 | const fs = require('fs'); 4 | 5 | 6 | /** 7 | * Returns a lazy sequence of regex executions over text 8 | * @param {String} text 9 | * @param {RegExp} regex 10 | * @yield {IterableIterator>} 11 | */ 12 | function* allRegexMatches(text, regex) { 13 | while (true) { 14 | const section = regex.exec(text); 15 | if (section === null) { 16 | return; 17 | } 18 | 19 | yield [...section]; 20 | } 21 | } 22 | 23 | /** 24 | * return the Set s1 without the elements from Set s2 25 | * @param {Set<*>} s1 26 | * @param {Set<*>} s2 27 | * @return {Set<*>} 28 | */ 29 | function difference(s1, s2) { 30 | const result = new Set(); 31 | for (const value of s1) { 32 | if (!s2.has(value)) { 33 | result.add(value); 34 | } 35 | } 36 | return result; 37 | } 38 | 39 | 40 | /** 41 | * Synchronously list all files in a directory recursively 42 | * @param {String} pathlike to the sql target 43 | */ 44 | function* recursiveReaddirSync(pathlike) { 45 | const stat = fs.lstatSync(pathlike); 46 | if (stat.isFile()) { 47 | yield pathlike; 48 | } else { 49 | for (const child of fs.readdirSync(pathlike, { withFileTypes: true })) { 50 | if (child.isDirectory()) { 51 | yield* recursiveReaddirSync(path.resolve(pathlike, child.name)); 52 | } else { // file otherwise 53 | yield path.resolve(pathlike, child.name); 54 | } 55 | } 56 | } 57 | } 58 | 59 | module.exports = { 60 | allRegexMatches, 61 | difference, 62 | recursiveReaddirSync, 63 | }; 64 | -------------------------------------------------------------------------------- /test/sql/chinook.sql: -------------------------------------------------------------------------------- 1 | /** 2 | * This query finds all artists in the chinook sample db 3 | * @function findAllArtists 4 | */ 5 | select * from artists; 6 | /** 7 | * @function findAllCustomers 8 | */ 9 | select * from customers; 10 | 11 | 12 | 13 | /** 14 | * Fetch all the albums from a specific artist ... 15 | * you better pick a good one ;) 16 | * @function findAllAlbums 17 | * @param {Number} artistId - an artist id 18 | * @param {Number} albumId - an album id 19 | */ 20 | select * from albums where "AlbumId" = :albumId and "ArtistId" = :artistId; 21 | 22 | /** 23 | * Account transaction 24 | * @function complexTransaction 25 | */ 26 | BEGIN TRANSACTION; 27 | 28 | UPDATE accounts 29 | SET balance = balance - 1000 30 | WHERE account_no = 100; 31 | 32 | UPDATE accounts 33 | SET balance = balance + 1000 34 | WHERE account_no = 200; 35 | 36 | INSERT INTO account_changes(account_no,flag,amount,changed_at) 37 | VALUES(100,'-',1000,datetime('now')); 38 | 39 | INSERT INTO account_changes(account_no,flag,amount,changed_at) 40 | VALUES(200,'+',1000,datetime('now')); 41 | 42 | COMMIT; 43 | 44 | /** 45 | * Check that this one is also properly parsed 46 | * @function complexTransactionDuplicate 47 | */ 48 | BEGIN TRANSACTION; 49 | 50 | UPDATE accounts 51 | SET balance = balance - 1000 52 | WHERE account_no = 100; 53 | 54 | UPDATE accounts 55 | SET balance = balance + 1000 56 | WHERE account_no = 200; 57 | 58 | INSERT INTO account_changes(account_no,flag,amount,changed_at) 59 | VALUES(100,'-',1000,datetime('now')); 60 | 61 | INSERT INTO account_changes(account_no,flag,amount,changed_at) 62 | VALUES(200,'+',1000,datetime('now')); 63 | 64 | COMMIT; 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Optional REPL history 57 | .node_repl_history 58 | 59 | # Output of 'npm pack' 60 | *.tgz 61 | 62 | # Yarn Integrity file 63 | 64 | # dotenv environment variables file 65 | .env 66 | .env.test 67 | 68 | # parcel-bundler cache (https://parceljs.org/) 69 | .cache 70 | 71 | # next.js build output 72 | .next 73 | 74 | # nuxt.js build output 75 | .nuxt 76 | 77 | # vuepress build output 78 | .vuepress/dist 79 | 80 | # Serverless directories 81 | .serverless/ 82 | 83 | # FuseBox cache 84 | .fusebox/ 85 | 86 | # DynamoDB Local files 87 | .dynamodb/ 88 | *.sql.js 89 | /.idea/ 90 | /resources/chinook.db 91 | -------------------------------------------------------------------------------- /src/cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const path = require('path'); 4 | const fs = require('fs'); 5 | const { docopt, DocoptLanguageError, DocoptExit } = require('docopt'); 6 | const { recursiveReaddirSync } = require('./util'); 7 | const hugsql = require('./compiler'); 8 | const { version, description } = require('../package.json'); 9 | 10 | const docstring = ` 11 | ${description} 12 | 13 | Usage: 14 | hugsql [--labeled] 15 | hugsql (-h | --help) 16 | hugsql --version 17 | 18 | Options: 19 | --labeled Allows '$name' parameters in Sql. By default hugsql replaces 20 | all Sql parameters with '?'. 21 | 22 | Examples: 23 | hugsql ./resources 24 | hugsql ./resources --labeled 25 | `; 26 | 27 | try { 28 | // parse user arguments 29 | const options = docopt(docstring, { 30 | version, 31 | help: true, 32 | exit: false, 33 | }); 34 | // find all files in the provided dir 35 | const files = recursiveReaddirSync(path.resolve(options[''])); 36 | // compile a js file for each sql file found 37 | for (const filepath of files) { 38 | if (path.extname(filepath) === '.sql') { 39 | const output = hugsql.compile(filepath, options['--labeled']); 40 | const filename = path.basename(filepath, '.sql'); 41 | const filedir = path.dirname(filepath); 42 | fs.writeFileSync(path.join(filedir, `${filename}.sql.js`), output); 43 | } 44 | } 45 | } catch (error) { 46 | // error thrown by docopt 47 | if (error instanceof DocoptLanguageError || error instanceof DocoptExit) { 48 | console.error('Error parsing user arguments. Please check the usage section below and try again'); 49 | console.log(docstring); 50 | } else { 51 | console.error(error.toString()); 52 | } 53 | process.exit(1); 54 | } 55 | -------------------------------------------------------------------------------- /test/sql/queries.sql: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * @function findAllWayNodes 4 | * used for a graph representation of the road network 5 | */ 6 | select * from way_node join node on way_node.node = node.id; 7 | 8 | 9 | /** 10 | * @function findArcConnection 11 | */ 12 | select * from arc join node as source on arc.src = source.id 13 | join node as destination on arc.dst = destination.id; 14 | 15 | 16 | /** 17 | * @function findDestination 18 | * @param {Number} source the id of the node to start searching from 19 | * @param {Number} destination the id of the node to search for 20 | * src 2708331052, dst 561065 21 | * "sends a radar beacon to know the distance to the destination" 22 | * This query is only useful to know the cost of the shortest path 23 | * until the destination. We can use it to display it to the user 24 | * and as a way to stop the graph_traversal recursive query below 25 | */ 26 | with recursive 27 | beacon(src, dst, cost) as ( 28 | values (null, :source, 0) 29 | union all 30 | select arc.src, arc.dst, round(arc.distance + beacon.cost) as cost 31 | from beacon 32 | join arc on arc.src = beacon.dst 33 | order by cost 34 | limit 100000 35 | ) 36 | select * from beacon where beacon.dst = :destination limit 1; 37 | 38 | 39 | /** 40 | * @function findShortestPath 41 | * @param {Number} source the id of the node to start searching from 42 | * @param {Number} destination the id of the node to search for 43 | * @param {Number} radious the maximum distance to stop searching for 'destination' 44 | * compute the shortest path from source to destination using 45 | * a plain dijkstra algorithm; done here in several steps due 46 | * to SQL restrictions 47 | */ 48 | with recursive 49 | -- traverse the graph until the cost to target is reached. At that point we 50 | -- would have a big traversal tree with repeated nodes but different weights 51 | graph_traversal(src, dst, cost) as ( 52 | values (null, :source, 0) 53 | union all 54 | select arc.src, arc.dst, round(arc.distance + graph_traversal.cost) as cost 55 | from graph_traversal 56 | join arc on arc.src = graph_traversal.dst 57 | where round(arc.distance + graph_traversal.cost) <= :radious 58 | order by cost 59 | ), 60 | -- perform a proper dijkstra by keeping only the nodes from the tree with the 61 | -- minimum cost 62 | dijkstra as ( 63 | select graph_traversal.src, graph_traversal.dst, min(graph_traversal.cost) as cost 64 | from graph_traversal 65 | group by graph_traversal.dst 66 | order by cost 67 | ), 68 | -- compute the shortest path by backtracking from the destination to the 69 | -- source 70 | shortest_path as ( 71 | select * from dijkstra where dijkstra.dst = :destination 72 | union all 73 | select dijkstra.* 74 | from shortest_path, dijkstra 75 | where dijkstra.dst = shortest_path.src 76 | ) 77 | -- the src of each row is redundant so we can remove it ;) 78 | select shortest_path.dst, shortest_path.cost 79 | from shortest_path; 80 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HugSql 2 | 3 | Node.js library for embracing SQL. 4 | 5 | [![Build Status](https://travis-ci.com/carocad/hugsql.svg?branch=master)](https://travis-ci.com/carocad/hugsql) 6 | [![npm version](https://badge.fury.io/js/%40carocad%2Fhugsql.svg)](https://badge.fury.io/js/%40carocad%2Fhugsql) 7 | 8 | HugSql is my own adaptation of the wonderful [HugSql](https://github.com/layerware/hugsql) 9 | library for Clojure. 10 | 11 | - SQL is the right tool for the job when working with a relational database! 12 | - HugSQL uses JsDoc style documentation in your SQL files to 13 | define (at compile time) database functions in your Javascript Modules, 14 | creating a clean separation of Javascript and SQL code 15 | - HugSQL relies on battle tested SQL client implementations for Node.js 16 | to provide proper value replacement and escaping with security 17 | 18 | Stop using complicated frameworks that transform your queries in unexpected ways. Instead 19 | just use plain Sql statements and enjoy the benefits of *What You See Is What You Get*. 20 | 21 | As an added benefit you can directly use all the tooling available for your Sql dialect 22 | and the one available for Node.js without compromising on quality :) 23 | 24 | ### Usage 25 | 26 | - install it 27 | ```Shell 28 | npm install --save-dev @carocad/hugsql 29 | ``` 30 | 31 | - Document your SQL statements with JsDoc annotations. HugSql will then create 32 | Javascript functions with the appropriate documentation and parameters. For example on `src/example.sql` file 33 | ```SQL 34 | /** 35 | * Fetch all the albums from a specific artist ... 36 | * you better pick a good one ;) 37 | * @function findAllAlbums 38 | * @param {Number} artistId - an artist id 39 | * @param {Number} albumId - an album id 40 | */ 41 | select * from albums where "AlbumId" = :albumId and "ArtistId" = :artistId; 42 | ``` 43 | 44 | - run *hugsql* on the command line 45 | ```Shell 46 | # point it to the directory containing your *.sql files 47 | npx hugsql src/ 48 | ``` 49 | 50 | - you get an autogenerated `src/example.sql.js` file 51 | ```js 52 | const statements = { 53 | /** 54 | * Fetch all the albums from a specific artist ... 55 | * you better pick a good one ;) 56 | * 57 | * @param {Number} artistId - an artist id 58 | * @param {Number} albumId - an album id 59 | */ 60 | findAllAlbums: (artistId,albumId) => ({ 61 | query: 'select * from albums where "AlbumId" = ? and "ArtistId" = ?;', 62 | parameters: [ albumId,artistId ] 63 | }) 64 | } 65 | 66 | module.exports = statements 67 | ``` 68 | 69 | - then you can use it like (example using [MySql](https://github.com/mysqljs/mysql)) 70 | ```js 71 | const mysql = require('mysql'); 72 | // import your autogenerated code 73 | const statements = require('./src/example.sql.js'); 74 | 75 | const connection = mysql.createConnection({ ... }); 76 | 77 | const myFavouriteSongs = statements.findAllAlbums('Calle 13', 'Multi Viral'); 78 | connection.query(MyFavouriteSongs.query, myFavouriteSongs.parameters, function (error, results, fields) { 79 | console.log('Here we kum: ', results); 80 | }) 81 | ``` 82 | 83 | - Enjoy :) 84 | 85 | For more examples, check the [example queries](./test/sql) used to test 86 | hugsql 87 | -------------------------------------------------------------------------------- /src/compiler.js: -------------------------------------------------------------------------------- 1 | 2 | const fs = require('fs'); 3 | const handlebars = require('handlebars'); 4 | const { difference, allRegexMatches } = require('./util'); 5 | 6 | const namelessTemplate = fs.readFileSync(`${__dirname}/../resources/templates/nameless.handlebars`, 'utf8'); 7 | 8 | const sectionRegex = /(\/\*\*.*?\*\/)\n*(.*?)(?=(\/\*)|$)/sg; 9 | const jsDocFunctionRegex = /@function (\w+)/; 10 | // $ is a valid Javascript identifier 11 | const jsDocParamRegex = /@param ({\w+} )?(\$?\w+)(.*)/g; 12 | const sqlParamRegex = / (:|@|\$)(\w+)/gi; 13 | 14 | 15 | /** 16 | * replaces all named parameters in sqlStatement with ? placeholders 17 | * @param {String} sqlStatement 18 | * @return {{query: string, sortedParameters: Array}} 19 | */ 20 | function anonymize(sqlStatement) { 21 | let counter = 0; 22 | const sortedParameters = []; 23 | const anonymized = sqlStatement.replace(sqlParamRegex, (match, symbol, name) => { 24 | sortedParameters[counter] = symbol === '$' ? match.trim() : name; 25 | counter += 1; 26 | return ' ?'; 27 | }); 28 | return { 29 | sortedParameters, 30 | query: anonymized, 31 | }; 32 | } 33 | 34 | /** 35 | * Checks that the parameters of the jsDoc section are present in Sql and vice versa 36 | * @param {String} sqlStatement 37 | * @param {String} jsDoc 38 | * @return {Array} the names of the parameters (without :$@ symbols) 39 | */ 40 | function checkParameters(sqlStatement, jsDoc) { 41 | // $ is a valid identifier in Javascript so if we find it accept it as part of the name 42 | const sqlParameters = [...allRegexMatches(sqlStatement, sqlParamRegex)] 43 | .map(([match, symbol, name]) => (symbol === '$' ? match.trim() : name)); 44 | 45 | const jsDocParameters = [...allRegexMatches(jsDoc, jsDocParamRegex)] 46 | .reduce((result, [, , name]) => [...result, name], []); 47 | 48 | const sqlJsDifference = difference(new Set(sqlParameters), new Set(jsDocParameters)); 49 | if (sqlJsDifference.size !== 0) { 50 | throw new SyntaxError(`"${[...sqlJsDifference]}" parameters` 51 | + ` found in Sql statement:\n\n${sqlStatement}\n\nbut not in JsDoc` 52 | + ` section:\n${jsDoc}`); 53 | } 54 | 55 | const jsSqlDifference = difference(new Set(jsDocParameters), new Set(sqlParameters)); 56 | if (jsSqlDifference.size !== 0) { 57 | throw new SyntaxError(`"${[...jsSqlDifference]}" parameters` 58 | + ` found in JsDoc section:\n\n${jsDoc}\n\nbut not in Sql` 59 | + ` statement:\n${sqlStatement}`); 60 | } 61 | 62 | return jsDocParameters; 63 | } 64 | 65 | /** 66 | * Parse and check the content of an Sql file with JsDoc annotations. Yields 67 | * a sequence of objects that can be used for rendering with mustache 68 | * @param {String} fileContent 69 | * @param {Boolean} labeled 70 | * @yield {functionName: string, 71 | * sortedParameters: string, 72 | * docstring: string, 73 | * query: string, 74 | * parameters: Array} 75 | */ 76 | function* parseContent(fileContent, labeled) { 77 | for (const section of allRegexMatches(fileContent, sectionRegex)) { 78 | const [, docstringBlock, rawSqlStatement] = section; 79 | 80 | // ignore private statements 81 | if (!docstringBlock.includes('@private')) { 82 | // extract basic info on the current section 83 | const functionBlock = jsDocFunctionRegex.exec(docstringBlock); 84 | if (functionBlock === null) { 85 | throw new SyntaxError('Missing @function in docstring' 86 | + ` section ${docstringBlock}`); 87 | } 88 | const [jsDocLine, functionName] = functionBlock; 89 | 90 | // did the user forget to annotate anything ? 91 | const parameters = checkParameters(rawSqlStatement, docstringBlock); 92 | 93 | // normalize input data 94 | const { query, sortedParameters } = labeled === false ? anonymize(rawSqlStatement) : { 95 | query: rawSqlStatement, 96 | sortedParameters: parameters, 97 | }; 98 | 99 | if (labeled === true && parameters.some((param) => !param.startsWith('$'))) { 100 | throw new SyntaxError('Only \'$name\' parameters are allowed on --labeled mode.' 101 | + `\nPlease rename these parameters: ${parameters} on the statement:\n\n${rawSqlStatement}`); 102 | } 103 | 104 | yield { 105 | functionName, 106 | parameters, 107 | sortedParameters, 108 | query: query.trim(), 109 | docstring: docstringBlock.replace(jsDocLine, ''), 110 | }; 111 | } 112 | } 113 | } 114 | 115 | /** 116 | * Parses a filepath and writes a file with the same name 117 | * but .sql.js extension out 118 | * 119 | * @param {String} filepath 120 | * @param {Boolean} labeled Whether to return arrays or objects in 121 | * generated Js functions 122 | * @return {String} a Js file with functions containing the Sql statements 123 | */ 124 | module.exports.compile = function compile(filepath, labeled) { 125 | const template = handlebars.compile(namelessTemplate); 126 | const fileContent = fs.readFileSync(filepath, 'utf8'); 127 | 128 | return template({ 129 | sections: [...parseContent(fileContent, labeled)], 130 | }); 131 | }; 132 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /test/sql/create-table.sql: -------------------------------------------------------------------------------- 1 | /** 2 | * @private 3 | * check gtfs reference for further documentation 4 | * https://developers.google.com/transit/gtfs/reference/#agencytxt 5 | 6 | * check Sqlite type affinity for more info on automatic type coercion 7 | * https://www.sqlite.org/draft/datatype3.html 8 | */ 9 | pragma foreign_keys=on; 10 | 11 | 12 | /** 13 | * @function createLevelsTable 14 | */ 15 | create table if not exists levels ( 16 | level_id integer primary key, 17 | level_index float not null, 18 | level_name text 19 | ); 20 | 21 | /** 22 | * @function createFeedInfoTable 23 | */ 24 | create table feed_info ( 25 | feed_publisher_name text not null, 26 | feed_publisher_url text not null, 27 | feed_lang text not null, 28 | feed_start_date text, 29 | feed_end_date text, 30 | feed_version text, 31 | feed_contact_email text, 32 | feed_contact_url text, 33 | 34 | constraint valid_start_date check (feed_start_date = strftime('%Y%m%d', feed_start_date)), 35 | constraint valid_end_date check (feed_end_date = strftime('%Y%m%d', feed_end_date)) 36 | ); 37 | 38 | /** 39 | * @function createCalendarTable 40 | */ 41 | create table if not exists calendar ( 42 | service_id integer primary key, 43 | monday integer not null, 44 | tuesday integer not null, 45 | wednesday integer not null, 46 | thursday integer not null, 47 | friday integer not null, 48 | saturday integer not null, 49 | sunday integer not null, 50 | start_date text not null, 51 | end_date text not null, 52 | 53 | constraint monday check (monday in (0, 1)), 54 | constraint tueday check (monday in (0, 1)), 55 | constraint wednesday check (monday in (0, 1)), 56 | constraint thursday check (monday in (0, 1)), 57 | constraint friday check (monday in (0, 1)), 58 | constraint saturday check (monday in (0, 1)), 59 | constraint sunday check (monday in (0, 1)), 60 | 61 | constraint valid_start_date check (start_date = strftime('%Y%m%d', start_date)), 62 | constraint valid_end_date check (end_date = strftime('%Y%m%d', end_date)), 63 | constraint consistent_dates check (strftime('%Y%m%d', end_date) > strftime('%Y%m%d', start_date) > 0) 64 | ); 65 | 66 | /** 67 | * @function createCalendarDatesTable 68 | */ 69 | create table if not exists calendar_dates ( 70 | service_id integer not null references calendar, 71 | "date" text not null, 72 | exception_type integer not null, 73 | 74 | primary key (service_id, "date"), 75 | 76 | constraint exception_type_enum check (exception_type in (1, 2)), 77 | constraint date_value check ("date" > 0), 78 | constraint valid_date check ("date" = strftime('%Y%m%d', "date")) 79 | ); 80 | 81 | 82 | /** 83 | * @function createAgencyTable 84 | * https://developers.google.com/transit/gtfs/reference/#agencytxt 85 | */ 86 | create table if not exists agency ( 87 | -- conditionally required -> default only valid for single agency feed 88 | agency_id integer primary key default 'default', 89 | agency_name text not null, 90 | agency_url text not null, 91 | agency_timezone text not null, 92 | agency_lang text, 93 | agency_phone text, 94 | agency_fare_url text, 95 | agency_email text 96 | ); 97 | 98 | /** 99 | * @function createShapesTable 100 | */ 101 | create table if not exists shapes ( 102 | shape_id integer not null, 103 | shape_pt_lat float not null, 104 | shape_pt_lon float not null, 105 | shape_pt_sequence int not null, 106 | shape_dist_traveled float, 107 | 108 | primary key (shape_id, shape_pt_sequence), 109 | 110 | constraint latitude_WGS84 check (shape_pt_lat >= -90.0 and shape_pt_lat <= 90.0), 111 | constraint longitude_WGS84 check (shape_pt_lon >= -180.0 and shape_pt_lon <= 180.0), 112 | constraint non_negative_shape_sequence check (shape_pt_sequence > 0), 113 | constraint non_negative_shape_dist_traveled check (shape_dist_traveled > 0) 114 | ); 115 | 116 | /** 117 | * @function createFareAttributesTable 118 | */ 119 | create table if not exists fare_attributes ( 120 | fare_id integer primary key, 121 | price float not null, 122 | currency_type text not null, 123 | payment_method integer not null, 124 | transfers integer, 125 | agency_id integer references agency, -- tricky validation 126 | transfer_duration integer, 127 | 128 | constraint non_negative_price check (price > 0), 129 | constraint payment_method_enum check (payment_method in (0, 1)), 130 | constraint transfer_enum check (transfers in (0, 1, 2, null)), 131 | constraint non_negative_transfer_duration check (transfer_duration > 0) 132 | ); 133 | 134 | /** 135 | * @function createRoutesTable 136 | */ 137 | create table if not exists routes ( 138 | route_id integer primary key, 139 | agency_id integer not null references agency, 140 | route_short_name text, 141 | route_long_name text, 142 | route_desc text, 143 | route_type integer not null, 144 | route_url text, 145 | route_color text default 'FFFFFF', 146 | route_text_color text default '000000', 147 | route_sort_order integer 148 | 149 | constraint name check (route_short_name != null or route_long_name != null), 150 | constraint route_type_enum check (route_type in (0, 1, 2, 3, 4, 5, 6, 7)), 151 | constraint positive_sort_order check (route_sort_order >= 0) 152 | ); 153 | 154 | 155 | /** 156 | * @function createStopsTable 157 | * https://developers.google.com/transit/gtfs/reference/#stopstxt 158 | */ 159 | create table if not exists stops ( 160 | stop_id integer primary key, 161 | stop_code text, 162 | stop_name text, -- conditionally required 163 | stop_desc text not null, 164 | stop_lat float, -- conditionally required 165 | stop_lon float, -- conditionally required 166 | zone_id integer, -- conditionally required ... 167 | stop_url text, 168 | location_type integer default 0, 169 | parent_station integer references stops, 170 | stop_timezone text, -- tricky rules 171 | wheelchair_boarding integer default 0, 172 | level_id integer references levels, 173 | platform_code text, 174 | 175 | constraint latitude_WGS84 check (stop_lat >= -90.0 and stop_lon <= 90.0), 176 | constraint longitude_WGS84 check (stop_lon >= -180.0 and stop_lon <= 180.0), 177 | -- stop/platform, station, entrace/exit, node, boarding area. 178 | constraint location_type_enum check (location_type in (0, 1, 2, 3, 4)), 179 | constraint conditional_stop_name check (location_type in (3, 4) or 180 | (location_type in (0, 1, 2) and 181 | stop_name != null)), 182 | constraint conditional_stop_lat check (location_type in (3, 4) or 183 | (location_type in (0, 1, 2) and 184 | stop_lat != null)), 185 | constraint conditional_stop_lon check (location_type in (3, 4) or 186 | (location_type in (0, 1, 2) and 187 | stop_lon != null)), 188 | constraint parent_relationship check ((location_type = 1 and parent_station = null) or 189 | (location_type = 0 and parent_station != null) or 190 | (location_type = 0 and parent_station = null)), 191 | constraint wheelchair_boarding_enum check (wheelchair_boarding in (0, 1, 2)) 192 | ); 193 | 194 | 195 | /** 196 | * @function createTransfersTable 197 | */ 198 | create table if not exists transfers ( 199 | from_stop_id integer not null references stops, 200 | to_stop_id integer not null references stops, 201 | transfer_type integer not null, 202 | min_transfer_time integer, 203 | 204 | primary key (from_stop_id, to_stop_id), 205 | 206 | constraint transfer_type_enum check (transfer_type in (0, 1, 2, 3)), 207 | constraint non_negative_min_transfer_time check (min_transfer_time > 0) 208 | ); 209 | 210 | /** 211 | * @function createFareRulesTable 212 | */ 213 | create table if not exists fare_rules ( 214 | fare_id integer references fare_attributes, 215 | route_id integer references routes, 216 | origin_id integer, --references stops.zone_id 217 | destination_id integer, --references stops.zone_id 218 | contains_id integer 219 | ); 220 | 221 | /** 222 | * @function createTripsTable 223 | */ 224 | create table if not exists trips ( 225 | route_id integer not null references routes, 226 | service_id integer not null references calendar, 227 | trip_id integer primary key, 228 | trip_headsign text, 229 | trip_short_name text, 230 | direction_id integer, 231 | block_id integer, 232 | shape_id integer references shapes, 233 | wheelchair_accessible integer, 234 | bikes_allowed integer, 235 | 236 | constraint direction_id_enum check (direction_id in (0, 1)), 237 | constraint wheelchair_accessible_enum check (wheelchair_accessible in (0, 1, 2)), 238 | constraint bikes_allowed_enum check (bikes_allowed in (0, 1, 2)) 239 | ); 240 | 241 | 242 | /** 243 | * @function createStopTimesTable 244 | */ 245 | create table if not exists stop_times ( 246 | trip_id integer not null references trips, 247 | arrival_time text,-- can be null/empty .... tricky validation 248 | departure_time text, -- can be null/empty .... tricky validation 249 | stop_id integer not null references stops, 250 | stop_sequence integer not null, 251 | stop_headsign text, 252 | pickup_type integer default 0, 253 | drop_off_type integer default 0, 254 | shape_dist_traveled float, 255 | timepoint integer, 256 | 257 | primary key (trip_id, stop_id, arrival_time), 258 | 259 | constraint non_negative_arrival_time check (arrival_time = strftime('%Y%m%d', arrival_time)), 260 | constraint non_negative_arrival_time check (departure_time = strftime('%Y%m%d', departure_time)), 261 | constraint consistent_times check (departure_time > arrival_time), 262 | constraint non_negative_stop_sequence check (stop_sequence > 0), 263 | constraint pickup_type_enum check (pickup_type in (0, 1, 2, 3)), 264 | constraint drop_off_type_enum check (drop_off_type in (0, 1, 2, 3)), 265 | constraint non_negative_shape_dist_traveled check (shape_dist_traveled >= 0), 266 | constraint timepoint_enum check (timepoint in (0, 1)) 267 | ); 268 | 269 | 270 | /** 271 | * @function createFrequenciesTable 272 | */ 273 | create table if not exists frequencies ( 274 | trip_id integer references trips, 275 | start_time text not null, 276 | end_time text not null, 277 | headway_secs integer not null, 278 | exact_times integer, 279 | 280 | primary key (trip_id, start_time), 281 | 282 | constraint non_negative_start_time check (start_time = strftime('%H:%M:%S', start_time)), 283 | constraint non_negative_end_time check (end_time = strftime('%H:%M:%S', end_time)), 284 | constraint consistent_times check (end_time > start_time), 285 | constraint non_negative_headway_secs check (headway_secs > 0), 286 | constraint exact_times_enum check (exact_times in (0, 1)) 287 | ); 288 | 289 | 290 | /** 291 | * @function createPathwaysTable 292 | */ 293 | create table if not exists pathways ( 294 | pathway_id integer not null, 295 | from_stop_id integer not null references stops, 296 | to_stop_id integer not null references stops, 297 | pathway_mode integer not null, 298 | is_bidirectional integer not null, 299 | length float, 300 | traversal_time integer, 301 | stair_count integer, 302 | max_slope integer, 303 | min_width float, 304 | signposted_as text, 305 | reversed_signposted text 306 | 307 | constraint pathway_mode_enum check (pathway_mode in (0, 1, 2, 3, 4, 5, 6, 7)) 308 | constraint is_bidirectional_enum check (is_bidirectional in (0, 1)), 309 | constraint non_negative_length check (length > 0), 310 | constraint positive_traversal_time check (traversal_time > 0), 311 | constraint non_null_stair_count check (stair_count != 0) 312 | ); 313 | --------------------------------------------------------------------------------