├── .eslintignore
├── .eslintrc
├── .github
└── workflows
│ └── node.js.yml
├── .gitignore
├── .vscode
└── tasks.json
├── LICENSE
├── README.md
├── clean.js
├── package-lock.json
├── package.json
├── shebang.js
├── src
├── controllers
│ ├── DBs.ts
│ ├── auth.ts
│ ├── query.ts
│ ├── services.ts
│ └── users.ts
├── deps.d.ts
├── entities
│ ├── DB.ts
│ ├── IEntity.ts
│ ├── table.ts
│ ├── user.ts
│ └── variable.ts
├── index.ts
├── objects
│ ├── DB.ts
│ ├── DBFile.ts
│ ├── user.ts
│ └── variable.ts
├── parser
│ ├── queryParse.ts
│ ├── queryRaw.ts
│ └── scriptParse.ts
├── server.ts
├── services
│ ├── DB.ts
│ ├── DBFile.ts
│ ├── query.ts
│ └── user.ts
├── utils
│ ├── CRUD.ts
│ ├── DB.ts
│ ├── DBOp.ts
│ ├── constants.ts
│ ├── entities.ts
│ ├── lazyLoader.ts
│ ├── lazyValidator.ts
│ ├── method.ts
│ ├── names.ts
│ ├── password.ts
│ ├── queries.ts
│ ├── swagger.json
│ └── variable.ts
└── valid
│ ├── DB.ts
│ └── user.ts
├── test
├── basic.test.ts
├── objects
│ ├── DB.test.ts
│ └── DBObjects.test.ts
├── parser
│ ├── queryParse.test.ts
│ ├── rawParse.test.ts
│ └── scriptParse.test.ts
├── utils
│ └── .test.env
└── valid
│ ├── DB.test.ts
│ ├── entities.ts
│ └── user.test.ts
└── tsconfig.json
/.eslintignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | dist
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "parser": "@typescript-eslint/parser",
4 | "plugins": [
5 | "@typescript-eslint"
6 | ],
7 | "rules": {
8 | "@typescript-eslint/await-thenable": "error",
9 | "@typescript-eslint/no-floating-promises": "error"
10 | },
11 | "ignorePatterns": ["*.test.ts"],
12 | "overrides": [
13 | {
14 | "files": ["*.ts", "*.tsx"], // Your TypeScript files extension
15 |
16 | // As mentioned in the comments, you should extend TypeScript plugins here,
17 | // instead of extending them outside the `overrides`.
18 | // If you don't want to extend any rules, you don't need an `extends` attribute.
19 | "extends": [
20 | // "eslint:recommended",
21 | // "plugin:@typescript-eslint/eslint-recommended",
22 | // "plugin:@typescript-eslint/recommended"
23 | ],
24 |
25 | "parserOptions": {
26 | "project": ["./tsconfig.json"] // Specify it only for TypeScript files
27 | }
28 | }
29 | ]
30 | }
--------------------------------------------------------------------------------
/.github/workflows/node.js.yml:
--------------------------------------------------------------------------------
1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs
3 |
4 | name: Node.js CI
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main" ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | strategy:
18 | matrix:
19 | node-version: [21.x]
20 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
21 |
22 | steps:
23 | - uses: actions/checkout@v3
24 | - name: Use Node.js ${{ matrix.node-version }}
25 | uses: actions/setup-node@v3
26 | with:
27 | node-version: ${{ matrix.node-version }}
28 | cache: 'npm'
29 | - run: npm ci
30 | - run: npm run build --if-present
31 | - run: npm test
32 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 | certs/
133 | data/
134 | test/data/
135 |
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "2.0.0",
3 | "tasks": [
4 | {
5 | "type": "npm",
6 | "script": "build",
7 | "group": "build",
8 | "problemMatcher": [],
9 | "label": "npm: build",
10 | "detail": "tsc"
11 | }
12 | ]
13 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 We Watch Wall Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # stark-db
2 |
3 | [](https://github.com/WeWatchWall/stark-db/actions?query=workflow%3A%22Node.js+CI%22)
4 | [](https://www.npmjs.com/package/stark-db)
5 |
6 | SQLite-backed, change-tracking database available over HTTP.
7 |
8 |
9 | ## Installation
10 |
11 | ```bash
12 | npm i -g stark-db
13 | ```
14 |
15 | ## Basics
16 |
17 | Run with:
18 |
19 | ```bash
20 | stark-db
21 | ```
22 |
23 | By default, the DB engine is configured to run over SSL. While some self-signed
24 | ssl certificates are automatically generated, they are not ever valid so
25 | the user must supply their own. Then, the user needs to set the `-c` flag
26 | in order to enable the cookie security. Only then is this system ready for
27 | use in production.
28 |
29 | There is a Swagger endpoint `https://127.0.0.1:5984/api-docs` where the user can
30 | try out the routes available.
31 |
32 | You may want to use `BEGIN IMMEDIATE TRANSACTION;` if you write to the database
33 | concurrently as SQLite will throw busy errors otherwise.
34 |
35 | This database tracks changes to all entities in the auto-created column(on all
36 | tables) `stark_version`. There is also an extra table generated with any
37 | user-created table called `_stark_del_${name}`. Deletions are tracked
38 | in this auxiliary set of tables. With the help of this change tracking,
39 | synchronization mechanisms can be built later. The user has the option
40 | of using soft deletion -- marking data as deleted -- or relying on an
41 | `id` column to track such deletions. ROWID would not work in a synchronization
42 | scenario. Any modifications made by stark-db to the sqlite database
43 | can be seen by running `select * from sqlite_master;` and the user can edit the
44 | triggers/tables.
45 | Also, the `-f` flag prevents all modifications related to change tracking.
46 |
47 | It is recommended to invoke the API one query at a time although it is possible
48 | to execute multiple statements in a single invocation. Interactive queries are
49 | supported due to the stateful nature of the API. A DB connection is marked
50 | inactive and is refreshed after 1 hour. A session cookie to the server is marked
51 | inactive after 1 day.
52 |
53 | ## CLI
54 |
55 | ```bash
56 | -a, --address
HTTP address to listen on (default: "127.0.0.1")
57 | -i, --doc Address to query by the documentation (default: "https://127.0.0.1")
58 | -p, --port HTTP port to listen on (default: "5983")
59 | -s, --ssl HTTPS port to listen on (default: "5984")
60 | -c, --cookie Secure cookie, served over valid HTTPS only (default: false)
61 | -d, --data Path to the data directory (default: "./data")
62 | -k, --certs Path to the certs directory (default: "./certs")
63 | -f, --simple Do not run change-tracking queries (default: false)
64 | -h, --help display help for command
65 | ```
66 | ## Wiki
67 | See the Wiki for [HTTP API Documentation](https://github.com/WeWatchWall/stark-db/wiki/HTTP-API-Documentation).
68 |
--------------------------------------------------------------------------------
/clean.js:
--------------------------------------------------------------------------------
1 | var fs = require('fs');
2 |
3 | function deleteFolderRecursive(path) {
4 | if (fs.existsSync(path) && fs.lstatSync(path).isDirectory()) {
5 | fs.readdirSync(path).forEach(function(file){
6 | var curPath = path + "/" + file;
7 |
8 | if (fs.lstatSync(curPath).isDirectory()) { // recurse
9 | deleteFolderRecursive(curPath);
10 | } else { // delete file
11 | fs.unlinkSync(curPath);
12 | }
13 | });
14 |
15 | console.log(`Deleting directory "${path}"...`);
16 | fs.rmdirSync(path);
17 | }
18 | }
19 |
20 | // print process.argv
21 | process.argv.forEach(function (val, index) {
22 | if (index > 1) {
23 | deleteFolderRecursive(val);
24 | }
25 | });
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stark-db",
3 | "version": "1.3.4",
4 | "description": "Database engine based on SQLite.",
5 | "bin": {
6 | "stark-db": "./dist/src/index.js"
7 | },
8 | "scripts": {
9 | "build": "tsc && node shebang.js",
10 | "build-clean": "npm run clean && npm run build",
11 | "clean": "node clean.js dist build package",
12 | "lint": "eslint . --ext .ts",
13 | "test": "node --env-file=./test/utils/.test.env --test-concurrency 1 --test ./dist/test/**/*.test.js",
14 | "start": "node ./dist/src/index.js"
15 | },
16 | "repository": {
17 | "type": "git",
18 | "url": "git+https://github.com/WeWatchWall/stark-db"
19 | },
20 | "bugs": {
21 | "url": "https://github.com/WeWatchWall/stark-db/issues"
22 | },
23 | "keywords": [
24 | "SQL",
25 | "SQLite"
26 | ],
27 | "author": "Adrian Burlacu ",
28 | "license": "MIT",
29 | "devDependencies": {
30 | "@types/async-retry": "^1.4.8",
31 | "@types/cors": "^2.8.14",
32 | "@types/express": "^4.17.18",
33 | "@types/express-session": "^1.17.8",
34 | "@types/javascript-state-machine": "^2.4.4",
35 | "@types/node": "^20.6.2",
36 | "@types/swagger-ui-express": "^4.1.4",
37 | "@types/workerpool": "^6.4.1",
38 | "@typescript-eslint/eslint-plugin": "^6.7.4",
39 | "@typescript-eslint/parser": "^6.7.4",
40 | "eslint": "^8.50.0"
41 | },
42 | "dependencies": {
43 | "@appland/sql-parser": "^1.5.1",
44 | "@casl/ability": "^6.5.0",
45 | "async-retry": "^1.3.3",
46 | "await-lock": "^2.2.2",
47 | "commander": "^11.0.0",
48 | "cors": "^2.8.5",
49 | "express": "^4.18.2",
50 | "express-async-handler": "^1.2.0",
51 | "express-session": "^1.17.3",
52 | "flat-promise": "^1.0.3",
53 | "http-proxy": "^1.18.1",
54 | "javascript-state-machine": "^3.1.0",
55 | "memorystore": "^1.6.7",
56 | "objectmodel": "^4.4.5",
57 | "recursive-iterator": "^3.3.0",
58 | "selfsigned": "^2.1.1",
59 | "shorthash2": "^1.0.3",
60 | "sqlite3": "^5.1.6",
61 | "sticky-session-custom": "^1.2.1",
62 | "swagger-ui-express": "^5.0.0",
63 | "typeorm": "^0.3.17",
64 | "workerpool": "^6.5.0"
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/shebang.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 |
3 | let file = fs.readFileSync('./dist/src/index.js', 'utf8');
4 | const shebang = `#!/usr/bin/env node
5 | `;
6 |
7 | file = shebang + file;
8 |
9 | fs.writeFileSync('./dist/src/index.js', file, 'utf8');
--------------------------------------------------------------------------------
/src/controllers/DBs.ts:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import asyncHandler from 'express-async-handler';
3 | import { User } from '../objects/user';
4 | import { ZERO } from '../utils/constants';
5 | import { Services } from './services';
6 |
7 | const router = express.Router({ mergeParams: true });
8 |
9 | router.get('/DBs', asyncHandler(async (req: any, res: any) => {
10 | /* #region Initialize the session user. */
11 | if (!req.session.user) {
12 | res.sendStatus(401);
13 | return;
14 | }
15 |
16 | const sessionUser = new User({
17 | DB: Services.DB.adminDB.DB,
18 | ...req.session.user
19 | });
20 | /* #endregion */
21 |
22 | try {
23 | const DBs = await Services.DB.getAll({
24 | user: sessionUser,
25 | DB: {
26 | ID: parseInt(req.query.ID) || undefined,
27 | name: req.query.name,
28 | }
29 | });
30 |
31 | const isSingle = req.query.ID || req.query.name;
32 | if (isSingle && DBs.length > ZERO) {
33 | res.status(200).send(DBs[0].toObject());
34 | return;
35 | }
36 |
37 | res.status(200).send(DBs.map((DB) => DB.toObject()));
38 | } catch (error) {
39 | res.sendStatus(403);
40 | }
41 | }));
42 |
43 | router.post('/DBs', asyncHandler(async (req: any, res: any) => {
44 | /* #region Initialize the session user. */
45 | if (!req.session.user) {
46 | res.sendStatus(401);
47 | return;
48 | }
49 |
50 | const sessionUser = new User({
51 | DB: Services.DB.adminDB.DB,
52 | ...req.session.user
53 | });
54 | /* #endregion */
55 |
56 | try {
57 | const DB = await Services.DB.add({
58 | user: sessionUser,
59 | DB: req.body
60 | });
61 | res.status(200).send(DB.toObject());
62 | } catch (error) {
63 | res.sendStatus(403);
64 | }
65 | }));
66 |
67 | router.put('/DBs', asyncHandler(async (req: any, res: any) => {
68 | /* #region Initialize the session user. */
69 | if (!req.session.user) {
70 | res.sendStatus(401);
71 | return;
72 | }
73 |
74 | const sessionUser = new User({
75 | DB: Services.DB.adminDB.DB,
76 | ...req.session.user
77 | });
78 | /* #endregion */
79 |
80 | try {
81 | const DB = await Services.DB.set({
82 | user: sessionUser,
83 | DB: {
84 | ID: req.body.ID,
85 | name: req.body.name,
86 | admins: req.body.admins,
87 | readers: req.body.readers,
88 | writers: req.body.writers,
89 | }
90 | });
91 | res.status(200).send(DB.toObject());
92 | } catch (error) {
93 | res.sendStatus(403);
94 | }
95 | }));
96 |
97 | router.delete('/DBs', asyncHandler(async (req: any, res: any) => {
98 | /* #region Initialize the session user. */
99 | if (!req.session.user) {
100 | res.sendStatus(401);
101 | return;
102 | }
103 |
104 | const sessionUser = new User({
105 | DB: Services.DB.adminDB.DB,
106 | ...req.session.user
107 | });
108 | /* #endregion */
109 |
110 | try {
111 | await Services.DB.del({
112 | user: sessionUser,
113 | DB: {
114 | ID: parseInt(req.query.ID) || undefined,
115 | name: req.query.name,
116 | }
117 | });
118 | res.sendStatus(200);
119 | } catch (error) {
120 | res.sendStatus(403);
121 | }
122 | }));
123 |
124 | export default router;
--------------------------------------------------------------------------------
/src/controllers/auth.ts:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import asyncHandler from 'express-async-handler';
3 | import { DBBase } from '../objects/DB';
4 | import { User } from '../objects/user';
5 | import { Services } from './services';
6 | import { ONE } from '../utils/constants';
7 |
8 | const router = express.Router({ mergeParams: true });
9 |
10 | router.post('/:DB/login', asyncHandler(async (req: any, res: any) => {
11 | const { username, password } = req.body;
12 |
13 | /* #region Manage the User. */
14 | const user = new User({
15 | DB: Services.DB.adminDB.DB,
16 | name: username
17 | });
18 | await user.load();
19 | user.login(password);
20 | if (!user.isLoggedIn) { res.sendStatus(401); return; }
21 |
22 | if (req.session.user && req.session.user.ID !== user.ID) {
23 | // Logout the current user on user change.
24 | delete req.session.user;
25 | delete req.session.DBs;
26 | }
27 | /* #endregion */
28 |
29 | /* #region Manage the DB. */
30 | req.session.DBs = req.session.DBs || [];
31 | let DB: DBBase;
32 | try {
33 | DB = await Services.DB.get({ user, DB: { name: req.params.DB } });
34 | } catch (error) {
35 | res.sendStatus(403);
36 | }
37 | if (!DB) { return; }
38 | /* #endregion */
39 |
40 | // Call the DBFile Service to load the DBFile connection.
41 | await Services.DBFile.add(req.sessionID, DB);
42 |
43 | /* #region User is logged in and DB is found. */
44 | req.session.user = user.toObject();
45 |
46 | const DBSet = new Set(req.session.DBs);
47 | if (DB.ID !== ONE) { DBSet.add(DB.name); } // Don't query the adminDB.
48 | req.session.DBs = Array.from(DBSet);
49 |
50 | res.sendStatus(200);
51 | /* #endregion */
52 | }));
53 |
54 | router.post('/logout', asyncHandler(async (req: any, res: any) => {
55 | // Call the DBFile Service to close the DBFile connection.
56 | for (const DB of req.session.DBs) {
57 | await Services.DBFile.delete(req.sessionID, DB);
58 | }
59 |
60 | // Delete the session user and DBs.
61 | delete req.session.user;
62 | delete req.session.DBs;
63 |
64 | res.sendStatus(200);
65 | }));
66 |
67 | export default router;
--------------------------------------------------------------------------------
/src/controllers/query.ts:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import asyncHandler from 'express-async-handler';
3 | import { inspect } from 'util'
4 |
5 | import { Query } from '../services/query';
6 | import { Services } from './services';
7 |
8 | const router = express.Router({ mergeParams: true });
9 |
10 | router.post('/:DB/query', asyncHandler(async (req: any, res: any) => {
11 | /* #region Initialize the session user. */
12 | if (!req.session.user) {
13 | res.sendStatus(401);
14 | return;
15 | }
16 | /* #endregion */
17 |
18 | /* #region Initialize the session DB connection. */
19 | if (!req.session.DBs || !req.session.DBs.includes(req.params.DB)) {
20 | res.sendStatus(403);
21 | return;
22 | }
23 | const { DB, connection } =
24 | Services.DBFile.get(req.sessionID, req.params.DB);
25 |
26 | if (!connection) {
27 | res.sendStatus(403);
28 | return;
29 | }
30 | /* #endregion */
31 |
32 | // Execute the script.
33 | try {
34 | const result = await Query.add(
35 | req.session.user,
36 | DB,
37 | connection,
38 | req.body.query,
39 | req.body.params
40 | );
41 | res.status(200).send({ result });
42 | } catch (error: any) {
43 | res.status(500).send({
44 | error: `${error.name}: ${error.message}`,
45 | stack: error.stack,
46 | });
47 | }
48 |
49 | }));
50 |
51 | export default router;
--------------------------------------------------------------------------------
/src/controllers/services.ts:
--------------------------------------------------------------------------------
1 | import { DB } from "../services/DB";
2 | import { DBFile } from "../services/DBFile";
3 | import { User } from "../services/user";
4 |
5 | export class Services {
6 | static DB: DB;
7 | static User: User;
8 | static DBFile: DBFile;
9 | }
--------------------------------------------------------------------------------
/src/controllers/users.ts:
--------------------------------------------------------------------------------
1 | import express from 'express';
2 | import asyncHandler from 'express-async-handler';
3 | import { Services } from './services';
4 | import { User } from '../objects/user';
5 |
6 | const router = express.Router({ mergeParams: true });
7 |
8 | router.get('/users', asyncHandler(async (req: any, res: any) => {
9 | /* #region Initialize the session user. */
10 | if (!req.session.user) {
11 | res.sendStatus(401);
12 | return;
13 | }
14 |
15 | const sessionUser = new User({
16 | DB: Services.DB.adminDB.DB,
17 | ...req.session.user
18 | });
19 | /* #endregion */
20 |
21 | try {
22 | const isSingle = req.query.ID || req.query.name;
23 | if (isSingle) {
24 | const user = await Services.User.get({
25 | session: sessionUser,
26 | arg: {
27 | ID: parseInt(req.query.ID) || undefined,
28 | name: req.query.name,
29 | }
30 | });
31 | res.status(200).send(user.toObject());
32 |
33 | return;
34 | }
35 |
36 | const users = await Services.User.getAll(sessionUser);
37 | res.status(200).send(users.map((user) => user.toObject()));
38 | } catch (error) {
39 | res.sendStatus(403);
40 | }
41 | }));
42 |
43 | router.post('/users', asyncHandler(async (req: any, res: any) => {
44 | /* #region Initialize the session user. */
45 | if (!req.session.user) {
46 | res.sendStatus(401);
47 | return;
48 | }
49 |
50 | const sessionUser = new User({
51 | DB: Services.DB.adminDB.DB,
52 | ...req.session.user
53 | });
54 | /* #endregion */
55 |
56 | try {
57 | const user = await Services.User.add({
58 | session: sessionUser,
59 | arg: req.body
60 | });
61 |
62 | res.status(200).send(user.toObject());
63 | } catch (error) {
64 | res.sendStatus(403);
65 | }
66 | }));
67 |
68 | router.put('/users', asyncHandler(async (req: any, res: any) => {
69 | /* #region Initialize the session user. */
70 | if (!req.session.user) {
71 | res.sendStatus(401);
72 | return;
73 | }
74 |
75 | const sessionUser = new User({
76 | DB: Services.DB.adminDB.DB,
77 | ...req.session.user
78 | });
79 | /* #endregion */
80 |
81 | try {
82 | const user = await Services.User.set({
83 | session: sessionUser,
84 | arg: {
85 | ID: parseInt(req.body.ID),
86 | name: req.body.name,
87 | password: req.body.password,
88 | salt: req.body.salt,
89 | }
90 | });
91 |
92 | res.status(200).send(user.toObject());
93 | } catch (error) {
94 | res.sendStatus(403);
95 | }
96 | }));
97 |
98 | router.delete('/users', asyncHandler(async (req: any, res: any) => {
99 | /* #region Initialize the session user. */
100 | if (!req.session.user) {
101 | res.sendStatus(401);
102 | return;
103 | }
104 |
105 | const sessionUser = new User({
106 | DB: Services.DB.adminDB.DB,
107 | ...req.session.user
108 | });
109 | /* #endregion */
110 |
111 | try {
112 | await Services.User.del({
113 | session: sessionUser,
114 | arg: {
115 | ID: parseInt(req.query.ID) || undefined,
116 | name: req.query.name,
117 | }
118 | });
119 |
120 | res.sendStatus(200)
121 | } catch (error) {
122 | res.sendStatus(403);
123 | }
124 | }));
125 |
126 | export default router;
--------------------------------------------------------------------------------
/src/deps.d.ts:
--------------------------------------------------------------------------------
1 | declare module 'sticky-session-custom';
2 | declare module 'recursive-iterator';
3 | declare module '@appland/sql-parser';
4 | declare module 'flat-promise';
--------------------------------------------------------------------------------
/src/entities/DB.ts:
--------------------------------------------------------------------------------
1 | import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm';
2 |
3 | import { IEntity, IEntityArg } from './IEntity';
4 |
5 | export class DBArg implements IEntityArg {
6 | ID?: number;
7 | name: string;
8 | admins: number[];
9 | readers: number[];
10 | writers: number[];
11 | }
12 |
13 | @Entity()
14 | export class DB implements IEntity {
15 |
16 | constructor(init: DBArg) {
17 | Object.assign(this, init);
18 | }
19 |
20 | @PrimaryGeneratedColumn()
21 | ID: number;
22 |
23 | @Column({ unique: true })
24 | name: string;
25 |
26 | @Column("simple-json")
27 | admins: number[];
28 |
29 | @Column("simple-json")
30 | readers: number[];
31 |
32 | @Column("simple-json")
33 | writers: number[];
34 |
35 | }
36 |
37 | export class AdminDB extends DB {}
--------------------------------------------------------------------------------
/src/entities/IEntity.ts:
--------------------------------------------------------------------------------
1 | export interface IEntityArg {}
2 | export interface IEntity {}
--------------------------------------------------------------------------------
/src/entities/table.ts:
--------------------------------------------------------------------------------
1 | import { Column, Entity, PrimaryColumn } from 'typeorm';
2 |
3 | import { TABLES_TABLE } from '../utils/constants';
4 | import { IEntity, IEntityArg } from './IEntity';
5 |
6 | export class TableArg implements IEntityArg {
7 | name?: string;
8 | keys?: string[];
9 | isMemory?: boolean;
10 | changeCount?: number;
11 | }
12 |
13 | @Entity({name: TABLES_TABLE})
14 | export class Table implements IEntity {
15 |
16 | constructor(init: TableArg) {
17 | Object.assign(this, init);
18 | }
19 |
20 | @PrimaryColumn()
21 | name: string;
22 |
23 | @Column("simple-json")
24 | autoKeys: string[];
25 |
26 | @Column("simple-json")
27 | keys: string[];
28 |
29 | @Column()
30 | isMemory: boolean;
31 |
32 | @Column()
33 | changeCount: number;
34 | }
--------------------------------------------------------------------------------
/src/entities/user.ts:
--------------------------------------------------------------------------------
1 | import { Column, Entity, PrimaryGeneratedColumn } from 'typeorm';
2 |
3 | import { IEntity, IEntityArg } from './IEntity';
4 |
5 | export class UserArg implements IEntityArg {
6 | ID?: number;
7 | name: string;
8 | password: string;
9 | salt: string;
10 | }
11 |
12 | @Entity()
13 | export class User implements IEntity {
14 |
15 | constructor(init: UserArg) {
16 | Object.assign(this, init);
17 | }
18 |
19 | @PrimaryGeneratedColumn()
20 | ID: number;
21 |
22 | @Column({ unique: true })
23 | name: string;
24 |
25 | @Column()
26 | password: string;
27 |
28 | @Column()
29 | salt: string;
30 |
31 | }
--------------------------------------------------------------------------------
/src/entities/variable.ts:
--------------------------------------------------------------------------------
1 | import { Column, Entity, PrimaryColumn } from 'typeorm';
2 |
3 | import { VARS_TABLE } from '../utils/constants';
4 | import { IEntity, IEntityArg } from './IEntity';
5 |
6 | class VariableArg implements IEntityArg {
7 | name?: string;
8 | value?: boolean | number | string;
9 | }
10 |
11 | @Entity({ name: VARS_TABLE })
12 | export class Variable implements IEntity {
13 |
14 | constructor(init: VariableArg) {
15 | Object.assign(this, init);
16 | }
17 |
18 | @PrimaryColumn()
19 | name?: string;
20 |
21 | @Column({ type: 'text' })
22 | value?: boolean | number | string;
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | import { Command } from 'commander';
2 |
3 | const program = new Command();
4 |
5 | program
6 | .name('stark-db')
7 | .description('Stark DB CLI');
8 |
9 | program
10 | .description('Run the Stark DB server')
11 | .option(
12 | '-a, --address ',
13 | 'HTTP address to listen on',
14 | process.env.STARK_DB_HTTP_LISTEN_ADDRESS || '127.0.0.1'
15 | )
16 | .option(
17 | '-i, --doc ',
18 | 'Address to query by the documentation',
19 | process.env.STARK_DB_DOCUMENTATION_ADDRESS || 'https://127.0.0.1'
20 | )
21 | .option(
22 | '-p, --port ',
23 | 'HTTP port to listen on',
24 | process.env.STARK_DB_HTTP_PORT || '5983'
25 | )
26 | .option(
27 | '-s, --ssl ',
28 | 'HTTPS port to listen on',
29 | process.env.STARK_DB_HTTPS_PORT || '5984'
30 | )
31 | .option('-c, --cookie',
32 | 'Secure cookie, served over valid HTTPS only',
33 | process.env.STARK_DB_COOKIE === "true" || false
34 | )
35 | .option(
36 | '-d, --data ',
37 | 'Path to the data directory',
38 | process.env.STARK_DB_DATA_DIR || './data'
39 | )
40 | .option(
41 | '-k, --certs ',
42 | 'Path to the certs directory',
43 | process.env.STARK_DB_CERTS_DIR || './certs'
44 | )
45 | .option('-f, --simple',
46 | 'Do not run change-tracking queries',
47 | process.env.STARK_DB_SIMPLE === "true" || false
48 | );
49 |
50 | program.parse(process.argv);
51 | const options = program.opts();
52 |
53 | options.port = parseInt(options.port);
54 | options.ssl = parseInt(options.ssl);
55 | options.cookie = !!options.cookie;
56 |
57 | // DON'T USE CONSTANTS IN THIS FILE SO THEY CAN BE PRE-SET BY THE CLI
58 | process.env.STARK_DB_HTTP_LISTEN_ADDRESS = options.address;
59 | process.env.STARK_DB_DOCUMENTATION_ADDRESS = options.doc;
60 | process.env.STARK_DB_HTTP_PORT = options.port;
61 | process.env.STARK_DB_HTTPS_PORT = options.ssl;
62 | process.env.STARK_DB_COOKIE = options.cookie;
63 | process.env.STARK_DB_DATA_DIR = options.data;
64 | process.env.STARK_DB_CERTS_DIR = options.certs;
65 | process.env.STARK_DB_SIMPLE = options.simple;
66 |
67 | import { Server } from './server';
68 | Server.start(); // eslint-disable-line
--------------------------------------------------------------------------------
/src/objects/DB.ts:
--------------------------------------------------------------------------------
1 | import { ArrayModel, ObjectModel } from "objectmodel";
2 | import { DataSource } from "typeorm";
3 |
4 | import { DB as DBEntity } from "../entities/DB";
5 | import { ADMIN_NAME, ONE } from "../utils/constants";
6 | import { LazyValidator } from "../utils/lazyValidator";
7 |
8 | export class DBArg {
9 | DB?: DataSource;
10 |
11 | ID?: number;
12 | name?: string;
13 | admins?: number[];
14 | readers?: number[];
15 | writers?: number[];
16 | }
17 |
18 | export abstract class DBBase {
19 | protected validator: LazyValidator;
20 |
21 | DB: DataSource;
22 |
23 | ID: number;
24 | name: string;
25 | admins: number[];
26 | readers: number[];
27 | writers: number[];
28 |
29 | constructor(init: DBArg) {
30 | this.validator = new LazyValidator(
31 | () => this.validateInit.apply(this, []),
32 | () => this.readyInit.apply(this, [])
33 | );
34 |
35 | if (init != undefined) {
36 | Object.assign(this, init);
37 | this.validator.ready();
38 | }
39 | }
40 | private validateInit(): void { new DBInit(this); }
41 | private readyInit(): void { } // NOOP
42 |
43 | async load(): Promise {
44 | this.validator = new LazyValidator(
45 | () => this.validateLoad.apply(this, []),
46 | () => this.readyLoad.apply(this, [])
47 | );
48 |
49 | await this.validator.readyAsync();
50 | }
51 | protected abstract validateLoad(): void;
52 | protected async readyLoad(): Promise {
53 | const entity = await this.DB.manager.findOneByOrFail(DBEntity, {
54 | ID: this.ID,
55 | name: this.name,
56 | });
57 | Object.assign(this, entity);
58 | };
59 |
60 | async save(arg: DBArg): Promise {
61 | this.validator = new LazyValidator(
62 | () => this.validateSave.apply(this, [arg]),
63 | () => this.readySave.apply(this, [arg])
64 | );
65 |
66 | await this.validator.readyAsync();
67 | }
68 | protected abstract validateSave(arg: DBArg): void;
69 | protected async readySave(arg: DBArg): Promise {
70 | // Make each array property unique.
71 | const adminsSet = new Set(arg.admins);
72 | const readersSet = new Set(arg.readers);
73 | const writersSet = new Set(arg.writers);
74 | arg.admins = Array.from(adminsSet);
75 | arg.readers = Array.from(readersSet);
76 | arg.writers = Array.from(writersSet);
77 |
78 | const entity = await this.DB.manager.save(DBEntity, arg);
79 |
80 | Object.assign(this, entity);
81 | };
82 |
83 | abstract delete(): Promise;
84 |
85 | toObject(): DBArg {
86 | const { ID, name, admins, readers, writers } = this;
87 | return { ID, name, admins, readers, writers };
88 | }
89 | }
90 |
91 | const DBInit = new ObjectModel({
92 | DB: DataSource,
93 | });
94 |
95 | export class DB extends DBBase {
96 | protected validateLoad(): void {
97 | new DBLoad(this);
98 | }
99 |
100 | protected validateSave(arg: DBArg): void {
101 | new DBSave(arg);
102 | }
103 |
104 | async delete(): Promise {
105 | this.validator = new LazyValidator(
106 | () => this.validateDelete.apply(this, []),
107 | () => this.readyDelete.apply(this, [])
108 | );
109 |
110 | await this.validator.readyAsync();
111 | }
112 | protected validateDelete(): void {
113 | new DBLoad(this);
114 | }
115 | protected async readyDelete(): Promise {
116 | const arg = {
117 | ID: this.ID,
118 | name: this.name,
119 | };
120 |
121 | if (!this.ID) { delete arg.ID; }
122 | if (!this.name) { delete arg.name; }
123 |
124 | await this.DB.manager.delete(DBEntity, arg);
125 | }
126 | }
127 |
128 | // TODO: Check if either ID or name is defined.
129 | // TODO: Check that ID is not ONE and name is not ADMIN_DB_NAME.
130 | const DBLoad = new ObjectModel({
131 | ID: [Number],
132 | name: [String],
133 | });
134 |
135 | // TODO: Check that ID is not ONE and name is not ADMIN_DB_NAME.
136 | const DBSave = new ObjectModel({
137 | ID: [Number],
138 | name: String,
139 | admins: ArrayModel(Number),
140 | readers: ArrayModel(Number),
141 | writers: ArrayModel(Number),
142 | });
143 |
144 | export class AdminDB extends DBBase {
145 | protected validateLoad(): void {
146 | new AdminDBLoad(this);
147 | }
148 |
149 | protected validateSave(arg: DBArg): void {
150 | new AdminDBSave(arg);
151 | }
152 |
153 | async delete(): Promise {
154 | throw new Error("Security error: cannot delete the admin database.");
155 | }
156 | }
157 |
158 | const AdminDBLoad = new ObjectModel({
159 | ID: ONE,
160 | name: ADMIN_NAME,
161 | });
162 |
163 | const AdminDBSave = new ObjectModel({
164 | ID: ONE,
165 | name: ADMIN_NAME,
166 | admins: ArrayModel(Number),
167 | readers: ArrayModel(Number),
168 | writers: ArrayModel(Number),
169 | });
--------------------------------------------------------------------------------
/src/objects/DBFile.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from "path";
3 | import { DataSource } from "typeorm";
4 | import { Any, ArrayModel, ObjectModel } from 'objectmodel';
5 |
6 | import {
7 | ADMIN_NAME,
8 | DB_DRIVER,
9 | DB_IDENTIFIER,
10 | DB_IDENTIFIER_ADMIN,
11 | DATA_DIR
12 | } from "../utils/constants";
13 | import { LazyValidator } from "../utils/lazyValidator";
14 |
15 | const PRAGMA_VER = 'user_version';
16 | const PRAGMA_WAL = 'journal_mode';
17 |
18 | export class DBFileArg {
19 | name: string;
20 | types: any[];
21 | }
22 |
23 | export abstract class DBFileBase {
24 | protected validator: LazyValidator;
25 |
26 | name: string;
27 | fileName: string;
28 | types: any[];
29 |
30 | DB: DataSource;
31 |
32 | constructor(init: DBFileArg) {
33 | if (init != undefined) {
34 | Object.assign(this, init);
35 | }
36 | }
37 |
38 | async load(): Promise {
39 | this.validator = new LazyValidator(
40 | () => this.validateLoad.apply(this, []),
41 | () => this.readyLoad.apply(this, [])
42 | );
43 |
44 | await this.validator.readyAsync();
45 | }
46 | protected abstract validateLoad(): void;
47 | protected async readyLoad(): Promise {
48 | this.fileName = DBFileBase.getFileName(this.name);
49 |
50 | // Open the database.
51 | this.DB = new DataSource({
52 | type: DB_DRIVER,
53 | database: this.fileName,
54 | cache: true,
55 | synchronize: true, // TODO: remove this in production
56 | logging: false,
57 | entities: this.types,
58 | });
59 |
60 | await this.DB.initialize();
61 | }
62 |
63 | abstract save(arg: DBFileArg): Promise;
64 | abstract delete(): Promise;
65 |
66 | abstract isInit(): Promise;
67 | abstract setInit(): Promise;
68 |
69 | protected static getFileName(name: string): string {
70 | const dir = DATA_DIR;
71 |
72 | if (!fs.existsSync(dir)) {
73 | fs.mkdirSync(dir, { recursive: true });
74 | }
75 |
76 | return path.resolve(dir, `${name}.db`);
77 | }
78 |
79 | protected async isVersion(version: number): Promise {
80 | const result = await this.DB.query(`PRAGMA ${PRAGMA_VER};`);
81 |
82 | return result[0][PRAGMA_VER] === version;
83 | }
84 |
85 | protected async setVersion(version: number): Promise {
86 | await this.DB.query(`PRAGMA busy_timeout = 30000;`);
87 | await this.DB.query(`PRAGMA ${PRAGMA_WAL} = WAL;`);
88 | await this.DB.query(`PRAGMA ${PRAGMA_VER} = ${version};`);
89 | }
90 |
91 | [Symbol.asyncDispose](): Promise {
92 | if (!this.DB) { return Promise.resolve(undefined); }
93 | // ignore the error if the connection is already closed.
94 | return this.DB.destroy().catch(() => undefined);
95 | }
96 | }
97 |
98 | export class DBFile extends DBFileBase {
99 | validateLoad(): void {
100 | new DBInit(this);
101 | }
102 |
103 | async save(arg: DBFileArg): Promise {
104 | this.validator = new LazyValidator(
105 | () => this.validateSave.apply(this, [arg]),
106 | () => this.readySave.apply(this, [arg])
107 | );
108 |
109 | await this.validator.readyAsync();
110 | }
111 | protected validateSave(arg: DBFileArg): void {
112 | new DBInit(arg);
113 | }
114 | protected async readySave(arg: DBFileArg): Promise {
115 | if (arg.name === this.name) { return; }
116 |
117 | // Check if the database is initialized. Connect to it first if necessary.
118 | if (!this.DB || !this.DB.isInitialized) {
119 | await this.load();
120 | }
121 | if (!await this.isInit()) {
122 | throw new Error(`Cannot rename the database ${this.name} because it is not initialized.`);
123 | }
124 |
125 | // Close the connection.
126 | await this.DB.destroy();
127 |
128 | // Rename the file.
129 | const newFileName = DBFileBase.getFileName(arg.name);
130 | fs.renameSync(this.fileName, newFileName);
131 |
132 | // Run the readyLoad function again.
133 | Object.assign(this, arg);
134 | await this.readyLoad();
135 | }
136 |
137 | async delete(): Promise {
138 | this.validator = new LazyValidator(
139 | () => this.validateDelete.apply(this, []),
140 | () => this.readyDelete.apply(this, [])
141 | );
142 |
143 | await this.validator.readyAsync();
144 | }
145 | protected validateDelete(): void { new DBInit(this); }
146 | protected async readyDelete(): Promise {
147 | // Check if the database is initialized. Connect to it first if necessary.
148 | if (!this.DB || !this.DB.isInitialized) {
149 | await this.load();
150 | }
151 |
152 | if (!await this.isInit()) {
153 | throw new Error(`Cannot delete the database ${this.name} because it is not initialized.`);
154 | }
155 |
156 | // Close the connection.
157 | await this.DB.destroy();
158 |
159 | // Delete the file.
160 | fs.rmSync(this.fileName);
161 | fs.rmSync(`${this.fileName}-shm`, { force: true });
162 | fs.rmSync(`${this.fileName}-wal`, { force: true });
163 | }
164 |
165 | async isInit(): Promise {
166 | return await this.isVersion(DB_IDENTIFIER);
167 | }
168 |
169 | async setInit(): Promise {
170 | await this.setVersion(DB_IDENTIFIER);
171 | }
172 | }
173 |
174 | const DBInit = new ObjectModel({
175 | name: String,
176 | types: ArrayModel(Any),
177 | });
178 |
179 | export class AdminDBFile extends DBFileBase {
180 | protected validateLoad(): void {
181 | new AdminDBLoadInit(this);
182 | }
183 |
184 | save(_arg: DBFileArg): Promise {
185 | throw new Error("Security error: cannot rename the admin database.");
186 | }
187 |
188 | delete(): Promise {
189 | throw new Error("Security error: cannot delete the admin database.");
190 | }
191 |
192 | async isInit(): Promise {
193 | return await this.isVersion(DB_IDENTIFIER_ADMIN);
194 | }
195 |
196 | async setInit(): Promise {
197 | await this.setVersion(DB_IDENTIFIER_ADMIN);
198 | }
199 | }
200 |
201 | const AdminDBLoadInit = new ObjectModel({
202 | name: ADMIN_NAME,
203 | // TODO: This type is actually a constructor: () => Any.
204 | types: ArrayModel(Any),
205 | });
--------------------------------------------------------------------------------
/src/objects/user.ts:
--------------------------------------------------------------------------------
1 | import { ObjectModel } from "objectmodel";
2 | import { DataSource } from "typeorm";
3 | import assert from "assert";
4 |
5 | import { User as UserEntity } from "../entities/user";
6 | import { LazyValidator } from "../utils/lazyValidator";
7 | import { Password } from "../utils/password";
8 |
9 | export class UserArg {
10 | DB?: DataSource;
11 |
12 | ID?: number;
13 | name?: string;
14 | password?: string;
15 | salt?: string;
16 |
17 | isLoggedIn?: boolean;
18 | }
19 |
20 | export class User {
21 | private validator: LazyValidator;
22 |
23 | DB: DataSource;
24 |
25 | ID: number;
26 | name: string;
27 | password: string;
28 | salt: string;
29 |
30 | isLoggedIn = false;
31 |
32 | constructor(init: UserArg) {
33 | this.validator = new LazyValidator(
34 | () => this.validateInit.apply(this, []),
35 | () => this.readyInit.apply(this, [])
36 | );
37 |
38 | if (init != undefined) {
39 | Object.assign(this, init);
40 | this.validator.ready();
41 | }
42 | }
43 | private validateInit(): void { new UserInit(this); }
44 | private readyInit(): void { } // NOOP
45 |
46 | login(password: string): void {
47 |
48 | const loginHash = Password.hash(password, this.salt);
49 | if (loginHash !== this.password) { return;}
50 |
51 | this.isLoggedIn = true;
52 | }
53 | logout(): void {
54 | this.isLoggedIn = false;
55 | }
56 |
57 | async load(): Promise {
58 | this.validator = new LazyValidator(
59 | () => this.validateLoad.apply(this, []),
60 | () => this.readyLoad.apply(this, [])
61 | );
62 |
63 | await this.validator.readyAsync();
64 | }
65 | validateLoad(): void { new UserLoad(this); }
66 | async readyLoad(): Promise {
67 | const entity = await this.DB.manager.findOneByOrFail(UserEntity, {
68 | ID: this.ID,
69 | name: this.name,
70 | });
71 | Object.assign(this, entity);
72 | }
73 |
74 | async save(arg: UserArg): Promise {
75 | this.validator = new LazyValidator(
76 | () => this.validateSave.apply(this, [arg]),
77 | () => this.readySave.apply(this, [arg])
78 | );
79 |
80 | await this.validator.readyAsync();
81 | }
82 | validateSave(arg: UserArg): void { new UserSave(arg); }
83 | async readySave(arg: UserArg): Promise {
84 | arg.salt = Password.getSalt();
85 | arg.password = Password.hash(arg.password, arg.salt);
86 |
87 | const entity = await this.DB.manager.save(UserEntity, arg);
88 | Object.assign(this, entity);
89 | }
90 |
91 | async delete(): Promise {
92 | this.validator = new LazyValidator(
93 | () => this.validateDelete.apply(this, []),
94 | () => this.readyDelete.apply(this, [])
95 | );
96 |
97 | await this.validator.readyAsync();
98 | }
99 | validateDelete(): void { new UserLoad(this); }
100 | async readyDelete(): Promise {
101 | const arg = {
102 | ID: this.ID,
103 | name: this.name,
104 | };
105 |
106 | if (!this.ID) { delete arg.ID; }
107 | if (!this.name) { delete arg.name; }
108 |
109 | await this.DB.manager.delete(UserEntity, arg);
110 | }
111 |
112 | toObject(): UserArg {
113 | const { ID, name, password, salt, isLoggedIn } = this;
114 | return { ID, name, password, salt, isLoggedIn };
115 | }
116 | }
117 |
118 | const UserInit = new ObjectModel({
119 | DB: DataSource,
120 | });
121 |
122 | // TODO: Check if either ID or name is defined.
123 | const UserLoad = new ObjectModel({
124 | ID: [Number],
125 | name: [String]
126 | });
127 |
128 | const UserSave = new ObjectModel({
129 | ID: [Number],
130 | name: String,
131 | password: String,
132 | salt: String,
133 | });
--------------------------------------------------------------------------------
/src/objects/variable.ts:
--------------------------------------------------------------------------------
1 | import { ObjectModel } from "objectmodel";
2 | import { DataSource } from "typeorm";
3 |
4 | import { Variable as VariableEntity } from "../entities/variable";
5 | import { LazyValidator } from "../utils/lazyValidator";
6 |
7 | export class VariableArg {
8 | DB?: DataSource;
9 |
10 | name?: string;
11 | value?: boolean | number | string;
12 | }
13 |
14 | export class Variable {
15 | private validator: LazyValidator;
16 |
17 | DB: DataSource;
18 |
19 | name: string;
20 | value: boolean | number | string;
21 |
22 | constructor(init: VariableArg) {
23 | this.validator = new LazyValidator(
24 | () => this.validateInit.apply(this, []),
25 | () => this.readyInit.apply(this, [])
26 | );
27 |
28 | if (init != undefined) {
29 | Object.assign(this, init);
30 | this.validator.ready();
31 | }
32 | }
33 | private validateInit(): void { new VariableInit(this); }
34 | private readyInit(): void { } // NOOP
35 |
36 | async load(): Promise {
37 | this.validator = new LazyValidator(
38 | () => this.validateLoad.apply(this, []),
39 | () => this.readyLoad.apply(this, [])
40 | );
41 |
42 | await this.validator.readyAsync();
43 | }
44 | validateLoad(): void { new VariableLoad(this); }
45 | async readyLoad(): Promise {
46 | const entity = await this.DB.manager.findOneByOrFail(VariableEntity, {
47 | name: this.name,
48 | });
49 | Object.assign(this, entity);
50 | }
51 |
52 | async save(arg: VariableArg): Promise {
53 | this.validator = new LazyValidator(
54 | () => this.validateSave.apply(this, [arg]),
55 | () => this.readySave.apply(this, [arg])
56 | );
57 |
58 | await this.validator.readyAsync();
59 | }
60 | validateSave(arg: VariableArg): void { new VariableSave(arg); }
61 | async readySave(arg: VariableArg): Promise {
62 | await this.DB.manager.save(VariableEntity, arg);
63 | }
64 |
65 | async delete(): Promise {
66 | this.validator = new LazyValidator(
67 | () => this.validateDelete.apply(this, []),
68 | () => this.readyDelete.apply(this, [])
69 | );
70 |
71 | await this.validator.readyAsync();
72 | }
73 | validateDelete(): void { new VariableLoad(this); }
74 | async readyDelete(): Promise {
75 | await this.DB.manager.delete(VariableEntity, {
76 | name: this.name,
77 | });
78 | }
79 |
80 | }
81 |
82 | const VariableInit = new ObjectModel({
83 | DB: DataSource,
84 | });
85 |
86 | const VariableLoad = new ObjectModel({
87 | name: String
88 | });
89 |
90 | const VariableSave = new ObjectModel({
91 | name: String,
92 | value: [Boolean, Number, String],
93 | });
--------------------------------------------------------------------------------
/src/parser/queryParse.ts:
--------------------------------------------------------------------------------
1 | import assert from 'assert';
2 | import { Any, ArrayModel, ObjectModel } from 'objectmodel';
3 | import RecursiveIterator from 'recursive-iterator';
4 | import sqliteParser from '@appland/sql-parser';
5 |
6 | import { ONE, ZERO } from '../utils/constants';
7 | import { LazyValidator } from '../utils/lazyValidator';
8 |
9 | export enum ParseType {
10 | begin_transaction = 0,
11 | rollback_transaction = 1,
12 | commit_transaction = 2,
13 |
14 | create_table = 3,
15 | rename_table = 4,
16 | modify_table_columns = 5,
17 | drop_table = 6,
18 |
19 | modify_data = 7,
20 | select_data = 8,
21 | delete_data = 9,
22 |
23 | other = 10,
24 | }
25 |
26 | export const COMMIT_Qs: Set = new Set([
27 | ParseType.begin_transaction,
28 | ParseType.commit_transaction,
29 | ParseType.rollback_transaction,
30 | ]);
31 |
32 | export const DATA_Qs: Set = new Set([
33 | ParseType.modify_data,
34 | ParseType.select_data,
35 | ParseType.delete_data,
36 | ]);
37 |
38 | export const READ_ONLY_Qs: Set = new Set([
39 | ParseType.select_data,
40 | ParseType.begin_transaction,
41 | ParseType.commit_transaction,
42 | ParseType.rollback_transaction,
43 | ]);
44 |
45 | export const TABLE_MODIFY_Qs = new Set([
46 | ParseType.create_table,
47 | ParseType.rename_table,
48 | ParseType.modify_table_columns,
49 | ParseType.drop_table,
50 | ]);
51 |
52 | enum QueryType {
53 | begin = 0,
54 | rollback = 1,
55 | commit = 2,
56 | end = 3,
57 |
58 | create = 4,
59 | rename = 5,
60 | drop = 6,
61 |
62 | add = 7,
63 |
64 | insert = 8,
65 | update = 9,
66 | select = 10,
67 | delete = 11,
68 | }
69 |
70 | export class QueryParseArg {
71 | query: string;
72 | params: any[];
73 |
74 | type?: ParseType;
75 | isRead?: boolean;
76 | tablesWrite?: string[];
77 | tablesRead?: string[];
78 | columns?: string[];
79 | autoKeys?: string[];
80 | keys?: string[];
81 | }
82 |
83 | // TODO: Add support for the following:
84 | // - WITH PREFIX TO DATA QUERIES
85 |
86 | // - CREATE INDEX
87 | // - CREATE TRIGGER
88 | // - CREATE VIEW
89 |
90 | // - SCHEMA NAME
91 | // - ATTACH DB
92 | export class QueryParse {
93 | validator: LazyValidator;
94 |
95 | query: string;
96 | params: any[];
97 |
98 | type: ParseType;
99 | isRead: boolean;
100 | tablesWrite: string[];
101 | tablesRead: string[];
102 | columns: string[];
103 | autoKeys: string[];
104 | keys: string[];
105 |
106 | private meta: any;
107 |
108 | /**
109 | * Creates an instance of a SQL statement.
110 | * @param [init] @type {QueryParseArg} The initial values.
111 | */
112 | constructor(init?: QueryParseArg) {
113 | this.validator = new LazyValidator(
114 | () => this.validate.apply(this, []),
115 | () => this.ready.apply(this, [])
116 | );
117 |
118 | // Copy the properties.
119 | if (init != undefined) {
120 | Object.assign(this, init);
121 | this.validator.ready();
122 | }
123 | }
124 |
125 | private validate(): void {
126 | this.query = this.query?.trim() || '';
127 | this.params = this.params || [];
128 |
129 | new QueryParseInitArg(this);
130 |
131 | const parseResult = sqliteParser(this.query);
132 | this.meta = parseResult
133 | ?.statement
134 | ?.[ZERO];
135 |
136 | assert(this.meta, `Failed to parse the statement.`);
137 | assert( // TODO: Might be unnecessary.
138 | this.meta.type === `statement`,
139 | `Failed to parse the statement metadata type.`
140 | );
141 | }
142 |
143 | private ready(): void {
144 | // Set up the state.
145 | this.type = ParseType.other;
146 | this.isRead = false;
147 | this.tablesRead = []; this.tablesWrite = [];
148 | this.columns = [];
149 | this.autoKeys = [];
150 | this.keys = [];
151 |
152 | this.parseType();
153 | this.parseIsRead();
154 | this.parseTables();
155 | }
156 |
157 | private parseType() {
158 | const transactionActions = [
159 | QueryType[0],
160 | QueryType[1],
161 | ];
162 |
163 | const transactionEndActions = [
164 | QueryType[2],
165 | QueryType[3],
166 | ];
167 |
168 | const columnModifyActions = [
169 | QueryType[7],
170 | QueryType[6]
171 | ];
172 |
173 | const dataModifyActions = [
174 | QueryType[8],
175 | QueryType[9],
176 | ];
177 |
178 | if (transactionActions.includes(this.meta.action)) {
179 | this.type = (QueryType[this.meta.action]);
180 |
181 | } else if (transactionEndActions.includes(this.meta.action)) {
182 | this.type = ParseType.commit_transaction;
183 |
184 | } else if (
185 | this.meta.format === `table` &&
186 | this.meta.variant === QueryType[4]
187 | ) {
188 | this.type = ParseType.create_table;
189 |
190 | } else if (this.meta.action === QueryType[5]) {
191 | this.type = ParseType.rename_table;
192 |
193 | } else if (
194 | columnModifyActions.includes(this.meta.action)
195 | ) {
196 | this.type = ParseType.modify_table_columns;
197 |
198 | } else if (
199 | this.meta.format === `table` &&
200 | this.meta.variant === QueryType[6]
201 | ) {
202 | this.type = ParseType.drop_table;
203 |
204 | } else if (dataModifyActions.includes(this.meta.variant)) {
205 | this.type = ParseType.modify_data;
206 |
207 | } else if (this.meta.variant === QueryType[11]) {
208 | this.type = ParseType.delete_data;
209 |
210 | } else if (this.meta.variant === QueryType[10]) {
211 | this.type = ParseType.select_data;
212 |
213 | } else {
214 | this.type = ParseType.other;
215 | }
216 |
217 | // Detect if the statement is a write with a read.
218 | this.isRead = (
219 | this.type === ParseType.create_table &&
220 | this.meta.definition?.[ZERO]?.type === `statement` &&
221 | this.meta.definition?.[ZERO]?.variant === QueryType[10]
222 | ) || (
223 | this.type === ParseType.modify_data &&
224 | this.meta.result?.type === `statement` &&
225 | this.meta.result?.variant === QueryType[10]
226 | );
227 | }
228 |
229 | private parseIsRead(): void {
230 | let iterator = new RecursiveIterator(
231 | this.meta,
232 | ONE, // Breath-first.
233 | );
234 |
235 | for (let { node } of iterator) {
236 | if (
237 | node?.type === `statement` &&
238 | node?.variant === QueryType[10]
239 | ) {
240 | this.isRead = true;
241 | break;
242 | }
243 | }
244 | }
245 |
246 | private parseTables(): void {
247 | if (!TABLE_MODIFY_Qs.has(this.type) && !DATA_Qs.has(this.type)) {
248 | return;
249 | }
250 |
251 | const { columns, autoKeys, keys } = this.parseColumns();
252 | this.columns = columns; this.autoKeys = autoKeys; this.keys = keys;
253 |
254 | switch (this.type) {
255 | case ParseType.modify_data:
256 | case ParseType.delete_data:
257 | // Get the all the tables from the result.
258 | this.tablesWrite = this.parseDataTables(this.meta);
259 |
260 | // Get all the tables from the select part.
261 | const selectMeta = this.meta.result || this.meta.where;
262 | if (this.isRead && !!selectMeta) {
263 | this.tablesRead = this.parseDataTables(selectMeta);
264 | }
265 | break;
266 | case ParseType.create_table:
267 |
268 | // Get the all the tables from the result.
269 | this.tablesWrite = this.parseDataTables(this.meta);
270 |
271 | // Get all the tables from the select part.
272 | if (this.isRead && !!this.meta.definition) {
273 | this.tablesRead = this.parseDataTables(this.meta.definition);
274 | }
275 | break;
276 |
277 | case ParseType.rename_table:
278 | this.tablesWrite = this.parseDataTables(this.meta);
279 | this.tablesWrite = this.tablesWrite.reverse();
280 | break;
281 |
282 | case ParseType.modify_table_columns:
283 | case ParseType.drop_table:
284 | this.tablesWrite = this.parseDataTables(this.meta);
285 | break;
286 |
287 | default:
288 | this.tablesRead = this.parseDataTables(this.meta);
289 | break;
290 | }
291 |
292 | // Create a set of the Read tables.
293 | const tablesReadSet = new Set(this.tablesRead);
294 |
295 | // Filter the tables to only include the tables that are not in the
296 | // Read and Delete tables.
297 | this.tablesWrite = this.tablesWrite
298 | .filter(table => !tablesReadSet.has(table));
299 | }
300 |
301 | private parseColumns(): {
302 | columns: string[];
303 | autoKeys: string[];
304 | keys: string[];
305 | } {
306 | // Extract the columns metadata to an array.
307 | let definitions = [];
308 | if (!!this.meta.definition) {
309 | if (!!this.meta.definition.name) {
310 | definitions.push(this.meta.definition);
311 | } else if (!!this.meta.definition.length) {
312 | definitions = this.meta.definition;
313 | }
314 | }
315 |
316 | // Extract the columns and the key columns.
317 | const columns = []; const autoKeys = []; const keys = [];
318 | for (let definition of definitions) {
319 | if (definition.type !== `definition`) { continue; }
320 |
321 | if (definition.variant === `column`) {
322 | columns.push(definition.name);
323 |
324 | for (const constraint of definition.definition) {
325 | if (constraint.type !== `constraint`) { continue; }
326 |
327 | if (constraint.variant === `primary key`) {
328 | keys.push(definition.name);
329 | if(constraint.autoIncrement) { autoKeys.push(definition.name); }
330 | break;
331 | }
332 | }
333 | } else if (
334 | definition.variant === `constraint` &&
335 | (definition.definition).findIndex(constraint =>
336 | constraint.type === `constraint` &&
337 | constraint.variant === `primary key`
338 | ) > -ONE
339 | ) {
340 | for (const column of definition.columns) {
341 | keys.push(column.name);
342 | }
343 | }
344 | }
345 |
346 | return {
347 | columns,
348 | autoKeys,
349 | keys
350 | };
351 | }
352 |
353 | private parseDataTables(rootNode: any): string[] {
354 | /* #region Parse table metadata. */
355 | const tables = new Set();
356 |
357 | let iterator = new RecursiveIterator(
358 | rootNode,
359 | ONE, // Breath-first.
360 | );
361 |
362 | for (let { node } of iterator) {
363 | const conditions = new Set([
364 | node?.type === `identifier`,
365 | node?.variant === `table` || node?.format === `table`,
366 | !!node?.name
367 | ]);
368 |
369 | if (conditions.has(false)) { continue; }
370 |
371 | tables.add(node.name);
372 | }
373 | /* #endregion */
374 |
375 |
376 | return Array.from(tables);
377 | }
378 |
379 | toObject(): QueryParseArg {
380 | return {
381 | query: this.query,
382 | params: this.params,
383 |
384 | type: this.type,
385 | isRead: this.isRead,
386 | tablesWrite: this.tablesWrite,
387 | tablesRead: this.tablesRead,
388 | columns: this.columns,
389 | autoKeys: this.autoKeys,
390 | keys: this.keys
391 | };
392 | }
393 |
394 | /**
395 | * Gets the string representation of the instance.
396 | * @returns string
397 | * @memberof Statement
398 | */
399 | toString(): string {
400 | return this.query;
401 | }
402 | }
403 |
404 | /* #region Use schema to check the properties. */
405 | const QueryParseInitArg = new ObjectModel({
406 | query: String,
407 | params: ArrayModel(Any)
408 | });
409 | /* #endregion */
--------------------------------------------------------------------------------
/src/parser/queryRaw.ts:
--------------------------------------------------------------------------------
1 | import { Any, ArrayModel, ObjectModel } from 'objectmodel';
2 |
3 | import { LazyValidator } from '../utils/lazyValidator';
4 |
5 | class QueryRawArg {
6 | query: string;
7 | params: any[];
8 | }
9 |
10 | export class QueryRaw {
11 | validator: LazyValidator;
12 |
13 | query: string;
14 | params: any[];
15 |
16 | /**
17 | * Creates an instance of the class.
18 | * @param [init] @type {QueryRawArg} The initial value.
19 | */
20 | constructor(init?: QueryRawArg) {
21 | this.validator = new LazyValidator(
22 | () => this.validate.apply(this, []),
23 | );
24 |
25 | // Copy the properties.
26 | if (init != undefined) {
27 | Object.assign(this, init);
28 | this.validator.valid();
29 | }
30 | }
31 |
32 | private validate(): void {
33 | new QueryRawInitArg(this);
34 | }
35 |
36 | toObject(): QueryRawArg {
37 | return {
38 | query: this.query,
39 | params: this.params,
40 | };
41 | }
42 | }
43 |
44 | const QueryRawInitArg = new ObjectModel({
45 | query: String,
46 | params: ArrayModel(Any),
47 | });
--------------------------------------------------------------------------------
/src/parser/scriptParse.ts:
--------------------------------------------------------------------------------
1 | import sqliteParser from '@appland/sql-parser';
2 | import { Any, ArrayModel, ObjectModel } from 'objectmodel';
3 |
4 | import { NEWLINE, STATEMENT_DELIMITER } from '../utils/constants';
5 | import { LazyValidator } from '../utils/lazyValidator';
6 | import { QueryParse, READ_ONLY_Qs } from './queryParse';
7 |
8 | class ScriptParseArg {
9 | script: string;
10 | params: any[];
11 | }
12 |
13 | export class ScriptParse {
14 | validator: LazyValidator;
15 |
16 | script: string;
17 | params: any[];
18 |
19 | isReadOnly: boolean;
20 | queries: QueryParse[];
21 |
22 | /**
23 | * Creates an instance of the class.
24 | * @param [init] @type {ScriptParseArg} The initial value.
25 | */
26 | constructor(init?: ScriptParseArg) {
27 | this.validator = new LazyValidator(
28 | () => this.validate.apply(this, []),
29 | () => this.ready.apply(this, []),
30 | );
31 |
32 | // Copy the properties.
33 | if (init != undefined) {
34 | Object.assign(this, init);
35 | this.validator.ready();
36 | }
37 | }
38 |
39 | private validate(): void {
40 | this.script = this.script?.trim() || '';
41 | this.params = this.params || [];
42 |
43 | new ScriptParseInitArg(this);
44 | }
45 |
46 | private ready(): void {
47 | // Parse the whole script to validate it.
48 | sqliteParser(this.script);
49 |
50 | // Initialize the state.
51 | this.queries = [];
52 | this.isReadOnly = true;
53 |
54 | // Check if the script is empty.
55 | if (!this.script) { return; }
56 |
57 | // Split up the script into lines.
58 | // This might be a bit tricky because of the semicolon in trigger
59 | // definitions.
60 | const candidateLines = this.script.split(STATEMENT_DELIMITER);
61 |
62 | // Reverse the params array so we can pop them off the end.
63 | const reverseParams = this.params.reverse();
64 |
65 | // Iterate over the lines.
66 | let query = [];
67 | for (let i = 0; i < candidateLines.length; i++) {
68 | query.push(candidateLines[i]);
69 |
70 | try {
71 | // Join the lines into a query.
72 | const queryStr = query.join(STATEMENT_DELIMITER);
73 |
74 | // Try to parse the query.
75 | sqliteParser(queryStr);
76 |
77 | // Count the number of parameters in the query.
78 | const paramCount = (queryStr.match(/\?/g) || []).length;
79 |
80 | // Pop the parameters off the end of the array.
81 | const params = reverseParams.splice(-paramCount, paramCount).reverse();
82 |
83 | // If the line is parsable, add it to the queries array.
84 | const queryParse = new QueryParse({
85 | query: `${queryStr}${STATEMENT_DELIMITER}`,
86 | params: params,
87 | });
88 | this.queries.push(queryParse);
89 |
90 | // Reset the query.
91 | query = [];
92 | } catch (error) {
93 | // If the query is not parsable, continue.
94 | continue;
95 | }
96 | }
97 |
98 | // Check if all the queries are read-only.
99 | this.isReadOnly = this.queries.every(q => READ_ONLY_Qs.has(q.type));
100 | }
101 |
102 | /**
103 | * Returns the object representation of the class.
104 | * @returns {Object} The object representation of the class.
105 | */
106 | toObject(): any {
107 | return {
108 | isReadOnly: this.isReadOnly,
109 | queries: this.queries.map(q => q.toObject()),
110 | };
111 | }
112 |
113 | /**
114 | * Returns the script representation of the class.
115 | * @returns {Object} The script representation of the class.
116 | */
117 | toScript(): any {
118 | return {
119 | script: this.queries.map(q => q.query).join(NEWLINE),
120 | params: this.queries.flatMap(q => q.params),
121 | };
122 | }
123 | }
124 |
125 | const ScriptParseInitArg = new ObjectModel({
126 | script: String,
127 | params: ArrayModel(Any),
128 | });
--------------------------------------------------------------------------------
/src/server.ts:
--------------------------------------------------------------------------------
1 | import cluster from 'cluster'; // Only required if you want cluster.worker.id
2 | import express from 'express';
3 | import session from 'express-session';
4 | import http from 'http';
5 | import memoryStore from 'memorystore';
6 | import { randomBytes } from 'node:crypto';
7 | import fs from 'node:fs';
8 | import sticky from 'sticky-session-custom';
9 | import swaggerUi from 'swagger-ui-express';
10 | import path from 'node:path';
11 | import cors from 'cors';
12 |
13 | import DBController from './controllers/DBs';
14 | import authController from './controllers/auth';
15 | import userController from './controllers/users';
16 | import queryController from './controllers/query';
17 | import {
18 | CERT_DIR,
19 | DOCUMENTATION_ADDRESS,
20 | HTTPS_PORT,
21 | HTTP_LISTEN_ADDRESS,
22 | HTTP_PORT,
23 | ONE,
24 | SECURE_COOKIE,
25 | SESSION_EXPIRY,
26 | ZERO
27 | } from './utils/constants';
28 | import { Services } from './controllers/services';
29 | import { DB } from './services/DB';
30 | import { User } from './services/user';
31 | import { DBFile } from './services/DBFile';
32 |
33 | export class Server {
34 | public static async start() {
35 | const app = express();
36 | app.use(cors());
37 | app.set('trust proxy', ONE) // trust first proxy
38 |
39 | /* #region Setup Swagger. */
40 | const JSONPath =
41 | path.join(__dirname, '..', '..', 'src', 'utils', 'swagger.json');
42 | const swaggerDocument =
43 | JSON.parse(fs.readFileSync(JSONPath).toString());
44 | swaggerDocument.servers[ZERO].url = `${DOCUMENTATION_ADDRESS}:${HTTPS_PORT}`;
45 | app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerDocument));
46 | /* #endregion */
47 |
48 | /* #region Setup the session store. */
49 | const MemoryStore = memoryStore(session);
50 | app.use(session({
51 | secret: randomBytes(16).toString('hex'),
52 | resave: true,
53 | saveUninitialized: true,
54 | cookie: { secure: SECURE_COOKIE }, // TODO: Enable when HTTPS is enabled.
55 | store: new MemoryStore({
56 | checkPeriod: SESSION_EXPIRY
57 | }),
58 | }));
59 | /* #endregion */
60 |
61 | app.use(express.json());
62 |
63 | /* #region Setup the services. */
64 | Services.DB = new DB();
65 | await Services.DB.init();
66 |
67 | Services.User = new User(Services.DB.adminDB);
68 |
69 | Services.DBFile = new DBFile(Services.DB.adminDB);
70 | /* #endregion */
71 |
72 | /* #region Setup the routes. */
73 | app.use('', DBController);
74 | app.use('', userController);
75 | app.use('', authController);
76 | app.use('', queryController);
77 | /* #endregion */
78 |
79 | const server = http.createServer(app);
80 |
81 | // Setup sticky sessions with PID parameter.
82 | const master = sticky.listen(server, HTTP_PORT, HTTP_LISTEN_ADDRESS, {
83 | generatePrehashArray(req: any, _socket: any) {
84 | const parsed = new URL(req.url, 'https://dummyurl.example.com');
85 | // You can use '' instead of Math.random().toString() if you want to use
86 | // a consistent worker for all unauthenticated requests.
87 | const userToken = parsed.searchParams.get('pid') || '';
88 | // turn string into an array of numbers for hashing
89 | return (userToken.split('') || [' ']).map(e => e.charCodeAt(0));
90 | }
91 | });
92 |
93 | if (master) {
94 | // Master code
95 | server.once('listening', function () {
96 | console.log(`HTTP Server listening on address: ${HTTP_LISTEN_ADDRESS}, port: ${HTTP_PORT}`);
97 | console.log(`HTTPS Proxy listening on address 0.0.0.0 (all incoming) port: ${HTTPS_PORT}`);
98 | });
99 |
100 | const { key, cert } = Server.getPems();
101 |
102 | /* #region Create the HTTPS proxy server in front of a HTTP server. */
103 | const httpProxy = require('http-proxy');
104 | httpProxy.createServer({
105 | target: {
106 | host: HTTP_LISTEN_ADDRESS,
107 | port: HTTP_PORT
108 | },
109 | ssl: { key, cert }
110 | }).listen(HTTPS_PORT);
111 | /* #endregion */
112 | } else {
113 | // Worker code
114 | console.log(`Worker: ${cluster.worker.id}, Status: Started`);
115 | }
116 | }
117 |
118 | /* #region Certificate generation. */
119 | private static getPems(): { key: string, cert: string } {
120 | const keyPath = path.join(CERT_DIR, 'key.pem');
121 | const certPath = path.join(CERT_DIR, 'cert.pem');
122 |
123 | const hasCert =
124 | fs.existsSync(keyPath) &&
125 | fs.existsSync(certPath);
126 |
127 | if (!hasCert) { Server.setPems(); }
128 |
129 | return {
130 | key: fs.readFileSync(keyPath, 'utf8'),
131 | cert: fs.readFileSync(certPath, 'utf8')
132 | };
133 | }
134 |
135 | private static setPems() {
136 | const selfsigned = require('selfsigned');
137 | const pems = selfsigned.generate([{
138 | name: 'commonName',
139 | value: '127.0.0.1'
140 | }], {
141 | days: 365
142 | });
143 |
144 | fs.mkdirSync(CERT_DIR, { recursive: true });
145 |
146 | fs.writeFileSync(path.join(CERT_DIR, 'key.pem'), pems.private, 'utf8');
147 | fs.writeFileSync(path.join(CERT_DIR, 'cert.pem'), pems.cert, 'utf8');
148 | }
149 | /* #endregion */
150 | }
--------------------------------------------------------------------------------
/src/services/DB.ts:
--------------------------------------------------------------------------------
1 | import { ForbiddenError } from "@casl/ability";
2 | import assert from "node:assert";
3 | import retry from 'async-retry';
4 |
5 | import { DB as DBEntity } from "../entities/DB";
6 | import { User as UserEntity } from "../entities/user";
7 | import { Variable as VariableEntity } from "../entities/variable";
8 | import { AdminDB, DBArg, DBBase, DB as DBObject } from "../objects/DB";
9 | import { AdminDBFile, DBFile } from "../objects/DBFile";
10 | import { User } from "../objects/user";
11 | import { Variable } from "../objects/variable";
12 | import { DBOp } from "../utils/DBOp";
13 | import { ADMIN_NAME, DB_EXISTS_CHECK, ONE, ZERO } from "../utils/constants";
14 | import { Variable as VariableType } from '../utils/variable';
15 | import defineAbilityForDB from "../valid/DB";
16 |
17 | export class DB implements AsyncDisposable {
18 | adminDB: AdminDB;
19 | private adminDBFile: AdminDBFile;
20 |
21 | constructor() {
22 | this.adminDBFile = new AdminDBFile({
23 | name: ADMIN_NAME,
24 | types: [DBEntity, UserEntity]
25 | });
26 | }
27 |
28 | async init(): Promise {
29 | await this.adminDBFile.load();
30 | this.adminDB = new AdminDB({
31 | DB: this.adminDBFile.DB,
32 | ID: ONE,
33 | name: ADMIN_NAME
34 | });
35 |
36 | if (await this.adminDBFile.isInit()) { await this.adminDB.load(); return; }
37 |
38 | // Initialize the AdminDB.
39 | await this.adminDB.save({
40 | ID: ONE,
41 | name: ADMIN_NAME,
42 | admins: [ONE],
43 | readers: [],
44 | writers: []
45 | });
46 | assert.strictEqual(this.adminDB.ID, ONE);
47 |
48 | // Initialize the Admin user.
49 | const adminUser = new User({ DB: this.adminDBFile.DB });
50 | await adminUser.save({
51 | ID: ONE,
52 | name: ADMIN_NAME,
53 | password: ADMIN_NAME,
54 | salt: ''
55 | });
56 | assert.strictEqual(adminUser.ID, ONE);
57 |
58 | // Initialize the AdminDBFile.
59 | await this.adminDBFile.setInit();
60 | }
61 |
62 | async add(arg: { user: User, DB: DBArg }): Promise {
63 | ForbiddenError
64 | .from(defineAbilityForDB(arg.user))
65 | .throwUnlessCan(DBOp.Admin, this.adminDB);
66 |
67 | assert.strictEqual(arg.DB.ID, undefined);
68 |
69 | await using localDBFile =
70 | new DBFile({ name: arg.DB.name, types: [VariableEntity] });
71 | await localDBFile.load();
72 |
73 | // Save the auxiliary variables.
74 | const version = new Variable({ DB: localDBFile.DB });
75 | await version.save({ name: VariableType.version, value: ZERO });
76 |
77 | // Set the initialized flag.
78 | await localDBFile.setInit();
79 |
80 | // reload the AdminDB in case of changes.
81 | await this.adminDB.load();
82 |
83 | const localDBObject = new DBObject({ DB: this.adminDBFile.DB });
84 | await localDBObject.save({
85 | name: arg.DB.name,
86 | admins: arg.DB.admins.concat(arg.user.ID, this.adminDB.admins),
87 | readers: arg.DB.readers,
88 | writers: arg.DB.writers
89 | });
90 |
91 | return localDBObject;
92 | }
93 |
94 | async get(arg: { user: User, DB: DBArg }): Promise {
95 | // TODO: make redundant.
96 | assert.ok(arg.DB.ID || arg.DB.name);
97 |
98 | let localDB: DBBase = new DBObject({
99 | DB: this.adminDBFile.DB,
100 | ID: arg.DB.ID,
101 | name: arg.DB.name
102 | });
103 |
104 | await localDB.load();
105 | if (localDB.ID === ONE) {
106 | localDB = new AdminDB(localDB);
107 | }
108 |
109 | ForbiddenError
110 | .from(defineAbilityForDB(arg.user))
111 | .throwUnlessCan(DBOp.Read, localDB);
112 |
113 | return localDB;
114 | }
115 |
116 | async getAll(arg: { user: User, DB: DBArg }): Promise {
117 | if (arg.DB.ID || arg.DB.name) { return [await this.get(arg)]; }
118 |
119 | const localDBs: DBBase[] = [];
120 |
121 | // Get all the DBs.
122 | const localDBEntities = await this.adminDB.DB.manager.find(DBEntity);
123 | for (const localDBEntity of localDBEntities) {
124 | let localDB: DBBase;
125 |
126 | try {
127 | localDB = await this.get({ user: arg.user, DB: localDBEntity });
128 | localDBs.push(localDB);
129 | } catch (error) { continue; } // Expected forbidden error for some DBs.
130 | }
131 |
132 | return localDBs;
133 | }
134 |
135 | async set(arg: { user: User, DB: DBArg }): Promise {
136 | const localDB = await this.get({
137 | user: arg.user,
138 | DB: { ID: arg.DB.ID }
139 | });
140 |
141 | ForbiddenError
142 | .from(defineAbilityForDB(arg.user))
143 | .throwUnlessCan(DBOp.Admin, localDB);
144 |
145 | const previousName = localDB.name;
146 |
147 | // Update the DB entry.
148 | await localDB.save(arg.DB);
149 |
150 | // Initialize the DB file object.
151 | const localDBFile = new DBFile({
152 | name: previousName,
153 | types: []
154 | });
155 |
156 | // Retry updating the file until it succeeds.
157 | await retry(
158 | async (_bail) => {
159 | await localDBFile.save({ name: arg.DB.name, types: [] });
160 | },
161 | {
162 | retries: 3,
163 | minTimeout: DB_EXISTS_CHECK
164 | }
165 | );
166 |
167 | return localDB;
168 | }
169 |
170 | async del(arg: { user: User, DB: DBArg }): Promise {
171 | assert.ok(arg.DB.ID !== ONE && arg.DB.name !== ADMIN_NAME);
172 |
173 | const localDB = await this.get(arg);
174 | ForbiddenError
175 | .from(defineAbilityForDB(arg.user))
176 | .throwUnlessCan(DBOp.Admin, localDB);
177 |
178 | // Delete the DB entry.
179 | await localDB.delete();
180 |
181 | // Initialize the DB file object.
182 | const localDBFile = new DBFile({
183 | name: localDB.name,
184 | types: []
185 | });
186 |
187 | // Retry deleting the file until it succeeds.
188 | await retry(
189 | async (_bail) => {
190 | await localDBFile.delete();
191 | },
192 | {
193 | retries: 3,
194 | minTimeout: DB_EXISTS_CHECK
195 | }
196 | );
197 | }
198 |
199 | [Symbol.asyncDispose](): Promise {
200 | return this.adminDBFile[Symbol.asyncDispose]();
201 | }
202 | }
--------------------------------------------------------------------------------
/src/services/DBFile.ts:
--------------------------------------------------------------------------------
1 | import { DataSource } from "typeorm";
2 |
3 | import { AdminDB, DBBase } from "../objects/DB";
4 | import { DBFile as DBFileObject } from "../objects/DBFile";
5 | import { CONNECTION_EXPIRY, DB_EXISTS_CHECK } from "../utils/constants";
6 |
7 | class DBFileEntry {
8 | lastQuery: number;
9 | DBFile: DBFileObject;
10 | DB: DBBase;
11 | }
12 |
13 | export class DBFile implements Disposable {
14 | store: Map;
15 |
16 | private adminDB: AdminDB;
17 |
18 | private logoutInterval: NodeJS.Timeout;
19 | private deleteInterval: NodeJS.Timeout;
20 |
21 | private DBs: Set;
22 |
23 | constructor(adminDB: AdminDB) {
24 | this.adminDB = adminDB;
25 |
26 | this.store = new Map();
27 | this.logoutInterval = setInterval(async () => {
28 | for (const key of this.store.keys()) {
29 | const entry = this.store.get(key);
30 |
31 | if (entry.lastQuery < Date.now() - CONNECTION_EXPIRY) { continue; }
32 |
33 | const { sessionID, name } = JSON.parse(key);
34 | await this.delete(sessionID, name);
35 | }
36 | }, CONNECTION_EXPIRY);
37 |
38 | this.DBs = new Set();
39 | this.deleteInterval = setInterval(async () => {
40 | const queriedDBs = (await this
41 | .adminDB
42 | .DB
43 | .query(`SELECT name FROM DB;`)
44 | ).map((queriedDB: { name: string }) => queriedDB.name);
45 |
46 | // Add unkown DBs to the known set.
47 | for (const DBName of queriedDBs) {
48 | this.DBs.add(DBName);
49 | }
50 |
51 | // Delete known DBs from the known store.
52 | const queriedDBsSet = new Set(queriedDBs);
53 | for (const key of this.store.keys()) {
54 | const { sessionID, name } = JSON.parse(key);
55 | if (queriedDBsSet.has(name)) { continue; }
56 |
57 | await this.delete(sessionID, name);
58 | }
59 | }, DB_EXISTS_CHECK);
60 | }
61 |
62 | async add(sessionID: string, DB: DBBase): Promise {
63 | const key = JSON.stringify({ sessionID, name: DB.name });
64 | const entry = this.store.get(key);
65 | if (entry) {
66 | entry.lastQuery = Date.now();
67 | return entry.DBFile.DB;
68 | }
69 |
70 | const DBFile = new DBFileObject({
71 | name: DB.name,
72 | types: [],
73 | });
74 | await DBFile.load();
75 |
76 | const newEntry: DBFileEntry = {
77 | lastQuery: Date.now(),
78 | DBFile,
79 | DB,
80 | };
81 |
82 | this.store.set(key, newEntry);
83 | return DBFile.DB;
84 | }
85 |
86 | get(sessionID: string, name: string): {
87 | DB: DBBase;
88 | connection: DataSource;
89 | } {
90 | const key = JSON.stringify({ sessionID, name });
91 | const entry = this.store.get(key);
92 | if (!entry) { return { DB: undefined, connection: undefined }; }
93 |
94 | entry.lastQuery = Date.now();
95 | return {
96 | DB: entry.DB,
97 | connection: entry.DBFile.DB,
98 | };
99 | }
100 |
101 | async delete(sessionID: string, name: string): Promise {
102 | const key = JSON.stringify({ sessionID, name });
103 | const entry = this.store.get(key);
104 | if (!entry) { return; }
105 |
106 | await entry.DBFile[Symbol.asyncDispose]();
107 | this.store.delete(key);
108 | }
109 |
110 | [Symbol.dispose](): void {
111 | clearInterval(this.logoutInterval);
112 | clearInterval(this.deleteInterval);
113 | }
114 | }
--------------------------------------------------------------------------------
/src/services/query.ts:
--------------------------------------------------------------------------------
1 | import FlatPromise from 'flat-promise';
2 | import { DataSource } from "typeorm";
3 |
4 | import { UserArg } from '../objects/user';
5 | import {
6 | QueryParse,
7 | READ_ONLY_Qs,
8 | TABLE_MODIFY_Qs
9 | } from "../parser/queryParse";
10 | import { ScriptParse } from "../parser/scriptParse";
11 | import { SIMPLE, VARS_TABLE } from "../utils/constants";
12 | import { QueryUtils } from "../utils/queries";
13 | import { Variable } from "../utils/variable";
14 | import { DBBase } from '../objects/DB';
15 | import { ForbiddenError } from '@casl/ability';
16 | import defineAbilityForDB from '../valid/DB';
17 | import { DBOp } from '../utils/DBOp';
18 |
19 | export class Query {
20 | static async add(
21 | user: UserArg,
22 | DB: DBBase,
23 | connection: DataSource,
24 | query: string,
25 | params: any[]
26 | ): Promise {
27 | // Throw an error right away if the user doesn't have the read persmission.
28 | ForbiddenError
29 | .from(defineAbilityForDB(user))
30 | .throwUnlessCan(DBOp.Read, DB);
31 |
32 | // Parse the script.
33 | const script = new ScriptParse({
34 | script: query,
35 | params: params
36 | });
37 |
38 | // Throw an error if the user is writing and doesn't have write persmission.
39 | if (!script.isReadOnly) {
40 | ForbiddenError
41 | .from(defineAbilityForDB(user))
42 | .throwUnlessCan(DBOp.Write, DB);
43 | }
44 |
45 | // Get the raw connection.
46 | const queryRunner = connection.createQueryRunner();
47 | const connectionRaw = await queryRunner.connect();
48 |
49 | // Add the extra queries.
50 | await Query.addExtraQueries(connection, script);
51 |
52 | const result: any[] = [];
53 |
54 | // Execute the queries.
55 | for (const query of script.queries) {
56 | // Execute the queries.
57 | const resultPromise = new FlatPromise();
58 | await connectionRaw
59 | .all(
60 | query.query,
61 | query.params,
62 | (err: any, rows: any) => {
63 | if (err) {
64 | resultPromise.reject(err);
65 | return;
66 | }
67 |
68 | resultPromise.resolve(rows);
69 | }
70 | );
71 | const resultRows: any[] = await resultPromise.promise;
72 |
73 | // Add the result to the array.
74 | if (!resultRows.length) { continue; }
75 |
76 | result.push(resultRows);
77 | }
78 |
79 | return result;
80 | }
81 |
82 | private static async addExtraQueries(
83 | DB: DataSource,
84 | script: ScriptParse
85 | ): Promise {
86 | const queries: QueryParse[] = [];
87 |
88 | // Add the queries to the array.
89 | for (const query of script.queries) {
90 | queries.push(query);
91 |
92 | // Increment the DB version for every write query.
93 | if (!SIMPLE && !READ_ONLY_Qs.has(query.type)) {
94 | queries.push(new QueryParse({
95 | query: `UPDATE ${VARS_TABLE} SET value = (SELECT value FROM ${VARS_TABLE} WHERE name = "${Variable.version}") + 1 WHERE name = "${Variable.version}";`,
96 | params: [],
97 | }));
98 | }
99 |
100 | if (SIMPLE || !TABLE_MODIFY_Qs.has(query.type)) { continue; }
101 |
102 | // Get the table modification queries.
103 | const tableQueries: QueryParse[] = await QueryUtils.getTableModify(
104 | DB,
105 | query
106 | );
107 |
108 | // Add the table modification queries.
109 | queries.push(...tableQueries);
110 | }
111 |
112 | // Add the queries to the script.
113 | script.queries = queries;
114 | }
115 | }
--------------------------------------------------------------------------------
/src/services/user.ts:
--------------------------------------------------------------------------------
1 | import { ForbiddenError } from "@casl/ability";
2 |
3 | import { User as UserEntity } from "../entities/user";
4 | import { AdminDB } from "../objects/DB";
5 | import { UserArg, User as UserObject } from "../objects/user";
6 | import { CRUD } from "../utils/CRUD";
7 | import { DBOp } from "../utils/DBOp";
8 | import defineAbilityForDB from "../valid/DB";
9 | import defineAbilityForUser from "../valid/user";
10 | import assert from "assert";
11 | import { ADMIN_NAME, ONE } from "../utils/constants";
12 |
13 | export class User {
14 | adminDB: AdminDB;
15 |
16 | constructor(adminDB: AdminDB) {
17 | this.adminDB = adminDB;
18 | }
19 |
20 | async add(arg: {session: UserObject, arg: UserArg}): Promise {
21 | ForbiddenError
22 | .from(defineAbilityForDB(arg.session))
23 | .throwUnlessCan(DBOp.Admin, this.adminDB);
24 |
25 | const user = new UserObject({ DB: this.adminDB.DB });
26 | await user.save(arg.arg);
27 |
28 | return user;
29 | }
30 |
31 | async get(arg: {session: UserObject, arg: UserArg}): Promise {
32 | // TODO: make redundant.
33 | assert.ok(arg.arg.ID || arg.arg.name);
34 |
35 | const user = new UserObject({ DB: this.adminDB.DB, ...arg.arg });
36 | await user.load();
37 |
38 | const isAdmin = await this.isAdmin(arg.session);
39 | ForbiddenError
40 | .from(defineAbilityForUser(arg.session, isAdmin))
41 | .throwUnlessCan(CRUD.Read, user);
42 |
43 | return user;
44 | }
45 |
46 | async getAll(sessionUser: UserObject): Promise {
47 | await this.adminDB.load();
48 |
49 | ForbiddenError
50 | .from(defineAbilityForDB(sessionUser))
51 | .throwUnlessCan(DBOp.Admin, this.adminDB);
52 |
53 | const users: UserObject[] = [];
54 | const entities = await this.adminDB.DB.manager.find(UserEntity);
55 | for (const entity of entities) {
56 | const user = new UserObject({ DB: this.adminDB.DB, ...entity });
57 | users.push(user);
58 | }
59 |
60 | return users;
61 | }
62 |
63 | async set(
64 | arg: { session: UserObject, arg: UserArg }
65 | ): Promise {
66 | const isAdmin = await this.isAdmin(arg.session);
67 |
68 | const user = new UserObject({ DB: this.adminDB.DB, ID: arg.arg.ID });
69 | await user.load();
70 |
71 | ForbiddenError
72 | .from(defineAbilityForUser(arg.session, isAdmin))
73 | .throwUnlessCan(CRUD.Update, user);
74 |
75 | await user.save(arg.arg);
76 |
77 | return user;
78 | }
79 |
80 | async del(
81 | arg: { session: UserObject, arg: UserArg }
82 | ): Promise {
83 | // TODO: make redundant.
84 | assert.ok(arg.arg.ID || arg.arg.name);
85 | assert.ok(arg.arg.ID !== ONE && arg.arg.name !== ADMIN_NAME);
86 |
87 | const user = new UserObject({ DB: this.adminDB.DB, ...arg.arg });
88 | await user.load();
89 |
90 | const isAdmin = await this.isAdmin(arg.session);
91 | ForbiddenError
92 | .from(defineAbilityForUser(arg.session, isAdmin))
93 | .throwUnlessCan(CRUD.Delete, user);
94 |
95 | await user.delete();
96 | }
97 |
98 | private async isAdmin(user: UserObject): Promise {
99 | await this.adminDB.load();
100 | return defineAbilityForDB(user).can(DBOp.Admin, this.adminDB);
101 | }
102 | }
--------------------------------------------------------------------------------
/src/utils/CRUD.ts:
--------------------------------------------------------------------------------
1 | export enum CRUD {
2 | Create = 'create',
3 | Read = 'read',
4 | Update = 'update',
5 | Delete = 'delete',
6 | }
--------------------------------------------------------------------------------
/src/utils/DB.ts:
--------------------------------------------------------------------------------
1 | import sqlite3 from 'sqlite3';
2 | import { DataSource } from 'typeorm';
3 |
4 | import { DB_DRIVER, Target } from './constants';
5 |
6 | export function getDBConnection(
7 | name: string,
8 | target: Target,
9 | entities: any[] = []
10 | ): DataSource {
11 | switch (target) {
12 | case Target.DB:
13 | return new DataSource({
14 | type: DB_DRIVER,
15 | database: name,
16 | cache: true,
17 | synchronize: true, // TODO: should this be disabled?
18 | logging: false,
19 | entities: entities,
20 | migrations: [],
21 | subscribers: [],
22 | });
23 | case Target.mem:
24 | return new DataSource({
25 | type: DB_DRIVER,
26 | database: `file:${name}?mode=memory`,
27 | flags:
28 | sqlite3.OPEN_URI |
29 | sqlite3.OPEN_SHAREDCACHE |
30 | sqlite3.OPEN_READWRITE |
31 | sqlite3.OPEN_CREATE,
32 | cache: true,
33 | synchronize: true, // TODO: should this be disabled?
34 | logging: false,
35 | entities: entities,
36 | migrations: [],
37 | subscribers: [],
38 | });
39 | default:
40 | throw new Error(`Invalid target: ${target}`);
41 | }
42 | };
--------------------------------------------------------------------------------
/src/utils/DBOp.ts:
--------------------------------------------------------------------------------
1 | export enum DBOp {
2 | Admin = 'admin',
3 | Read = 'read',
4 | Write = 'write',
5 | }
--------------------------------------------------------------------------------
/src/utils/constants.ts:
--------------------------------------------------------------------------------
1 | export const ZERO = 0;
2 | export const ONE = 1;
3 |
4 | export const RESULT_PREFIX = 'STARK_RESULT_';
5 |
6 | /* #region Parser constants. */
7 | export const NEWLINE = '\n';
8 | export const PARAMETER_TOKEN = '?';
9 | export const STATEMENT_DELIMITER = ';';
10 | export const VALUE_DELIMITER = ',';
11 | /* #endregion */
12 |
13 | /* #region Schema constants. */
14 | export const DB_DRIVER = 'sqlite';
15 | export const ADMIN_NAME = 'admin';
16 | export const DB_IDENTIFIER = 1663328354;
17 | export const DB_IDENTIFIER_ADMIN = 1663328355;
18 |
19 | export const COMMITS_TABLE = '_stark_commits';
20 | export const TABLES_TABLE = '_stark_tables';
21 | export const VARS_TABLE = '_stark_vars';
22 |
23 | export enum Target { DB = 'DB', mem = 'mem' }
24 | /* #endregion */
25 |
26 | /* #region Constants that can be defined in the environment */
27 | export const DATA_DIR = process.env.STARK_DB_DATA_DIR || "./data";
28 | export const CERT_DIR = process.env.STARK_DB_CERTS_DIR || "./certs";
29 |
30 | export const HTTP_PORT = process.env.STARK_DB_HTTP_PORT || 5983;
31 | export const HTTPS_PORT = process.env.STARK_DB_HTTPS_PORT || 5984;
32 | export const SECURE_COOKIE = process.env.STARK_DB_COOKIE === "true";
33 | export const HTTP_LISTEN_ADDRESS =
34 | process.env.STARK_DB_HTTP_LISTEN_ADDRESS || "127.0.0.1";
35 | export const DOCUMENTATION_ADDRESS =
36 | process.env.STARK_DB_DOCUMENTATION_ADDRESS || "https://127.0.0.1";
37 | export const SIMPLE = process.env.STARK_DB_SIMPLE === "true";
38 | /* #endregion */
39 |
40 | // prune expired sessions every 24h
41 | export const SESSION_EXPIRY =
42 | parseInt(process.env.STARK_DB_SESSION_EXPIRY) || 1e3 * 60 * 60 * 24;
43 | export const CONNECTION_EXPIRY =
44 | parseInt(process.env.STARK_DB_CONNECTION_EXPIRY) || 1e3 * 60 * 60;
45 | export const DB_EXISTS_CHECK = 5e3;
46 |
47 |
--------------------------------------------------------------------------------
/src/utils/entities.ts:
--------------------------------------------------------------------------------
1 | export enum Entities {
2 | AdminDB = 'AdminDB',
3 | DB = 'DB',
4 |
5 | User = 'User',
6 |
7 | Variable = 'Variable',
8 | }
--------------------------------------------------------------------------------
/src/utils/lazyLoader.ts:
--------------------------------------------------------------------------------
1 | import StateMachine from 'javascript-state-machine';
2 |
3 | enum LoadState {
4 | init = 0,
5 | load = 1,
6 | save = 2,
7 | }
8 |
9 | export class LazyLoader {
10 | private machine?: {
11 | toJSON: () => string;
12 | is: (arg0: string) => boolean;
13 | state: string;
14 | step: () => void | Promise;
15 | };
16 |
17 | /**
18 | * Inits stateful with its FSM.
19 | * @param load @type {Function}
20 | */
21 | constructor(
22 | load?: () => any,
23 | save?: () => any
24 | ) {
25 | // @ts-ignore
26 | this.machine = new StateMachine({
27 | init: 'init',
28 | transitions: [
29 | { name: 'step', from: LoadState[0], to: LoadState[1] },
30 | { name: 'step', from: LoadState[1], to: LoadState[2] },
31 | { name: 'step', from: LoadState[2], to: LoadState[2] }
32 | ],
33 | methods: {
34 | onLoad: function () {
35 | if (load != undefined) {
36 | return load();
37 | }
38 | },
39 | onSave: function () {
40 | if (save != undefined) {
41 | return save();
42 | }
43 | }
44 | }
45 | });
46 |
47 | // Hide this large object in the JSON representation.
48 | this.machine.toJSON = () => '[StateMachine]';
49 | }
50 |
51 | load(): void {
52 | this.step(LoadState.load);
53 | }
54 |
55 | async loadAsync(): Promise {
56 | return await this.stepAsync(LoadState.load);
57 | }
58 |
59 | save(): void {
60 | this.step(LoadState.save);
61 | }
62 |
63 | async saveAsync(): Promise {
64 | return await this.stepAsync(LoadState.save);
65 | }
66 |
67 | /**
68 | * Readies the stateful to the required step.
69 | */
70 | private step(target: LoadState): void {
71 | // @ts-ignore
72 | while (LoadState[this.machine.state] < target) {
73 | this.machine.step();
74 | }
75 | }
76 |
77 | /**
78 | * Readies the stateful to the required step if the handlers are async.
79 | */
80 | private async stepAsync(target: LoadState): Promise {
81 | // @ts-ignore
82 | while (LoadState[this.machine.state] < target) {
83 | await this.machine.step();
84 | }
85 | }
86 | }
--------------------------------------------------------------------------------
/src/utils/lazyValidator.ts:
--------------------------------------------------------------------------------
1 | import StateMachine from 'javascript-state-machine';
2 |
3 | enum ValidState {
4 | init = 0,
5 | valid = 1,
6 | ready = 2,
7 | }
8 |
9 | export class LazyValidator {
10 | private machine: {
11 | toJSON: () => string;
12 | is: (arg0: string) => boolean;
13 | state: string;
14 | step: () => void | Promise;
15 | };
16 |
17 | /**
18 | * Inits stateful with its FSM.
19 | * @param validate @type {Function}
20 | */
21 | constructor(
22 | validate: () => void,
23 | ready?: () => any
24 | ) {
25 | // @ts-ignore
26 | this.machine = new StateMachine({
27 | init: 'init',
28 | transitions: [
29 | { name: 'step', from: ValidState[0], to: ValidState[1] },
30 | { name: 'step', from: ValidState[1], to: ValidState[2] },
31 | { name: 'step', from: ValidState[2], to: ValidState[2] }
32 | ],
33 | methods: {
34 | onValid: function () {
35 | validate();
36 | },
37 | onReady: function () {
38 | if (ready != undefined) {
39 | return ready();
40 | }
41 | }
42 | }
43 | });
44 |
45 | // Hide this large object in the JSON representation.
46 | this.machine.toJSON = () => '[StateMachine]';
47 | }
48 |
49 | valid(): void {
50 | this.step(ValidState.valid);
51 | }
52 |
53 | ready(): void {
54 | this.step(ValidState.ready);
55 | }
56 |
57 | async readyAsync(): Promise {
58 | return await this.stepAsync(ValidState.ready);
59 | }
60 |
61 | /**
62 | * Readies the stateful to the required step.
63 | */
64 | private step(target: ValidState): void {
65 | // @ts-ignore
66 | while (ValidState[this.machine.state] < target) {
67 | this.machine.step(); // eslint-disable-line
68 | }
69 | }
70 |
71 | /**
72 | * Readies the stateful to the required step if the ready function is async.
73 | */
74 | private async stepAsync(target: ValidState): Promise {
75 | // @ts-ignore
76 | while (ValidState[this.machine.state] < target) {
77 | await this.machine.step();
78 | }
79 | }
80 | }
--------------------------------------------------------------------------------
/src/utils/method.ts:
--------------------------------------------------------------------------------
1 | export enum Method {
2 | add = 'add',
3 | del = 'del',
4 | get = 'get',
5 | set = 'set',
6 | }
--------------------------------------------------------------------------------
/src/utils/names.ts:
--------------------------------------------------------------------------------
1 | import { Method } from "./method";
2 |
3 | const DEL_TABLE_PREFIX = '_stark_del';
4 | const TRIGGER_PREFIX = '_stark_trigger';
5 |
6 | const WORKER_CHANNEL = 'stark-worker';
7 |
8 | export class Names {
9 | static VERSION_COLUMN = 'stark_version';
10 |
11 | /* #region Channels. */
12 | static getWorkerMemTablesReset(name: string): string {
13 | return `${WORKER_CHANNEL}-${name}-reset`;
14 | }
15 | /* #endregion */
16 |
17 | /* #region SQL schema. */
18 | static getDelTable(name: string): string {
19 | return `${DEL_TABLE_PREFIX}_${name}`;
20 | }
21 |
22 | static getTrigger(name: string, method: Method): string {
23 | return `${TRIGGER_PREFIX}_${method}_${name}`;
24 | }
25 | /* #endregion */
26 | }
--------------------------------------------------------------------------------
/src/utils/password.ts:
--------------------------------------------------------------------------------
1 | import { randomBytes, pbkdf2Sync } from "node:crypto";
2 |
3 | export class Password {
4 | static getSalt(): string {
5 | return randomBytes(16).toString('hex');
6 | }
7 |
8 | static hash(password: string, salt: string): string {
9 | return pbkdf2Sync(password, salt, 1000, 64, `sha512`).toString(`hex`);
10 | }
11 | }
--------------------------------------------------------------------------------
/src/utils/queries.ts:
--------------------------------------------------------------------------------
1 | import { DataSource } from 'typeorm';
2 | import { ParseType, QueryParse } from '../parser/queryParse';
3 | import {
4 | ONE,
5 | VARS_TABLE,
6 | ZERO,
7 | } from './constants';
8 | import { Method } from './method';
9 | import { Names } from './names';
10 | import { Variable } from './variable';
11 |
12 | export const COMMIT_START: string = `BEGIN IMMEDIATE TRANSACTION;`;
13 | export const COMMIT_CANCEL: string = `ROLLBACK TRANSACTION;`;
14 | export const COMMIT_END: string = `COMMIT TRANSACTION;`;
15 |
16 | export class QueryUtils {
17 | static async getTableModify(
18 | DB: DataSource,
19 | statement: QueryParse
20 | ): Promise {
21 | const results: QueryParse[] = [];
22 |
23 | let oldTableName: string;
24 | let newTableName: string;
25 |
26 | let triggerAddName: string;
27 | let triggerSetName: string;
28 | let triggerDelName: string;
29 |
30 | let delTable: string;
31 |
32 | switch (statement.type) {
33 | case ParseType.create_table:
34 | newTableName = statement.tablesWrite[ZERO];
35 |
36 | /* #region Create the diffs column and del table. */
37 | if (!statement.columns.includes(Names.VERSION_COLUMN)) {
38 | results.push(new QueryParse({
39 | query: `ALTER TABLE ${newTableName} ADD COLUMN ${Names.VERSION_COLUMN} INTEGER NOT NULL DEFAULT 0;`,
40 | params: []
41 | }));
42 | }
43 |
44 | // Delete the del table if it exists.
45 | delTable = Names.getDelTable(newTableName);
46 | results.push(this.tableDel(delTable));
47 |
48 | // Create the del table.
49 | results.push(this.delTableAdd(delTable));
50 | /* #endregion */
51 |
52 | /* #region Create the triggers. */
53 | // Remove the triggers if they exist.
54 | triggerAddName = Names.getTrigger(newTableName, Method.add);
55 | triggerSetName = Names.getTrigger(newTableName, Method.set);
56 | triggerDelName = Names.getTrigger(newTableName, Method.del);
57 | results.push(this.triggerDel(triggerAddName));
58 | results.push(this.triggerDel(triggerSetName));
59 | results.push(this.triggerDel(triggerDelName));
60 |
61 | const triggerAddQuery = this.triggerAdd(
62 | Method.add,
63 | triggerAddName,
64 | newTableName,
65 | );
66 | const triggerSetQuery: QueryParse = this.triggerAdd(
67 | Method.set,
68 | triggerSetName,
69 | newTableName,
70 | );
71 | const triggerDelQuery: QueryParse = this.triggerAddDel(
72 | Method.del,
73 | triggerDelName,
74 | newTableName,
75 | delTable
76 | );
77 |
78 | // Add the triggers.
79 | results.push(triggerAddQuery);
80 | results.push(triggerSetQuery);
81 | results.push(triggerDelQuery);
82 | /* #endregion */
83 | break;
84 |
85 | case ParseType.rename_table:
86 | oldTableName = statement.tablesWrite[ZERO];
87 |
88 | // Delete the triggers.
89 | triggerAddName = Names.getTrigger(oldTableName, Method.add);
90 | triggerSetName = Names.getTrigger(oldTableName, Method.set);
91 | triggerDelName = Names.getTrigger(oldTableName, Method.del);
92 | results.push(QueryUtils.triggerDel(triggerAddName));
93 | results.push(QueryUtils.triggerDel(triggerSetName));
94 | results.push(QueryUtils.triggerDel(triggerDelName));
95 |
96 | // Delete the del tables
97 | delTable = Names.getDelTable(oldTableName);
98 | results.push(this.tableDel(delTable));
99 |
100 | // Update the table name.
101 | newTableName = statement.tablesWrite[ONE];
102 |
103 | /* #region Re-create the table triggers. */
104 | const tableCreateStatement =
105 | await QueryUtils.tableAdd(DB, oldTableName, newTableName);
106 | const newTableQueries =
107 | await QueryUtils.getTableModify(DB, tableCreateStatement);
108 | results.push(...newTableQueries);
109 | /* #endregion */
110 | break;
111 |
112 | case ParseType.drop_table:
113 | oldTableName = statement.tablesWrite[ZERO];
114 |
115 | // Triggers are automatically deleted when the table is deleted.
116 | // Delete the diff tables.
117 | delTable = Names.getDelTable(oldTableName);
118 | results.push(this.tableDel(delTable));
119 | break;
120 |
121 | default:
122 | break;
123 | }
124 |
125 | return results;
126 | }
127 |
128 | /* #region Del tables. */
129 | private static delTableAdd(
130 | delTableName: string,
131 | ): QueryParse {
132 | const query = `CREATE TABLE IF NOT EXISTS ${delTableName} (
133 | id INTEGER PRIMARY KEY,
134 | ${Names.VERSION_COLUMN} INTEGER NOT NULL
135 | );`;
136 |
137 | return new QueryParse({
138 | query,
139 | params: []
140 | });
141 | }
142 | /* #endregion */
143 |
144 | /* #region Tables. */
145 | private static async tableAdd(
146 | DB: DataSource,
147 | oldTableName: string,
148 | newTableName: string
149 | ): Promise {
150 | let tableCreateQuery = `${await QueryUtils.tableGet(DB, oldTableName)};`;
151 | const replaceNameRegEx = new RegExp(oldTableName, `gi`);
152 | tableCreateQuery =
153 | tableCreateQuery.replace(replaceNameRegEx, newTableName);
154 | const tableCreateStatement = new QueryParse({
155 | query: tableCreateQuery,
156 | params: []
157 | });
158 | return tableCreateStatement;
159 | }
160 |
161 | private static tableDel(name: string): QueryParse {
162 | return new QueryParse({
163 | query: `DROP TABLE IF EXISTS ${name};`,
164 | params: []
165 | });
166 | }
167 |
168 | private static async tableGet(
169 | DB: DataSource,
170 | name: string
171 | ): Promise {
172 | // Get the create table query from the SQLite master table.
173 | return (await DB.query(
174 | `SELECT sql FROM sqlite_master WHERE name = ?;`,
175 | [name]
176 | ))?.[ZERO]?.sql;
177 | }
178 | /* #endregion */
179 |
180 | /* #region Triggers. */
181 | private static triggerAdd(
182 | method: Method,
183 | name: string,
184 | table: string
185 | ): QueryParse {
186 | let op: string;
187 | switch (method) {
188 | case Method.add: op = `INSERT`; break;
189 | case Method.set: op = `UPDATE`; break;
190 | default: break;
191 | }
192 |
193 | const query = `CREATE TRIGGER
194 | IF NOT EXISTS ${name}
195 | AFTER ${op}
196 | ON ${table}
197 | BEGIN
198 | UPDATE ${table} SET ${Names.VERSION_COLUMN} = (SELECT value FROM ${VARS_TABLE} WHERE name = "${Variable.version}") WHERE ROWID = NEW.ROWID;
199 | END;`;
200 |
201 | return new QueryParse({
202 | query,
203 | params: []
204 | });
205 | }
206 |
207 | private static triggerAddDel(
208 | method: Method,
209 | name: string,
210 | table: string,
211 | delName: string,
212 | ): QueryParse {
213 | const entity = "OLD";
214 |
215 | let op: string;
216 | switch (method) {
217 | case Method.del: op = `DELETE`; break;
218 | default: break;
219 | }
220 |
221 | const query = `CREATE TRIGGER
222 | IF NOT EXISTS ${name}
223 | AFTER ${op}
224 | ON ${table}
225 | BEGIN
226 | INSERT OR REPLACE INTO ${delName}
227 | VALUES (OLD.id, (SELECT value FROM ${VARS_TABLE} where name = "${Variable.version}"));
228 | END;`;
229 |
230 | return new QueryParse({
231 | query,
232 | params: []
233 | });
234 | }
235 |
236 | private static triggerDel(name: string): QueryParse {
237 | return new QueryParse({
238 | query: `DROP TRIGGER IF EXISTS ${name};`,
239 | params: []
240 | });
241 | }
242 | /* #endregion */
243 | }
--------------------------------------------------------------------------------
/src/utils/swagger.json:
--------------------------------------------------------------------------------
1 | {
2 | "openapi": "3.0.0",
3 | "servers": [
4 | {
5 | "url": "https://127.0.0.1:5984",
6 | "description": "Local documentation"
7 | }
8 | ],
9 | "info": {
10 | "description": "The Stark DB Server.",
11 | "version": "1.3.4",
12 | "title": "Stark DB API",
13 | "termsOfService": "",
14 | "contact": {
15 | "email": "adrian.burlacu@live.com",
16 | "name": "Adrian Burlacu",
17 | "url": "https://github.com/WeWatchWall/stark-db"
18 | },
19 | "license": {
20 | "name": "MIT",
21 | "url": ""
22 | }
23 | },
24 | "tags": [
25 | {
26 | "name": "Admin",
27 | "description": "The server administrator."
28 | },
29 | {
30 | "name": "DB Admin",
31 | "description": "The database administrator. The server administrator automatically gets this role."
32 | },
33 | {
34 | "name": "User",
35 | "description": "The database user. The administrators automatically get this role."
36 | }
37 | ],
38 | "paths": {
39 | "/{DB}/login": {
40 | "summary": "Database login",
41 | "description": "Login to the given database.",
42 | "post": {
43 | "summary": "DB Login",
44 | "description": "Logs into a database.",
45 | "parameters": [
46 | {
47 | "name": "DB",
48 | "in": "path",
49 | "required": true,
50 | "schema": {
51 | "type": "string",
52 | "default": "admin",
53 | "enum": [
54 | "admin",
55 | "database1",
56 | "database2",
57 | "database3"
58 | ]
59 | },
60 | "description": "The database."
61 | },
62 | {
63 | "name": "pid",
64 | "in": "query",
65 | "required": true,
66 | "schema": {
67 | "type": "string",
68 | "default": "111",
69 | "enum": [
70 | "111",
71 | "222",
72 | "333",
73 | "aaa",
74 | "bbb",
75 | "ccc"
76 | ]
77 | },
78 | "description": "The process ID. This is generated by the client."
79 | }
80 | ],
81 | "operationId": "Login",
82 | "responses": {
83 | "200": {
84 | "description": "The login was sucessful."
85 | },
86 | "401": {
87 | "description": "The login failed for the credentials."
88 | },
89 | "403": {
90 | "description": "The login failed for the DB."
91 | }
92 | },
93 | "tags": [
94 | "User"
95 | ],
96 | "requestBody": {
97 | "$ref": "#/components/requestBodies/LoginBody"
98 | }
99 | }
100 | },
101 | "/logout": {
102 | "summary": "User logout",
103 | "description": "Logout of the current user.",
104 | "post": {
105 | "summary": "User logout",
106 | "description": "Logs out of a user.",
107 | "parameters": [
108 | {
109 | "name": "pid",
110 | "in": "query",
111 | "required": true,
112 | "schema": {
113 | "type": "string",
114 | "default": "111",
115 | "enum": [
116 | "111",
117 | "222",
118 | "333",
119 | "aaa",
120 | "bbb",
121 | "ccc"
122 | ]
123 | },
124 | "description": "The process ID. This is generated by the client."
125 | }
126 | ],
127 | "operationId": "Logout",
128 | "responses": {
129 | "200": {
130 | "description": "The logout was successful."
131 | }
132 | },
133 | "tags": [
134 | "User"
135 | ]
136 | }
137 | },
138 | "/users": {
139 | "summary": "The users",
140 | "description": "Manage the users of the system.",
141 | "get": {
142 | "summary": "Get users",
143 | "description": "Get the users in the system. Regular users can only get their own user. Admins can get all the users.",
144 | "parameters": [
145 | {
146 | "name": "pid",
147 | "in": "query",
148 | "required": true,
149 | "schema": {
150 | "type": "string",
151 | "default": "111",
152 | "enum": [
153 | "111",
154 | "222",
155 | "333",
156 | "aaa",
157 | "bbb",
158 | "ccc"
159 | ]
160 | },
161 | "description": "The process ID. This is generated by the client."
162 | },
163 | {
164 | "name": "ID",
165 | "in": "query",
166 | "required": false,
167 | "schema": {
168 | "type": "string",
169 | "enum": [
170 | "1",
171 | "2",
172 | "3"
173 | ]
174 | },
175 | "description": "The user ID.",
176 | "allowEmptyValue": false
177 | },
178 | {
179 | "name": "name",
180 | "in": "query",
181 | "required": false,
182 | "schema": {
183 | "type": "string",
184 | "enum": [
185 | "admin",
186 | "username1",
187 | "username2"
188 | ]
189 | },
190 | "description": "The username."
191 | }
192 | ],
193 | "operationId": "Get users",
194 | "responses": {
195 | "200": {
196 | "description": "The requested user(s)."
197 | },
198 | "401": {
199 | "description": "The user is not logged in."
200 | },
201 | "403": {
202 | "description": "The logged in user either doesn't have permission to view the user or the user wasn't found."
203 | }
204 | },
205 | "tags": [
206 | "User"
207 | ]
208 | },
209 | "post": {
210 | "summary": "Add user",
211 | "description": "Add a user. Only Admins can do this.",
212 | "parameters": [
213 | {
214 | "name": "pid",
215 | "in": "query",
216 | "required": true,
217 | "schema": {
218 | "type": "string",
219 | "default": "111",
220 | "enum": [
221 | "111",
222 | "222",
223 | "333",
224 | "aaa",
225 | "bbb",
226 | "ccc"
227 | ]
228 | },
229 | "description": "The process ID. This is generated by the client."
230 | }
231 | ],
232 | "requestBody": {
233 | "$ref": "#/components/requestBodies/AddUserBody"
234 | },
235 | "operationId": "Add user",
236 | "responses": {
237 | "200": {
238 | "description": "The user was added."
239 | },
240 | "401": {
241 | "description": "The user is not logged in."
242 | },
243 | "403": {
244 | "description": "The logged in user doesn't have permission to add the user or the user entity had an error."
245 | }
246 | },
247 | "tags": [
248 | "Admin"
249 | ]
250 | },
251 | "put": {
252 | "summary": "Set user",
253 | "description": "Set a user. Regular users can only set their own user. Admins can set any user.",
254 | "parameters": [
255 | {
256 | "name": "pid",
257 | "in": "query",
258 | "required": true,
259 | "schema": {
260 | "type": "string",
261 | "default": "111",
262 | "enum": [
263 | "111",
264 | "222",
265 | "333",
266 | "aaa",
267 | "bbb",
268 | "ccc"
269 | ]
270 | },
271 | "description": "The process ID. This is generated by the client."
272 | }
273 | ],
274 | "requestBody": {
275 | "$ref": "#/components/requestBodies/SetUserBody"
276 | },
277 | "operationId": "Set user",
278 | "responses": {
279 | "200": {
280 | "description": "The user was set."
281 | },
282 | "401": {
283 | "description": "The user is not logged in."
284 | },
285 | "403": {
286 | "description": "The logged in user either doesn't have permission to set the user or the user entity was not found."
287 | }
288 | },
289 | "tags": [
290 | "User"
291 | ]
292 | },
293 | "delete": {
294 | "summary": "Delete user",
295 | "description": "Delete a user. Regular users can only delete their own user. Admins can delete any user.",
296 | "parameters": [
297 | {
298 | "name": "pid",
299 | "in": "query",
300 | "required": true,
301 | "schema": {
302 | "type": "string",
303 | "default": "111",
304 | "enum": [
305 | "111",
306 | "222",
307 | "333",
308 | "aaa",
309 | "bbb",
310 | "ccc"
311 | ]
312 | },
313 | "description": "The process ID. This is generated by the client."
314 | },
315 | {
316 | "name": "ID",
317 | "in": "query",
318 | "required": false,
319 | "schema": {
320 | "type": "string",
321 | "enum": [
322 | "1",
323 | "2",
324 | "3"
325 | ]
326 | },
327 | "description": "The user ID"
328 | },
329 | {
330 | "name": "name",
331 | "in": "query",
332 | "required": false,
333 | "schema": {
334 | "type": "string",
335 | "enum": [
336 | "admin",
337 | "username1",
338 | "username2"
339 | ]
340 | },
341 | "description": "The username"
342 | }
343 | ],
344 | "operationId": "Delete user",
345 | "responses": {
346 | "200": {
347 | "description": "The user was deleted"
348 | },
349 | "401": {
350 | "description": "The user is not logged in."
351 | },
352 | "403": {
353 | "description": "The logged in user either doesn't have permission to delete the user or the user wasn't found."
354 | }
355 | },
356 | "tags": [
357 | "User"
358 | ]
359 | }
360 | },
361 | "/DBs": {
362 | "summary": "The DBs",
363 | "description": "Manage the DBs of the system.",
364 | "get": {
365 | "summary": "Get DBs",
366 | "description": "Get the DBs in the system. Regular users can only get their own DBs. Admins can get all the DBs.",
367 | "parameters": [
368 | {
369 | "name": "pid",
370 | "in": "query",
371 | "required": true,
372 | "schema": {
373 | "type": "string",
374 | "default": "111",
375 | "enum": [
376 | "111",
377 | "222",
378 | "333",
379 | "aaa",
380 | "bbb",
381 | "ccc"
382 | ]
383 | },
384 | "description": "The process ID. This is generated by the client."
385 | },
386 | {
387 | "name": "ID",
388 | "in": "query",
389 | "required": false,
390 | "schema": {
391 | "type": "string",
392 | "enum": [
393 | "1",
394 | "2",
395 | "3",
396 | "4"
397 | ]
398 | },
399 | "description": "The DB ID.",
400 | "allowEmptyValue": false
401 | },
402 | {
403 | "name": "name",
404 | "in": "query",
405 | "required": false,
406 | "schema": {
407 | "type": "string",
408 | "enum": [
409 | "admin",
410 | "database1",
411 | "database2",
412 | "database3"
413 | ]
414 | },
415 | "description": "The DB name."
416 | }
417 | ],
418 | "operationId": "Get DBs",
419 | "responses": {
420 | "200": {
421 | "description": "The requested DB(s)."
422 | },
423 | "401": {
424 | "description": "The user is not logged in."
425 | },
426 | "403": {
427 | "description": "The logged in user either doesn't have permission to get the DB or the DB wasn't found."
428 | }
429 | },
430 | "tags": [
431 | "User"
432 | ]
433 | },
434 | "post": {
435 | "summary": "Add DB",
436 | "description": "Add a DB. Only Admins can do this.",
437 | "parameters": [
438 | {
439 | "name": "pid",
440 | "in": "query",
441 | "required": true,
442 | "schema": {
443 | "type": "string",
444 | "default": "111",
445 | "enum": [
446 | "111",
447 | "222",
448 | "333",
449 | "aaa",
450 | "bbb",
451 | "ccc"
452 | ]
453 | },
454 | "description": "The process ID. This is generated by the client."
455 | }
456 | ],
457 | "requestBody": {
458 | "$ref": "#/components/requestBodies/AddDBBody"
459 | },
460 | "operationId": "Add DB",
461 | "responses": {
462 | "200": {
463 | "description": "The DB was added."
464 | }
465 | },
466 | "tags": [
467 | "Admin"
468 | ]
469 | },
470 | "put": {
471 | "summary": "Set DB",
472 | "description": "Set a DB. DB Admins can update their own DBs while Admins can update all DBs.",
473 | "parameters": [
474 | {
475 | "name": "pid",
476 | "in": "query",
477 | "required": true,
478 | "schema": {
479 | "type": "string",
480 | "default": "111",
481 | "enum": [
482 | "111",
483 | "222",
484 | "333",
485 | "aaa",
486 | "bbb",
487 | "ccc"
488 | ]
489 | },
490 | "description": "The process ID. This is generated by the client."
491 | }
492 | ],
493 | "requestBody": {
494 | "$ref": "#/components/requestBodies/SetDBBody"
495 | },
496 | "operationId": "Set DB",
497 | "responses": {
498 | "200": {
499 | "description": "The DB was set."
500 | }
501 | },
502 | "tags": [
503 | "DB Admin"
504 | ]
505 | },
506 | "delete": {
507 | "summary": "Delete DB",
508 | "description": "Delete a DB. DB Admins can delete their own DBs while Admins can delete any DB.",
509 | "parameters": [
510 | {
511 | "name": "pid",
512 | "in": "query",
513 | "required": true,
514 | "schema": {
515 | "type": "string",
516 | "default": "111",
517 | "enum": [
518 | "111",
519 | "222",
520 | "333",
521 | "aaa",
522 | "bbb",
523 | "ccc"
524 | ]
525 | },
526 | "description": "The process ID. This is generated by the client."
527 | },
528 | {
529 | "name": "ID",
530 | "in": "query",
531 | "required": false,
532 | "schema": {
533 | "type": "string",
534 | "enum": [
535 | "1",
536 | "2",
537 | "3",
538 | "4"
539 | ]
540 | },
541 | "description": "The DB ID"
542 | },
543 | {
544 | "name": "name",
545 | "in": "query",
546 | "required": false,
547 | "schema": {
548 | "type": "string",
549 | "enum": [
550 | "admin",
551 | "database1",
552 | "database2",
553 | "database3"
554 | ]
555 | },
556 | "description": "The DB name"
557 | }
558 | ],
559 | "operationId": "Delete DB",
560 | "responses": {
561 | "200": {
562 | "description": "The DB was deleted"
563 | }
564 | },
565 | "tags": [
566 | "DB Admin"
567 | ]
568 | }
569 | },
570 | "/{DB}/query": {
571 | "summary": "Database login",
572 | "description": "Login to the given database.",
573 | "post": {
574 | "summary": "DB Query",
575 | "description": "Queries a database.",
576 | "parameters": [
577 | {
578 | "name": "DB",
579 | "in": "path",
580 | "required": true,
581 | "schema": {
582 | "type": "string",
583 | "default": "admin",
584 | "enum": [
585 | "admin",
586 | "database1",
587 | "database2",
588 | "database3"
589 | ]
590 | },
591 | "description": "The database."
592 | },
593 | {
594 | "name": "pid",
595 | "in": "query",
596 | "required": true,
597 | "schema": {
598 | "type": "string",
599 | "default": "111",
600 | "enum": [
601 | "111",
602 | "222",
603 | "333",
604 | "aaa",
605 | "bbb",
606 | "ccc"
607 | ]
608 | },
609 | "description": "The process ID. This is generated by the client."
610 | }
611 | ],
612 | "operationId": "Query",
613 | "responses": {
614 | "200": {
615 | "description": "The successful query results."
616 | },
617 | "401": {
618 | "description": "The login failed for the credentials."
619 | },
620 | "403": {
621 | "description": "The login failed for the DB."
622 | },
623 | "500": {
624 | "description": "The server was not able to respond to the query."
625 | }
626 | },
627 | "tags": [
628 | "User"
629 | ],
630 | "requestBody": {
631 | "$ref": "#/components/requestBodies/QueryBody"
632 | }
633 | }
634 | }
635 | },
636 | "components": {
637 | "schemas": {},
638 | "requestBodies": {
639 | "LoginBody": {
640 | "description": "Login",
641 | "required": true,
642 | "content": {
643 | "application/json": {
644 | "schema": {
645 | "type": "object",
646 | "properties": {
647 | "username": {
648 | "type": "string",
649 | "description": "The username."
650 | },
651 | "password": {
652 | "type": "string",
653 | "description": "The password."
654 | }
655 | }
656 | },
657 | "examples": {
658 | "admin": {
659 | "value": {
660 | "username": "admin",
661 | "password": "admin"
662 | }
663 | },
664 | "user1": {
665 | "value": {
666 | "username": "username1",
667 | "password": "password1"
668 | }
669 | },
670 | "user2": {
671 | "value": {
672 | "username": "username2",
673 | "password": "password2"
674 | }
675 | }
676 | }
677 | }
678 | }
679 | },
680 | "AddUserBody": {
681 | "description": "Add a user.",
682 | "required": true,
683 | "content": {
684 | "application/json": {
685 | "schema": {
686 | "type": "object",
687 | "properties": {
688 | "name": {
689 | "type": "string",
690 | "description": "The username."
691 | },
692 | "password": {
693 | "type": "string",
694 | "description": "The password."
695 | },
696 | "salt": {
697 | "type": "string",
698 | "description": "The salt. Ignored."
699 | }
700 | }
701 | },
702 | "examples": {
703 | "admin": {
704 | "value": {
705 | "name": "admin",
706 | "password": "admin",
707 | "salt": ""
708 | }
709 | },
710 | "user1": {
711 | "value": {
712 | "name": "username1",
713 | "password": "password1",
714 | "salt": ""
715 | }
716 | },
717 | "user2": {
718 | "value": {
719 | "name": "username2",
720 | "password": "password2",
721 | "salt": ""
722 | }
723 | }
724 | }
725 | }
726 | }
727 | },
728 | "SetUserBody": {
729 | "description": "Set a user.",
730 | "required": true,
731 | "content": {
732 | "application/json": {
733 | "schema": {
734 | "type": "object",
735 | "properties": {
736 | "ID": {
737 | "type": "integer",
738 | "description": "The user ID."
739 | },
740 | "name": {
741 | "type": "string",
742 | "description": "The username."
743 | },
744 | "password": {
745 | "type": "string",
746 | "description": "The password."
747 | },
748 | "salt": {
749 | "type": "string",
750 | "description": "The salt. Ignored."
751 | }
752 | }
753 | },
754 | "examples": {
755 | "admin": {
756 | "value": {
757 | "ID": 1,
758 | "name": "admin",
759 | "password": "admin",
760 | "salt": ""
761 | }
762 | },
763 | "user1": {
764 | "value": {
765 | "ID": 2,
766 | "name": "username1",
767 | "password": "password1",
768 | "salt": ""
769 | }
770 | },
771 | "user2": {
772 | "value": {
773 | "ID": 3,
774 | "name": "username2",
775 | "password": "password2",
776 | "salt": ""
777 | }
778 | }
779 | }
780 | }
781 | }
782 | },
783 | "AddDBBody": {
784 | "description": "Add a DB.",
785 | "required": true,
786 | "content": {
787 | "application/json": {
788 | "schema": {
789 | "type": "object",
790 | "properties": {
791 | "name": {
792 | "type": "string",
793 | "description": "The username."
794 | },
795 | "admins": {
796 | "type": "array",
797 | "description": "The admins.",
798 | "items": {
799 | "type": "integer"
800 | }
801 | },
802 | "users": {
803 | "type": "array",
804 | "description": "The users.",
805 | "items": {
806 | "type": "integer"
807 | }
808 | }
809 | }
810 | },
811 | "examples": {
812 | "admin": {
813 | "value": {
814 | "name": "admin",
815 | "admins": [
816 | 1
817 | ],
818 | "users": []
819 | }
820 | },
821 | "database1": {
822 | "value": {
823 | "name": "database1",
824 | "admins": [
825 | 1,
826 | 2
827 | ],
828 | "readers": [],
829 | "writers": []
830 | }
831 | },
832 | "database2": {
833 | "value": {
834 | "name": "database2",
835 | "admins": [
836 | 1
837 | ],
838 | "readers": [3],
839 | "writers": [
840 | 2
841 | ]
842 | }
843 | },
844 | "database3": {
845 | "value": {
846 | "name": "database3",
847 | "admins": [
848 | 1
849 | ],
850 | "readers": [
851 | 2
852 | ],
853 | "writers": [3]
854 | }
855 | }
856 | }
857 | }
858 | }
859 | },
860 | "SetDBBody": {
861 | "description": "Set a DB.",
862 | "required": true,
863 | "content": {
864 | "application/json": {
865 | "schema": {
866 | "type": "object",
867 | "properties": {
868 | "ID": {
869 | "type": "integer",
870 | "description": "The DB ID."
871 | },
872 | "name": {
873 | "type": "string",
874 | "description": "The DB name."
875 | },
876 | "admins": {
877 | "type": "array",
878 | "description": "The admins.",
879 | "items": {
880 | "type": "integer"
881 | }
882 | },
883 | "users": {
884 | "type": "array",
885 | "description": "The users.",
886 | "items": {
887 | "type": "integer"
888 | }
889 | }
890 | }
891 | },
892 | "examples": {
893 | "admin": {
894 | "value": {
895 | "ID": 1,
896 | "name": "admin",
897 | "admins": [
898 | 1
899 | ],
900 | "readers": [],
901 | "writers": []
902 | }
903 | },
904 | "database1": {
905 | "value": {
906 | "ID": 2,
907 | "name": "database1",
908 | "admins": [
909 | 1,
910 | 3
911 | ],
912 | "readers": [
913 | 2
914 | ],
915 | "writers": []
916 | }
917 | },
918 | "database2": {
919 | "value": {
920 | "ID": 3,
921 | "name": "database2",
922 | "admins": [
923 | 1
924 | ],
925 | "readers": [
926 | 2,
927 | 3
928 | ],
929 | "writers": []
930 | }
931 | },
932 | "database3": {
933 | "value": {
934 | "ID": 4,
935 | "name": "database3",
936 | "admins": [
937 | 1
938 | ],
939 | "readers": [
940 | 2
941 | ],
942 | "writers": [
943 | 3
944 | ]
945 | }
946 | }
947 | }
948 | }
949 | }
950 | },
951 | "QueryBody": {
952 | "description": "Query",
953 | "required": true,
954 | "content": {
955 | "application/json": {
956 | "schema": {
957 | "type": "object",
958 | "properties": {
959 | "query": {
960 | "type": "string",
961 | "description": "The query."
962 | },
963 | "params": {
964 | "type": "array",
965 | "description": "The query parameters.",
966 | "items":{
967 | "type": "object"
968 | }
969 | }
970 | }
971 | },
972 | "examples": {
973 | "create table": {
974 | "value": {
975 | "query": "CREATE TABLE IF NOT EXISTS \"user\" (\"id\" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, \"firstName\" VARCHAR NOT NULL, \"lastName\" VARCHAR NOT NULL, \"age\" INTEGER NOT NULL);",
976 | "params": []
977 | }
978 | },
979 | "insert data": {
980 | "value": {
981 | "query": "INSERT OR REPLACE INTO user (firstName, lastName, age) VALUES (?, ?, ?);",
982 | "params": ["Timber", "Saw", 25]
983 | }
984 | },
985 | "select data": {
986 | "value": {
987 | "query": "SELECT * FROM user;",
988 | "params": []
989 | }
990 | }
991 | }
992 | }
993 | }
994 | }
995 | },
996 | "links": {},
997 | "callbacks": {}
998 | },
999 | "security": []
1000 | }
--------------------------------------------------------------------------------
/src/utils/variable.ts:
--------------------------------------------------------------------------------
1 | export enum Variable {
2 | version = 'version',
3 | }
--------------------------------------------------------------------------------
/src/valid/DB.ts:
--------------------------------------------------------------------------------
1 | import { defineAbility } from '@casl/ability';
2 | import { DBOp } from '../utils/DBOp';
3 | import { Entities } from '../utils/entities';
4 |
5 | export default function defineAbilityForDB(user: any) {
6 | return defineAbility((can) => {
7 | if (user.isLoggedIn) {
8 | // Admin can do anything to the Admin DB.
9 | can(DBOp.Admin, Entities.AdminDB, { admins: { $all: [user.ID] } });
10 | can(DBOp.Read, Entities.AdminDB, { admins: { $all: [user.ID] } });
11 | can(DBOp.Write, Entities.AdminDB, { admins: { $all: [user.ID] } });
12 |
13 | // DB admin can do anything to the DB.
14 | can(DBOp.Admin, Entities.DB, { admins: { $all: [user.ID] } });
15 | can(DBOp.Read, Entities.DB, { admins: { $all: [user.ID] } });
16 | can(DBOp.Write, Entities.DB, { admins: { $all: [user.ID] } });
17 |
18 | // Writer can write to the DB.
19 | can(DBOp.Read, Entities.DB, { writers: { $all: [user.ID] } });
20 | can(DBOp.Write, Entities.DB, { writers: { $all: [user.ID] } });
21 |
22 | // Reader can read the DB.
23 | can(DBOp.Read, Entities.DB, { readers: { $all: [user.ID] } });
24 | }
25 | });
26 | }
--------------------------------------------------------------------------------
/src/valid/user.ts:
--------------------------------------------------------------------------------
1 | import { defineAbility } from '@casl/ability';
2 | import { CRUD } from '../utils/CRUD';
3 | import { Entities } from '../utils/entities';
4 |
5 | export default function defineAbilityForUser(user: any, isAdmin: boolean) {
6 | return defineAbility((can, cannot) => {
7 | if (isAdmin) {
8 | can(CRUD.Create, Entities.User);
9 | can(CRUD.Read, Entities.User);
10 | can(CRUD.Update, Entities.User);
11 | can(CRUD.Delete, Entities.User);
12 | } else {
13 | cannot(CRUD.Create, Entities.User);
14 | can(CRUD.Read, Entities.User, { ID: user.ID });
15 | can(CRUD.Update, Entities.User, { ID: user.ID });
16 | can(CRUD.Delete, Entities.User, { ID: user.ID });
17 | }
18 | });
19 | }
--------------------------------------------------------------------------------
/test/basic.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 | import workerpool from 'workerpool';
4 |
5 | describe('Tests', () => {
6 | it('should run', () => {
7 | assert.strictEqual(1, 1);
8 | });
9 | });
10 |
11 | describe('Workerpool', () => {
12 | it('should run', async () => {
13 | const pool = workerpool.pool();
14 |
15 | function add(a: number, b: number) {
16 | return a + b;
17 | }
18 |
19 | const result = await pool.exec(add, [1, 2]);
20 | assert.strictEqual(result, 3);
21 |
22 | pool.terminate();
23 | });
24 | });
--------------------------------------------------------------------------------
/test/objects/DB.test.ts:
--------------------------------------------------------------------------------
1 | import { existsSync, rmSync } from 'fs';
2 | import assert from 'node:assert';
3 | import { before, describe, it } from 'node:test';
4 | import { ADMIN_NAME, DATA_DIR } from '../../src/utils/constants';
5 | import { AdminDBFile, DBFile } from '../../src/objects/DBFile';
6 | import path from 'node:path';
7 |
8 | const USER_DB_NAME = 'userDB';
9 | const USER_DB_NAME_2 = 'userDB2';
10 |
11 | const ADMIN_DB_NAME_2 = 'adminDB2';
12 |
13 | describe('DBFile Objects', () => {
14 | before(() => {
15 | rmSync(DATA_DIR, { force: true, recursive: true });
16 | });
17 |
18 | it('should create an Admin file', async () => {
19 | await using file = new AdminDBFile({ name: ADMIN_NAME, types: [] });
20 | await file.load();
21 |
22 | const filePath = path.resolve(DATA_DIR, `${ADMIN_NAME}.db`);
23 | assert.ok(existsSync(filePath));
24 | assert.ok(!await file.isInit());
25 | });
26 |
27 | it('should create a file', async () => {
28 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
29 | await file.load();
30 |
31 | const filePath = path.resolve(DATA_DIR, `${USER_DB_NAME}.db`);
32 | assert.ok(existsSync(filePath));
33 | assert.ok(!await file.isInit());
34 | });
35 |
36 | it('should fail to rename an admin file', async () => {
37 | await using file = new AdminDBFile({ name: ADMIN_NAME, types: [] });
38 | await file.load();
39 |
40 | let error;
41 | try {
42 | await file.save({ name: ADMIN_DB_NAME_2, types: [] });
43 | } catch (err) {
44 | error = err;
45 | }
46 | assert.ok(error);
47 | });
48 |
49 | it('should fail to rename an unitialized file', async () => {
50 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
51 | await file.load();
52 |
53 | let error;
54 | try {
55 | await file.save({ name: USER_DB_NAME_2, types: [] });
56 | } catch (err) {
57 | error = err;
58 | }
59 | assert.ok(error);
60 | });
61 |
62 | it('should initialize an admin file', async () => {
63 | await using file = new AdminDBFile({ name: ADMIN_NAME, types: [] });
64 | await file.load();
65 |
66 | await file.setInit();
67 | assert.ok(await file.isInit());
68 | });
69 |
70 | it('should initialize a file', async () => {
71 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
72 | await file.load();
73 |
74 | await file.setInit();
75 | assert.ok(await file.isInit());
76 | });
77 |
78 | it('should rename an initialized file', async () => {
79 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
80 | await file.load();
81 |
82 | await file.save({ name: USER_DB_NAME_2, types: [] });
83 |
84 | const filePath = path.resolve(DATA_DIR, `${USER_DB_NAME_2}.db`);
85 | assert.ok(existsSync(filePath));
86 | assert.ok(await file.isInit());
87 | });
88 |
89 | it('should fail to delete an admin file', async () => {
90 | await using file = new AdminDBFile({ name: ADMIN_NAME, types: [] });
91 | await file.load();
92 |
93 | let error;
94 | try {
95 | await file.delete();
96 | } catch (err) {
97 | error = err;
98 | }
99 | assert.ok(error);
100 | });
101 |
102 | it('should fail to delete an unitialized file', async () => {
103 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
104 |
105 | let error;
106 | try {
107 | await file.delete();
108 | } catch (err) {
109 | error = err;
110 | }
111 | assert.ok(error);
112 | });
113 |
114 | it('should delete an initialized file', async () => {
115 | await using file = new DBFile({ name: USER_DB_NAME, types: [] });
116 | await file.load();
117 |
118 | await file.setInit();
119 | assert.ok(await file.isInit());
120 |
121 | await file.delete();
122 |
123 | const filePath = path.resolve(DATA_DIR, `${USER_DB_NAME}.db`);
124 | assert.ok(!existsSync(filePath));
125 | });
126 | });
--------------------------------------------------------------------------------
/test/objects/DBObjects.test.ts:
--------------------------------------------------------------------------------
1 | import { rmSync } from 'fs';
2 | import assert from 'node:assert';
3 | import { after, before, describe, it } from 'node:test';
4 |
5 | import { DBFile } from '../../src/objects/DBFile';
6 | import { ADMIN_NAME, DATA_DIR, ONE } from '../../src/utils/constants';
7 | import { AdminDB, DB } from '../../src/objects/DB';
8 | import { DB as DBEntity } from '../../src/entities/DB';
9 |
10 | const USER_DB_NAME = 'userDB';
11 | const USER_DB_NAME_2 = 'userDB2';
12 |
13 | const ADMIN_DB_NAME_2 = 'adminDB2';
14 |
15 | let adminFile: DBFile;
16 |
17 | describe('DBEntity Objects', () => {
18 | before(async () => {
19 | rmSync(DATA_DIR, { force: true, recursive: true });
20 |
21 | adminFile = new DBFile({ name: ADMIN_NAME, types: [DBEntity] });
22 | await adminFile.load();
23 | await adminFile.setInit();
24 | });
25 |
26 | after(async () => {
27 | await adminFile[Symbol.asyncDispose]();
28 | });
29 |
30 | it('should create an AdminDB entity', async () => {
31 | const adminDBTest = new AdminDB({ DB: adminFile.DB });
32 | await adminDBTest.save({
33 | ID: ONE,
34 | name: ADMIN_NAME,
35 | admins: [],
36 | readers: [],
37 | writers: []
38 | });
39 |
40 | const entity = await adminFile.DB.manager.findOneByOrFail(DBEntity, {
41 | ID: ONE,
42 | name: ADMIN_NAME,
43 | });
44 | assert.strictEqual(entity.ID, ONE);
45 | assert.strictEqual(entity.name, ADMIN_NAME);
46 | });
47 |
48 | it('should create a DB entity', async () => {
49 | const DBTest = new DB({ DB: adminFile.DB });
50 | await DBTest.save({
51 | name: USER_DB_NAME,
52 | admins: [],
53 | readers: [],
54 | writers: []
55 | });
56 |
57 | const entity = await adminFile.DB.manager.findOneByOrFail(DBEntity, {
58 | name: USER_DB_NAME,
59 | });
60 | assert.strictEqual(entity.ID, 2);
61 | assert.strictEqual(entity.name, USER_DB_NAME);
62 | });
63 |
64 | it('should load an AdminDB entity', async () => {
65 | const adminDBTest = new AdminDB({
66 | DB: adminFile.DB,
67 | ID: ONE,
68 | name: ADMIN_NAME
69 | });
70 | await adminDBTest.load();
71 |
72 | assert.strictEqual(adminDBTest.ID, ONE);
73 | assert.strictEqual(adminDBTest.name, ADMIN_NAME);
74 | assert.deepStrictEqual(adminDBTest.admins, []);
75 | assert.deepStrictEqual(adminDBTest.readers, []);
76 | assert.deepStrictEqual(adminDBTest.writers, []);
77 | });
78 |
79 | it('should load a DB entity', async () => {
80 | const DBTest = new DB({
81 | DB: adminFile.DB,
82 | ID: 2,
83 | });
84 | await DBTest.load();
85 |
86 | assert.strictEqual(DBTest.ID, 2);
87 | assert.strictEqual(DBTest.name, USER_DB_NAME);
88 | assert.deepStrictEqual(DBTest.admins, []);
89 | assert.deepStrictEqual(DBTest.readers, []);
90 | assert.deepStrictEqual(DBTest.writers, []);
91 | });
92 |
93 | it('should fail to load an incorrect AdminDB entity', async () => {
94 | const adminDBTest = new AdminDB({
95 | DB: adminFile.DB,
96 | ID: 2,
97 | name: ADMIN_NAME,
98 | });
99 |
100 | let error;
101 | try {
102 | await adminDBTest.load();
103 | } catch (err) {
104 | error = err;
105 | }
106 | assert.ok(error);
107 | });
108 |
109 | it.skip('should fail to load an incorrect DB entity', async () => {
110 | const DBTest = new DB({
111 | DB: adminFile.DB,
112 | ID: ONE,
113 | });
114 |
115 | let error;
116 | try {
117 | await DBTest.load();
118 | } catch (err) {
119 | error = err;
120 | }
121 | assert.ok(error);
122 | });
123 |
124 | it('should update an AdminDB entity', async () => {
125 | const adminDBTest = new AdminDB({
126 | DB: adminFile.DB,
127 | ID: ONE,
128 | name: ADMIN_NAME,
129 | });
130 | await adminDBTest.load();
131 |
132 | await adminDBTest.save({
133 | ID: ONE,
134 | name: ADMIN_NAME,
135 | admins: [ONE],
136 | readers: [],
137 | writers: []
138 | });
139 | assert.strictEqual(adminDBTest.ID, ONE);
140 | assert.strictEqual(adminDBTest.name, ADMIN_NAME);
141 | assert.deepStrictEqual(adminDBTest.admins, [ONE]);
142 | assert.deepStrictEqual(adminDBTest.readers, []);
143 | assert.deepStrictEqual(adminDBTest.writers, []);
144 |
145 | const entity = await adminFile.DB.manager.findOneByOrFail(DBEntity, {
146 | ID: ONE,
147 | name: ADMIN_NAME,
148 | });
149 | assert.strictEqual(entity.ID, ONE);
150 | assert.strictEqual(entity.name, ADMIN_NAME);
151 | assert.deepStrictEqual(entity.admins, [ONE]);
152 | assert.deepStrictEqual(entity.readers, []);
153 | assert.deepStrictEqual(entity.writers, []);
154 | });
155 |
156 | it('should update a DB entity', async () => {
157 | const DBTest = new DB({
158 | DB: adminFile.DB,
159 | ID: 2
160 | });
161 | await DBTest.load();
162 |
163 | await DBTest.save({
164 | ID: 2,
165 | name: USER_DB_NAME_2,
166 | admins: [ONE],
167 | readers: [],
168 | writers: []
169 | });
170 | assert.strictEqual(DBTest.ID, 2);
171 | assert.strictEqual(DBTest.name, USER_DB_NAME_2);
172 | assert.deepStrictEqual(DBTest.admins, [ONE]);
173 | assert.deepStrictEqual(DBTest.readers, []);
174 | assert.deepStrictEqual(DBTest.writers, []);
175 |
176 | const entity = await adminFile.DB.manager.findOneByOrFail(DBEntity, {
177 | ID: 2,
178 | name: USER_DB_NAME_2,
179 | });
180 | assert.strictEqual(entity.ID, 2);
181 | assert.strictEqual(entity.name, USER_DB_NAME_2);
182 | assert.deepStrictEqual(entity.admins, [ONE]);
183 | assert.deepStrictEqual(entity.readers, []);
184 | assert.deepStrictEqual(entity.writers, []);
185 | });
186 |
187 | it('should fail to update an incorrect AdminDB entity', async () => {
188 | const adminDBTest = new AdminDB({
189 | DB: adminFile.DB,
190 | });
191 |
192 | let error;
193 | try {
194 | await adminDBTest.save({
195 | ID: ONE,
196 | name: ADMIN_DB_NAME_2,
197 | admins: [ONE],
198 | readers: [],
199 | writers: []
200 | });
201 | } catch (err) {
202 | error = err;
203 | }
204 | assert.ok(error);
205 | });
206 |
207 | it.skip('should fail to update an incorrect DB entity', async () => {
208 | const DBTest = new DB({
209 | DB: adminFile.DB,
210 | });
211 |
212 | let error;
213 | try {
214 | await DBTest.save({
215 | ID: ONE,
216 | name: ADMIN_NAME,
217 | admins: [ONE],
218 | readers: [],
219 | writers: []
220 | });
221 | } catch (err) {
222 | error = err;
223 | }
224 | assert.ok(error);
225 | });
226 |
227 | it('should fail to delete an AdminDB entity', async () => {
228 | const adminDBTest = new AdminDB({
229 | DB: adminFile.DB,
230 | ID: ONE,
231 | name: ADMIN_NAME,
232 | });
233 |
234 | let error;
235 | try {
236 | await adminDBTest.delete();
237 | } catch (err) {
238 | error = err;
239 | }
240 | assert.ok(error);
241 | });
242 |
243 | it('should delete a DB entity', async () => {
244 | const DBTest = new DB({
245 | DB: adminFile.DB,
246 | ID: 2,
247 | });
248 |
249 | await DBTest.delete();
250 |
251 | const entity = await adminFile.DB.manager.findOneBy(DBEntity, {
252 | ID: 2,
253 | });
254 | assert.ok(!entity);
255 | });
256 | });
--------------------------------------------------------------------------------
/test/parser/queryParse.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 | import { ParseType, QueryParse } from '../../src/parser/queryParse';
4 |
5 | const tests = [
6 | /* #region Transactions. */
7 | // https://www.sqlite.org/lang_transaction.html
8 | {
9 | id: 0,
10 | name: 'Query Transaction - begin',
11 | query: 'BEGIN;',
12 | result: {
13 | isRead: false,
14 | query: 'BEGIN;',
15 | params: [],
16 | tablesRead: [],
17 | tablesWrite: [],
18 | columns: [],
19 | autoKeys: [],
20 | keys: [],
21 | type: ParseType.begin_transaction
22 | },
23 | isSkip: false
24 | }, {
25 | id: 1,
26 | name: 'Query Transaction - begin trim',
27 | query: '\n BEGIN;',
28 | result: {
29 | isRead: false,
30 | query: 'BEGIN;',
31 | params: [],
32 | tablesRead: [],
33 | tablesWrite: [],
34 | columns: [],
35 | autoKeys: [],
36 | keys: [],
37 | type: ParseType.begin_transaction
38 | }
39 | }, {
40 | id: 2,
41 | name: 'Query Transaction - begin long',
42 | query: 'BEGIN TRANSACTION;',
43 | result: {
44 | isRead: false,
45 | query: 'BEGIN TRANSACTION;',
46 | params: [],
47 | tablesRead: [],
48 | tablesWrite: [],
49 | columns: [],
50 | autoKeys: [],
51 | keys: [],
52 | type: ParseType.begin_transaction
53 | }
54 | }, {
55 | id: 3,
56 | name: 'Query Transaction - rollback',
57 | query: 'ROLLBACK;',
58 | result: {
59 | isRead: false,
60 | query: 'ROLLBACK;',
61 | params: [],
62 | tablesRead: [],
63 | tablesWrite: [],
64 | columns: [],
65 | autoKeys: [],
66 | keys: [],
67 | type: ParseType.rollback_transaction
68 | }
69 | }, {
70 | id: 4,
71 | name: 'Query Transaction - rollback long',
72 | query: 'ROLLBACK TRANSACTION;',
73 | result: {
74 | isRead: false,
75 | query: 'ROLLBACK TRANSACTION;',
76 | params: [],
77 | tablesRead: [],
78 | tablesWrite: [],
79 | columns: [],
80 | autoKeys: [],
81 | keys: [],
82 | type: ParseType.rollback_transaction
83 | }
84 | }, {
85 | id: 5,
86 | name: 'Query Transaction - commit',
87 | query: 'COMMIT;',
88 | result: {
89 | isRead: false,
90 | query: 'COMMIT;',
91 | params: [],
92 | tablesRead: [],
93 | tablesWrite: [],
94 | columns: [],
95 | autoKeys: [],
96 | keys: [],
97 | type: ParseType.commit_transaction
98 | }
99 | }, {
100 | id: 6,
101 | name: 'Query Transaction - commit long',
102 | query: 'COMMIT TRANSACTION;',
103 | result: {
104 | isRead: false,
105 | query: 'COMMIT TRANSACTION;',
106 | params: [],
107 | tablesRead: [],
108 | tablesWrite: [],
109 | columns: [],
110 | autoKeys: [],
111 | keys: [],
112 | type: ParseType.commit_transaction
113 | }
114 | }, {
115 | id: 7,
116 | name: 'Query Transaction - end',
117 | query: 'END;',
118 | result: {
119 | isRead: false,
120 | query: 'END;',
121 | params: [],
122 | tablesRead: [],
123 | tablesWrite: [],
124 | columns: [],
125 | autoKeys: [],
126 | keys: [],
127 | type: ParseType.commit_transaction
128 | }
129 | }, {
130 | id: 8,
131 | name: 'Query Transaction - end long',
132 | query: 'END TRANSACTION;',
133 | result: {
134 | isRead: false,
135 | query: 'END TRANSACTION;',
136 | params: [],
137 | tablesRead: [],
138 | tablesWrite: [],
139 | columns: [],
140 | autoKeys: [],
141 | keys: [],
142 | type: ParseType.commit_transaction
143 | }
144 | },
145 | /* #endregion */
146 |
147 | /* #region Tables. */
148 | // https://www.sqlite.org/lang_createtable.html
149 | {
150 | id: 9,
151 | name: 'Query Table - create',
152 | query: 'CREATE TABLE IF NOT EXISTS "variables" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "value" text NOT NULL);',
153 | result: {
154 | isRead: false,
155 | query: 'CREATE TABLE IF NOT EXISTS "variables" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, "value" text NOT NULL);',
156 | params: [],
157 | tablesRead: [],
158 | tablesWrite: ["variables"],
159 | columns: ["id", "value"],
160 | autoKeys: ["id"],
161 | keys: ["id"],
162 | type: ParseType.create_table,
163 | },
164 | }, {
165 | id: 10,
166 | name: 'Query Table - create multiple keys',
167 | query: 'CREATE TABLE IF NOT EXISTS "variables" ("id" INTEGER NOT NULL, "type" varchar NOT NULL, "value" text NOT NULL, PRIMARY KEY("id", "type"));',
168 | result: {
169 | isRead: false,
170 | query: 'CREATE TABLE IF NOT EXISTS "variables" ("id" INTEGER NOT NULL, "type" varchar NOT NULL, "value" text NOT NULL, PRIMARY KEY("id", "type"));',
171 | params: [],
172 | tablesRead: [],
173 | tablesWrite: ["variables"],
174 | columns: ["id", "type", "value"],
175 | autoKeys: [],
176 | keys: ["id", "type"],
177 | type: ParseType.create_table,
178 | }
179 | }, {
180 | id: 11,
181 | name: 'Query Table - create temp',
182 | query: 'CREATE TEMPORARY TABLE IF NOT EXISTS "variables" ("id" varchar PRIMARY KEY NOT NULL, "value" text NOT NULL);',
183 | result: {
184 | isRead: false,
185 | query: 'CREATE TEMPORARY TABLE IF NOT EXISTS "variables" ("id" varchar PRIMARY KEY NOT NULL, "value" text NOT NULL);',
186 | params: [],
187 | tablesRead: [],
188 | tablesWrite: ["variables"],
189 | columns: ["id", "value"],
190 | autoKeys: [],
191 | keys: ["id"],
192 | type: ParseType.create_table,
193 | }
194 | }, {
195 | id: 12,
196 | name: 'Query Data - create table with select.',
197 | query: 'CREATE TABLE variables AS SELECT * FROM variables2;',
198 | result: {
199 | isRead: true,
200 | query: 'CREATE TABLE variables AS SELECT * FROM variables2;',
201 | params: [],
202 | tablesRead: ["variables2"],
203 | tablesWrite: ["variables"],
204 | columns: [],
205 | autoKeys: [],
206 | keys: [],
207 | type: ParseType.create_table,
208 | },
209 | isSkip: false
210 | }, {
211 | id: 13,
212 | name: 'Query Table - drop',
213 | query: 'DROP TABLE "variables";',
214 | result: {
215 | isRead: false,
216 | query: 'DROP TABLE "variables";',
217 | params: [],
218 | tablesRead: [],
219 | tablesWrite: ["variables"],
220 | columns: [],
221 | autoKeys: [],
222 | keys: [],
223 | type: ParseType.drop_table,
224 | }
225 | }, {
226 | id: 14,
227 | name: 'Query Table - drop conditional',
228 | query: 'DROP TABLE IF EXISTS "variables";',
229 | result: {
230 | isRead: false,
231 | query: 'DROP TABLE IF EXISTS "variables";',
232 | params: [],
233 | tablesRead: [],
234 | tablesWrite: ["variables"],
235 | columns: [],
236 | autoKeys: [],
237 | keys: [],
238 | type: ParseType.drop_table,
239 | }
240 | },
241 |
242 | // https://www.sqlite.org/lang_altertable.html
243 | {
244 | id: 15,
245 | name: 'Query Table - rename',
246 | query: 'ALTER TABLE "variables" RENAME TO "variables2";',
247 | result: {
248 | isRead: false,
249 | query: 'ALTER TABLE "variables" RENAME TO "variables2";',
250 | params: [],
251 | tablesRead: [],
252 | tablesWrite: ["variables", "variables2"],
253 | columns: [],
254 | autoKeys: [],
255 | keys: [],
256 | type: ParseType.rename_table
257 | }
258 | }, {
259 | id: 16,
260 | name: 'Query Table - TODO: rename column',
261 | query: 'ALTER TABLE "variables" RENAME COLUMN "value" TO "value2";',
262 | result: {
263 | isRead: false,
264 | query: 'ALTER TABLE "variables" RENAME COLUMN "value" TO "value2";',
265 | params: [] as any[],
266 | tablesRead: [] as string[],
267 | tablesWrite: ["variables2", "variables"],
268 | columns: ["value", "value2"],
269 | autoKeys: [] as string[],
270 | keys: [] as string[],
271 | type: ParseType.rename_table,
272 | },
273 | isSkip: true
274 | }, {
275 | id: 17,
276 | name: 'Query Table - add column',
277 | query: 'ALTER TABLE "variables" ADD "value2";',
278 | result: {
279 | isRead: false,
280 | query: 'ALTER TABLE "variables" ADD "value2";',
281 | params: [],
282 | tablesRead: [],
283 | tablesWrite: ["variables"],
284 | columns: ["value2"],
285 | autoKeys: [],
286 | keys: [],
287 | type: ParseType.modify_table_columns
288 | }
289 | }, {
290 | id: 18,
291 | name: 'Query Table - add column long',
292 | query:
293 | 'ALTER TABLE "variables" ADD COLUMN "value2" VARCHAR PRIMARY KEY;',
294 | result: {
295 | isRead: false,
296 | query:
297 | 'ALTER TABLE "variables" ADD COLUMN "value2" VARCHAR PRIMARY KEY;',
298 | params: [],
299 | tablesRead: [],
300 | tablesWrite: ["variables"],
301 | columns: ["value2"],
302 | autoKeys: [],
303 | keys: ["value2"],
304 | type: ParseType.modify_table_columns,
305 | }
306 | }, {
307 | id: 19,
308 | name: 'Query Table - TODO: drop column',
309 | query: 'ALTER TABLE "variables" DROP "value2";',
310 | result: {
311 | isRead: false,
312 | query: 'ALTER TABLE "variables" DROP "value2";',
313 | params: [],
314 | tablesRead: [],
315 | tablesWrite: ["variables"],
316 | columns: ["value2"],
317 | autoKeys: [],
318 | keys: [],
319 | type: ParseType.modify_table_columns,
320 | },
321 | isSkip: true
322 | }, {
323 | id: 20,
324 | name: 'Query Table - TODO: drop column long',
325 | query: 'ALTER TABLE "variables" DROP COLUMN "value2";',
326 | result: {
327 | isRead: false,
328 | query: 'ALTER TABLE "variables" DROP COLUMN "value2";',
329 | params: [],
330 | tablesRead: [],
331 | tablesWrite: ["variables"],
332 | columns: ["value2"],
333 | autoKeys: [],
334 | keys: [],
335 | type: ParseType.modify_table_columns,
336 | },
337 | isSkip: true
338 | },
339 | /* #endregion */
340 |
341 | /* #region Data. */
342 | {
343 | id: 21,
344 | name: 'Query Data - insert',
345 | query: 'INSERT INTO variables (id, value) VALUES ("isWAL", 1);',
346 | result: {
347 | isRead: false,
348 | query: 'INSERT INTO variables (id, value) VALUES ("isWAL", 1);',
349 | params: [],
350 | tablesRead: [],
351 | tablesWrite: ["variables"],
352 | columns: [],
353 | autoKeys: [],
354 | keys: [],
355 | type: ParseType.modify_data,
356 | }
357 | }, {
358 | id: 22,
359 | name: 'Query Data - insert with select.',
360 | query: 'INSERT INTO variables SELECT * FROM variables2;',
361 | result: {
362 | isRead: true,
363 | query: 'INSERT INTO variables SELECT * FROM variables2;',
364 | params: [] as any[],
365 | tablesRead: ["variables2"],
366 | tablesWrite: ["variables"],
367 | columns: [] as string[],
368 | autoKeys: [] as string[],
369 | keys: [] as string[],
370 | type: ParseType.modify_data
371 | }
372 | }, {
373 | id: 23,
374 | name: 'Query Data - upsert',
375 | query: 'INSERT OR REPLACE INTO variables (id, value) VALUES ("isWAL", 1);',
376 | result: {
377 | isRead: false,
378 | query: 'INSERT OR REPLACE INTO variables (id, value) VALUES ("isWAL", 1);',
379 | params: [],
380 | tablesRead: [],
381 | tablesWrite: ["variables"],
382 | columns: [],
383 | autoKeys: [],
384 | keys: [],
385 | type: ParseType.modify_data
386 | }
387 | }, {
388 | id: 24,
389 | name: 'Query Data - UPSERT with select.',
390 | query: 'INSERT OR REPLACE INTO variables SELECT * FROM variables2;',
391 | result: {
392 | isRead: true,
393 | query: 'INSERT OR REPLACE INTO variables SELECT * FROM variables2;',
394 | params: [],
395 | tablesRead: ["variables2"],
396 | tablesWrite: ["variables"],
397 | columns: [],
398 | autoKeys: [],
399 | keys: [],
400 | type: ParseType.modify_data
401 | },
402 | isSkip: false
403 | }, {
404 | id: 25,
405 | name: 'Query Data - update',
406 | query: 'UPDATE variables SET value = "new";',
407 | result: {
408 | isRead: false,
409 | query: 'UPDATE variables SET value = "new";',
410 | params: [],
411 | tablesRead: [],
412 | tablesWrite: ["variables"],
413 | columns: [],
414 | autoKeys: [],
415 | keys: [],
416 | type: ParseType.modify_data
417 | }
418 | }, {
419 | id: 26,
420 | name: 'Query Data - update with select',
421 | query: 'UPDATE variables SET value = "new" WHERE id IN (SELECT id FROM variables2);',
422 | result: {
423 | isRead: true,
424 | query: 'UPDATE variables SET value = "new" WHERE id IN (SELECT id FROM variables2);',
425 | params: [],
426 | tablesRead: ["variables2"],
427 | tablesWrite: ["variables"],
428 | columns: [],
429 | autoKeys: [],
430 | keys: [],
431 | type: ParseType.modify_data
432 | }
433 | }, {
434 | id: 27,
435 | name: 'Query Data - delete',
436 | query: 'DELETE FROM variables WHERE value = "new";',
437 | result: {
438 | isRead: false,
439 | query: 'DELETE FROM variables WHERE value = "new";',
440 | params: [],
441 | tablesRead: [],
442 | tablesWrite: ["variables"],
443 | columns: [],
444 | autoKeys: [],
445 | keys: [],
446 | type: ParseType.delete_data
447 | }
448 | }, {
449 | id: 28,
450 | name: 'Query Data - select',
451 | query: 'SELECT * FROM variables WHERE value = "new";',
452 | result: {
453 | isRead: false,
454 | query: 'SELECT * FROM variables WHERE value = "new";',
455 | params: [],
456 | tablesRead: ["variables"],
457 | tablesWrite: [],
458 | columns: [],
459 | autoKeys: [],
460 | keys: [],
461 | type: ParseType.select_data
462 | }
463 | }, {
464 | id: 29,
465 | name: 'Query Data - select with join',
466 | query: 'SELECT * FROM variables A, variables2 B WHERE A.id = B.id;',
467 | result: {
468 | isRead: false,
469 | query: 'SELECT * FROM variables A, variables2 B WHERE A.id = B.id;',
470 | params: [],
471 | tablesRead: ["variables", "variables2"],
472 | tablesWrite: [],
473 | columns: [],
474 | autoKeys: [],
475 | keys: [],
476 | type: ParseType.select_data
477 | }
478 | },
479 | /* #endregion */
480 |
481 | /* #region Data - Common Table Expressions. */
482 | {
483 | id: 30,
484 | name: 'CTE - insert with select.',
485 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) INSERT INTO variables SELECT * FROM twoCol;',
486 | result: {
487 | isRead: true,
488 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) INSERT INTO variables SELECT * FROM twoCol;',
489 | params: [],
490 | tablesRead: ["twocol"],
491 | tablesWrite: ["variables"],
492 | columns: [],
493 | autoKeys: [],
494 | keys: [],
495 | type: ParseType.modify_data
496 | }
497 | }, {
498 | id: 31,
499 | name: 'CTE - update with select',
500 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) UPDATE variables SET value = "new" WHERE id IN (SELECT a FROM twoCol);',
501 | result: {
502 | isRead: true,
503 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) UPDATE variables SET value = "new" WHERE id IN (SELECT a FROM twoCol);',
504 | params: [],
505 | tablesRead: ["twocol"],
506 | tablesWrite: ["variables"],
507 | columns: [],
508 | autoKeys: [],
509 | keys: [],
510 | type: ParseType.modify_data
511 | }
512 | }, {
513 | id: 31,
514 | name: 'CTE - update with select',
515 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) UPDATE variables SET value = "new" WHERE id IN (SELECT a FROM twoCol);',
516 | result: {
517 | isRead: true,
518 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) UPDATE variables SET value = "new" WHERE id IN (SELECT a FROM twoCol);',
519 | params: [],
520 | tablesRead: ["twocol"],
521 | tablesWrite: ["variables"],
522 | columns: [],
523 | autoKeys: [],
524 | keys: [],
525 | type: ParseType.modify_data
526 | }
527 | }, {
528 | id: 32,
529 | name: 'CTE - delete',
530 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) DELETE FROM variables WHERE id IN (SELECT a FROM twoCol);',
531 | result: {
532 | isRead: true,
533 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) DELETE FROM variables WHERE id IN (SELECT a FROM twoCol);',
534 | params: [],
535 | tablesRead: ["twocol"],
536 | tablesWrite: ["variables"],
537 | columns: [],
538 | autoKeys: [],
539 | keys: [],
540 | type: ParseType.delete_data
541 | }
542 | }, {
543 | id: 33,
544 | name: 'CTE - select',
545 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) SELECT * FROM twoCol;',
546 | result: {
547 | isRead: true, // Inconsistent.
548 | query: 'WITH twoCol( a, b ) AS ( SELECT 1, 2 ) SELECT * FROM twoCol;',
549 | params: [],
550 | tablesRead: ["twocol"],
551 | tablesWrite: [],
552 | columns: [],
553 | autoKeys: [],
554 | keys: [],
555 | type: ParseType.select_data
556 | }
557 | }, {
558 | id: 34,
559 | name: 'CTE - select',
560 | query: 'WITH twoCol( a, b ) AS ( SELECT * FROM variables ) SELECT * FROM twoCol;',
561 | result: {
562 | isRead: true, // Inconsistent.
563 | query: 'WITH twoCol( a, b ) AS ( SELECT * FROM variables ) SELECT * FROM twoCol;',
564 | params: [],
565 | tablesRead: ["twocol", "variables"],
566 | tablesWrite: [],
567 | columns: [],
568 | autoKeys: [],
569 | keys: [],
570 | type: ParseType.select_data
571 | }
572 | },
573 | /* #endregion */
574 |
575 | /* #region Other. */
576 | {
577 | id: 35,
578 | name: 'Query Other - add pragma',
579 | query: 'PRAGMA pragma_name = value;',
580 | result: {
581 | isRead: false,
582 | query: 'PRAGMA pragma_name = value;',
583 | params: [],
584 | tablesRead: [],
585 | tablesWrite: [],
586 | columns: [],
587 | autoKeys: [],
588 | keys: [],
589 | type: ParseType.other
590 | }
591 | },
592 | /* #endregion */
593 | ];
594 |
595 | describe('Queries.', function () {
596 | for (const test of tests) {
597 | if (test.isSkip) { continue; }
598 |
599 | it(`${test.id}: ${test.name}`, async () => {
600 | const statement = new QueryParse({
601 | query: test.query,
602 | params: []
603 | });
604 | statement.validator.ready();
605 |
606 | // Copy and cleanup the statement.
607 | const result = statement.toObject();
608 |
609 | assert.deepEqual(result, test.result);
610 | });
611 | }
612 | });
--------------------------------------------------------------------------------
/test/parser/rawParse.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 |
4 | import sqliteParser from '@appland/sql-parser';
5 |
6 | const tests: any[] = [
7 | {
8 | id: 0,
9 | name: 'Empty script',
10 | script: '',
11 | result: undefined,
12 | },
13 | {
14 | id: 1,
15 | name: 'Single statement',
16 | script: 'SELECT * FROM foo;',
17 | result: undefined,
18 | },
19 | {
20 | id: 2,
21 | name: 'Single statement, trigger',
22 | script: `
23 | CREATE TRIGGER
24 | IF NOT EXISTS test
25 | AFTER INSERT
26 | ON test
27 | BEGIN
28 | UPDATE test SET version = (SELECT value FROM test WHERE name = "1") WHERE ROWID = NEW.ROWID;
29 | END;
30 | `,
31 | result: undefined,
32 | },
33 | {
34 | id: 3,
35 | name: 'Multiple statements',
36 | script: `
37 | BEGIN TRANSACTION;
38 | SELECT * FROM foo;
39 | `,
40 | result: undefined,
41 | },
42 | {
43 | id: 4,
44 | name: 'Error, single statement, trigger',
45 | script: `
46 | CREATE TRIGGER
47 | IF NOT EXISTS test
48 | AFTER INSERT
49 | ON test
50 | BEGIN
51 | UPDATE test SET version = (SELECT value FROM test WHERE name = "1") WHERE ROWID = NEW.ROWID;
52 | `,
53 | result: 'Syntax error found near Identifier (WITH Clause)',
54 | },
55 | {
56 | id: 5,
57 | name: 'Error, multiple statements, trigger',
58 | script: `
59 | BEGIN TRANSACTION;
60 | CREATE TRIGGER
61 | IF NOT EXISTS test
62 | AFTER INSERT
63 | ON test
64 | BEGIN
65 | UPDATE test SET version = (SELECT value FROM test WHERE name = "1") WHERE ROWID = NEW.ROWID;
66 | `,
67 | result: 'Syntax error found near Identifier (WITH Clause)',
68 | },
69 | ];
70 |
71 | function parseScript(script: string) {
72 | return sqliteParser(script);
73 | }
74 |
75 | describe('rawParse', () => {
76 | for (const test of tests) {
77 | it(`${test.id}: ${test.name}`, () => {
78 | let error;
79 | try {
80 | parseScript(test.script);
81 | } catch (e: any) {
82 | error = e;
83 | }
84 |
85 | assert.deepStrictEqual(test.result, error?.message);
86 | });
87 | }
88 | });
--------------------------------------------------------------------------------
/test/parser/scriptParse.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 |
4 | import { ScriptParse } from '../../src/parser/scriptParse';
5 | import { ParseType } from '../../src/parser/queryParse';
6 |
7 | const tests = [
8 | {
9 | id: 0,
10 | name: 'Empty',
11 | script: '',
12 | params: [] as any[],
13 | result: {
14 | isReadOnly: true,
15 | queries: [] as any[],
16 | },
17 | },
18 | {
19 | id: 1,
20 | name: 'One statement',
21 | script: 'SELECT * FROM users;',
22 | params: [] as any[],
23 | result: {
24 | isReadOnly: true,
25 | queries: [
26 | {
27 | autoKeys: [],
28 | columns: [],
29 | isRead: false,
30 | keys: [],
31 | params: [],
32 | query: 'SELECT * FROM users;',
33 | tablesRead: [
34 | 'users'
35 | ],
36 | tablesWrite: [],
37 | type: ParseType.select_data
38 | }
39 | ]
40 | },
41 | },
42 | {
43 | id: 2,
44 | name: 'Two statements',
45 | script: `-- This is a sample script
46 | CREATE TABLE users (
47 | id INTEGER PRIMARY KEY,
48 | name TEXT,
49 | age INTEGER
50 | );
51 |
52 | INSERT INTO users (name, age) VALUES (?, ?);`,
53 | params: ['John Doe', 42],
54 | result: {
55 | isReadOnly: false,
56 | queries: [
57 | {
58 | autoKeys: [],
59 | columns: ['id', 'name', 'age'],
60 | isRead: false,
61 | keys: ['id'],
62 | params: [],
63 | query: `-- This is a sample script
64 | CREATE TABLE users (
65 | id INTEGER PRIMARY KEY,
66 | name TEXT,
67 | age INTEGER
68 | );`,
69 | tablesRead: [],
70 | tablesWrite: ['users'],
71 | type: ParseType.create_table
72 | },
73 | {
74 | autoKeys: [],
75 | columns: [],
76 | isRead: false,
77 | keys: [],
78 | params: ['John Doe', 42],
79 | query: 'INSERT INTO users (name, age) VALUES (?, ?);',
80 | tablesRead: [],
81 | tablesWrite: [
82 | 'users'
83 | ],
84 | type: ParseType.modify_data
85 | }
86 | ]
87 | },
88 | },
89 | {
90 | id: 3,
91 | name: 'Three statements',
92 | script: `-- This is a sample script
93 | CREATE TABLE users (
94 | id INTEGER PRIMARY KEY,
95 | name TEXT,
96 | age INTEGER
97 | );
98 |
99 | INSERT INTO users (name, age) VALUES (?, ?);
100 | INSERT INTO variables (name, value, isTest) VALUES (?, ?, ?);
101 | `,
102 | params: ['John Doe', 42, 'test', 1, true],
103 | result: {
104 | isReadOnly: false,
105 | queries: [
106 | {
107 | autoKeys: [],
108 | columns: ['id', 'name', 'age'],
109 | isRead: false,
110 | keys: ['id'],
111 | params: [],
112 | query: `-- This is a sample script
113 | CREATE TABLE users (
114 | id INTEGER PRIMARY KEY,
115 | name TEXT,
116 | age INTEGER
117 | );`,
118 | tablesRead: [],
119 | tablesWrite: ['users'],
120 | type: ParseType.create_table
121 | },
122 | {
123 | autoKeys: [],
124 | columns: [],
125 | isRead: false,
126 | keys: [],
127 | params: ['John Doe', 42],
128 | query: 'INSERT INTO users (name, age) VALUES (?, ?);',
129 | tablesRead: [],
130 | tablesWrite: ['users'],
131 | type: ParseType.modify_data
132 | },
133 | {
134 | autoKeys: [],
135 | columns: [],
136 | isRead: false,
137 | keys: [],
138 | params: ['test', 1, true],
139 | query: 'INSERT INTO variables (name, value, isTest) VALUES (?, ?, ?);',
140 | tablesRead: [],
141 | tablesWrite: ['variables'],
142 | type: ParseType.modify_data
143 | }
144 | ]
145 | },
146 | },
147 | {
148 | id: 4,
149 | name: 'Three statements with trigger',
150 | script: `
151 | INSERT INTO users (name, age) VALUES (?, ?);
152 |
153 | CREATE TRIGGER
154 | IF NOT EXISTS test
155 | AFTER INSERT
156 | ON test
157 | BEGIN
158 | UPDATE test SET version = (SELECT value FROM test WHERE name = ?) WHERE ROWID = NEW.ROWID;
159 | END;
160 |
161 | INSERT INTO variables (name, value, isTest) VALUES (?, ?, ?);
162 | `,
163 | params: ['John Doe', 42, 'blue', 'test', 1, true],
164 | result: {
165 | isReadOnly: false,
166 | queries: [
167 | {
168 | autoKeys: [],
169 | columns: [],
170 | isRead: false,
171 | keys: [],
172 | params: ['John Doe', 42],
173 | query: 'INSERT INTO users (name, age) VALUES (?, ?);',
174 | tablesRead: [],
175 | tablesWrite: ['users'],
176 | type: ParseType.modify_data
177 | },
178 | {
179 | autoKeys: [],
180 | columns: [],
181 | isRead: true,
182 | keys: [],
183 | params: ['blue'],
184 | query: `CREATE TRIGGER
185 | IF NOT EXISTS test
186 | AFTER INSERT
187 | ON test
188 | BEGIN
189 | UPDATE test SET version = (SELECT value FROM test WHERE name = ?) WHERE ROWID = NEW.ROWID;
190 | END;`,
191 | tablesRead: [],
192 | tablesWrite: [],
193 | type: ParseType.other
194 | },
195 | {
196 | autoKeys: [],
197 | columns: [],
198 | isRead: false,
199 | keys: [],
200 | params: ['test', 1, true],
201 | query: 'INSERT INTO variables (name, value, isTest) VALUES (?, ?, ?);',
202 | tablesRead: [],
203 | tablesWrite: ['variables'],
204 | type: ParseType.modify_data
205 | }
206 | ]
207 | },
208 | }
209 | ];
210 |
211 | describe('Parses scripts', () => {
212 | for (const test of tests) {
213 | it(`${test.id}: ${test.name}`, () => {
214 | const scriptParse = new ScriptParse({
215 | script: test.script,
216 | params: test.params,
217 | });
218 |
219 | assert.deepStrictEqual(scriptParse.toObject(), test.result);
220 | });
221 | }
222 | });
223 |
--------------------------------------------------------------------------------
/test/utils/.test.env:
--------------------------------------------------------------------------------
1 | STARK_DB_DATA_DIR="./test/data"
2 | STARK_DB_CERTS_DIR="./test/certs"
--------------------------------------------------------------------------------
/test/valid/DB.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 | import defineAbilityForDB from '../../src/valid/DB';
4 | import { AdminDB, DB } from './entities';
5 | import { DBOp } from '../../src/utils/DBOp';
6 |
7 | describe('DB Validation', () => {
8 | it('should pass admin logged in AdminDB', () => {
9 | const user = { ID: 1, isLoggedIn: true };
10 | const adminDB = new AdminDB({ admins: [1] });
11 |
12 | const ability = defineAbilityForDB(user);
13 |
14 | assert.strictEqual(true, ability.can(DBOp.Admin, adminDB));
15 | });
16 |
17 | it('should pass DB admin logged in DB', () => {
18 | const user = { ID: 1, isLoggedIn: true };
19 | const db = new DB({ admins: [1], readers: [2, 3], writers: [4] });
20 |
21 | const ability = defineAbilityForDB(user);
22 |
23 | assert.strictEqual(true, ability.can(DBOp.Admin, db));
24 | });
25 |
26 | it('should pass user is admin in DB and reads', () => {
27 | const user = { ID: 1, isLoggedIn: true };
28 | const db = new DB({ admins: [1], readers: [2, 3], writers: [4] });
29 |
30 | const ability = defineAbilityForDB(user);
31 |
32 | assert.strictEqual(true, ability.can(DBOp.Read, db));
33 | });
34 |
35 | it('should pass user is admin in DB and writes', () => {
36 | const user = { ID: 1, isLoggedIn: true };
37 | const db = new DB({ admins: [1], readers: [2, 3], writers: [4] });
38 |
39 | const ability = defineAbilityForDB(user);
40 |
41 | assert.strictEqual(true, ability.can(DBOp.Write, db));
42 | });
43 |
44 | it('should pass user is admin in admin DB and reads', () => {
45 | const user = { ID: 1, isLoggedIn: true };
46 | const db = new AdminDB({ admins: [1], readers: [2, 3], writers: [4] });
47 |
48 | const ability = defineAbilityForDB(user);
49 |
50 | assert.strictEqual(true, ability.can(DBOp.Read, db));
51 | });
52 |
53 | it('should pass user is admin in admin DB and writes', () => {
54 | const user = { ID: 1, isLoggedIn: true };
55 | const db = new AdminDB({ admins: [1], readers: [2, 3], writers: [4] });
56 |
57 | const ability = defineAbilityForDB(user);
58 |
59 | assert.strictEqual(true, ability.can(DBOp.Write, db));
60 | });
61 |
62 | it('should pass reader logged in DB and reads', () => {
63 | const user = { ID: 1, isLoggedIn: true };
64 | const db = new DB({ admins: [], readers: [1, 2, 3], writers: [4] });
65 |
66 | const ability = defineAbilityForDB(user);
67 |
68 | assert.strictEqual(true, ability.can(DBOp.Read, db));
69 | });
70 |
71 | it('should pass writer logged in DB and reads', () => {
72 | const user = { ID: 1, isLoggedIn: true };
73 | const db = new DB({ admins: [], readers: [2, 3], writers: [1] });
74 |
75 | const ability = defineAbilityForDB(user);
76 |
77 | assert.strictEqual(true, ability.can(DBOp.Read, db));
78 | });
79 |
80 | it('should pass writer logged in DB and writes', () => {
81 | const user = { ID: 1, isLoggedIn: true };
82 | const db = new DB({ admins: [], readers: [2, 3], writers: [1] });
83 |
84 | const ability = defineAbilityForDB(user);
85 |
86 | assert.strictEqual(true, ability.can(DBOp.Write, db));
87 | });
88 |
89 | it('should fail if not logged in', () => {
90 | const user = { ID: 1, isLoggedIn: false };
91 | const adminDB = new AdminDB({ admins: [1] });
92 | const userDB = new DB({ admins: [1], readers: [1, 2, 3], writers: [4] });
93 |
94 | const ability = defineAbilityForDB(user);
95 |
96 | assert.strictEqual(false, ability.can(DBOp.Admin, adminDB));
97 | assert.strictEqual(false, ability.can(DBOp.Admin, userDB));
98 | assert.strictEqual(false, ability.can(DBOp.Read, userDB));
99 | });
100 |
101 | it('should fail if user is not admin of AdminDB and admins', () => {
102 | const user = { ID: 1, isLoggedIn: true };
103 | const adminDB = new AdminDB({ admins: [2, 3] });
104 |
105 | const ability = defineAbilityForDB(user);
106 |
107 | assert.strictEqual(false, ability.can(DBOp.Admin, adminDB));
108 | });
109 |
110 | it('should fail if user is not admin of DB and admins', () => {
111 | const user = { ID: 1, isLoggedIn: true };
112 | const adminDB = new DB({ admins: [2, 3], readers: [1], writers: [4] });
113 |
114 | const ability = defineAbilityForDB(user);
115 |
116 | assert.strictEqual(false, ability.can(DBOp.Admin, adminDB));
117 | });
118 |
119 | it('should fail reader is not in DB and reads', () => {
120 | const user = { ID: 1, isLoggedIn: true };
121 | const db = new DB({ admins: [], readers: [2, 3], writers: [4] });
122 |
123 | const ability = defineAbilityForDB(user);
124 |
125 | assert.strictEqual(false, ability.can(DBOp.Read, db));
126 | });
127 |
128 | it('should fail writer is not in DB and writes', () => {
129 | const user = { ID: 1, isLoggedIn: true };
130 | const db = new DB({ admins: [], readers: [2, 3], writers: [4] });
131 |
132 | const ability = defineAbilityForDB(user);
133 |
134 | assert.strictEqual(false, ability.can(DBOp.Write, db));
135 | });
136 |
137 | it('should fail user is not in writers and writes', () => {
138 | const user = { ID: 1, isLoggedIn: true };
139 | const db = new DB({ admins: [], readers: [1, 2], writers: [3] });
140 |
141 | const ability = defineAbilityForDB(user);
142 |
143 | assert.strictEqual(false, ability.can(DBOp.Write, db));
144 | });
145 | });
--------------------------------------------------------------------------------
/test/valid/entities.ts:
--------------------------------------------------------------------------------
1 | /* #region Validation is class-name dependent! */
2 | class Entity {
3 | constructor(attrs: any) {
4 | Object.assign(this, attrs);
5 | }
6 | }
7 |
8 | export class AdminDB extends Entity { }
9 | export class DB extends Entity { }
10 | export class User extends Entity { }
11 | /* #endregion */
--------------------------------------------------------------------------------
/test/valid/user.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 | import defineAbilityForUser from '../../src/valid/user';
4 | import { User } from './entities';
5 | import { CRUD } from '../../src/utils/CRUD';
6 |
7 | describe('User Validation', () => {
8 | it('should pass if admin', () => {
9 | const user = { ID: 1 };
10 | const ability = defineAbilityForUser(user, true);
11 |
12 | const user2 = new User({ ID: 2 });
13 |
14 | assert.strictEqual(true, ability.can(CRUD.Create, user2));
15 | assert.strictEqual(true, ability.can(CRUD.Read, user2));
16 | assert.strictEqual(true, ability.can(CRUD.Update, user2));
17 | assert.strictEqual(true, ability.can(CRUD.Delete, user2));
18 | });
19 |
20 | it('should pass if user reads own user', () => {
21 | const user = { ID: 1 };
22 | const ability = defineAbilityForUser(user, false);
23 |
24 | const user2 = new User({ ID: 1 });
25 |
26 | assert.strictEqual(true, ability.can(CRUD.Read, user2));
27 | });
28 |
29 | it('should pass if user changes own user', () => {
30 | const user = { ID: 1 };
31 | const ability = defineAbilityForUser(user, false);
32 |
33 | const user2 = new User({ ID: 1 });
34 |
35 | assert.strictEqual(true, ability.can(CRUD.Update, user2));
36 | assert.strictEqual(true, ability.can(CRUD.Delete, user2));
37 | });
38 |
39 | it('should fail if user creates user', () => {
40 | const user = { ID: 1 };
41 | const ability = defineAbilityForUser(user, false);
42 |
43 | const user2 = new User({ ID: 2 });
44 |
45 | assert.strictEqual(false, ability.can(CRUD.Create, user2));
46 | });
47 |
48 | it('should fail if user reads other user', () => {
49 | const user = { ID: 1 };
50 | const ability = defineAbilityForUser(user, false);
51 |
52 | const user2 = new User({ ID: 2 });
53 |
54 | assert.strictEqual(false, ability.can(CRUD.Read, user2));
55 | });
56 |
57 | it('should fail if user changes other user', () => {
58 | const user = { ID: 1 };
59 | const ability = defineAbilityForUser(user, false);
60 |
61 | const user2 = new User({ ID: 2 });
62 |
63 | assert.strictEqual(false, ability.can(CRUD.Update, user2));
64 | assert.strictEqual(false, ability.can(CRUD.Delete, user2));
65 | });
66 | });
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Visit https://aka.ms/tsconfig to read more about this file */
4 |
5 | /* Projects */
6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
12 |
13 | /* Language and Environment */
14 | "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
16 | // "jsx": "preserve", /* Specify what JSX code is generated. */
17 | "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
18 | "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
26 |
27 | /* Modules */
28 | "module": "commonjs", /* Specify what module code is generated. */
29 | // "rootDir": "./", /* Specify the root folder within your source files. */
30 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */
36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
38 | // "resolveJsonModule": true, /* Enable importing .json files. */
39 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
40 |
41 | /* JavaScript Support */
42 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
43 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
44 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
45 |
46 | /* Emit */
47 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
48 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */
49 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
50 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
51 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
52 | "outDir": "./dist", /* Specify an output folder for all emitted files. */
53 | // "removeComments": true, /* Disable emitting comments. */
54 | // "noEmit": true, /* Disable emitting files from a compilation. */
55 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
56 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
57 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
58 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
59 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
60 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
61 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
62 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
63 | // "newLine": "crlf", /* Set the newline character for emitting files. */
64 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
65 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
66 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
67 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
68 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
69 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
70 |
71 | /* Interop Constraints */
72 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
73 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
74 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
75 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
76 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
77 |
78 | /* Type Checking */
79 | "strict": true, /* Enable all strict type-checking options. */
80 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
81 | "strictNullChecks": false, /* When type checking, take into account 'null' and 'undefined'. */
82 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
83 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
84 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
85 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
86 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
87 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
88 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
89 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
90 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
91 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
92 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
93 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
94 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
95 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
96 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
97 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
98 |
99 | /* Completeness */
100 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
101 | "skipLibCheck": true /* Skip type checking all .d.ts files. */
102 | }
103 | }
104 |
--------------------------------------------------------------------------------