├── .editorconfig ├── .envrc ├── .eslintrc.cjs ├── .gitattributes ├── .github ├── pull_request_template.md └── workflows │ ├── codeql-analysis.yml │ └── nodejs.yml ├── .gitignore ├── .npsrc.json ├── .prettierignore ├── .prettierrc ├── Changelog.md ├── Readme.md ├── Server Side Redux.md ├── TODO.md ├── build-git.sh ├── flake.lock ├── flake.nix ├── license ├── package-scripts.cjs ├── package.json ├── pnpm-lock.yaml ├── src ├── DB │ ├── DB.js │ ├── DB.test.js │ ├── SQLite.js │ ├── SQLite.test.ts │ ├── Statement.js │ ├── Statement.test.js │ └── index.js ├── EventQueue.js ├── EventQueue.test.js ├── EventSourcingDB │ ├── ESDB-concurrency.test.js │ ├── ESDB-create.test.js │ ├── ESDB-errors.test.js │ ├── ESDB-events.test.js │ ├── ESDB-queue.test.js │ ├── ESDB-readOnly.test.js │ ├── ESDB-subevents.test.js │ ├── ESModel.js │ ├── ESModel.test.js │ ├── EventSourcingDB.js │ ├── __snapshots__ │ │ └── ESModel.test.js.snap │ ├── applyResult.js │ └── index.js ├── JsonModel │ ├── JM-ItemClass.test.js │ ├── JM-columns.test.js │ ├── JM-create.test.js │ ├── JM-makeSelect.test.js │ ├── JM-migration.test.js │ ├── JM-search.test.js │ ├── JM-set.test.js │ ├── JM-toObj.test.js │ ├── JsonModel.js │ ├── __snapshots__ │ │ └── JM-columns.test.js.snap │ ├── assignJsonParents.js │ ├── index.js │ ├── makeDefaultIdValue.js │ ├── makeMigrations.js │ ├── normalizeColumn.js │ ├── prepareSqlCol.js │ └── verifyOptions.js ├── index.js └── lib │ ├── _test-helpers.js │ ├── settleAll.js │ ├── slugify.js │ ├── slugify.test.js │ └── warning.js ├── tsconfig.json ├── types.d.ts └── vite.config.ts /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = tab 5 | end_of_line = lf 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | 10 | [*.yml] 11 | indent_style = space 12 | indent_size = 2 13 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | layout node 3 | -------------------------------------------------------------------------------- /.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | /* eslint sort-keys: "error" */ 2 | 3 | // The nicest rules 4 | const nicest = { 5 | '@typescript-eslint/no-unused-vars': [ 6 | 'error', 7 | // allow unused vars starting with _ 8 | { 9 | argsIgnorePattern: '^_', 10 | ignoreRestSiblings: true, 11 | varsIgnorePattern: '^_', 12 | }, 13 | ], 14 | 'default-param-last': 1, 15 | eqeqeq: [2, 'allow-null'], // == and != are nice for null+undefined 16 | 'no-console': 2, // we want a clean console - eslint-disable every wanted one 17 | 'no-implicit-coercion': [2, {allow: ['!!']}], // !! is fun 18 | 'no-shadow': 2, // sometimes causes logic bugs. 19 | 'object-shorthand': 2, 20 | 'prefer-destructuring': [ 21 | 2, 22 | {AssignmentExpression: {array: false, object: false}}, 23 | ], 24 | 'prettier/prettier': 1, // don't distract while programming 25 | 'unicorn/consistent-function-scoping': 1, 26 | 'unicorn/expiring-todo-comments': [2, {allowWarningComments: true}], 27 | 'unicorn/no-fn-reference-in-iterator': 1, 28 | 'valid-typeof': [2, {requireStringLiterals: true}], 29 | } 30 | 31 | // Would be nice to make these error 32 | const maybe = { 33 | '@typescript-eslint/ban-ts-comment': 1, 34 | 'no-warning-comments': 1, // set to 0 and remove allowWarning from unicorn rule above 35 | 'require-atomic-updates': 1, // too many false positives 36 | } 37 | 38 | // these rules suck 39 | const suck = { 40 | '@typescript-eslint/no-explicit-any': 0, 41 | 'capitalized-comments': 0, 42 | 'no-eq-null': 0, 43 | 'no-mixed-operators': 0, 44 | 'one-var': 0, 45 | 'padding-line-between-statements': 0, 46 | 'prefer-template': 0, 47 | 'promise/always-return': 0, 48 | 'promise/no-callback-in-promise': 0, 49 | 'promise/param-names': 0, 50 | 'unicorn/catch-error-name': 0, 51 | 'unicorn/consistent-destructuring': 0, 52 | 'unicorn/explicit-length-check': 0, 53 | 'unicorn/filename-case': 0, 54 | 'unicorn/import-style': 0, 55 | 'unicorn/no-await-expression-member': 0, 56 | 'unicorn/no-nested-ternary': 0, 57 | 'unicorn/no-null': 0, 58 | 'unicorn/no-process-exit': 0, 59 | 'unicorn/no-typeof-undefined': 0, 60 | 'unicorn/no-useless-undefined': 0, 61 | 'unicorn/number-literal-case': 0, 62 | 'unicorn/prefer-module': 0, 63 | 'unicorn/prefer-node-protocol': 0, 64 | 'unicorn/prevent-abbreviations': 0, 65 | } 66 | 67 | const rules = {...nicest, ...maybe, ...suck} 68 | 69 | module.exports = { 70 | env: { 71 | commonjs: true, 72 | es6: true, 73 | node: true, 74 | 'vitest-globals/env': true, 75 | }, 76 | extends: [ 77 | 'eslint:recommended', 78 | 'plugin:@typescript-eslint/recommended', 79 | 'plugin:vitest-globals/recommended', 80 | 'plugin:promise/recommended', 81 | 'plugin:unicorn/recommended', 82 | // Keep this last, it overrides all style rules 83 | 'plugin:prettier/recommended', 84 | ], 85 | ignorePatterns: ['/build/**/*', '/coverage/**/*', '/dist/**/*'], 86 | parser: '@typescript-eslint/parser', 87 | plugins: ['promise', 'unicorn', 'jsdoc', '@typescript-eslint'], 88 | reportUnusedDisableDirectives: true, 89 | rules, 90 | } 91 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | *.js text eol=lf 3 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Put your description here. 2 | 3 | Checklist, delete what doesn't apply and make sure you did what applies: 4 | 5 | - [ ] I made sure documentation exists and is up to date 6 | - [ ] I added tests for new functionality 7 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: 'CodeQL' 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | # The branches below must be a subset of the branches above 8 | branches: [master] 9 | schedule: 10 | - cron: '0 6 * * 3' 11 | 12 | jobs: 13 | analyze: 14 | name: Analyze 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v2 20 | with: 21 | # We must fetch at least the immediate parents so that if this is 22 | # a pull request then we can checkout the head. 23 | fetch-depth: 2 24 | 25 | # If this run was triggered by a pull request event, then checkout 26 | # the head of the pull request instead of the merge commit. 27 | - run: git checkout HEAD^2 28 | if: ${{ github.event_name == 'pull_request' }} 29 | 30 | # Initializes the CodeQL tools for scanning. 31 | - name: Initialize CodeQL 32 | uses: github/codeql-action/init@v1 33 | # Override language selection by uncommenting this and choosing your languages 34 | # with: 35 | # languages: go, javascript, csharp, python, cpp, java 36 | 37 | # # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 38 | # # If this step fails, then you should remove it and run the build manually (see below) 39 | # - name: Autobuild 40 | # uses: github/codeql-action/autobuild@v1 41 | 42 | # ℹ️ Command-line programs to run using the OS shell. 43 | # 📚 https://git.io/JvXDl 44 | 45 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 46 | # and modify them (or add more) to build your code if your project 47 | # uses a compiled language 48 | 49 | #- run: | 50 | # make bootstrap 51 | # make release 52 | 53 | - name: Perform CodeQL Analysis 54 | uses: github/codeql-action/analyze@v1 55 | -------------------------------------------------------------------------------- /.github/workflows/nodejs.yml: -------------------------------------------------------------------------------- 1 | name: Node CI 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | 9 | jobs: 10 | # This job prevents duplicate jobs and doc-only changes 11 | verify: 12 | continue-on-error: true 13 | runs-on: ubuntu-latest 14 | 15 | # Map a step output to a job output 16 | outputs: 17 | should_skip: ${{ steps.skip_check.outputs.should_skip }} 18 | steps: 19 | - id: skip_check 20 | uses: fkirc/skip-duplicate-actions@master 21 | with: 22 | # https://github.com/marketplace/actions/skip-duplicate-actions#skip-concurrent-workflow-runs 23 | concurrent_skipping: same_content_newer 24 | paths_ignore: '["**/*.md"]' 25 | 26 | lint: 27 | if: ${{ needs.verify.outputs.should_skip != 'true' }} 28 | runs-on: ubuntu-latest 29 | strategy: 30 | matrix: 31 | node-version: [16.x] 32 | 33 | env: 34 | BASE_REF: ${{ github.base_ref }} 35 | 36 | steps: 37 | - name: Checkout 38 | uses: actions/checkout@v3 39 | with: 40 | clean: false 41 | # Make git diff HEAD^ work 42 | fetch-depth: 2 43 | # For diffing 44 | - run: git fetch --depth=1 origin ${{ github.base_ref }} 45 | 46 | - name: Use Node.js ${{ matrix.node-version }} 47 | uses: actions/setup-node@v3 48 | with: 49 | node-version: ${{ matrix.node-version }} 50 | cache: npm 51 | 52 | - run: npm ci 53 | 54 | - name: eslint 55 | run: npx nps lint.ci 56 | shell: bash 57 | env: 58 | GH_EVENT: ${{ github.event_name }} 59 | 60 | test: 61 | if: ${{ needs.verify.outputs.should_skip != 'true' }} 62 | runs-on: ubuntu-latest 63 | strategy: 64 | matrix: 65 | node-version: [16.x] 66 | 67 | env: 68 | BASE_REF: ${{ github.base_ref }} 69 | 70 | steps: 71 | - name: Checkout 72 | uses: actions/checkout@v3 73 | with: 74 | clean: false 75 | # Make git diff HEAD^ work 76 | fetch-depth: 2 77 | # For diffing 78 | - run: git fetch --depth=1 origin ${{ github.base_ref }} 79 | 80 | - name: Use Node.js ${{ matrix.node-version }} 81 | uses: actions/setup-node@v3 82 | with: 83 | node-version: ${{ matrix.node-version }} 84 | cache: npm 85 | 86 | - run: npm ci 87 | 88 | # Note, these only check changed files 89 | - name: tests 90 | run: npx nps test.ci 91 | # For pipefail 92 | shell: bash 93 | env: 94 | GH_EVENT: ${{ github.event_name }} 95 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | coverage 3 | dist 4 | build 5 | .direnv 6 | -------------------------------------------------------------------------------- /.npsrc.json: -------------------------------------------------------------------------------- 1 | {"config": "./package-scripts.cjs"} 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | coverage/ 3 | package-lock.json 4 | package.json 5 | API.md 6 | flake.lock 7 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "avoid", 3 | "bracketSpacing": false, 4 | "plugins": ["prettier-plugin-jsdoc"], 5 | "semi": false, 6 | "singleQuote": true, 7 | "trailingComma": "es5", 8 | "useTabs": true 9 | } 10 | -------------------------------------------------------------------------------- /Changelog.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 3.6.1 4 | 5 | ### Changes 6 | 7 | - Tooling: switch to Vite + Vitest 8 | - Tooling: define direnv devshell with Nix 9 | 10 | ### Fixes 11 | 12 | - SQLite: fix time measurement. It was just measuring the time of the sqlite initial call :-/ 13 | 14 | ## 3.6.0 15 | 16 | ### Changes 17 | 18 | - SQLite: accurate query time measurement. Before, query timing included any queries that were underway. This change serializes all calls to the SQLite connection when either debug has `strato-db/sqlite:query` enabled or there are listeners on the `call` event. This might have a performance impact, but likely very small. Note that SQLite itself can only do one DB operation at a time (per connection). 19 | 20 | ## 3.5.2 21 | 22 | ### Fixes 23 | 24 | - SQLite: prevent interpolation in query debug output 25 | - Remove old build dependencies 26 | - Slightly improve types, mostly replacing null returns with undefined 27 | 28 | ## 3.5.1 29 | 30 | ### Fixes 31 | 32 | - ESModel preprocessor was not resetting \_maxId properly 33 | 34 | ## 3.5.0 35 | 36 | ### Changes 37 | 38 | - JsonModel: Backwards cursors 39 | - ESDB: deprecate `undefined` in reducer results 40 | 41 | ## 3.4.5 42 | 43 | ### Fixes 44 | 45 | - ESDB: fix multiple dispatches in transact returning old event 46 | 47 | ## 3.4.4 48 | 49 | ### Fixes 50 | 51 | - JsonModel: throw parsing errors also with debugging enabled; remove dataloader creation debug statements 52 | - Statement: Fix sticky errored state of prepared statements 53 | 54 | ## 3.4.3 55 | 56 | ### Fixes 57 | 58 | - ESDB: fix throwing when dispatching outside but during processing 59 | 60 | ## 3.4.2 61 | 62 | ### Fixes 63 | 64 | - JsonModel: from `.each()`, call `.search()` with `noTotal` 65 | - deprecation warnings fail in prod 66 | 67 | ## 3.4.1 68 | 69 | ### Fixes 70 | 71 | - JsonModel: `.each()` was returning too early 72 | 73 | ## 3.4.0 74 | 75 | ### Changes 76 | 77 | Fun new features: 78 | 79 | - JsonModel: `.each(attrs, options, fn)` now takes `concurrent` to limit the concurrently running functions. This adds a dependency on the tiny `async-sema` package. 80 | - ESDB: Added `transact({event, model, store, dispatch})` phase to the event processing flow. In this callback, you can call `dispatch` to generate and await sub-events, and calling ESModel will work too (any model can use the `dispatch` given via the constructor). 81 | - ESDB: `dispatch({type, data, ts})` (passing everything in a single argument) is now also possible, as well as for `addEvent`. 82 | - ESModel: provide event creators for set, update and remove. 83 | 84 | We also have some tiny API changes that don't warrant a major version: 85 | 86 | - `sqlite.userVersion(v?: number)` now always returns the user version, also when setting it. This makes more sense from an API perspective and has as good as no performance impact. 87 | - EventQueue: `.setKnownV()` is now synchronous and no longer returns a Promise. This is only a breaking change if you were using `.then()` instead of `await`. 88 | This requires the use of `AsyncLocalStorage`, and thus the minimum NodeJS version is now v12.17 (which is already ancient). 89 | 90 | ### Fixes 91 | 92 | - ESDB: fix deadlock on queue add in migration (due to sync version setting) 93 | - JM: while paging, if the total == the limit, the cursor is now also null, because there is no next page. 94 | 95 | ## 3.3.2 96 | 97 | ### Changes 98 | 99 | - SQLite: add `emit` event for query result monitoring 100 | - Small typing improvements 101 | 102 | ### Fixes 103 | 104 | - ESDB: fix userVersion poll loop on error 105 | 106 | ## 3.3.1 107 | 108 | ### Changes 109 | 110 | - ESModel: allow changeId while writable 111 | - deprecations are printed as console.warn 112 | 113 | ## 3.3.0 114 | 115 | ### Deprecations 116 | 117 | - The `dispatch()` function that is passed to redux methods was renamed to `addEvent()`. 118 | 119 | ### Changes 120 | 121 | - SQLite: added `.runOnceOnOpen()` to register functions that should be run on the open database but shouldn't open the database 122 | - Renamed `dispatch()` to `addEvent()` inside the event processing flow. `dispatch()` still works but gives a deprecation warning. The `.dispatch()` method is not affected. 123 | 124 | ### Fixes 125 | 126 | - Statement: fixed an issue with error propagation on opening the database 127 | 128 | ## 3.2.1 129 | 130 | - ESDB: fix rare race condition where read-only connection doesn't see just-committed transaction 131 | - JM: Better error message on wrong colName in `.get()` 132 | - JM: Fix small issue with table name in error message on incorrect columns 133 | 134 | ## 3.2.0 135 | 136 | - Improve typings in JSDoc 137 | - Add TypeScript typing file 138 | 139 | This unfortunately means that the automatic API.md creation no longer works, because jsdoc can't parse the TS types. 140 | 141 | ## 3.1.1 142 | 143 | - ESModel - `getNextId()` fix (was returning incorrect values when run inside a subevent) 144 | 145 | ## 3.1.0 146 | 147 | ### Breaking 148 | 149 | - The `cache` argument to `JsonModel.clearCache(cache, [id], [colName])` is no longer optional, and the method will now always return the `DataLoader` instance 150 | 151 | ### Changes 152 | 153 | - Added TypeScript types generated from the JSDoc, and improved some definitions 154 | - There was a deadlock in some circumstances where the initialization of ESModel could wait on the EventQueue and vice versa 155 | - `JsonModel.getAll(ids, [colName])` now optimizes getting 0 and 1 objects 156 | 157 | ## 3.0.0 158 | 159 | ### Breaking 160 | 161 | - The package builds for NodeJS v10 now. 162 | - EventSourcingDB events that result in errors now halt processing and have to be fixed before processing continues 163 | - `waitForP` was removed from DB, use `onWillOpen` instead, if it returns a Promise that will be waited for. 164 | - The EventSourcingDB version is now stored in the SQLite `user_version` pragma, and the `metadata` model is no longer available by default. If you need it, add `metadata: {}` to the `models` passed to ESDB 165 | - `DB.models` was renamed to `DB.store` for consistency with ESDB and also to be different from the `models` option. `DB.models` still works but will output an error on first use in non-production. 166 | - The `result` argument passed to derivers is now the result of the deriver's model. All results are still available at `event.result` 167 | - DB connections now set `PRAGMA recursive_triggers` 168 | - In NODE_ENV=development, the order of unordered query results will sometimes be reversed to show where ordering is not consistent. In test this is not done since the ordering is always the same and used in snapshots etc. 169 | - The `meta` argument in ESModel `.set` and `.update` moved to 4th position to make room for `noResult` 170 | - EventSourcingDB no longer checks for pending events when instantiated. You have to do this yourself with `.checkForEvents()` or simply `.startPolling()` 171 | - DB no longer returns itself on `.exec()`. There's no reason for having it and it saves some GC work. 172 | - `.applyChanges(result)` was renamed to `.applyResult(result)` 173 | - the debug namespace was changed to `strato-db` 174 | - `applyChanges` was moved from JsonModel to a separate helper function `applyResult(model, result)` 175 | - EventSourcingDB now passes `emitter` as an option to models, so they can subscribe to events. You have to take it out before passing the options to `JsonModel`. 176 | - Migration metadata is now stored in the table `{sdb} migrations` instead of `_migrations`. There is a migration procedure, but don't open your DBs with previous versions of strato-db, since the old versions will try to run the migrations again (and fail, so the data is safe). 177 | 178 | ### Deprecated 179 | 180 | - reducers are now called with a single `{model, event, store, dispatch, isMainEvent}` object like preprocessor and deriver. Old reducers with multiple arguments are automatically wrapped and result in a deprecation message 181 | 182 | ### Changes 183 | 184 | - EventSourcingDB refactor: 185 | - sub-events! You can dispatch events during events; they are handled depth-first in the same transaction. If any result in error, they error the parent event 186 | - make error handling more robust 187 | - simplify redux loop 188 | - retry failed events with increasing timeouts and exit program after an hour 189 | - ESModel will now emit a `${model.INIT}` event to allow setting up the table, if you pass `init: true` 190 | - DB, JsonModel, EventSourcingDB: Better debugging information for queries and errors 191 | - DB: split into SQlite and the migrations-adding DB 192 | - SQlite: add `autoVacuum` option, sets up incremental vacuuming. If there are > 20x `vacuumPageCount` free pages, it will free `vacuumPageCount` pages every `vacuumInterval` seconds. Defaults to 1MB (of 4KB pages) and 30s. 193 | - SQlite: limit WAL file size after transaction to 4MB 194 | - SQlite: run `PRAGMA optimize` every 2 hours 195 | - SQlite: emit `'begin'`, `'rollback'`, `'end'`, `'finally'` on transactions as EventEmitter 196 | - JsonModel: `.set` and `.update` take the `noReturn` boolean as their 3rd argument to indicate they don't have to return the value, as an optimization 197 | - SQLite: add `.inTransaction` boolean that indicates if `withTransaction` is active 198 | - JsonModel: `.update` reuses a running `withTransaction`, so there is probably never a reason to use `.updateNoTrans` 199 | - EventQueue: `.latestVersion()` is deprecated in favor of `.getMaxV()` 200 | - JsonModel: if the id column is not an integer type (which means that sqlite uses it as the `rowId`), `rowId` will be added as a column. This ensures that the VACUUM command doesn't change the `rowid`s so that references to them won't become invalid. To disable this you can pass `keepRowId: false` to JsonModel. 201 | - EventSourcingDB: provide a `cache` object to the preprocessor and reducer, which can be used to optimize data fetching. The cache is shared and only valid during the read-only phase of the event handling 202 | 203 | ## 2.3.3 204 | 205 | - JsonModel: fix using columns with paths in `getAll` 206 | 207 | ## 2.3.2 208 | 209 | - JsonModel: don't error on existing indexes 210 | 211 | ## 2.3.1 212 | 213 | - JsonModel 214 | - refactor: split in multiple files 215 | - change array queries so they can be prepared 216 | - fix expression index creation before column 217 | - EventQueue: test before calling timer.unref() 218 | - build: Upgrade to Babel 7 and latest ESLint 219 | 220 | ## 2.3.0 221 | 222 | Minor version change due to index changes on queue for ESDB 223 | 224 | - DB: add filename and stack to SQLite errors 225 | - JsonModel: allow thunking definitions, that way you can create helpers to define columns 226 | - JsonModel: add `.each([attrs, [options]], fn)` to iterate over search results 227 | `fn(row, i)` will be called with row data and row number 228 | - EventSourcingDB: `withViews = true` option to add the helper views to the queue 229 | - EventQueue: drop the `type` index and use `type, size`; always add `size` column 230 | 231 | ## 2.2.3 232 | 233 | - EventQueue: Ensure that queue insertions maintain their order 234 | - DB/EventSourcingDB: Fix `readOnly` mode and add test 235 | 236 | ## 2.2.2 237 | 238 | - First public release! 239 | - EventSourcingDB: the metadata table can be used as well 240 | - EventQueue: don't keep Node alive while waiting unless `forever: true` 241 | 242 | ## 2.2.1 243 | 244 | - JsonModel: value() on non-real columns is now stored 245 | 246 | ## 2.2.0 247 | 248 | JsonModel: 249 | 250 | - `column.alwaysObject`: for JSON columns: always have an object at that path, and store empty objects as NULL 251 | - `column.falsyBool`: store booleanish value as true/undefined. `real:true` makes the column be integer type 252 | querying also works with truthy and falsy values 253 | 254 | ## 2.1.0 255 | 256 | JsonModel: 257 | 258 | - `column.where(val, origVal)`: Now the original value is also available to the `where` function 259 | - fix json column detection for non-JSON columns 260 | 261 | ## 2.0.0 262 | 263 | JsonModel: breaking API change 264 | 265 | - `jsonPath` is now `path` and defaults to column name 266 | - Columns can be real or virtual 267 | - Real columns are put where `path` wants it 268 | - `value` can always be used to calculate field values, for real and virtual columns 269 | - `parse` and `stringify` convert values from/to database values 270 | - you can nest column parsing etc, it will run them in the right order 271 | 272 | To upgrade: 273 | 274 | - delete `jsonPath` where it's the same as the column name 275 | - rename `jsonPath` to `path` 276 | - delete `value` where it's just extracting the same field as the column name, replace with `type` (and indicate the column type) or `real: true` 277 | - `get` is now `true` by default, so remove it when true and set it to `false` when missing 278 | 279 | ## 1.4.0 280 | 281 | - JsonModel: stricter options with better type checking 282 | - EventQueue: add \_recentHistory and \_historyTypes views for debugging 283 | 284 | ## 1.3.0 285 | 286 | - ESModel: add metadata to event data on index 3 287 | 288 | ## 1.2.0 289 | 290 | - JsonModel: `required` flag on column makes sure the result of value() is non-null and sets allowNull to false for proper indexing 291 | 292 | ## 1.1.0 293 | 294 | - Directly depend on mapbox/sqlite3 by copying the relevant code from kriasoft/sqlite 295 | - Remove Bluebird dependency 296 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # Strato-DB 2 | 3 | > MaybeSQL with Event Sourcing based on SQLite 4 | 5 | The overall concept is to be a minimal wrapper that keeps SQL close by, but allows schemaless storage for where you want it. 6 | 7 | ## Install 8 | 9 | ```shell 10 | npm install strato-db 11 | ``` 12 | 13 | ## Usage 14 | 15 | Simple CRUD DB: 16 | 17 | ```js 18 | import {DB, JsonModel} from 'strato-db' 19 | 20 | const db = new DB({file: 'data/mydb.sqlite3', verbose: true}) 21 | 22 | class Things extends JsonModel { 23 | constructor(options) { 24 | super({ 25 | ...options, 26 | name: 'things', 27 | columns: { 28 | id: {type: 'INTEGER'}, 29 | count: {type: 'INTEGER', index: 'SPARSE'}, 30 | }, 31 | }) 32 | } 33 | } 34 | 35 | db.addModel(Things) 36 | 37 | // db only opens the file once this runs 38 | await db.store.things.set({id: 5, name: 'hi', count: 3}) 39 | // Get all items that have count 3 40 | console.log(await db.store.things.search({count: 3})) 41 | ``` 42 | 43 | DB with Event Sourcing: 44 | 45 | ```js 46 | import {DB, EventQueue, EventSourcingDB, ESModel} from 'strato-db' 47 | 48 | const qDb = qFile && qFile !== file ? new DB({file: qFile, verbose}) : db 49 | qDb.addModel(EventQueue, {name: 'queue'}) 50 | const queue = qDB.store.queue 51 | 52 | class ESThings extends ESModel { 53 | constructor(options) { 54 | super({ 55 | ...options, 56 | name: 'things', 57 | columns: { 58 | id: {type: 'INTEGER'}, 59 | count: {type: 'INTEGER', index: 'SPARSE'}, 60 | }, 61 | }) 62 | } 63 | } 64 | 65 | const eSDB = new EventSourcingDB({ 66 | db, 67 | queue, 68 | models: {things: {Model: ESThings}}, 69 | }) 70 | 71 | await eSDB.store.things.set({id: 5, name: 'hi', count: 3}) 72 | console.log(await eSDB.store.things.search({count: 3})) 73 | // See the created events 74 | console.log(await eSDB.queue.all()) 75 | ``` 76 | 77 | ## API 78 | 79 | The API is class-based. There are types in JSDoc and in types.d.ts, which are the only documentation for now. 80 | 81 | The design of EventSourcingDB is discussed in [Server Side Redux](./Server Side Redux.md) 82 | 83 | Classes: 84 | 85 | - `SQLite`: Wraps a Sqlite3 database with a lazy-init promise interface 86 | - `DB`: Adds models and migrations to SQLite3 87 | - `JsonModel`: Stores given objects in a `DB` instance as JSON fields with an `id` column, other columns can be calculated or be virtual. You can perform searches via the wrapper on defined columns. 88 | - `EventQueue`: Stores events. Minimal message queue. 89 | - `EventSourcingDB`: Implements the Event Sourcing concept using EventQueue. See [Server Side Redux](./Server Side Redux.md). 90 | - `ESModel`: A drop-in replacement for JsonModel to use EventSourcingDB. Modifications are dispatched as events and awaited 91 | 92 | With the TypeScript definitions you can provide a Type for the stored objects and the config each model uses. This allows typechecking CRUD inputs and results, even in plain JS (with JSDoc comments). 93 | 94 | ## Status 95 | 96 | This project is used in production environments. 97 | 98 | Since it wraps SQLite, the actual storage of data is rock-solid. 99 | 100 | It works fine with multi-GB databases, and if you choose your queries and indexes well, you can have <1ms query times. 101 | 102 | The important things are tested, our goal is 100% coverage. 103 | 104 | Multi-process behavior is not very worked out for the `EventSourcingDB`: 105 | 106 | - Since it's layering a single-locking queue on top of SQLite, it works without problems, but no effort is made yet to avoid double work. It would require workers "locking" events and watching each other's timestamps. 107 | - To have DB slaves, the idea would be to either use distributed SQLite as implemented by BedrockDB, or to distribute the event queue to slaves and have them derive their own copy of the data. 108 | 109 | Take a look at [the planned improvements](./TODO.md). 110 | 111 | ## License 112 | 113 | MIT © [Wout Mertens](https://stratokit.io) 114 | -------------------------------------------------------------------------------- /Server Side Redux.md: -------------------------------------------------------------------------------- 1 | # Server Side Redux 2 | 3 | ## Concept 4 | 5 | Run a Redux-like cycle on the server side to transform incoming events into database writes. 6 | 7 | Basic ideas from [Turning The Database Inside Out](https://www.confluent.io/blog/turning-the-database-inside-out-with-apache-samza/) talk. 8 | 9 | ## Parts 10 | 11 | - **Version**: All changes have a version `v`, a non-zero monotonously increasing positive integer. 12 | - **Event**: An object with `v` (assigned by the queue), `type` and arbitrary `data` to describe a past event or desired change to server state. 13 | - Examples: "this `user` logged in", "this `user` requested changing this `document`", "this `weatherData` arrived", "this `amount` needs to be refunded", … 14 | - **Event queue**: A FIFO store, holding events until they are processed. 15 | - This should be a very reliable store. 16 | - When an `event` is put on the queue, it gets a unique incremented `v`. 17 | - Events can lead to sub-events, which are processed as part of the transaction of the event 18 | - **State**: The state consists of sets of objects with at-least-per-set-unique-ids. 19 | - Database tables can store a row per set entry, so that's what we'll do. Each set is a table. We also store the current version in a separate one-row table (so that concurrent transactions conflict). The database is presumed to always be consistent (all tables and version in sync). 20 | - **Middleware => Preprocessors**: Redux middleware is mostly implemented as preprocessor functions that run within the event handling transaction, and can alter the event based on data from all tables. Preprocessors should only be used to convert events into a canonical representation. Side-effects like I/O should be performed by restartable workers that store their state in the DB ("this needs doing", "this was done", "this failed, retry x more times", …). 21 | - **Reducers**: Reducers are pure functions that get current state and the event, and return a description of what should happen to the state to get to the next version. 22 | - Current state (the DB model) is accessed by calling (asynchronous) DB functions 23 | - Each reducer is responsible for a single set (table) 24 | - Contrary to Redux, in ESDB reducers also get access to the state of other reducers, this turns out to be very useful 25 | - Given the same state and event, the reducer always returns the same result 26 | - The result is an object `{error: {message: "..."}, set: [{id, ...}], rm: [id, ...], events: [], audit, ...}` 27 | - `error`: Present if the event can not be processed for some reason, contains information to debug the problem. This halts all event processing until the problem is fixed. To represent e.g. denial of requests, use a different key (ESModel uses `esFail`) and inspect the event result to see if it the request was granted. 28 | - `events`: any sub-events that should be processed. They are handled in-order after applying the changes of the parent event. 29 | - Several keys are used by the JsonModel `applyResult`: 30 | - `set`: objects to replace in the table 31 | - `ins`: objects to insert in the table (errors if exist) 32 | - `upd`: objects to shallow-update 33 | - `sav`: objects to shallow-update or insert if missing 34 | - `rm`: ids of objects to remove from the table 35 | - by subclassing, the behavior can be tweaked 36 | - any other keys: opaque data describing the change, can be informational or used by a custom `applyResult` 37 | - **Derivers**: Functions that calculate "secondary state" from the changes. They can serve to make the event result smaller 38 | - **History**: An ordered list of reduced events (so `{v, type, data, result}`). This is not required, and the event can be abridged. It could serve as an audit log. If all the original event data is retained, it can be used to reprocess the database from scratch. 39 | - **Sub-events**: To make the event processing code simpler, ESDB allows adding derived events from anywhere in the redux cycle. These events are processed depth-first after normal event processing, inside the same transaction. For example, a USER_LOGIN event can result in a USER_REGISTERED event if they logged in via OAuth for the first time. 40 | 41 | ## Flow 42 | 43 | - Inbox cycle: 44 | 45 | - An event comes in. Think of events as newspaper headlines, they describe something that happened. They are not commands, they are facts and requests. 46 | - Any event that needs external information to process should be split up into multiple asynchronous events that have all the necessary data. 47 | - **Dispatch**: The event is stored on the queue and auto-assigned a version `v` 48 | 49 | - Redux cycle: 50 | - **Wait for event**: 51 | - Based on the DB processed version, get the next event from the queue. 52 | - **Start Transaction**: 53 | - All the below now runs in a transaction in a separate read-write DB connection. 54 | - The separate connection makes sure that other code only sees the previous version via the default read-only connection 55 | - If any step fails, the transaction is rolled back and retried later. 56 | - All later events are held until the error is resolved, possibly through manual resolution (by fixing the code, the event data or disabling/removing the event). 57 | - As such, avoid errors at all times, instead recording failure states. 58 | - **Preprocess**: 59 | - `preprocess`ors can change the event object before it's passed to the reducers. 60 | - Mutation is allowed, but this is stored in the DB, so make sure it's repeatable. 61 | - Version cannot change, but e.g. id could be assigned. 62 | - For example, ESModel uses this to make sure the data always includes the object id even for new objects. 63 | - Failing preprocessors abort the transaction 64 | - **Reduce**: 65 | - The event is passed to all `reducer`s 66 | - All reducers see the same state, the DB after processing the previous event 67 | - Reducers produce a change description and sub-events 68 | - Event object becomes history object with `result` attribute 69 | - Failing reducers abort the transaction 70 | - **Apply** 71 | - All results are written, including history object 72 | - The way they are written is arbitrary, implemented by an `applyResult` method 73 | - Failing applyResult functions abort the transaction 74 | - **Derive** 75 | - `deriver`s are called with the history object 76 | - They change their models as they see fit 77 | - Failing derivers abort the transaction 78 | - **SubEvents** 79 | - Each added subevent undergoes these same steps in the same transaction, with the same version number. 80 | - **Transact** 81 | - All `transact` callbacks are called sequentially in undefined order 82 | - They receive a `dispatch` function that behaves like the `ESDB.dispatch` method but adds and waits for subevents 83 | - This is an alternative to chaining reducer calls by adding subevents; a transact function keeps the logic together. 84 | - **End transaction** 85 | - The DB is now at the event version. 86 | - **Listeners**: 87 | - They get called with the history object after the Redux cycle completes. 88 | - Note that side-effect workers should wait until the queue is processed (`eSDB.waitForQueue()`), making sure they are not working from stale data 89 | 90 | The Inbox flow can happen at any time; the Reduce/Write cycle happens sequentially. 91 | 92 | If an incoming request is for some side-effect change, this should be stored as a sequence of events, recording the intent, the intermediate states and the end result. The database is then used by worker functions to know the current state of side-effects. These workers should be restartable and correctly manage real-world state. 93 | 94 | ## Advantages 95 | 96 | All the advantages of Redux, but applied to the server: 97 | 98 | - time travel (requires snapshots or reversible events (storing the full calculated change with the event)) 99 | - clean code, easy to unit test 100 | - reprocessing: just start from 0 101 | - reducers can run in parallel on the same event, interleaving I/O requests 102 | - easy to generate audit log 103 | 104 | ## Limitations 105 | 106 | ### Risks 107 | 108 | - Data loss can occur in all the normal ways, but the event log in essence duplicates the data 109 | - The hard-line approach of failing events halting event processing can result in servers needing immediate care, but it is really the only sane way to handle data, and the event handling code should be robust 110 | 111 | ### Model 112 | 113 | It is sometimes harder to model all changes as events, for example spawning side effects and keeping track of their state. 114 | 115 | ### Resource use 116 | 117 | - Requires a data store with transactions, so that the new state and history is stored in one go. 118 | - Not well suited for high-volume writes due to per-event "synchronous" updating. Caching can help. 119 | - Not well suited for change sets exceeding available working memory. (e.g. delete all records). Those have to be special-cased within the `applyEvent` code. 120 | - Not well suited for huge events since events are loaded into memory during processing and written several times (e.g. >2MB of data in the event). Move big data out of the event and use side-effects. 121 | - Single master for event queue. Sharding might help if that's a problem. 122 | 123 | ## Implementation 124 | 125 | Up until v3 this was implemented by redux with changes for asynchronous behavior, but in v3 the concepts were implemented directly to allow for sub-events. 126 | 127 | ### GraphQL mutations 128 | 129 | Mutations assume that they can return either with an Error or the result value of the mutation. This means keeping track of the event execution as it passes through all the layers. To that end, `dispatch` tracks event completion even if it happened in another process. 130 | 131 | ## DevOps Scenarios 132 | 133 | ### Start from existing database 134 | 135 | - Add history and version table, mark all tables as a first version 136 | - Add new reducers, see below 137 | 138 | ### Add new table/reducer 139 | 140 | - Add reducer to reducers 141 | - Either: 142 | - Calculate/provide table at current db version in a migration 143 | - Start empty, run all stored events 144 | 145 | ### Change reducer 146 | 147 | - Change reducer in reducers 148 | - Either: 149 | - Convert table in a migration 150 | - Start empty, run all stored events 151 | 152 | ### Running out of disk 153 | 154 | - Prune history 155 | - Remove old entries, perhaps keeping the last X, as well as important ones 156 | - Remove metadata of old entries, perhaps only keeping the audit information 157 | - Upgrade the server 158 | 159 | ### Running out of CPU 160 | 161 | - Shard DB: create more writers 162 | - Copy the DB to multiple masters 163 | - Replace the reducers with sharding versions 164 | - Update tables in a migration, pruning data not belonging to the shard 165 | - Duplicate servers: create more readers 166 | - Copy the DB etc to multiple readers 167 | - Synchronize events so there is absolute ordering and no previous version insertions after a version was stored in the DB 168 | - Upgrade the server 169 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | # TODO list 2 | 3 | ## General 4 | 5 | - use JSDoc to move types back to implementations as much as possible, so subclassing etc works 6 | - probably everything we need is covered by JSDoc now 7 | - allow using https://github.com/rqlite/rqlite-js as a backend for a distributed DB 8 | - Change the multi-access tests to use `"file:memdb1?mode=memory&cache=shared"` for shared access to the same in-memory db (probably when using better-sqlite, it requires file uri support) 9 | - Give DB and ESDB the same API for registering models (.addModel) 10 | - Optimize: 11 | - [ ] create benchmark 12 | - [ ] API to get prepared statements from JM .search 13 | - Some mechanism to quit running processes when the schema changes. Maybe store a user-defined schema version in `PRAGMA application_id`? Isn't nice to have to check it every time data_version changes though :( 14 | 15 | ## node-sqlite3 16 | 17 | ### Someday 18 | 19 | - [ ] sync interface for e.g. pragma data_version and BEGIN IMMEDIATE. Already did some work on it but it segfaults. Alternatively, use better-sqlite in a worker 20 | 21 | ## SQLite 22 | 23 | ### Important 24 | 25 | - [ ] when opening, handle the error `{code: 'SQLITE_CANTOPEN'}` by retrying later 26 | 27 | ### Nice to have 28 | 29 | - [ ] event emitter proxying the sqlite3 events 30 | - [ ] `ensureTable(columns)`: accept column defs and create/add if needed, using pragma table_info 31 | 32 | ```text 33 | > pragma table_info("_migrations"); 34 | cid|name|type|notnull|dflt_value|pk 35 | 0|runKey|STRING|0||0 36 | 1|ts|DATETIME|0||0 37 | 2|up|BOOLEAN|0||0 38 | ``` 39 | 40 | - [ ] `ensureIndexes(indexes, dropUnused)` manage indexes, using PRAGMA index*list. Drop unused indexes with `\_sdb*` prefix 41 | - [ ] create a worker thread version that uses better-sqlite. Benchmark. 42 | - [ ] support better-sqlite if it's ok for the main thread to hang 43 | 44 | ### Someday 45 | 46 | - [ ] use the btree info extension to provide index sizes at startup if debug enabled 47 | - [ ] async iterators for `db.each()`. Although node-sqlite3 actually slurps the entire table into an array so it would only be useful for better-sqlite 48 | 49 | ## DB 50 | 51 | ### Nice to have 52 | 53 | - [ ] if migration is `{undo:fn}` run the `undo` only if the migration ran before. We never needed `down` migrations so far. 54 | - if you want to run something only on existing databases, first deploy a no-op `()=>{}` migration and then change it to an `undo` 55 | 56 | ## JsonModel 57 | 58 | ### Important 59 | 60 | - [ ] .each() limit should apply to total fetched, breaking change 61 | - FTS5 support for text searching on columns 62 | - It uses the table as a backing table 63 | - Real columns marked `fts: true|object` generate a FTS5 index 64 | - `true` is shorthand for `{group: 'all', allColumns: false}` 65 | - `group` is a name for the index so you can group columns in separate indexes, for example by language 66 | - `allColumns`, if true, means to search all columns for this group. This can be used on non-real columns 67 | - Content columns have to be real columns, otherwise FTS5 can't refer to them. So, throw an error if the column not real except if `allColumns: true`. 68 | - `textSearch: true` should be deprecated and means `fts: {group: 'all'}` if `real: true` 69 | - other options could be added to configure tokenizing 70 | - one index per `fts.group` value (, defaults to `'all'`) 71 | - Searching passes the search argument to the FTS5 index of the group 72 | - The search is limited to the column unless `allColumns: true` 73 | - ¿Updates to the FTS index are applied by JsonModel, not triggers? why/why not 74 | - [ ] columns using the same path should get the same JSON path. There are some edge cases. 75 | - [ ] falsyBool paging doesn't work because it tries to >= and that fails for null. It should add a "sortable: false" flag 76 | 77 | ### Nice to have 78 | 79 | - [ ] removeFalsy column decorator: remove falsy elements (except '' and 0) from JSON before storing 80 | - [ ] validate(value): must return truthy given the current value (from path or value()) or storing throws 81 | - [ ] column.version: defaults to 1. When version increases, all rows are rewritten 82 | - do not change extra columns, that is what migrations are for 83 | - [ ] recreate index if expression changes 84 | - [ ] indexes: `[{expression, where}]` extra indexes 85 | - [ ] auto-delete other indexes, API change 86 | - column helpers: 87 | - [ ] objectColumn() helper -> type=JSON, NULL === {}, stringify checks if object (char 0 is `{`) 88 | - [ ] boolColumn() -> `type="INTEGER"; parse = Boolean; stringify=Boolean` 89 | - [ ] falsyColumn() -> implement falsyBool 90 | - note that `col.where = (${col.sql} IS NULL)=?` doesn't need a where function but won't use a sparse index. 91 | So maybe, for sparse index falsybool, only do 'is not null' and throw if false 92 | - [ ] uuidColumn() -> use buffer stringify/parse to implement efficient UUID by default. See https://stackoverflow.com/questions/20342058/which-uuid-version-to-use 93 | - [ ] foreign key support 94 | - [ ] prepared statements for .search 95 | - `q = m.prepareSearch(args, options); q.search(args, options) // not allowed to change arg items, where or sort` 96 | - However, `whereVal` values should be allowed to change 97 | - But `where` should stay the same and should not be recalculated, so best if it is not a function. Most of the time this can be done. 98 | - Probably `.makeSelect()` would need to return an intermediate query object 99 | - Note: When using prepared statements, replace `IN (?,?,?)` clauses with `IN (SELECT value FROM json_each(?))` and pass the array as a JSON string. That way the prepared statement can handle any array length 100 | - Benchmark test that warns if runtime increases on current system 101 | - getting/setting can be optimized by creating Functions instead of lodash get/set, but first create benchmark 102 | - it's probably better to always create same object from columns and then assign json if not null 103 | - Test for `uniqueSlugId` 104 | - Booleans should be stored as 0/1 if real, except when sparse indexing, then NULL/1. If not real, the index and where clause should be `IFNULL(json..., false)` 105 | 106 | ## Queue 107 | 108 | ### Important 109 | 110 | - [ ] allow marking an event as being processed, by setting worker id `where workerId is null` or something similar 111 | - [ ] workers should register in a table and write timestamps for a watchdog 112 | - [ ] while an event is being worked, next event can't be worked on. 113 | - [ ] it may be better to `INSERT IF NOT EXISTS` a knownV event instead of using the sequence table? 114 | 115 | ### Nice to have 116 | 117 | - [ ] split history into multiple files, per 1GB, automatically attach for queries. (make sure it's multi-process safe - lock the db, make sure new writes are not possible in old files) 118 | - [ ] test multi-process changes 119 | 120 | ## ESModel 121 | 122 | ### Nice to have 123 | 124 | - [ ] .get for the RO ESModel uses .getCached, with a caching-map limiting the amount, cleared when the version changes 125 | - [ ] .changeId (`mv:[[oldId, newId],…]` apply action?) 126 | 127 | ## ESDB 128 | 129 | ### Important 130 | 131 | - [ ] split queue in history (append-only) and results. The results are only for debugging and include one row per subevent and a diff vs the original data after preprocessing. 132 | - Ideally, the results go in a different db that can be split at will. 133 | - for multi-process, lock the result db exclusively to worker 134 | - re-processing events clears all subevent rows 135 | - [ ] Add `beforeApply` phase which runs after all reducers ran so it has access to the state of the DB before the changes are applied. 136 | 137 | ### Nice to have 138 | 139 | - [ ] add eventSpy, e.g. `eSDB.debug(boolean|{filter()})` 140 | - [ ] in non-prod, randomly run preprocessor twice (keep event in memory and restart handling) to verify repeatability 141 | - [ ] don't store empty result sub-events 142 | - [ ] `reducerByType` object keyed by type that gets the same arguments as preprocessor 143 | - same for preprocessor/deriver/transact 144 | - [ ] explore read-only clones that get the event queue changes only, dispatches go to primary db. Will need Raft implementation. 145 | -------------------------------------------------------------------------------- /build-git.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | BUILDDIR=lib 3 | 4 | die() { 5 | echo "!!! $* !!!" >&2 6 | exit 1 7 | } 8 | if ! git diff --quiet; then 9 | die Repo is not clean 10 | fi 11 | 12 | CURRENT=$(git branch --no-color 2>/dev/null | sed -e '/^[^*]/d' -e 's/^\* //') 13 | ORIGIN=$(git config branch."$CURRENT".remote) 14 | if [ -z "$ORIGIN" ]; then 15 | die "Cannot determine origin, are you on a branch?" 16 | fi 17 | if [ -n "$1" ]; then 18 | CURRENT=$1 19 | fi 20 | B=${CURRENT}-build 21 | echo "=== Building and pushing to $ORIGIN/$B ===" 22 | 23 | if ! nps test; then 24 | die Tests failed 25 | fi 26 | if ! nps build; then 27 | die Could not build 28 | fi 29 | if ! git add -f $BUILDDIR; then 30 | die Could not add to commit 31 | fi 32 | if ! git commit -m build; then 33 | die Could not commit 34 | fi 35 | 36 | clean() { 37 | # This undoes the last commit but leaves the build in place 38 | git reset HEAD^ 39 | } 40 | 41 | if ! git push -f "$ORIGIN" "HEAD:$B"; then 42 | clean 43 | die Could not push to "$ORIGIN/$B" 44 | fi 45 | 46 | if ! clean; then 47 | die Could not clean temporary commit 48 | fi 49 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "nixpkgs": { 4 | "locked": { 5 | "lastModified": 1715653339, 6 | "narHash": "sha256-7lR9tpVXviSccl07GXI0+ve/natd24HAkuy1sQp0OlI=", 7 | "owner": "NixOS", 8 | "repo": "nixpkgs", 9 | "rev": "abd6d48f8c77bea7dc51beb2adfa6ed3950d2585", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "NixOS", 14 | "ref": "nixpkgs-unstable", 15 | "repo": "nixpkgs", 16 | "type": "github" 17 | } 18 | }, 19 | "root": { 20 | "inputs": { 21 | "nixpkgs": "nixpkgs" 22 | } 23 | } 24 | }, 25 | "root": "root", 26 | "version": 7 27 | } 28 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Dev environment for strato-db"; 3 | 4 | inputs = { 5 | # Make sure to use the same locked commits as the nix-infra deploys 6 | # That way the packages are shared 7 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 8 | }; 9 | 10 | outputs = { self, nixpkgs }: 11 | let 12 | b = builtins; 13 | # Make sure that this include runtime libs linked by npm builds 14 | deps = pkgs: with pkgs; [ 15 | bashInteractive 16 | sqlite-interactive 17 | # NodeJS 18 | nodejs_22 19 | corepack_22 20 | ]; 21 | makeDevShell = system: pkgs: { 22 | default = pkgs.mkShell { 23 | nativeBuildInputs = (deps pkgs) ++ (with pkgs; [ 24 | gitMinimal 25 | 26 | # sqlite3 module 27 | sqlite-interactive.dev 28 | ]); 29 | shellHook = '' 30 | export PATH=$PWD/node_modules/.bin:$PATH 31 | ''; 32 | }; 33 | }; 34 | in 35 | { 36 | devShells = b.mapAttrs (makeDevShell) nixpkgs.legacyPackages; 37 | }; 38 | } 39 | -------------------------------------------------------------------------------- /license: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Wout Mertens 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | -------------------------------------------------------------------------------- /package-scripts.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-var-requires */ 2 | const {series} = require('nps-utils') 3 | const {version} = require('./package.json') 4 | 5 | const isPR = process.env.GH_EVENT === 'pull_request' 6 | const comparisonRef = isPR ? `origin/${process.env.BASE_REF}` : 'HEAD^' 7 | 8 | const scripts = { 9 | build: { 10 | default: `nps build.clean build.lib`, 11 | git: `sh build-git.sh v${version.split('.')[0]}`, 12 | clean: '[ ! -e dist-types ] || rm -r dist-types/', 13 | lib: 'vite build --mode lib', 14 | types: `tsc --emitDeclarationOnly`, 15 | }, 16 | lint: { 17 | default: 'eslint .', 18 | // The setup-node action parses eslint errors, no formatter needed 19 | ci: isPR 20 | ? `git diff --name-only --diff-filter=ACMRTUXB ${comparisonRef} | grep -E "\\.[jt]sx?$" | xargs -d \\\\n eslint` 21 | : `eslint .`, 22 | errors: 'eslint --format visualstudio --quiet .', 23 | fix: `eslint --fix .; prettier --write .`, 24 | }, 25 | test: { 26 | default: series.nps('lint', 'test.full'), 27 | // Note, this changes the repo during the run 28 | ci: isPR 29 | ? `git reset ${comparisonRef} && vitest --coverage --color --changed; out=$?; git reset HEAD@{1}; exit $out` 30 | : `vitest run --coverage --color`, 31 | full: 'vitest run --coverage --color', 32 | watch: 'vitest --color --watch', 33 | }, 34 | publish: `npx np`, 35 | } 36 | 37 | module.exports = {scripts} 38 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "strato-db", 3 | "version": "3.11.0", 4 | "description": "NoSQL-hybrid with Event Sourcing based on sqlite", 5 | "license": "MIT", 6 | "repository": "https://github.com/StratoKit/strato-db", 7 | "author": { 8 | "name": "Wout Mertens", 9 | "email": "Wout.Mertens@gmail.com" 10 | }, 11 | "main": "./dist/index.js", 12 | "module": "./dist/index.mjs", 13 | "types": "./types.d.ts", 14 | "files": [ 15 | "dist", 16 | "types.d.ts", 17 | "Readme.md" 18 | ], 19 | "engines": { 20 | "node": ">=16.4" 21 | }, 22 | "scripts": { 23 | "start": "nps", 24 | "test": "npx nps test", 25 | "prepublishOnly": "nps test.full build" 26 | }, 27 | "keywords": [ 28 | "sqlite", 29 | "nosql", 30 | "event sourcing", 31 | "esdb", 32 | "jsonmodel", 33 | "json", 34 | "database", 35 | "db" 36 | ], 37 | "dependencies": { 38 | "async-sema": "^3", 39 | "dataloader": "^2", 40 | "debug": "^4", 41 | "jsurl2": "^2.2.0", 42 | "lodash": "^4", 43 | "prop-types": "^15", 44 | "sqlite3": "^5", 45 | "uuid": ">=8 <10" 46 | }, 47 | "devDependencies": { 48 | "@types/node": "^22.10.7", 49 | "@typescript-eslint/eslint-plugin": "^7.16.0", 50 | "@typescript-eslint/parser": "^7.16.0", 51 | "@vitest/coverage-v8": "3.0.5", 52 | "eslint": "^8.44.0", 53 | "eslint-config-prettier": "^8.8.0", 54 | "eslint-config-xo": "^0.43.1", 55 | "eslint-plugin-jsdoc": "^48.7.0", 56 | "eslint-plugin-prettier": "^5.1.3", 57 | "eslint-plugin-promise": "^6.4.0", 58 | "eslint-plugin-unicorn": "^47.0.0", 59 | "eslint-plugin-vitest-globals": "^1.5.0", 60 | "fs-extra": "^11.3.0", 61 | "nps": "^5.10.0", 62 | "nps-utils": "^1.7.0", 63 | "prettier": "^3.4.2", 64 | "prettier-plugin-jsdoc": "^1.3.2", 65 | "tmp-promise": "3.0.3", 66 | "typescript": "^5.7.3", 67 | "vite": "^6.1.0", 68 | "vitest": "3.0.5" 69 | }, 70 | "packageManager": "pnpm@10.2.1" 71 | } 72 | -------------------------------------------------------------------------------- /src/DB/DB.js: -------------------------------------------------------------------------------- 1 | import {sortBy} from 'lodash' 2 | import debug from 'debug' 3 | import SQLite, {sql} from './SQLite' 4 | import {DEV, deprecated} from '../lib/warning' 5 | 6 | const dbg = debug('strato-db/DB') 7 | 8 | export const _getRanMigrations = async db => { 9 | if ( 10 | !(await db.get(`SELECT 1 FROM sqlite_master WHERE name="{sdb} migrations"`)) 11 | ) { 12 | await ((await db.get( 13 | `SELECT 1 FROM sqlite_master WHERE name="_migrations"` 14 | )) 15 | ? db.exec(`ALTER TABLE _migrations RENAME TO "{sdb} migrations"`) 16 | : db.exec(`CREATE TABLE "{sdb} migrations"( 17 | runKey TEXT, 18 | ts DATETIME, 19 | up BOOLEAN 20 | );`)) 21 | } 22 | const didRun = {} 23 | await db.each( 24 | ` 25 | SELECT runKey, max(ts) AS ts, up FROM "{sdb} migrations" 26 | GROUP BY runKey 27 | HAVING up = 1 28 | `, 29 | ({runKey}) => { 30 | didRun[runKey] = true 31 | } 32 | ) 33 | return didRun 34 | } 35 | 36 | const _markMigration = async (db, runKey, up) => { 37 | const ts = Math.round(Date.now() / 1000) 38 | up = up ? 1 : 0 39 | await db.run`INSERT INTO "{sdb} migrations" VALUES (${runKey}, ${ts}, ${up})` 40 | } 41 | 42 | /** 43 | * DB adds model management and migrations to Wrapper. The migration state is 44 | * kept in the table ""{sdb} migrations"". 45 | * 46 | * @implements {DB} 47 | */ 48 | class DBImpl extends SQLite { 49 | /** @param {DBOptions} options */ 50 | constructor({migrations = [], onBeforeMigrations, ...options} = {}) { 51 | const onDidOpen = options.readOnly 52 | ? options.onDidOpen 53 | : async db => { 54 | if (onBeforeMigrations) await onBeforeMigrations(db) 55 | await this.runMigrations(db) 56 | if (options.onDidOpen) await options.onDidOpen(db) 57 | } 58 | super({...options, onDidOpen}) 59 | this.options.migrations = migrations 60 | } 61 | 62 | static sql = sql 63 | 64 | get models() { 65 | if (DEV) deprecated(`use db.store instead of db.models`) 66 | return this.store 67 | } 68 | 69 | /** 70 | * Add a model to the DB, which will manage one or more tables in the SQLite 71 | * database. The model should use the given `db` instance at creation time. 72 | * 73 | * @param {Object} Model - A class. 74 | * @param {Object} options - Options passed during Model creation. 75 | * @returns {Object} - The created Model instance. 76 | */ 77 | addModel(Model, options) { 78 | const model = new Model({ 79 | ...options, 80 | db: this, 81 | }) 82 | if (this.store[model.name]) 83 | throw new TypeError(`Model name ${model.name} was already added`) 84 | this.store[model.name] = model 85 | return model 86 | } 87 | 88 | /** 89 | * Register an object with migrations. 90 | * 91 | * @param {string} name 92 | * 93 | * - The name under which to register these migrations. 94 | * 95 | * @param {Record>} migrations 96 | * 97 | * - The migrations object. 98 | * 99 | * @returns {void} 100 | */ 101 | registerMigrations(name, migrations) { 102 | if (this.migrationsRan) { 103 | throw new Error('migrations already done') 104 | } 105 | for (const key of Object.keys(migrations)) { 106 | let obj = migrations[key] 107 | if (typeof obj === 'function') { 108 | obj = {up: obj} 109 | } else if (!obj.up) { 110 | throw new Error( 111 | `Migration ${key} for "${name}" must be a function or have an "up({db, model, ...rest})" attribute` 112 | ) 113 | } 114 | // Separate with space, it sorts before other things 115 | const runKey = `${key} ${name}` 116 | this.options.migrations.push({ 117 | ...obj, 118 | runKey, 119 | }) 120 | } 121 | } 122 | 123 | /** 124 | * Runs the migrations in a transaction and waits for completion. 125 | * 126 | * @param {SQLite} db - An opened SQLite instance. 127 | * @returns {Promise} - Promise for completed migrations. 128 | */ 129 | async runMigrations(db) { 130 | const {store, options} = this 131 | const migrations = sortBy(options.migrations, ({runKey}) => runKey) 132 | await db.withTransaction(async () => { 133 | const didRun = await _getRanMigrations(db) 134 | for (const model of Object.values(store)) 135 | if (model.setWritable) model.setWritable(true) 136 | for (const {runKey, up} of migrations) { 137 | if (!didRun[runKey]) { 138 | dbg(this.name, 'start migration', runKey) 139 | await up(db) 140 | dbg(this.name, 'done migration', runKey) 141 | await _markMigration(db, runKey, 1) 142 | } 143 | } 144 | for (const model of Object.values(store)) 145 | if (model.setWritable) model.setWritable(false) 146 | }) 147 | this.migrationsRan = true 148 | 149 | // Protect against store updates during migrations 150 | this.store = store 151 | } 152 | } 153 | 154 | export default DBImpl 155 | -------------------------------------------------------------------------------- /src/DB/DB.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import DB, {_getRanMigrations} from './DB' 4 | 5 | test('can register model', () => { 6 | const db = new DB() 7 | class Hi { 8 | name = 'hi' 9 | } 10 | const m = db.addModel(Hi) 11 | expect(m.name).toBe('hi') 12 | expect(db.store.hi).toBe(m) 13 | expect(() => db.addModel(Hi)).toThrow() 14 | return db.close() 15 | }) 16 | 17 | test('has migration', async () => { 18 | const db = new DB() 19 | let canary = 0 20 | // eslint-disable-next-line promise/catch-or-return 21 | db.dbP.then(() => { 22 | // This should run after the migrations 23 | if (canary === 2) canary = 3 24 | return true 25 | }) 26 | db.registerMigrations('whee', { 27 | 0: { 28 | up: mDb => { 29 | if (canary === 0) canary = 1 30 | expect(mDb.store).toEqual({}) 31 | return mDb.exec(` 32 | CREATE TABLE foo(hi NUMBER); 33 | INSERT INTO foo VALUES (42); 34 | `) 35 | }, 36 | }, 37 | 1: mDb => { 38 | if (canary === 1) canary = 2 39 | return mDb.exec(` 40 | INSERT INTO foo VALUES (42); 41 | `) 42 | }, 43 | }) 44 | const row = await db.get('SELECT * FROM foo') 45 | expect(row.hi).toBe(42) 46 | expect(canary).toBe(3) 47 | await db.close() 48 | }) 49 | 50 | test('refuses late migrations', async () => { 51 | const db = new DB() 52 | db.registerMigrations('whee', {0: {up: () => {}}}) 53 | await db.open() 54 | expect(() => db.registerMigrations('whee', {1: {up: () => {}}})).toThrow() 55 | await db.close() 56 | }) 57 | 58 | test('runs migrations in writable mode', async () => { 59 | const db = new DB() 60 | let f = 0 61 | db.registerMigrations('whee', { 62 | 0() { 63 | if (f === 1) f = 2 64 | }, 65 | }) 66 | db.addModel( 67 | class T { 68 | setWritable(v) { 69 | if (v && f === 0) f = 1 70 | if (!v && f === 2) f = 3 71 | } 72 | } 73 | ) 74 | await db.open() 75 | expect(f).toBe(3) 76 | await db.close() 77 | }) 78 | 79 | test('sorts migrations', async () => { 80 | const db = new DB() 81 | const arr = [] 82 | db.registerMigrations('whee', { 83 | c: { 84 | up: () => { 85 | arr.push('c') 86 | }, 87 | }, 88 | }) 89 | db.registerMigrations('aah', { 90 | b: { 91 | up: () => { 92 | arr.push('b') 93 | }, 94 | }, 95 | }) 96 | db.registerMigrations('whee', { 97 | a: { 98 | up: () => { 99 | arr.push('a') 100 | }, 101 | }, 102 | }) 103 | await db.open() 104 | expect(arr).toEqual(['a', 'b', 'c']) 105 | await db.close() 106 | }) 107 | 108 | test('marks migrations as ran', async () => { 109 | const db = new DB() 110 | const count = {a: 0, b: 0} 111 | db.registerMigrations('whee', { 112 | a: { 113 | up: () => { 114 | count.a++ 115 | }, 116 | }, 117 | }) 118 | db.registerMigrations('whee', { 119 | b: { 120 | up: () => { 121 | count.b++ 122 | }, 123 | }, 124 | }) 125 | await db.open() 126 | const ran = await _getRanMigrations(db) 127 | expect(ran).toEqual({'a whee': true, 'b whee': true}) 128 | await db.close() 129 | }) 130 | 131 | test('fails open on failed migration', async () => { 132 | const db = new DB() 133 | db.registerMigrations('whee', { 134 | a: () => { 135 | throw new Error('nope') 136 | }, 137 | }) 138 | await expect(db.open()).rejects.toThrow('nope') 139 | let s 140 | expect(() => (s = db.prepare('SELECT 1'))).not.toThrow() 141 | await expect(s.run()).rejects.toThrow('nope') 142 | }) 143 | 144 | test('close()', async () => { 145 | const db = new DB() 146 | await db.exec(` 147 | CREATE TABLE foo(hi NUMBER); 148 | INSERT INTO foo VALUES (42); 149 | `) 150 | const {hi} = await db.get(`SELECT * FROM foo`) 151 | expect(hi).toBe(42) 152 | // This clears db because it's in memory only 153 | await db.close() 154 | await db.exec(` 155 | CREATE TABLE foo(hi NUMBER); 156 | INSERT INTO foo VALUES (43); 157 | `) 158 | const {hi: hi2} = await db.get(`SELECT * FROM foo`) 159 | expect(hi2).toBe(43) 160 | await db.close() 161 | }) 162 | 163 | test('onBeforeMigrations', async () => { 164 | let t = 0 165 | const db = new DB({ 166 | onBeforeMigrations() { 167 | if (t === 0) t = 1 168 | }, 169 | }) 170 | db.registerMigrations('meep', { 171 | c: { 172 | up: () => { 173 | if (t === 1) t = 2 174 | }, 175 | }, 176 | }) 177 | await db.open() 178 | expect(t).toBe(2) 179 | await db.close() 180 | }) 181 | 182 | test('onWillOpen', async () => { 183 | let t = 0 184 | const db = new DB({ 185 | onWillOpen() { 186 | if (t === 0) t = 1 187 | }, 188 | }) 189 | db.registerMigrations('meep', { 190 | c: { 191 | up: () => { 192 | if (t === 1) t = 2 193 | }, 194 | }, 195 | }) 196 | await db.open() 197 | expect(t).toBe(2) 198 | await db.close() 199 | }) 200 | 201 | test('10 simultaneous opens', async () => 202 | tmp.withDir( 203 | async ({path: dir}) => { 204 | const file = sysPath.join(dir, 'db') 205 | const migrations = { 206 | 0: async db => { 207 | await db.exec('CREATE TABLE t(id, v); INSERT INTO t VALUES(1, 0);') 208 | }, 209 | } 210 | const db = new DB({file}) 211 | db.registerMigrations('foo', migrations) 212 | 213 | const openClose = async () => { 214 | const extraDb = new DB({file}) 215 | extraDb.registerMigrations('foo', migrations) 216 | await extraDb.open() 217 | await extraDb.exec('UPDATE t SET v=v+1 WHERE id=1') 218 | await extraDb.close() 219 | } 220 | const Ps = [] 221 | for (let i = 0; i < 10; i++) { 222 | Ps.push(openClose()) 223 | } 224 | await Promise.all(Ps) 225 | expect(await db.get('SELECT v from t')).toHaveProperty('v', 10) 226 | }, 227 | {unsafeCleanup: true, prefix: 'db-open'} 228 | )) 229 | -------------------------------------------------------------------------------- /src/DB/Statement.js: -------------------------------------------------------------------------------- 1 | // Implements prepared statements that auto-close and recreate 2 | // Only a single preparation per sql string 3 | // No parameter binding at creation for now 4 | // Somewhat based on node-sqlite3 by Kriasoft, LLC 5 | 6 | import debug from 'debug' 7 | const dbg = debug('strato-db/DB:stmt') 8 | 9 | let id = 0 10 | /** @implements {Statement} */ 11 | class StatementImpl { 12 | constructor(db, sql, name) { 13 | db.statements[sql] = this 14 | this._sql = sql 15 | this.db = db 16 | this._name = `{${id++}${name ? ` ${name}` : ''}}` 17 | this.name = `${db.name}${this._name}` 18 | } 19 | 20 | get isStatement() { 21 | return true 22 | } 23 | 24 | get sql() { 25 | return this._sql 26 | } 27 | 28 | _P = Promise.resolve() 29 | 30 | /** 31 | * @callback voidFn 32 | * @returns {Promise | any} 33 | */ 34 | /** 35 | * Wrap the function with a refresh call. 36 | * 37 | * @param {voidFn} fn The function to wrap. 38 | * @returns {Promise} The result of the function. 39 | */ 40 | _wrap(fn) { 41 | // Always verify _stmt and fail if init fails 42 | const wrapped = () => this._refresh().then(fn) 43 | // Run invocations in-order but ignore output 44 | this._P = this._P.then(wrapped, wrapped) 45 | return this._P 46 | } 47 | 48 | _refresh = async () => { 49 | if (this._stmt) return 50 | this._stmt = await this.db._call( 51 | 'prepare', 52 | [this._sql], 53 | this.db._sqlite, 54 | this.name, 55 | false, 56 | true 57 | ) 58 | 59 | this.db.statements[this._sql] = this 60 | } 61 | 62 | finalize() { 63 | delete this.db.statements[this._sql] 64 | const {_stmt} = this 65 | if (!_stmt) return Promise.resolve() 66 | return this._wrap( 67 | () => 68 | new Promise((resolve, reject) => { 69 | delete this._stmt 70 | _stmt.finalize(err => { 71 | if (err) { 72 | if (!this._stmt) this._stmt = _stmt 73 | return reject(err) 74 | } 75 | dbg(`${this.name} finalized`) 76 | resolve() 77 | }) 78 | }) 79 | ) 80 | } 81 | 82 | /** 83 | * Run the statement and return the metadata. 84 | * 85 | * @param {any[]} [vars] - The variables to be bound to the statement. 86 | * @returns {Promise} - An object with `lastID` and `changes` 87 | */ 88 | async run(vars) { 89 | return this._wrap(() => this.db._call('run', vars, this, this.name, true)) 90 | } 91 | 92 | /** 93 | * Return the first row for the statement result. 94 | * 95 | * @param {any[]} [vars] - The variables to be bound to the statement. 96 | * @returns {Promise} - The result or falsy if missing. 97 | */ 98 | async get(vars) { 99 | return this._wrap(() => 100 | this.db._call('get', vars, this, this.name).finally( 101 | () => 102 | this._stmt && 103 | new Promise(resolve => { 104 | this._stmt.reset(() => { 105 | resolve(this) 106 | }) 107 | }) 108 | ) 109 | ) 110 | } 111 | 112 | /** 113 | * Return all result rows for the statement. 114 | * 115 | * @param {any[]} [vars] - The variables to be bound to the statement. 116 | * @returns {Promise} - The results. 117 | */ 118 | async all(vars) { 119 | return this._wrap(() => this.db._call('all', vars, this, this.name)) 120 | } 121 | 122 | async each(args, onRow) { 123 | if (typeof onRow !== 'function') 124 | throw new Error(`signature is .each(args Array, cb Function)`) 125 | // err is always null, no reason to have it 126 | return this._wrap(() => 127 | this.db._call('each', [args, (_, row) => onRow(row)], this, this.name) 128 | ) 129 | } 130 | } 131 | 132 | export default StatementImpl 133 | -------------------------------------------------------------------------------- /src/DB/Statement.test.js: -------------------------------------------------------------------------------- 1 | import DB from '.' 2 | import tmp from 'tmp-promise' 3 | 4 | test('prepares statement', async () => { 5 | const db = new DB() 6 | const s = db.prepare('SELECT 5') 7 | expect(await s.all()).toEqual([{5: 5}]) 8 | }) 9 | 10 | test('get resets', async () => { 11 | const db = new DB() 12 | const s = db.prepare('VALUES(1),(2)') 13 | expect(await s.get()).toEqual({column1: 1}) 14 | expect(await s.get()).toEqual({column1: 1}) 15 | await db.close() 16 | }) 17 | 18 | test('finalizes only once', async () => { 19 | const db = new DB() 20 | const s = db.prepare('SELECT 5') 21 | await s.finalize() 22 | await expect(s.finalize()).resolves.toBe() 23 | }) 24 | 25 | test('uses parameters', async () => { 26 | const db = new DB() 27 | const s = db.prepare('SELECT ?*IFNULL(?,2) AS v') 28 | expect(await s.get([5])).toEqual({v: 10}) 29 | expect(await s.all([2, 4])).toEqual([{v: 8}]) 30 | await db.close() 31 | }) 32 | 33 | test('each()', async () => { 34 | const db = new DB() 35 | const s = db.prepare('VALUES(1),(2),(3)') 36 | await expect(s.each()).rejects.toThrow() 37 | let t = '' 38 | await expect( 39 | s.each([], r => { 40 | t += r.column1 41 | }) 42 | ).resolves.toBe(3) 43 | expect(t).toBe('123') 44 | await db.close() 45 | }) 46 | 47 | test('handles db closure', () => 48 | tmp.withFile(async ({path: file}) => { 49 | const db = new DB({file}) 50 | await db.exec('CREATE TABLE foo(id INTEGER PRIMARY KEY)') 51 | const s = db.prepare('SELECT id FROM foo LIMIT 1') 52 | await db.exec('INSERT INTO foo VALUES (1)') 53 | expect(await s.get()).toEqual({id: 1}) 54 | await expect(db.exec('INSERT INTO foo VALUES (1)')).rejects.toThrow( 55 | 'SQLITE_CONSTRAINT' 56 | ) 57 | await db.close() 58 | expect(await s.get()).toEqual({id: 1}) 59 | })) 60 | 61 | test('ignores previous failure', async () => { 62 | const db = new DB() 63 | await db.exec('CREATE TABLE foo(id INTEGER PRIMARY KEY)') 64 | const s = db.prepare('INSERT INTO foo VALUES (?)') 65 | await s.run([1]) 66 | await expect(s.run([1])).rejects.toThrow() 67 | await expect(s.run([2])).resolves.toBeDefined() 68 | }) 69 | 70 | // TODO test get, all, run, each with parallel reads (only one should run at a time) 71 | -------------------------------------------------------------------------------- /src/DB/index.js: -------------------------------------------------------------------------------- 1 | export {default} from './DB' 2 | export {default as SQLite, valToSql, sql} from './SQLite' 3 | -------------------------------------------------------------------------------- /src/EventQueue.js: -------------------------------------------------------------------------------- 1 | // Note that this queue doesn't use any transactions by itself, to prevent deadlocks 2 | // Pass `forever: true` to keep Node running while waiting for events 3 | import debug from 'debug' 4 | import JsonModel from './JsonModel' 5 | 6 | const dbg = debug('strato-db/queue') 7 | 8 | let warnedLatest 9 | 10 | /** @typedef {defaultColumns} Columns */ 11 | const defaultColumns = { 12 | v: { 13 | type: 'INTEGER', 14 | autoIncrement: true, 15 | }, 16 | type: {type: 'TEXT'}, 17 | ts: { 18 | type: 'INTEGER', 19 | value: o => Number(o.ts) || Date.now(), 20 | index: 'ALL', 21 | }, 22 | data: {type: 'JSON'}, 23 | result: {type: 'JSON'}, 24 | size: {type: 'INTEGER', default: 0, get: false}, 25 | } 26 | 27 | /** 28 | * An event queue, including history. 29 | * 30 | * @template {T} 31 | * @template {U} 32 | * @implements {EventQueue} 33 | */ 34 | class EventQueueImpl extends JsonModel { 35 | /** @param {EQOptions} */ 36 | constructor({name = 'history', forever, withViews, ...rest}) { 37 | const columns = {...defaultColumns} 38 | if (rest.columns) 39 | for (const [key, value] of Object.entries(rest.columns)) { 40 | if (!value) continue 41 | if (columns[key]) throw new TypeError(`Cannot override column ${key}`) 42 | columns[key] = value 43 | } 44 | super({ 45 | ...rest, 46 | name, 47 | idCol: 'v', 48 | columns, 49 | migrations: { 50 | ...rest.migrations, 51 | addTypeSizeIndex: ({db}) => 52 | db.exec( 53 | `CREATE INDEX IF NOT EXISTS "history type,size" on history(type, size)` 54 | ), 55 | '20190521_addViews': withViews 56 | ? async ({db}) => { 57 | const historySchema = await db.all('PRAGMA table_info("history")') 58 | // This adds a field with data size, kept up-to-date with triggers 59 | if (!historySchema.some(f => f.name === 'size')) 60 | await db.exec( 61 | `ALTER TABLE history ADD COLUMN size INTEGER DEFAULT 0` 62 | ) 63 | // The size WHERE clause is to prevent recursive triggers 64 | await db.exec(` 65 | DROP TRIGGER IF EXISTS "history size insert"; 66 | DROP TRIGGER IF EXISTS "history size update"; 67 | CREATE TRIGGER "history size insert" AFTER INSERT ON history BEGIN 68 | UPDATE history SET 69 | size=ifNull(length(new.json),0)+ifNull(length(new.data),0)+ifNull(length(new.result),0) 70 | WHERE v=new.v; 71 | END; 72 | CREATE TRIGGER "history size update" AFTER UPDATE ON history BEGIN 73 | UPDATE history SET 74 | size=ifNull(length(new.json),0)+ifNull(length(new.data),0)+ifNull(length(new.result),0) 75 | WHERE v=new.v AND size!=ifNull(length(new.json),0)+ifNull(length(new.data),0)+ifNull(length(new.result),0); 76 | END; 77 | 78 | DROP VIEW IF EXISTS _recentHistory; 79 | DROP VIEW IF EXISTS _historyTypes; 80 | CREATE VIEW _recentHistory AS 81 | SELECT datetime(ts/1000, "unixepoch", "localtime") AS t, * 82 | FROM history ORDER BY v DESC LIMIT 1000; 83 | CREATE VIEW _historyTypes AS 84 | SELECT 85 | type, 86 | COUNT(*) AS count, 87 | SUM(size)/1024/1024 AS MB 88 | FROM history GROUP BY type ORDER BY count DESC; 89 | `) 90 | // Recalculate size 91 | await db.exec(`UPDATE history SET size=0`) 92 | } 93 | : null, 94 | }, 95 | }) 96 | this.currentV = -1 97 | this.knownV = 0 98 | this.forever = !!forever 99 | } 100 | 101 | /** 102 | * Replace existing event data. 103 | * 104 | * @param {Event} event - The new event. 105 | * @returns {Promise} - Promise for set completion. 106 | */ 107 | set(event) { 108 | if (!event.v) { 109 | throw new Error('cannot use set without v') 110 | } 111 | this.currentV = -1 112 | return super.set(event) 113 | } 114 | 115 | latestVersion() { 116 | if (process.env.NODE_ENV !== 'production' && !warnedLatest) { 117 | const {stack} = new Error( 118 | 'EventQueue: latestVersion() is deprecated, use getMaxV instead' 119 | ) 120 | // eslint-disable-next-line no-console 121 | console.error(stack) 122 | warnedLatest = true 123 | } 124 | return this.getMaxV() 125 | } 126 | 127 | /** 128 | * Get the highest version stored in the queue. 129 | * 130 | * @returns {Promise} - The version. 131 | */ 132 | async getMaxV() { 133 | if (this._addP) await this._addP 134 | 135 | const dataV = await this.db.dataVersion() 136 | if (this.currentV >= 0 && this._dataV === dataV) { 137 | // If there was no change on other connections, currentV is correct 138 | return this.currentV 139 | } 140 | this._dataV = dataV 141 | if (this._maxSql?.db !== this.db) 142 | this._maxSql = this.db.prepare( 143 | `SELECT MAX(v) AS v from ${this.quoted}`, 144 | 'maxV' 145 | ) 146 | const lastRow = await this._maxSql.get() 147 | this.currentV = Math.max(this.knownV, lastRow.v || 0) 148 | return this.currentV 149 | } 150 | 151 | _addP = null 152 | 153 | /** 154 | * Atomically add an event to the queue. 155 | * 156 | * @param {string} type - Event type. 157 | * @param {any} [data] - Event data. 158 | * @param {number} [ts=Date.now()] - Event timestamp, ms since epoch. Default 159 | * is `Date.now()` 160 | * @returns {Promise} - Promise for the added event. 161 | */ 162 | add(type, data, ts) { 163 | if (!type || typeof type !== 'string') 164 | return Promise.reject(new Error('type should be a non-empty string')) 165 | ts = Number(ts) || Date.now() 166 | 167 | // We need to guarantee same-process in-order insertion, the sqlite3 lib doesn't do it :( 168 | this._addP = (this._addP || Promise.resolve()).then(async () => { 169 | // Store promise so getMaxV can get the most recent v 170 | // Note that it replaces the promise for the previous add 171 | // sqlite-specific: INTEGER PRIMARY KEY is also the ROWID and therefore the lastID and v 172 | if (this._addSql?.db !== this.db) 173 | this._addSql = this.db.prepare( 174 | `INSERT INTO ${this.quoted}(type,ts,data) VALUES (?,?,?)`, 175 | 'add' 176 | ) 177 | const {lastID: v} = await this._addSql.run([ 178 | type, 179 | ts, 180 | JSON.stringify(data), 181 | ]) 182 | 183 | this.currentV = v 184 | 185 | const event = {v, type, ts, data} 186 | dbg(`queued`, v, type) 187 | if (this._nextAddedResolve) { 188 | this._nextAddedResolve(event) 189 | } 190 | return event 191 | }) 192 | return this._addP 193 | } 194 | 195 | _nextAddedP = null 196 | 197 | _nextAddedResolve = event => { 198 | if (!this._resolveNAP) return 199 | clearTimeout(this._addTimer) 200 | this._NAPresolved = true 201 | this._resolveNAP(event) 202 | } 203 | 204 | // promise to wait for next event with timeout 205 | _makeNAP() { 206 | if (this._nextAddedP && !this._NAPresolved) return 207 | this._nextAddedP = new Promise(resolve => { 208 | this._resolveNAP = resolve 209 | this._NAPresolved = false 210 | // Timeout after 10s so we can also get events from other processes 211 | this._addTimer = setTimeout(this._nextAddedResolve, 10_000) 212 | // if possible, mark the timer as non-blocking for process exit 213 | // some mocking libraries might forget to add unref() 214 | if (!this.forever && this._addTimer && this._addTimer.unref) 215 | this._addTimer.unref() 216 | }) 217 | } 218 | 219 | /** 220 | * Get the next event after v (gaps are ok). The wait can be cancelled by 221 | * `.cancelNext()`. 222 | * 223 | * @param {number} [v=0] The version. Default is `0` 224 | * @param {boolean} [noWait] Do not wait for the next event. 225 | * @returns {Promise} The event if found. 226 | */ 227 | async getNext(v = 0, noWait = false) { 228 | let event 229 | if (!noWait) dbg(`${this.name} waiting unlimited until >${v}`) 230 | do { 231 | this._makeNAP() 232 | const currentV = await this.getMaxV() 233 | event = 234 | v < currentV 235 | ? await this.searchOne(null, { 236 | where: {'v > ?': [Number(v)]}, 237 | sort: {v: 1}, 238 | }) 239 | : null 240 | if (event || noWait) break 241 | // Wait for next one from this process 242 | event = await this._nextAddedP 243 | if (event === 'CANCEL') return 244 | // Ignore previous events 245 | if (v && event && event.v < v) event = null 246 | } while (!event) 247 | return event 248 | } 249 | 250 | /** Cancel any pending `.getNext()` calls */ 251 | cancelNext() { 252 | if (!this._resolveNAP) return 253 | this._resolveNAP('CANCEL') 254 | } 255 | 256 | /** 257 | * Set the latest known version. New events will have higher versions. 258 | * 259 | * @param {number} v - The last known version. 260 | */ 261 | setKnownV(v) { 262 | // set the sqlite autoincrement value 263 | // Try changing current value, and insert if there was no change 264 | // This doesn't need a transaction, either one or the other runs and 265 | // both are sent in the same command so nothing will run in between 266 | this.db.runOnceOnOpen(db => 267 | db 268 | .exec( 269 | ` 270 | UPDATE sqlite_sequence SET seq = ${v} WHERE name = ${this.quoted}; 271 | INSERT INTO sqlite_sequence (name, seq) 272 | SELECT ${this.quoted}, ${v} WHERE NOT EXISTS 273 | (SELECT changes() AS change FROM sqlite_sequence WHERE change <> 0); 274 | ` 275 | ) 276 | .catch(error => { 277 | // eslint-disable-next-line no-console 278 | console.error(`setKnownV: could not update sequence`, error) 279 | db.close() 280 | }) 281 | ) 282 | this.currentV = Math.max(this.currentV, v) 283 | this.knownV = v 284 | } 285 | } 286 | 287 | export default EventQueueImpl 288 | -------------------------------------------------------------------------------- /src/EventQueue.test.js: -------------------------------------------------------------------------------- 1 | import DB from './DB' 2 | import EventQueue from './EventQueue' 3 | 4 | const getModel = options => { 5 | const db = new DB() 6 | return db.addModel(EventQueue, options) 7 | } 8 | 9 | const populate = (m, count) => { 10 | const lots = [] 11 | for (let i = 0; i < count; i++) { 12 | lots[i] = i 13 | } 14 | return Promise.all(lots.map(data => m.add('t', data))) 15 | } 16 | 17 | test('create w/ extra columns', async () => { 18 | const m = getModel({ 19 | columns: {foo: {real: true, value: () => 5, get: true}}, 20 | }) 21 | expect(await m.add('hi')).not.toHaveProperty('foo') 22 | expect(await m.get(1)).not.toHaveProperty('foo', 5) 23 | await m.update({v: 1}) 24 | expect(await m.get(1)).toHaveProperty('foo', 5) 25 | }) 26 | 27 | test('add invalid event', async () => { 28 | const m = getModel() 29 | await expect(m.add()).rejects.toThrow('type should be a non-empty string') 30 | await expect(m.add('')).rejects.toThrow('type should be a non-empty string') 31 | await expect(m.add(123)).rejects.toThrow('type should be a non-empty string') 32 | }) 33 | 34 | test('setKnownV', async () => { 35 | const m = getModel() 36 | expect(await m.getMaxV()).toBe(0) 37 | // internal API 38 | await m.setKnownV(20) 39 | expect(await m.getMaxV()).toBe(20) 40 | await m.set({v: 500, type: 'fooo'}) 41 | expect(await m.getMaxV()).toBe(500) 42 | }) 43 | 44 | test('add event', async () => { 45 | const m = getModel() 46 | const e = await m.add('test', {foo: 'hi'}) 47 | expect(e.v).toBeTruthy() 48 | expect(e.ts).toBeTruthy() 49 | expect(e.data.foo).toBe('hi') 50 | 51 | await expect(populate(m, 200)).resolves.not.toThrow() 52 | const events = await m.search({type: 't'}) 53 | expect(events.items).toHaveLength(200) 54 | }) 55 | 56 | test('getNext(undef/0)', async () => { 57 | const m = getModel() 58 | await m.setKnownV(50) 59 | await populate(m, 5) 60 | const e = await m.getNext() 61 | expect(e.v).toBe(51) 62 | }) 63 | 64 | test('getNext() waits', async () => { 65 | const m = getModel() 66 | await m.setKnownV(10) 67 | expect(await m.get(11)).toBeFalsy() 68 | const p = m.getNext() 69 | await m.add('t') 70 | const e = await p 71 | expect(e && e.v).toBe(11) 72 | const q = m.getNext(e.v) 73 | await m.add('u') 74 | const f = await q 75 | expect(f.v).toBe(12) 76 | expect(f.type).toBe('u') 77 | }) 78 | 79 | test('getNext(v, true) polls once', async () => { 80 | const m = getModel() 81 | await m.setKnownV(10) 82 | expect(await m.get(11)).toBeFalsy() 83 | expect(await m.getNext(null, true)).toBe(undefined) 84 | await m.add('t') 85 | const f = await m.getNext(10, true) 86 | expect(f.v).toBe(11) 87 | expect(f.type).toBe('t') 88 | }) 89 | 90 | test('allow JsonModel migrations', async () => { 91 | const m = getModel({ 92 | migrations: { 93 | test({model}) { 94 | return model.add('TEST', {hi: true}) 95 | }, 96 | }, 97 | }) 98 | const e = await m.getNext() 99 | expect(e.data.hi).toBe(true) 100 | }) 101 | 102 | test('type query uses index', async () => { 103 | const m = getModel() 104 | expect( 105 | await m.db.get( 106 | `EXPLAIN QUERY PLAN SELECT type FROM history where type='foo'` 107 | ) 108 | ).toHaveProperty('detail', expect.stringContaining('USING COVERING INDEX')) 109 | }) 110 | 111 | test('cancelNext', async () => { 112 | const m = getModel() 113 | const P = m.getNext(100, false) 114 | m.cancelNext() 115 | await expect(P).resolves.toBe() 116 | }) 117 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-concurrency.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import ESDB from '.' 4 | 5 | // eslint-disable-next-line @typescript-eslint/no-var-requires 6 | const payload = JSON.stringify(require('../../package.json')).repeat(50) 7 | 8 | let resolveMe, waitP 9 | 10 | const testModels = { 11 | storer: { 12 | reducer: ({event}) => { 13 | if (event.type === 'ins') return {ins: [event.data]} 14 | if (event.type === 'upd') return {upd: [{...event.data, payload}]} 15 | }, 16 | }, 17 | subber: { 18 | preprocessor: async ({model, store, event}) => { 19 | expect(model).toBe(store.subber) 20 | if (event.type === 'sub') expect(await model.get('hey')).toBeTruthy() 21 | }, 22 | reducer: async ({model, event: {type}, addEvent, store}) => { 23 | expect(model).toBe(store.subber) 24 | switch (type) { 25 | case 'main': { 26 | addEvent('sub') 27 | return {ins: [{id: 'hey'}]} 28 | } 29 | case 'sub': { 30 | expect(await model.get('hey')).toBeTruthy() 31 | break 32 | } 33 | default: 34 | } 35 | }, 36 | }, 37 | waiter: { 38 | reducer: async ({event: {type, data}}) => { 39 | if (type === 'waiter') { 40 | return {ins: [data]} 41 | } 42 | }, 43 | deriver: async ({event}) => { 44 | if (event.type === 'waiter') { 45 | resolveMe() 46 | resolveMe = null 47 | await waitP 48 | // eslint-disable-next-line require-atomic-updates 49 | waitP = null 50 | } 51 | }, 52 | }, 53 | nexter: { 54 | columns: {id: {type: 'INTEGER'}}, 55 | reducer: async ({model, event: {type, data}, addEvent}) => { 56 | if (type !== 'nexter') return 57 | const id = await model.getNextId() 58 | await model.getNextId() // skip an id 59 | const id2 = await model.getNextId() 60 | // skip an id, shouldn't matter 61 | await model.getNextId() 62 | if (data) { 63 | addEvent('nexter', data - 1) 64 | } 65 | return {ins: [{id}, {id: id2}]} 66 | }, 67 | }, 68 | } 69 | 70 | const withDbs = fn => async () => { 71 | const dir = await tmp.dir({unsafeCleanup: true, prefix: 'esdb-concurrent-'}) 72 | const {path} = dir 73 | const file = sysPath.join(path, 'db') 74 | const queueFile = sysPath.join(path, 'q') 75 | const db1 = new ESDB({ 76 | file, 77 | queueFile, 78 | name: 'E', 79 | models: testModels, 80 | }) 81 | const db2 = new ESDB({ 82 | file, 83 | queueFile, 84 | name: 'E', 85 | models: testModels, 86 | }) 87 | await db1.waitForQueue() 88 | await db2.waitForQueue() 89 | 90 | try { 91 | await fn({db1, db2}) 92 | } finally { 93 | await db1.close() 94 | await db2.close() 95 | await dir.cleanup() 96 | } 97 | } 98 | 99 | test( 100 | 'multiple ESDB', 101 | withDbs(async ({db1, db2}) => { 102 | let i = 1 103 | let v = 0 104 | do { 105 | let evP = db1.dispatch('ins', {id: i}) 106 | expect(await db2.getVersion()).toBe(v) 107 | v = (await evP).v 108 | expect(await db2.getVersion()).toBe(v) 109 | evP = db1.dispatch('upd', {id: i++, hi: i}) 110 | v = (await evP).v 111 | expect(await db2.getVersion()).toBe(v) 112 | } while (i <= 100) 113 | expect(v).toBe(200) 114 | }) 115 | ) 116 | 117 | // Sadly this test doesn't reproduce an issue seen in the wild: 118 | // db not seeing the changes from rwDb right after they 119 | // were committed. We have a fix but no repro. 120 | // Leaving this test in anyway, just in case 121 | test( 122 | 'ro/rw db events', 123 | withDbs(async ({db1}) => { 124 | let i = 1 125 | let v = 0 126 | do { 127 | let evP = db1.dispatch('ins', {id: i}) 128 | expect(await db1.db.userVersion()).toBe(v) 129 | v = (await evP).v 130 | expect(await db1.db.userVersion()).toBe(v) 131 | evP = db1.dispatch('upd', {id: i++, hi: i}) 132 | v = (await evP).v 133 | expect(await db1.db.userVersion()).toBe(v) 134 | } while (i <= 200) 135 | expect(v).toBe(400) 136 | }) 137 | ) 138 | 139 | test( 140 | 'subevent handlers see intermediate state', 141 | withDbs(async ({db1}) => { 142 | expect(await db1.dispatch('main')).toBeTruthy() 143 | }) 144 | ) 145 | 146 | test( 147 | `RO and other DB don't see transaction`, 148 | withDbs(async ({db1, db2}) => { 149 | const firstP = new Promise(resolve => { 150 | resolveMe = resolve 151 | }) 152 | let resolveSecond 153 | waitP = new Promise(resolve => { 154 | resolveSecond = resolve 155 | }) 156 | const eventP = db1.dispatch('waiter', {id: 'w'}) 157 | await firstP 158 | const v = await db1.getVersion() 159 | expect(await db1.rwStore.waiter.get('w')).toBeTruthy() 160 | expect(await db1.store.waiter.get('w')).toBeFalsy() 161 | expect(await db1.getVersion()).toBe(v) 162 | expect(await db2.rwStore.waiter.get('w')).toBeFalsy() 163 | expect(await db2.getVersion()).toBe(v) 164 | resolveSecond() 165 | const {v: v2} = await eventP 166 | expect(await db2.store.waiter.get('w')).toBeTruthy() 167 | await expect(v2).toBeGreaterThan(v) 168 | }) 169 | ) 170 | 171 | test( 172 | `getNextId should work across main and subevents`, 173 | withDbs(async ({db1}) => { 174 | await db1.dispatch('nexter', 1) 175 | await db1.dispatch('nexter', 1) 176 | expect(await db1.store.nexter.all()).toEqual([ 177 | {id: 1}, 178 | // skipped 179 | {id: 3}, 180 | // skipped but recovered within transaction 181 | {id: 4}, 182 | // skipped 183 | {id: 6}, 184 | // skipped but recovered outside transaction 185 | {id: 7}, 186 | // skipped 187 | {id: 9}, 188 | // skipped but recovered within transaction 189 | {id: 10}, 190 | // skipped 191 | {id: 12}, 192 | ]) 193 | }) 194 | ) 195 | 196 | // TODO 10 simultaneous opens of existing db file 197 | // TODO 10 simultaneous opens of new db file/new queue file with >1 version 198 | // TODO 10 simultaneous worker connections on 100 events 199 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-create.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import {JsonModel} from '..' 4 | import ESDB from '.' 5 | import {withESDB, testModels, DB} from '../lib/_test-helpers' 6 | 7 | const events = [ 8 | {v: 1, type: 'foo'}, 9 | {v: 2, type: 'bar', data: {gotBar: true}}, 10 | ] 11 | 12 | describe('ESDB create', () => { 13 | test('works', () => 14 | tmp.withDir( 15 | async ({path: dir}) => { 16 | const file = sysPath.join(dir, 'db') 17 | const queueFile = sysPath.join(dir, 'q') 18 | const eSDB = new ESDB({ 19 | file, 20 | queueFile, 21 | name: 'E', 22 | models: testModels, 23 | }) 24 | // eSDB.listen(changes => eSDB.reducers.count.get('count')) 25 | expect(eSDB.db).toBeTruthy() 26 | expect(eSDB.rwDb).toBeTruthy() 27 | expect(eSDB.queue).toBeTruthy() 28 | expect(eSDB.models).toBeUndefined() 29 | expect(eSDB.store.count).toBeTruthy() 30 | expect(eSDB.rwStore.count).toBeTruthy() 31 | // Make sure the read-only database can start (no timeout) 32 | // and that migrations work 33 | expect(await eSDB.store.count.all()).toEqual([ 34 | {id: 'count', total: 0, byType: {}}, 35 | ]) 36 | }, 37 | {unsafeCleanup: true, prefix: 'esdb-create'} 38 | )) 39 | 40 | test('with existing version', () => 41 | tmp.withDir( 42 | async ({path: dir}) => { 43 | const file = sysPath.join(dir, 'db') 44 | const db = new DB({file}) 45 | await db.userVersion(100) 46 | const queueFile = sysPath.join(dir, 'q') 47 | const eSDB = new ESDB({ 48 | file, 49 | queueFile, 50 | name: 'E', 51 | models: testModels, 52 | }) 53 | // Note that this only works if you open the db first 54 | await eSDB.waitForQueue() 55 | const e = await eSDB.dispatch('hi') 56 | expect(e.v).toBe(101) 57 | }, 58 | {unsafeCleanup: true} 59 | )) 60 | 61 | test('in single file', async () => { 62 | const eSDB = new ESDB({ 63 | name: 'E', 64 | models: testModels, 65 | }) 66 | // eSDB.listen(changes => eSDB.reducers.count.get('count')) 67 | expect(eSDB.db).toBeTruthy() 68 | expect(eSDB.rwDb).toBeTruthy() 69 | expect(eSDB.queue).toBeTruthy() 70 | expect(eSDB.models).toBeUndefined() 71 | expect(eSDB.store.count).toBeTruthy() 72 | expect(eSDB.rwStore.count).toBeTruthy() 73 | // Make sure the read-only database can start (no timeout) 74 | // and that migrations work 75 | expect(await eSDB.store.count.all()).toEqual([ 76 | {id: 'count', total: 0, byType: {}}, 77 | ]) 78 | }) 79 | 80 | test('with Model', () => { 81 | return withESDB( 82 | { 83 | count: { 84 | Model: class Count extends JsonModel { 85 | constructor(options) { 86 | if (typeof options.dispatch !== 'function') { 87 | throw new TypeError('Dispatch expected') 88 | } 89 | if (typeof options.emitter !== 'object') { 90 | throw new TypeError('emitter expected') 91 | } 92 | delete options.emitter 93 | super(options) 94 | } 95 | 96 | foo() { 97 | return true 98 | } 99 | }, 100 | reducer: testModels.count.reducer, 101 | }, 102 | }, 103 | eSDB => { 104 | expect(eSDB.store.count.foo()).toBe(true) 105 | } 106 | ) 107 | }) 108 | 109 | test('without given queue', async () => { 110 | let eSDB 111 | expect(() => { 112 | eSDB = new ESDB({models: {}}) 113 | }).not.toThrow() 114 | await expect(eSDB.dispatch('hi')).resolves.toHaveProperty('v', 1) 115 | }) 116 | }) 117 | 118 | describe('redux cycle', () => { 119 | test('reducer works', () => { 120 | return withESDB(async eSDB => { 121 | const result = await eSDB._reducer({}, events[0]) 122 | expect(result).toEqual({ 123 | v: 1, 124 | type: 'foo', 125 | result: { 126 | count: {set: [{id: 'count', total: 1, byType: {foo: 1}}]}, 127 | }, 128 | }) 129 | const result2 = await eSDB._reducer({}, events[1]) 130 | expect(result2).toEqual({ 131 | v: 2, 132 | type: 'bar', 133 | data: {gotBar: true}, 134 | result: { 135 | count: {set: [{id: 'count', total: 1, byType: {bar: 1}}]}, 136 | }, 137 | }) 138 | }) 139 | }) 140 | 141 | test('preprocess changes pass to reduce', () => { 142 | const models = { 143 | foo: { 144 | preprocessor: ({event}) => { 145 | if (event.type !== 'meep') return 146 | return {...event, step: 1} 147 | }, 148 | reducer: ({event}) => { 149 | if (event.type !== 'meep') return 150 | expect(event).toHaveProperty('step', 1) 151 | }, 152 | }, 153 | } 154 | return withESDB(models, async eSDB => { 155 | await eSDB.dispatch('meep') 156 | }) 157 | }) 158 | }) 159 | 160 | describe('ESDB migrations', () => { 161 | test('model migrations get queue', async () => { 162 | let step = 0 163 | return withESDB( 164 | { 165 | count: { 166 | ...testModels.count, 167 | migrations: { 168 | async foo({db, model, queue}) { 169 | expect(step).toBe(0) 170 | step = 1 171 | expect(db).toBeTruthy() 172 | expect(model).toBeTruthy() 173 | expect(queue).toBeTruthy() 174 | await queue.add('foo', 0) 175 | }, 176 | }, 177 | }, 178 | }, 179 | async eSDB => { 180 | await eSDB.open() 181 | expect(step).toBe(1) 182 | const e = await eSDB.queue.searchOne() 183 | expect(e.type).toBe('foo') 184 | } 185 | ) 186 | }) 187 | 188 | test('metadata migration', async () => { 189 | class M extends JsonModel { 190 | constructor({emitter: _1, ...props}) { 191 | super({ 192 | ...props, 193 | migrations: { 194 | ...props.migrations, 195 | 1: async ({model}) => model.set({id: 'version', v: 5}), 196 | }, 197 | }) 198 | } 199 | 200 | static reducer(_args) {} 201 | } 202 | const eSDB = new ESDB({ 203 | models: {metadata: {Model: M}}, 204 | }) 205 | // Version should be moved to user_version 206 | expect(await eSDB.db.get('PRAGMA user_version')).toHaveProperty( 207 | 'user_version', 208 | 5 209 | ) 210 | // metadata table should be gone 211 | expect( 212 | await eSDB.db.get('SELECT * FROM sqlite_master WHERE name="metadata"') 213 | ).toBeFalsy() 214 | }) 215 | 216 | test('metadata migration with existing data', async () => { 217 | class M extends JsonModel { 218 | constructor({emitter: _1, ...props}) { 219 | super({ 220 | ...props, 221 | migrations: { 222 | ...props.migrations, 223 | 1: async ({model}) => { 224 | await model.set({id: 'version', v: 5}) 225 | await model.set({id: 'hi'}) 226 | }, 227 | }, 228 | }) 229 | } 230 | 231 | static reducer(_args) {} 232 | } 233 | const eSDB = new ESDB({ 234 | models: {metadata: {Model: M}}, 235 | }) 236 | // Version should be moved to user_version 237 | expect(await eSDB.db.get('PRAGMA user_version')).toHaveProperty( 238 | 'user_version', 239 | 5 240 | ) 241 | // metadata table should still be there 242 | expect( 243 | await eSDB.db.get('SELECT * FROM sqlite_master WHERE name="metadata"') 244 | ).toBeTruthy() 245 | // but version should be gone 246 | expect( 247 | await eSDB.db.get('SELECT * FROM metadata WHERE id="version"') 248 | ).toBeFalsy() 249 | }) 250 | }) 251 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-errors.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable require-atomic-updates */ 2 | import ESDB from '.' 3 | import {withESDB} from '../lib/_test-helpers' 4 | 5 | test('event error in preprocessor', () => 6 | withESDB(async eSDB => { 7 | await expect( 8 | eSDB._handleEvent({type: 'error_pre'}) 9 | ).resolves.toHaveProperty( 10 | 'error._preprocess_count', 11 | expect.stringContaining('pre error for you') 12 | ) 13 | // All the below: don't call next phases 14 | // Error in apply => error: _apply 15 | })) 16 | 17 | test('event error in reducer', () => 18 | withESDB(async eSDB => { 19 | await expect( 20 | eSDB._handleEvent({type: 'error_reduce'}) 21 | ).resolves.toHaveProperty( 22 | 'error._reduce_count', 23 | expect.stringContaining('error for you') 24 | ) 25 | })) 26 | 27 | test('event error in apply', () => { 28 | return withESDB(async eSDB => { 29 | await expect( 30 | eSDB._applyEvent({ 31 | v: 1, 32 | type: 'foo', 33 | result: { 34 | // it will try to call map as a function 35 | count: {set: {map: 5}}, 36 | }, 37 | }) 38 | ).resolves.toHaveProperty( 39 | 'error._apply_apply', 40 | expect.stringContaining('.map is not a function') 41 | ) 42 | }) 43 | }) 44 | 45 | test('event error in deriver', () => 46 | withESDB(async eSDB => { 47 | await expect( 48 | eSDB._handleEvent({v: 1, type: 'error_derive'}) 49 | ).resolves.toHaveProperty( 50 | 'error._apply_derive', 51 | expect.stringContaining('error for you') 52 | ) 53 | })) 54 | 55 | test('event emitter', async () => { 56 | return withESDB(async eSDB => { 57 | let errored = 0, 58 | resulted = 0 59 | eSDB.on('result', event => { 60 | resulted++ 61 | expect(event.error).toBeFalsy() 62 | expect(event.result).toBeTruthy() 63 | }) 64 | eSDB.on('error', event => { 65 | errored++ 66 | expect(event.error).toBeTruthy() 67 | expect(event.result).toBeUndefined() 68 | }) 69 | await eSDB.dispatch('foo') 70 | await eSDB.dispatch('bar') 71 | eSDB.__BE_QUIET = true 72 | await expect(eSDB.dispatch('error_reduce')).rejects.toHaveProperty('error') 73 | expect(errored).toBe(1) 74 | expect(resulted).toBe(2) 75 | }) 76 | }) 77 | 78 | test('event replay', async () => 79 | withESDB(async (eSDB, queue) => { 80 | queue.set({ 81 | v: 1, 82 | type: 'TEST', 83 | data: {hi: true}, 84 | result: {}, 85 | error: {test: true}, 86 | }) 87 | 88 | await expect(eSDB.handledVersion(1)).resolves.not.toHaveProperty('error') 89 | })) 90 | 91 | test('model fail shows name', () => { 92 | expect(() => new ESDB({models: {foutje: false}})).toThrow('foutje') 93 | }) 94 | 95 | test('old reducer signature', async () => { 96 | // eslint-disable-next-line no-console 97 | const prev = console.warn 98 | // eslint-disable-next-line no-console 99 | console.warn = vi.fn() 100 | const eSDB = new ESDB({ 101 | models: { 102 | old: { 103 | reducer: (model, event) => 104 | event.type === 'TEST' ? {ins: [{id: 5}]} : false, 105 | }, 106 | }, 107 | }) 108 | // eslint-disable-next-line no-console 109 | expect(console.warn).toHaveBeenCalled() 110 | await eSDB.dispatch('TEST') 111 | expect(await eSDB.store.old.get(5)).toBeTruthy() 112 | // eslint-disable-next-line no-console 113 | console.warn = prev 114 | }) 115 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-events.test.js: -------------------------------------------------------------------------------- 1 | import {withESDB} from '../lib/_test-helpers' 2 | 3 | describe('ESDB events', () => { 4 | test('applyEvent', () => { 5 | return withESDB(async eSDB => { 6 | await eSDB.db.withTransaction(() => 7 | eSDB._applyEvent( 8 | { 9 | v: 50, 10 | type: 'foo', 11 | result: { 12 | count: {set: [{id: 'count', total: 1, byType: {foo: 1}}]}, 13 | }, 14 | }, 15 | true 16 | ) 17 | ) 18 | expect(await eSDB.store.count.get('count')).toEqual({ 19 | id: 'count', 20 | total: 1, 21 | byType: {foo: 1}, 22 | }) 23 | expect(await eSDB.getVersion()).toBe(50) 24 | }) 25 | }) 26 | 27 | test('dispatch', async () => { 28 | return withESDB(async eSDB => { 29 | const event1P = eSDB.dispatch('whattup', 'indeed', 42) 30 | const event2P = eSDB.dispatch('dude', {woah: true}, 55) 31 | expect(await event2P).toEqual({ 32 | v: 2, 33 | type: 'dude', 34 | ts: 55, 35 | data: {woah: true}, 36 | result: { 37 | count: { 38 | set: [{id: 'count', total: 2, byType: {whattup: 1, dude: 1}}], 39 | }, 40 | }, 41 | }) 42 | expect(await event1P).toEqual({ 43 | v: 1, 44 | type: 'whattup', 45 | ts: 42, 46 | data: 'indeed', 47 | result: { 48 | count: {set: [{id: 'count', total: 1, byType: {whattup: 1}}]}, 49 | }, 50 | }) 51 | }) 52 | }) 53 | 54 | test('dispatch object', async () => 55 | withESDB(async eSDB => { 56 | const event1P = eSDB.dispatch({type: 'whattup', data: 'indeed', ts: 42}) 57 | const event2P = eSDB.dispatch({type: 'dude', data: {woah: true}, ts: 55}) 58 | expect(await event2P).toEqual( 59 | expect.objectContaining({ 60 | v: 2, 61 | type: 'dude', 62 | ts: 55, 63 | data: {woah: true}, 64 | }) 65 | ) 66 | expect(await event1P).toEqual( 67 | expect.objectContaining({ 68 | v: 1, 69 | type: 'whattup', 70 | ts: 42, 71 | data: 'indeed', 72 | }) 73 | ) 74 | })) 75 | 76 | test('dispatch invalid object', async () => 77 | withESDB(async eSDB => { 78 | expect(() => eSDB.dispatch({data: 'indeed', ts: 42})).toThrow('type') 79 | expect(() => eSDB.dispatch({type: 5, data: 'indeed', ts: 42})).toThrow( 80 | 'type' 81 | ) 82 | expect(() => eSDB.dispatch({type: 'hi', extra: 'indeed'})).toThrow( 83 | 'extra' 84 | ) 85 | })) 86 | 87 | test('subdispatch object', async () => 88 | withESDB( 89 | { 90 | foo: { 91 | transact: async ({event, dispatch}) => { 92 | if (event.type !== 'hi') return 93 | await expect( 94 | dispatch({type: 'dude', data: {woah: true}, ts: 55}) 95 | ).resolves.toEqual( 96 | expect.objectContaining({type: 'dude', data: {woah: true}}) 97 | ) 98 | }, 99 | }, 100 | }, 101 | async eSDB => { 102 | await eSDB.dispatch('hi') 103 | } 104 | )) 105 | 106 | test('subdispatch invalid object', async () => 107 | withESDB( 108 | { 109 | foo: { 110 | transact: async ({event, dispatch}) => { 111 | if (event.type !== 'hi') return 112 | expect(() => dispatch({data: 'indeed', ts: 42})).toThrow('type') 113 | expect(() => dispatch({type: 6, data: 'indeed', ts: 42})).toThrow( 114 | 'type' 115 | ) 116 | expect(() => dispatch({type: 'indeed', extraMeep: 'foo'})).toThrow( 117 | 'extraMeep' 118 | ) 119 | }, 120 | }, 121 | }, 122 | async eSDB => { 123 | await eSDB.dispatch('hi') 124 | } 125 | )) 126 | 127 | test('derivers', async () => { 128 | return withESDB(async eSDB => { 129 | await eSDB.dispatch('bar') 130 | expect(await eSDB.store.deriver.searchOne()).toEqual({ 131 | desc: 'Total: 1, seen types: bar', 132 | id: 'descCount', 133 | }) 134 | }) 135 | }) 136 | 137 | test('preprocessors', async () => { 138 | const models = { 139 | meep: { 140 | preprocessor: async ({event, model, store, dispatch, cache}) => { 141 | if (typeof cache !== 'object') 142 | throw new Error('preprocessor: expecting a cache object') 143 | if (!model) throw new Error('expecting my model') 144 | if (!store) throw new Error('expecting the store') 145 | if (!dispatch) throw new Error('expecting dispatch for subevents') 146 | if (event.type === 'create_thing') { 147 | event.type = 'set_thing' 148 | event.data.id = 5 149 | return event 150 | } 151 | if (event.type === 'pre type') { 152 | delete event.type 153 | return event 154 | } 155 | if (event.type === 'pre version') { 156 | event.v = 123 157 | return event 158 | } 159 | if (event.type === 'bad event') { 160 | return {error: 'Yeah, no.'} 161 | } 162 | }, 163 | reducer: ({event, cache}) => { 164 | if (typeof cache !== 'object') 165 | throw new Error('reducer: expecting a cache object') 166 | if (event.type === 'set_thing') { 167 | return {set: [event.data]} 168 | } 169 | return false 170 | }, 171 | }, 172 | } 173 | return withESDB(models, async eSDB => { 174 | await expect( 175 | eSDB._preprocessor({}, {type: 'pre type'}) 176 | ).resolves.toHaveProperty( 177 | 'error._preprocess_meep', 178 | expect.stringContaining('type') 179 | ) 180 | await expect( 181 | eSDB._preprocessor({}, {type: 'pre version'}) 182 | ).resolves.toHaveProperty( 183 | 'error._preprocess_meep', 184 | expect.stringContaining('version') 185 | ) 186 | await expect( 187 | eSDB._preprocessor({}, {type: 'bad event'}) 188 | ).resolves.toHaveProperty( 189 | 'error._preprocess_meep', 190 | expect.stringContaining('Yeah, no.') 191 | ) 192 | await eSDB.dispatch('create_thing', {foo: 2}) 193 | expect(await eSDB.store.meep.searchOne()).toEqual({ 194 | id: '5', 195 | foo: 2, 196 | }) 197 | }) 198 | }) 199 | 200 | test('reducer, deriver data immutable', async () => { 201 | return withESDB( 202 | { 203 | meep: { 204 | reducer: ({event}) => { 205 | if (event.type === 'reduce') event.data.foo = 'bar' 206 | }, 207 | deriver: ({event}) => { 208 | if (event.type === 'derive') event.data.foo = 'bar' 209 | }, 210 | }, 211 | }, 212 | async eSDB => { 213 | eSDB.__BE_QUIET = true 214 | await expect(eSDB.dispatch('reduce', {})).rejects.toHaveProperty( 215 | 'error._reduce_meep' 216 | ) 217 | await eSDB.rwDb.userVersion((await eSDB.rwDb.userVersion()) + 1) 218 | await expect(eSDB.dispatch('derive', {})).rejects.toHaveProperty( 219 | 'error._apply_derive' 220 | ) 221 | } 222 | ) 223 | }) 224 | 225 | test('preprocessor/reducer for ESModel', async () => 226 | withESDB( 227 | { 228 | meep: { 229 | columns: {id: {type: 'INTEGER'}}, 230 | preprocessor: async ({event}) => { 231 | if (event.data && event.data.foo) event.data.ok = true 232 | }, 233 | reducer: ({event}) => { 234 | if (event.type === 'set_thing') { 235 | return {set: [event.data]} 236 | } 237 | return false 238 | }, 239 | }, 240 | }, 241 | async eSDB => { 242 | await eSDB.dispatch('set_thing', {foo: 2}) 243 | expect(await eSDB.store.meep.searchOne()).toEqual({ 244 | id: 1, 245 | foo: 2, 246 | ok: true, 247 | }) 248 | await eSDB.rwStore.meep.set({id: 2}) 249 | const event = await eSDB.queue.get(2) 250 | expect(event.data).toEqual([1, 2, {id: 2}]) 251 | expect(event.result).toEqual({meep: {ins: [{id: 2}]}}) 252 | } 253 | )) 254 | }) 255 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-queue.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import ESDB from '.' 4 | import {withESDB, testModels} from '../lib/_test-helpers' 5 | 6 | test('queue in same db', async () => 7 | tmp.withDir( 8 | async ({path: dir}) => { 9 | const file = sysPath.join(dir, 'db') 10 | const eSDB = new ESDB({ 11 | file, 12 | name: 'E', 13 | models: testModels, 14 | }) 15 | const {queue} = eSDB 16 | queue.add('boop') 17 | const {v} = await queue.add('moop') 18 | eSDB.checkForEvents() 19 | await eSDB.handledVersion(v) 20 | const history = await queue.all() 21 | expect(history).toHaveLength(2) 22 | expect(history[0].type).toBe('boop') 23 | expect(history[0].result).toBeTruthy() 24 | expect(history[1].type).toBe('moop') 25 | expect(history[1].result).toBeTruthy() 26 | await eSDB.dispatch('YO') 27 | }, 28 | {unsafeCleanup: true, prefix: 'esdb-queue'} 29 | )) 30 | 31 | test('waitForQueue', async () => 32 | withESDB(async (eSDB, queue) => { 33 | await expect(eSDB.waitForQueue()).resolves.toBeFalsy() 34 | await queue.add('1') 35 | await queue.add('2') 36 | expect(await eSDB.getVersion()).toBe(0) 37 | const p = eSDB.waitForQueue() 38 | let lastP 39 | for (let i = 3; i <= 10; i++) lastP = queue.add(String(i)) 40 | const num = Number((await p).type) 41 | // should be at least last awaited 42 | expect(num).toBeGreaterThanOrEqual(2) 43 | await lastP 44 | await expect(eSDB.waitForQueue()).resolves.toHaveProperty('type', '10') 45 | // This should return immediately, if not the test will time out 46 | await expect(eSDB.waitForQueue()).resolves.toHaveProperty('type', '10') 47 | })) 48 | 49 | test('waitForQueue race', async () => 50 | withESDB(async (eSDB, queue) => { 51 | queue.add('1') 52 | queue.add('2') 53 | eSDB.waitForQueue() 54 | queue.add('3') 55 | await eSDB.handledVersion(3) 56 | await eSDB.handledVersion(3) 57 | queue.add('4') 58 | queue.add('5') 59 | queue.add('6') 60 | eSDB.waitForQueue() 61 | await eSDB.handledVersion(3) 62 | await eSDB.handledVersion(3) 63 | queue.add('7') 64 | eSDB.waitForQueue() 65 | await eSDB.waitForQueue() 66 | queue.add('8') 67 | queue.add('9') 68 | await eSDB.handledVersion(9) 69 | await eSDB.handledVersion(9) 70 | queue.add('10') 71 | queue.add('11') 72 | queue.add('12') 73 | const p = eSDB.handledVersion(12) 74 | eSDB.startPolling(12) 75 | expect(await p).toBeTruthy() 76 | })) 77 | 78 | test('incoming event', async () => { 79 | return withESDB(async eSDB => { 80 | const event = await eSDB.queue.add('foobar') 81 | await eSDB.handledVersion(event.v) 82 | expect(await eSDB.store.count.get('count')).toEqual({ 83 | id: 'count', 84 | total: 1, 85 | byType: {foobar: 1}, 86 | }) 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-readOnly.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import {chmod} from 'fs-extra' 4 | import ESDB from '.' 5 | import {testModels} from '../lib/_test-helpers' 6 | 7 | test('open eSDB read-only separate queue', () => 8 | tmp.withDir( 9 | async ({path: dir}) => { 10 | const file = sysPath.join(dir, 'db') 11 | const queueFile = sysPath.join(dir, 'q') 12 | const eSDB = new ESDB({ 13 | file, 14 | queueFile, 15 | name: 'E', 16 | models: testModels, 17 | }) 18 | await eSDB.dispatch('foo') 19 | await eSDB.queue.db.exec('PRAGMA journal_mode=DELETE;') 20 | await eSDB.db.close() 21 | await eSDB.rwDb.exec('PRAGMA journal_mode=DELETE;') 22 | await eSDB.rwDb.close() 23 | 24 | await chmod(file, 0o400) 25 | await chmod(queueFile, 0o400) 26 | await chmod(sysPath.dirname(queueFile), 0o500) 27 | 28 | const roDB = new ESDB({ 29 | file, 30 | queueFile, 31 | readOnly: true, 32 | name: 'E', 33 | models: testModels, 34 | }) 35 | expect(await roDB.store.count.all()).toEqual([ 36 | {id: 'count', total: 1, byType: {foo: 1}}, 37 | ]) 38 | await expect(roDB.dispatch('foo')).rejects.toThrow('read') 39 | await chmod(sysPath.dirname(file), 0o700) 40 | }, 41 | {unsafeCleanup: true, prefix: 'esdb-ro-sep'} 42 | )) 43 | 44 | test('open eSDB read-only same queue', () => 45 | tmp.withDir( 46 | async ({path: dir}) => { 47 | const file = sysPath.join(dir, 'db') 48 | const eSDB = new ESDB({ 49 | file, 50 | name: 'E', 51 | models: testModels, 52 | }) 53 | await eSDB.dispatch('foo') 54 | await eSDB.queue.db.close() 55 | await eSDB.db.close() 56 | await eSDB.rwDb.exec('PRAGMA journal_mode=DELETE;') 57 | await eSDB.rwDb.close() 58 | 59 | await chmod(file, 0o400) 60 | await chmod(sysPath.dirname(file), 0o500) 61 | 62 | const roDB = new ESDB({ 63 | file, 64 | readOnly: true, 65 | name: 'E', 66 | models: testModels, 67 | }) 68 | expect(await roDB.store.count.all()).toEqual([ 69 | {id: 'count', total: 1, byType: {foo: 1}}, 70 | ]) 71 | await expect(roDB.dispatch('foo')).rejects.toThrow('read') 72 | await chmod(sysPath.dirname(file), 0o700) 73 | }, 74 | {unsafeCleanup: true, prefix: 'esdb-ro-same'} 75 | )) 76 | 77 | test('RO db sees transaction as soon as completed', async () => 78 | tmp.withDir( 79 | async ({path: dir}) => { 80 | const eSDB = new ESDB({ 81 | file: sysPath.join(dir, 'db'), 82 | queueFile: sysPath.join(dir, 'q'), 83 | name: 'E', 84 | models: testModels, 85 | }) 86 | for (let i = 1; i <= 100; i++) { 87 | await eSDB.dispatch('foo') 88 | 89 | expect(await eSDB.store.count.get('count')).toHaveProperty('total', i) 90 | } 91 | }, 92 | {unsafeCleanup: true, prefix: 'esdb-ro-see'} 93 | )) 94 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESDB-subevents.test.js: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import {withESDB} from '../lib/_test-helpers' 3 | 4 | describe('subevents', () => { 5 | test('work', async () => { 6 | const models = { 7 | foo: { 8 | preprocessor: ({event, addEvent, isMainEvent}) => { 9 | expect(isMainEvent).not.toBeUndefined() 10 | if (event.type === 'hi' || event.type === 'pre') 11 | addEvent('pre-' + event.type) 12 | }, 13 | reducer: ({event, addEvent, isMainEvent}) => { 14 | expect(isMainEvent).not.toBeUndefined() 15 | let events 16 | if (event.type === 'hi' || event.type === 'red') { 17 | addEvent('red-' + event.type) 18 | events = [{type: 'red-out-' + event.type}] 19 | } 20 | return {set: [{id: event.type}], events} 21 | }, 22 | deriver: ({event, addEvent, isMainEvent}) => { 23 | expect(isMainEvent).not.toBeUndefined() 24 | if (event.type === 'hi' || event.type === 'der') 25 | addEvent('der-' + event.type) 26 | }, 27 | }, 28 | } 29 | return withESDB(models, async eSDB => { 30 | const checker = async id => 31 | // this way we see the desired value in the output 32 | expect((await eSDB.store.foo.get(id)) || false).toHaveProperty('id', id) 33 | const event = await eSDB.dispatch('hi') 34 | expect(event.events).toHaveLength(4) 35 | await checker('pre-hi') 36 | await checker('red-hi') 37 | await checker('red-out-hi') 38 | await checker('der-hi') 39 | expect((await eSDB.dispatch('pre')).events).toHaveLength(1) 40 | await checker('pre-pre') 41 | await eSDB.dispatch('red') 42 | await checker('red-red') 43 | await checker('red-out-red') 44 | await eSDB.dispatch('der') 45 | await checker('der-der') 46 | }) 47 | }) 48 | 49 | test('depth first order', () => { 50 | const models = { 51 | foo: { 52 | reducer: ({event, addEvent}) => { 53 | if (event.type === 'hi') return {set: [{id: 'hi', all: ''}]} 54 | if (event.type === '3') addEvent('4') 55 | }, 56 | deriver: async ({model, event, addEvent}) => { 57 | if (event.type === 'hi') { 58 | addEvent('1') 59 | addEvent('3') 60 | } 61 | if (event.type === '1') addEvent('2') 62 | if (event.type === '3') addEvent('5') 63 | const t = await model.get('hi') 64 | return model.set({id: 'hi', all: t.all + event.type}) 65 | }, 66 | }, 67 | } 68 | return withESDB(models, async eSDB => { 69 | const spy = vi.fn(event => event.type) 70 | eSDB.on('result', spy) 71 | const event = await eSDB.dispatch('hi') 72 | expect(spy).toHaveBeenCalledTimes(1) 73 | expect(event.events).toHaveLength(2) 74 | expect(await eSDB.store.foo.get('hi')).toHaveProperty('all', 'hi12345') 75 | }) 76 | }) 77 | 78 | test('no infinite recursion', () => { 79 | const models = { 80 | foo: { 81 | deriver: async ({event, addEvent}) => { 82 | if (event.type === 'hi') addEvent('hi') 83 | }, 84 | }, 85 | } 86 | return withESDB(models, async eSDB => { 87 | eSDB.__BE_QUIET = true 88 | const doNotCall = vi.fn() 89 | const event = await eSDB.dispatch('hi').then(doNotCall, e => e) 90 | expect(doNotCall).toHaveBeenCalledTimes(0) 91 | expect(event).toHaveProperty( 92 | 'error._handle', 93 | expect.stringMatching(/(\.hi)+:.*deep/) 94 | ) 95 | }) 96 | }) 97 | 98 | test('replay clears subevents', () => { 99 | const models = { 100 | foo: { 101 | deriver: async ({event, addEvent}) => { 102 | if (event.type === 'hi') addEvent('ho') 103 | }, 104 | }, 105 | } 106 | return withESDB(models, async eSDB => { 107 | await eSDB.queue.set({v: 5, type: 'hi', events: [{type: 'deleteme'}]}) 108 | const event = await eSDB.handledVersion(5) 109 | expect(event).toHaveProperty('events', [ 110 | expect.objectContaining({type: 'ho'}), 111 | ]) 112 | }) 113 | }) 114 | }) 115 | 116 | describe('transact', () => { 117 | test('gets called', async () => { 118 | const models = { 119 | foo: {transact: vi.fn()}, 120 | } 121 | return withESDB(models, async eSDB => { 122 | await eSDB.dispatch('hi') 123 | expect(models.foo.transact).toHaveBeenCalledTimes(1) 124 | expect(models.foo.transact).toHaveBeenCalledWith( 125 | expect.objectContaining({ 126 | event: expect.any(Object), 127 | model: expect.any(Object), 128 | dispatch: expect.any(Function), 129 | store: expect.any(Object), 130 | isMainEvent: true, 131 | }) 132 | ) 133 | }) 134 | }) 135 | 136 | test('sync error stops transaction', async () => { 137 | const models = { 138 | foo: { 139 | transact: ({event: {type}}) => { 140 | if (type === 'sync') throw 'oops sync' 141 | }, 142 | }, 143 | } 144 | return withESDB(models, async eSDB => { 145 | eSDB.__BE_QUIET = true 146 | await expect(eSDB.dispatch('sync')).rejects.toHaveProperty('error', { 147 | _transact_foo: 'oops sync', 148 | }) 149 | }) 150 | }) 151 | 152 | test('rejection stops transaction', async () => { 153 | const models = { 154 | foo: { 155 | transact: ({event: {type}}) => { 156 | if (type === 'reject') return Promise.reject('oops reject') 157 | }, 158 | }, 159 | } 160 | return withESDB(models, async eSDB => { 161 | eSDB.__BE_QUIET = true 162 | await expect(eSDB.dispatch('reject')).rejects.toHaveProperty('error', { 163 | _transact_foo: 'oops reject', 164 | }) 165 | }) 166 | }) 167 | 168 | test('throws when dispatching outside transact', async () => { 169 | const models = { 170 | foo: { 171 | reducer: ({model}) => model.set({id: 'hi'}), 172 | }, 173 | } 174 | return withESDB(models, async eSDB => { 175 | eSDB.__BE_QUIET = true 176 | await expect(eSDB.dispatch('hi')).rejects.toEqual( 177 | expect.objectContaining({ 178 | error: expect.objectContaining({ 179 | _reduce_foo: expect.stringContaining('only allowed in transact'), 180 | }), 181 | }) 182 | ) 183 | }) 184 | }) 185 | 186 | test('does not throw when dispatching outside processing', async () => { 187 | let resolve1, resolve2 188 | let ranReducer = new Promise(r => (resolve1 = r)) 189 | const models = { 190 | foo: { 191 | reducer: async ({event: {type}}) => { 192 | if (type === 'hi') 193 | await new Promise(r => { 194 | resolve2 = r 195 | resolve1() 196 | }) 197 | }, 198 | }, 199 | } 200 | return withESDB(models, async eSDB => { 201 | eSDB.__BE_QUIET = true 202 | eSDB.dispatch('hi') 203 | await ranReducer 204 | setTimeout(resolve2) 205 | await expect(eSDB.store.foo.set('ho')).resolves.toBeDefined() 206 | }) 207 | }) 208 | 209 | test('can dispatch', async () => { 210 | const models = { 211 | foo: { 212 | transact: async ({event, dispatch}) => { 213 | if (event.type !== 'hi') return 214 | await expect(dispatch('sub-hi')).resolves.toEqual( 215 | expect.objectContaining({ 216 | type: 'sub-hi', 217 | result: expect.any(Object), 218 | }) 219 | ) 220 | }, 221 | }, 222 | } 223 | return withESDB(models, async eSDB => { 224 | expect(await eSDB.dispatch('hi')).toHaveProperty( 225 | 'events.0.type', 226 | 'sub-hi' 227 | ) 228 | }) 229 | }) 230 | 231 | test('can use dispatch via model', async () => { 232 | const models = { 233 | foo: { 234 | transact: async ({event, model}) => { 235 | if (event.type !== 'hi') return 236 | expect(await model.set({id: 'hi'})).toEqual({id: 'hi'}) 237 | }, 238 | }, 239 | } 240 | return withESDB(models, async eSDB => { 241 | expect(await eSDB.dispatch('hi')).toHaveProperty( 242 | 'events.0.type', 243 | 'es/foo' 244 | ) 245 | }) 246 | }) 247 | 248 | test('can transact in sub-event', async () => { 249 | const models = { 250 | foo: { 251 | transact: async ({event, dispatch}) => { 252 | if (event.type === 'hi') 253 | await expect(dispatch('sub-hi')).resolves.toEqual( 254 | expect.objectContaining({ 255 | type: 'sub-hi', 256 | result: expect.any(Object), 257 | }) 258 | ) 259 | if (event.type === 'sub-hi') 260 | await expect(dispatch('sub-sub-hi')).resolves.toEqual( 261 | expect.objectContaining({ 262 | type: 'sub-sub-hi', 263 | result: expect.any(Object), 264 | }) 265 | ) 266 | }, 267 | }, 268 | } 269 | return withESDB(models, async eSDB => { 270 | expect(await eSDB.dispatch('hi')).toHaveProperty( 271 | 'events.0.events.0.type', 272 | 'sub-sub-hi' 273 | ) 274 | }) 275 | }) 276 | 277 | test('handles sub-events in order', async () => { 278 | let lastSeen = 0 279 | const models = { 280 | foo: { 281 | reducer: ({event: {type, data}}) => { 282 | if (type === 'sub') { 283 | expect(lastSeen).toBeLessThan(data) 284 | lastSeen = data 285 | } 286 | }, 287 | transact: async ({event, dispatch}) => { 288 | if (event.type === 'hi') { 289 | for (let i = 1; i < 9; i++) dispatch('sub', i) 290 | await dispatch('sub', 9) 291 | } 292 | }, 293 | }, 294 | } 295 | return withESDB(models, async eSDB => { 296 | await eSDB.dispatch('hi') 297 | }) 298 | }) 299 | 300 | test('gets subevent from dispatch', async () => { 301 | const models = { 302 | foo: { 303 | transact: async ({event, dispatch}) => { 304 | if (event.type !== 'hi') return 305 | const sub = await dispatch('sub', 9) 306 | expect(sub).toHaveProperty('type', 'sub') 307 | expect(await dispatch('sub2', 10)).toHaveProperty('data', 10) 308 | expect(await dispatch('sub3', 11)).toHaveProperty('type', 'sub3') 309 | }, 310 | }, 311 | } 312 | return withESDB(models, async eSDB => { 313 | await eSDB.dispatch('hi') 314 | }) 315 | }) 316 | }) 317 | -------------------------------------------------------------------------------- /src/EventSourcingDB/ESModel.js: -------------------------------------------------------------------------------- 1 | // Drop-in replacement for JsonModel 2 | // Caveats: 3 | // * `.update()` returns the current object at the time of returning, not the one that was updated 4 | // 5 | // Events all type `es/name` and data `[actionEnum, id, obj, meta]` 6 | // The id is assigned by the preprocessor except for RM 7 | 8 | import JsonModel from '../JsonModel' 9 | import {DEV} from '../lib/warning' 10 | import {isEqual} from 'lodash' 11 | import applyResult from './applyResult' 12 | 13 | export const undefToNull = data => { 14 | if (data == null) return null 15 | if (typeof data !== 'object') return data 16 | if (Array.isArray(data)) return data.map(element => undefToNull(element)) 17 | if (Object.getPrototypeOf(data) !== Object.prototype) return data 18 | const out = {} 19 | for (const [key, value] of Object.entries(data)) { 20 | out[key] = undefToNull(value) 21 | } 22 | return out 23 | } 24 | 25 | export const getId = async (model, data) => { 26 | let id = data[model.idCol] 27 | if (id == null) { 28 | // Be sure to call with model as this, like in JsonModel 29 | id = await model.columns[model.idCol].value.call(model, data) 30 | } 31 | // This can only happen for integer ids 32 | if (id == null) id = await model.getNextId() 33 | return id 34 | } 35 | 36 | // Calculate the update given two objects that went 37 | // through JSON stringify+parse 38 | const calcUpd = (idCol, prev, obj, complete) => { 39 | const out = {} 40 | let changed = false 41 | for (const [key, value] of Object.entries(obj)) { 42 | const pVal = prev[key] 43 | if (value == null && pVal != null) { 44 | out[key] = null 45 | changed = true 46 | } else if (!isEqual(value, pVal)) { 47 | out[key] = value 48 | changed = true 49 | } 50 | } 51 | if (complete) 52 | for (const key of Object.keys(prev)) 53 | if (!(key in obj)) { 54 | out[key] = null 55 | changed = true 56 | } 57 | if (changed) { 58 | out[idCol] = prev[idCol] 59 | return out 60 | } 61 | return undefined 62 | } 63 | 64 | /** 65 | * ESModel is a drop-in wrapper around JsonModel to turn changes into events. 66 | * 67 | * Use it to convert your database to be event sourcing 68 | * 69 | * Event data is encoded as an array: `[subtype, id, data, meta]` Subtype is one 70 | * of `ESModel.(REMOVE|SET|INSERT|UPDATE|SAVE)`. `id` is filled in by the 71 | * preprocessor at the time of the event. `meta` is free-form data about the 72 | * event. It is just stored in the history table. 73 | * 74 | * For example: `model.set({foo: true})` would result in the event `[1, 1, {foo: 75 | * true}]` 76 | * 77 | * @augments JsonModel 78 | */ 79 | class ESModel extends JsonModel { 80 | static REMOVE = 0 81 | static SET = 1 82 | static INSERT = 2 83 | static UPDATE = 3 84 | static SAVE = 4 85 | 86 | /** 87 | * Creates a new ESModel model, called by DB. 88 | * 89 | * @class 90 | * @param {function} dispatch - The {@link ESDB} dispatch function. 91 | * @param {boolean} [init] - Emit an event with type `es/INIT:${modelname}` at 92 | * table creation time, to be used by custom reducers. 93 | * @param {Object} [...options] - Other params are passed to JsonModel. 94 | */ 95 | constructor({dispatch, init, emitter, ...options}) { 96 | super({ 97 | ...options, 98 | migrations: { 99 | ...options.migrations, 100 | '0_init': 101 | init && 102 | (({queue}) => { 103 | // Don't wait for add Promise to prevent deadlock 104 | queue.add(this.INIT) 105 | }), 106 | }, 107 | }) 108 | this.dispatch = dispatch 109 | this.writable = false 110 | const clearMax = () => { 111 | this._maxId = 0 112 | } 113 | // Prevent max listeners warning 114 | options.db.setMaxListeners(options.db.getMaxListeners() + 1) 115 | options.db.on('begin', clearMax) 116 | emitter.setMaxListeners(emitter.getMaxListeners() + 1) 117 | emitter.on('result', clearMax) 118 | emitter.on('error', clearMax) 119 | } 120 | 121 | TYPE = `es/${this.name}` 122 | 123 | INIT = `es/INIT:${this.name}` 124 | 125 | /** 126 | * Slight hack: use the writable state to fall back to JsonModel behavior. 127 | * This makes deriver and migrations work without changes. Note: while 128 | * writable, no events are created. Be careful. 129 | * 130 | * @param {boolean} state - Writeable or not. 131 | */ 132 | setWritable(state) { 133 | this.writable = state 134 | } 135 | 136 | event = { 137 | /** 138 | * Create an event that will insert or replace the given object into the 139 | * database. 140 | * 141 | * @param {Object} obj 142 | * 143 | * - The object to store. If there is no `id` value (or whatever the `id` 144 | * column is named), one is assigned automatically. 145 | * 146 | * @param {boolean} [insertOnly] 147 | * 148 | * - Don't allow replacing existing objects. 149 | * 150 | * @param {any} [meta] 151 | * 152 | * - Extra metadata to store in the event but not in the object. 153 | * 154 | * @returns {Pick} 155 | * 156 | * - Args to pass to addEvent/dispatch. 157 | */ 158 | set: (obj, insertOnly, meta) => { 159 | const data = [insertOnly ? ESModel.INSERT : ESModel.SET, null, obj] 160 | if (meta) data[3] = meta 161 | return {type: this.TYPE, data} 162 | }, 163 | /** 164 | * Create an event that will update an existing object. 165 | * 166 | * @param {Object} obj 167 | * 168 | * - The data to store. 169 | * 170 | * @param {boolean} [upsert] 171 | * 172 | * - If `true`, allow inserting if the object doesn't exist. 173 | * 174 | * @param {any} [meta] 175 | * 176 | * - Extra metadata to store in the event at `data[3]` but not in the object. 177 | * 178 | * @returns {Pick} 179 | * 180 | * - Args to pass to addEvent/dispatch. 181 | */ 182 | update: (obj, upsert, meta) => { 183 | const id = obj[this.idCol] 184 | if (id == null && !upsert) throw new TypeError('No ID specified') 185 | 186 | const data = [ 187 | upsert ? ESModel.SAVE : ESModel.UPDATE, 188 | null, 189 | undefToNull(obj), 190 | ] 191 | if (meta) data.push(meta) 192 | 193 | return {type: this.TYPE, data} 194 | }, 195 | /** 196 | * Create an event that will emove an object. 197 | * 198 | * @param {Object | string | integer} idOrObj 199 | * 200 | * - The id or the object itself. 201 | * 202 | * @param {any} meta 203 | * 204 | * - Metadata, attached to the event only, at `data[3]` 205 | * 206 | * @returns {Pick} 207 | * 208 | * - Args to pass to addEvent/dispatch. 209 | */ 210 | remove: (idOrObj, meta) => { 211 | const id = typeof idOrObj === 'object' ? idOrObj[this.idCol] : idOrObj 212 | if (id == null) throw new TypeError('No ID specified') 213 | 214 | const data = [ESModel.REMOVE, id] 215 | if (meta) data[3] = meta 216 | 217 | return {type: this.TYPE, data} 218 | }, 219 | } 220 | 221 | /** 222 | * Insert or replace the given object into the database. 223 | * 224 | * @param {Object} obj - The object to store. If there is no `id` value (or 225 | * whatever the `id` column is named), one is assigned automatically. 226 | * @param {boolean} [insertOnly] - Don't allow replacing existing objects. 227 | * @param {boolean} [noReturn] - Do not return the stored object; an 228 | * optimization. 229 | * @param {any} [meta] - Extra metadata to store in the event but not in the 230 | * object. 231 | * @returns {Promise} - If `noReturn` is false, the stored object is 232 | * fetched from the DB. 233 | */ 234 | async set(obj, insertOnly, noReturn, meta) { 235 | if (DEV && noReturn != null && typeof noReturn !== 'boolean') 236 | throw new Error(`${this.name}: meta argument is now in fourth position`) 237 | if (this.writable) { 238 | const id = obj[this.idCol] 239 | if (this._maxId && id > this._maxId) this._maxId = id 240 | return super.set(obj, insertOnly, noReturn) 241 | } 242 | 243 | const {data, result} = await this.dispatch( 244 | this.event.set(obj, insertOnly, meta) 245 | ) 246 | const id = data[1] 247 | 248 | const r = result[this.name] 249 | if (r && r.esFail) throw new Error(`${this.name}.set ${id}: ${r.esFail}`) 250 | 251 | // We have to get because we don't know what calculated values did 252 | // Unfortunately, this might be the object after a later event 253 | return noReturn ? undefined : this.get(id) 254 | } 255 | 256 | /** 257 | * Update an existing object. 258 | * 259 | * @param {Object} o - The data to store. 260 | * @param {boolean} [upsert] - If `true`, allow inserting if the object 261 | * doesn't exist. 262 | * @param {boolean} [noReturn] - Do not return the stored object; an 263 | * optimization. 264 | * @param {any} [meta] - Extra metadata to store in the event at `data[3]` but 265 | * not in the object. 266 | * @returns {Promise} - If `noReturn` is false, the stored object is 267 | * fetched from the DB. 268 | */ 269 | async update(obj, upsert, noReturn, meta) { 270 | if (DEV && noReturn != null && typeof noReturn !== 'boolean') 271 | throw new Error(`${this.name}: meta argument is now in fourth position`) 272 | 273 | if (this.writable) return super.update(obj, upsert, noReturn) 274 | 275 | if (DEV && noReturn != null && typeof noReturn !== 'boolean') 276 | throw new Error(`${this.name}: meta argument is now in fourth position`) 277 | 278 | const {data, result} = await this.dispatch( 279 | this.event.update(obj, upsert, meta) 280 | ) 281 | const id = data[1] 282 | 283 | const r = result[this.name] 284 | if (r && r.esFail) throw new Error(`${this.name}.update ${id}: ${r.esFail}`) 285 | 286 | // We have to get because we don't know what calculated values did 287 | // Unfortunately, this might be the object after a later event 288 | return this.get(id) 289 | } 290 | 291 | updateNoTrans(obj, upsert) { 292 | if (this.writable) return super.updateNoTrans(obj, upsert) 293 | throw new Error('Non-transactional changes are not possible with ESModel') 294 | } 295 | 296 | /** 297 | * Remove an object. 298 | * 299 | * @param {Object | string | integer} idOrObj 300 | * 301 | * - The id or the object itself. 302 | * 303 | * @param {any} meta 304 | * 305 | * - Metadata, attached to the event only, at `data[3]` 306 | * 307 | * @returns {Promise} 308 | * 309 | * - Always returns true. 310 | */ 311 | async remove(idOrObj, meta) { 312 | if (this.writable) return super.remove(idOrObj) 313 | 314 | await this.dispatch(this.event.remove(idOrObj, meta)) 315 | return true 316 | } 317 | 318 | /** ChangeId: not implemented yet, had no need so far */ 319 | changeId(oldId, newId) { 320 | if (this.writable) return super.changeId(oldId, newId) 321 | throw new Error(`ESModel doesn't support changeId yet`) 322 | } 323 | 324 | _maxId = 0 325 | _maxIdP = null 326 | _lastUV = 0 327 | 328 | /** 329 | * Returns the next available integer ID for the model. Calling this multiple 330 | * times during a redux cycle will give increasing numbers even though the 331 | * database table doesn't change. Use this from the redux functions to assign 332 | * unique ids to new objects. 333 | * 334 | * @returns {Promise} - The next usable ID. 335 | */ 336 | async getNextId() { 337 | if (!this._maxId) { 338 | if (!this._maxIdP) 339 | this._maxIdP = this.max(this.idCol).then(m => { 340 | this._maxId = m 341 | return m 342 | }) 343 | await this._maxIdP 344 | this._maxIdP = null 345 | } 346 | return ++this._maxId 347 | } 348 | 349 | /** 350 | * Applies the result from the reducer. 351 | * 352 | * @param {Object} result - Free-form change descriptor. 353 | * @returns {Promise} - Promise for completion. 354 | */ 355 | async applyResult(result) { 356 | if (result.esFail) return 357 | return applyResult(this, {...result, esFail: undefined}) 358 | } 359 | 360 | /** 361 | * Assigns the object id to the event at the start of the cycle. When 362 | * subclassing ESModel, be sure to call this too 363 | * (`ESModel.preprocessor(arg)`) 364 | */ 365 | static async preprocessor({model, event, isMainEvent}) { 366 | if (isMainEvent) model._maxId = 0 367 | if (event.type !== model.TYPE) return 368 | if (event.data[0] > ESModel.REMOVE) { 369 | // Always overwrite, so repeat events get correct ids 370 | // eslint-disable-next-line require-atomic-updates 371 | event.data[1] = await getId(model, event.data[2]) 372 | return event 373 | } 374 | } 375 | 376 | /** 377 | * Calculates the desired change ESModel will only emit `rm`, `ins`, `upd` and 378 | * `esFail` 379 | * 380 | * @param {Object} params 381 | * @param {ESModel} params.model - The model. 382 | * @param {Event} params.event - The event. 383 | * @returns {Promise} - The result object in the format JsonModel 384 | * likes. 385 | */ 386 | static async reducer({model, event: {type, data}}) { 387 | if (!model || type !== model.TYPE) return false 388 | 389 | let [action, id, obj] = data 390 | if (action === ESModel.REMOVE) { 391 | if (await model.exists({[model.idCol]: id})) return {rm: [id]} 392 | return false 393 | } 394 | 395 | if (obj[model.idCol] == null) obj = {...obj, [model.idCol]: id} 396 | 397 | const prev = await model.get(id) 398 | let update 399 | if (prev) { 400 | if (action === ESModel.INSERT) return {esFail: 'EEXIST'} 401 | update = calcUpd(model.idCol, prev, obj, action === ESModel.SET) 402 | return update ? {upd: [update]} : false 403 | } 404 | if (action === ESModel.UPDATE) return {esFail: 'ENOENT'} 405 | return {ins: [obj]} 406 | } 407 | } 408 | 409 | export default ESModel 410 | -------------------------------------------------------------------------------- /src/EventSourcingDB/__snapshots__/ESModel.test.js.snap: -------------------------------------------------------------------------------- 1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html 2 | 3 | exports[`ESModel > events > are updated 1`] = ` 4 | [ 5 | { 6 | "data": [ 7 | 1, 8 | 1, 9 | { 10 | "meep": "moop", 11 | }, 12 | ], 13 | "result": { 14 | "m": { 15 | "ins": [ 16 | { 17 | "meep": "moop", 18 | "v": 1, 19 | }, 20 | ], 21 | }, 22 | }, 23 | "ts": 0, 24 | "type": "es/m", 25 | "v": 1, 26 | }, 27 | { 28 | "data": [ 29 | 1, 30 | 1, 31 | { 32 | "meep": "moop", 33 | "v": 1, 34 | }, 35 | ], 36 | "result": {}, 37 | "ts": 0, 38 | "type": "es/m", 39 | "v": 2, 40 | }, 41 | { 42 | "data": [ 43 | 1, 44 | 1, 45 | { 46 | "a": [ 47 | null, 48 | 3, 49 | ], 50 | "beep": "boop", 51 | "v": 1, 52 | }, 53 | ], 54 | "result": { 55 | "m": { 56 | "upd": [ 57 | { 58 | "a": [ 59 | null, 60 | 3, 61 | ], 62 | "beep": "boop", 63 | "meep": null, 64 | "v": 1, 65 | }, 66 | ], 67 | }, 68 | }, 69 | "ts": 0, 70 | "type": "es/m", 71 | "v": 3, 72 | }, 73 | { 74 | "data": [ 75 | 3, 76 | 1, 77 | { 78 | "beep": "boop", 79 | "v": 1, 80 | }, 81 | ], 82 | "result": {}, 83 | "ts": 0, 84 | "type": "es/m", 85 | "v": 4, 86 | }, 87 | { 88 | "data": [ 89 | 3, 90 | 1, 91 | { 92 | "a": [ 93 | null, 94 | 3, 95 | ], 96 | "beep": "foop", 97 | "v": 1, 98 | }, 99 | ], 100 | "result": { 101 | "m": { 102 | "upd": [ 103 | { 104 | "beep": "foop", 105 | "v": 1, 106 | }, 107 | ], 108 | }, 109 | }, 110 | "ts": 0, 111 | "type": "es/m", 112 | "v": 5, 113 | }, 114 | ] 115 | `; 116 | 117 | exports[`ESModel > events > work 1`] = ` 118 | [ 119 | { 120 | "data": [ 121 | 1, 122 | 1, 123 | { 124 | "meep": "moop", 125 | }, 126 | ], 127 | "result": { 128 | "m": { 129 | "ins": [ 130 | { 131 | "id": 1, 132 | "meep": "moop", 133 | }, 134 | ], 135 | }, 136 | }, 137 | "ts": 0, 138 | "type": "es/m", 139 | "v": 1, 140 | }, 141 | { 142 | "data": [ 143 | 3, 144 | 1, 145 | { 146 | "beep": "boop", 147 | "id": 1, 148 | }, 149 | ], 150 | "result": { 151 | "m": { 152 | "upd": [ 153 | { 154 | "beep": "boop", 155 | "id": 1, 156 | }, 157 | ], 158 | }, 159 | }, 160 | "ts": 0, 161 | "type": "es/m", 162 | "v": 2, 163 | }, 164 | { 165 | "data": [ 166 | 2, 167 | 2, 168 | { 169 | "meep": "moop", 170 | }, 171 | ], 172 | "result": { 173 | "m": { 174 | "ins": [ 175 | { 176 | "id": 2, 177 | "meep": "moop", 178 | }, 179 | ], 180 | }, 181 | }, 182 | "ts": 0, 183 | "type": "es/m", 184 | "v": 3, 185 | }, 186 | { 187 | "data": [ 188 | 4, 189 | 3, 190 | { 191 | "beep": "boop", 192 | "id": 3, 193 | }, 194 | ], 195 | "result": { 196 | "m": { 197 | "ins": [ 198 | { 199 | "beep": "boop", 200 | "id": 3, 201 | }, 202 | ], 203 | }, 204 | }, 205 | "ts": 0, 206 | "type": "es/m", 207 | "v": 4, 208 | }, 209 | { 210 | "data": [ 211 | 0, 212 | 3, 213 | ], 214 | "result": { 215 | "m": { 216 | "rm": [ 217 | 3, 218 | ], 219 | }, 220 | }, 221 | "ts": 0, 222 | "type": "es/m", 223 | "v": 5, 224 | }, 225 | ] 226 | `; 227 | -------------------------------------------------------------------------------- /src/EventSourcingDB/applyResult.js: -------------------------------------------------------------------------------- 1 | import {unknown, DEV} from '../lib/warning' 2 | import {settleAll} from '../lib/settleAll' 3 | 4 | const applyResult = async (model, result) => { 5 | const {rm, set, ins, upd, sav} = result 6 | if (DEV) { 7 | // eslint-disable-next-line no-shadow 8 | const {rm, set, ins, upd, sav, ...rest} = result 9 | for (const k of Object.keys(rest)) 10 | if (typeof rest[k] !== 'undefined') unknown(k, `key ${k} in result`) 11 | } 12 | if (rm) await settleAll(rm, item => model.remove(item)) 13 | if (ins) await settleAll(ins, obj => model.set(obj, true, true)) 14 | if (set) await settleAll(set, obj => model.set(obj, false, true)) 15 | if (upd) await settleAll(upd, obj => model.updateNoTrans(obj, true, true)) 16 | if (sav) await settleAll(sav, obj => model.updateNoTrans(obj, false, true)) 17 | } 18 | 19 | export default applyResult 20 | -------------------------------------------------------------------------------- /src/EventSourcingDB/index.js: -------------------------------------------------------------------------------- 1 | export {default} from './EventSourcingDB' 2 | export {default as applyResult} from './applyResult' 3 | export {default as ESModel} from './ESModel' 4 | -------------------------------------------------------------------------------- /src/JsonModel/JM-ItemClass.test.js: -------------------------------------------------------------------------------- 1 | import {getModel} from '../lib/_test-helpers' 2 | 3 | class Foo { 4 | getF() { 5 | return this.f 6 | } 7 | } 8 | 9 | test('ItemClass set', async () => { 10 | const m = getModel({ItemClass: Foo}) 11 | const setted = await m.set({id: 4, f: 'meep'}) 12 | expect(setted instanceof Foo).toBe(true) 13 | expect(setted.getF()).toBe('meep') 14 | }) 15 | test('ItemClass get', async () => { 16 | const m = getModel({ItemClass: Foo}) 17 | await m.set({id: 4, f: 'meep'}) 18 | const saved = await m.get(4) 19 | expect(saved instanceof Foo).toBe(true) 20 | expect(saved.getF()).toBe('meep') 21 | }) 22 | test('ItemClass getall', async () => { 23 | const m = getModel({ItemClass: Foo, columns: {id: {type: 'INTEGER'}}}) 24 | await Promise.all([0, 1, 2, 3, 4].map(id => m.set({id}))) 25 | const saved = await m.getAll([2, 4, 1]) 26 | expect(saved.every(r => r instanceof Foo)).toBe(true) 27 | expect(saved.every(r => r.id)).toBe(true) 28 | }) 29 | test('ItemClass find', async () => { 30 | const m = getModel({ItemClass: Foo}) 31 | const obj = {id: 'foobar', f: true} 32 | await m.set(obj) 33 | const saved = await m.searchOne({id: obj.id}) 34 | expect(saved instanceof Foo).toBe(true) 35 | expect(saved.getF()).toBe(true) 36 | }) 37 | -------------------------------------------------------------------------------- /src/JsonModel/JM-columns.test.js: -------------------------------------------------------------------------------- 1 | import {getModel, sharedSetup} from '../lib/_test-helpers' 2 | 3 | const indexesSql = ` 4 | SELECT m.tbl_name || '.' || ifNull(ii.name, m.name) AS col, m.sql 5 | FROM sqlite_master AS m, 6 | pragma_index_info(m.name) AS ii 7 | WHERE m.type='index' 8 | ORDER BY 1; 9 | ` 10 | 11 | const withCols = sharedSetup(async () => { 12 | const m = getModel({ 13 | columns: { 14 | foo1: {index: 'SPARSE'}, 15 | foo2: { 16 | type: 'INTEGER', 17 | value: o => o.foo1 + 1, 18 | get: false, 19 | alias: 'foo2', 20 | }, 21 | foo3: { 22 | value: o => o.notExists, 23 | index: 'ALL', 24 | unique: true, 25 | }, 26 | fooGet: {real: true, value: () => 3}, 27 | }, 28 | }) 29 | await m.set({id: 'meep', foo1: 5}) 30 | return m 31 | }) 32 | test( 33 | 'columns create', 34 | withCols(async m => { 35 | expect(m.columnArr).toMatchSnapshot() 36 | const row = await m.db.get(`SELECT json, foo2 FROM ${m.name}`) 37 | expect(row).toEqual({json: `{"foo1":5}`, foo2: 6}) 38 | }) 39 | ) 40 | test( 41 | 'columns order', 42 | withCols(async m => { 43 | // id and json are calculated last 44 | const l = m.columnArr.length 45 | expect(m.columnArr[l - 2].name).toBe('id') 46 | expect(m.columnArr[l - 1].name).toBe('json') 47 | }) 48 | ) 49 | test( 50 | 'columns get', 51 | withCols(async m => { 52 | // columns don't automatically change the original object 53 | const saved = await m.get('meep') 54 | expect(saved.foo2).toBeFalsy() 55 | expect(saved.fooGet).toBe(3) 56 | saved.id = 'meep2' 57 | await m.set(saved) 58 | const row = await m.db.get`SELECT * FROM ${m.name}ID WHERE id = ${saved.id}` 59 | const json = JSON.parse(row.json) 60 | // JSON does not include get columns 61 | expect(json.fooGet).toBeFalsy() 62 | expect(json.id).toBeFalsy() 63 | expect(json.foo1).toBe(5) 64 | }) 65 | ) 66 | test( 67 | 'columns indexes', 68 | withCols(async m => { 69 | // Indexes are created 70 | const indexes = await m.db.all(indexesSql) 71 | expect(indexes.some(i => i.col.includes('foo3'))).toBe(true) 72 | expect(indexes.every(i => !i.col.includes('foo2'))).toBe(true) 73 | }) 74 | ) 75 | 76 | test('default w/ value()', async () => { 77 | const m = getModel({columns: {v: {real: true, default: 5}}}) 78 | await m.set({id: 1}) 79 | expect(await m.db.all(`select * from ${m.name}`)).toEqual([ 80 | {id: '1', json: null, v: 5}, 81 | ]) 82 | expect(m.columns.v.ignoreNull).toBe(false) 83 | }) 84 | 85 | test('default w/ sql', async () => { 86 | const m = getModel({columns: {v: {sql: 'hex(id)', default: 0}}}) 87 | expect(m.makeSelect({attrs: {v: 5}, sort: {v: -1}, cols: ['v']})).toEqual([ 88 | 'SELECT ifNull(hex(id),0) AS _0 FROM "testing" tbl WHERE(ifNull(hex(id),0)=?) ORDER BY _0 DESC', 89 | [5], 90 | undefined, 91 | 'SELECT COUNT(*) as t from ( SELECT ifNull(hex(id),0) AS _0 FROM "testing" tbl WHERE(ifNull(hex(id),0)=?) )', 92 | [5], 93 | false, 94 | ]) 95 | expect(m.columns.v.ignoreNull).toBe(false) 96 | }) 97 | 98 | test('value w type JSON', async () => { 99 | const m = getModel({ 100 | columns: { 101 | id: {type: 'INTEGER'}, 102 | v: {type: 'JSON'}, 103 | }, 104 | }) 105 | await m.set({v: {whee: true}}) 106 | await m.set({v: 5}) 107 | await m.set({other: true}) 108 | expect(await m.db.all('SELECT * FROM testing')).toEqual([ 109 | {id: 1, json: null, v: '{"whee":true}'}, 110 | {id: 2, json: null, v: 5}, 111 | {id: 3, json: '{"other":true}', v: null}, 112 | ]) 113 | expect(await m.all()).toEqual([ 114 | {id: 1, v: {whee: true}}, 115 | {id: 2, v: 5}, 116 | {id: 3, other: true}, 117 | ]) 118 | }) 119 | 120 | test('value not real', async () => { 121 | const m = getModel({ 122 | columns: { 123 | id: {type: 'INTEGER'}, 124 | v: {value: o => o.v * 2}, 125 | }, 126 | }) 127 | const result = {id: 1, v: 10} 128 | expect(await m.set({v: 5})).toEqual(result) 129 | expect(await m.get(1)).toEqual(result) 130 | }) 131 | 132 | test('required', async () => { 133 | const m = getModel({ 134 | columns: { 135 | foo: {value: o => o.foo, get: true, required: true}, 136 | bar: { 137 | async value() { 138 | return this.count() 139 | }, 140 | get: true, 141 | required: true, 142 | }, 143 | }, 144 | }) 145 | expect(m.columns.foo).toHaveProperty('ignoreNull', false) 146 | await expect(m.set({})).rejects.toThrow('foo') 147 | await expect(m.set({foo: null})).rejects.toThrow('foo') 148 | await expect(m.set({foo: 0})).resolves.toHaveProperty('foo', 0) 149 | await expect(m.set({foo: ''})).resolves.toHaveProperty('foo', '') 150 | const obj = await m.set({foo: 'hi'}) 151 | expect(obj).toHaveProperty('foo', 'hi') 152 | expect(obj).toHaveProperty('bar', 2) 153 | }) 154 | 155 | test('nested JSON', async () => { 156 | const m = getModel({ 157 | columns: { 158 | id: {type: 'INTEGER'}, 159 | c: {type: 'JSON', path: 'a.b.c'}, 160 | a: {type: 'JSON'}, 161 | b: {type: 'JSON', path: 'a.b'}, 162 | }, 163 | }) 164 | await expect(m.set({a: {b: {c: 3}}})).resolves.toEqual({ 165 | id: 1, 166 | a: {b: {c: 3}}, 167 | }) 168 | await expect(m.get(1)).resolves.toEqual({id: 1, a: {b: {c: 3}}}) 169 | await expect(m.db.get('select * from testing')).resolves.toEqual({ 170 | a: null, 171 | b: null, 172 | c: 3, 173 | id: 1, 174 | json: null, 175 | }) 176 | }) 177 | 178 | test('nested JSON null object', async () => { 179 | const m = getModel({ 180 | columns: { 181 | id: {type: 'INTEGER'}, 182 | c: {type: 'JSON', path: 'a.b.c'}, 183 | a: {type: 'JSON'}, 184 | b: {type: 'JSON', path: 'a.b'}, 185 | }, 186 | }) 187 | await expect(m.set({a: {t: 1}})).resolves.toEqual({ 188 | id: 1, 189 | a: {t: 1, b: {}}, 190 | }) 191 | await expect(m.get(1)).resolves.toEqual({id: 1, a: {t: 1, b: {}}}) 192 | await expect(m.db.get('select * from testing')).resolves.toEqual({ 193 | a: '{"t":1}', 194 | b: null, 195 | c: null, 196 | id: 1, 197 | json: null, 198 | }) 199 | }) 200 | 201 | test('JSON alwaysObject', async () => { 202 | const m = getModel({ 203 | columns: { 204 | id: {type: 'INTEGER'}, 205 | a: {type: 'JSON'}, 206 | b: {type: 'JSON', path: 'a.b', alwaysObject: true}, 207 | c: {type: 'JSON', alwaysObject: false}, 208 | d: {path: 'c.d'}, 209 | }, 210 | }) 211 | expect(m.columns.a).toHaveProperty('alwaysObject', true) 212 | expect(m.columns.c).toHaveProperty('alwaysObject', false) 213 | await expect(m.set({a: {t: 1}})).resolves.toEqual({ 214 | id: 1, 215 | a: {t: 1, b: {}}, 216 | c: undefined, 217 | }) 218 | await expect(m.get(1)).resolves.toEqual({id: 1, a: {t: 1, b: {}}}) 219 | await expect(m.set({c: 1})).resolves.toEqual({ 220 | id: 2, 221 | a: {b: {}}, 222 | c: 1, 223 | }) 224 | await expect(m.get(2)).resolves.toEqual({id: 2, a: {b: {}}, c: 1}) 225 | }) 226 | 227 | test('path in json column', async () => { 228 | const m = getModel({ 229 | columns: { 230 | id: {type: 'INTEGER'}, 231 | b: {path: 'a.b'}, 232 | a: {type: 'JSON'}, 233 | c: {real: true}, 234 | d: {path: 'c.d'}, 235 | e: {type: 'JSON', get: false, parse: null}, 236 | f: {path: 'e.f'}, 237 | }, 238 | }) 239 | expect(m.columns.b.jsonCol).toBe('a') 240 | expect(m.columns.d.jsonCol).toBe('json') 241 | expect(m.columns.f.jsonCol).toBe('json') 242 | await m.set({a: {b: 4}}) 243 | await expect(m.searchOne({b: 4})).resolves.toHaveProperty('id', 1) 244 | await expect(m.get(1)).resolves.toEqual({id: 1, a: {b: 4}}) 245 | await expect(m.db.get('select * from testing')).resolves.toEqual({ 246 | id: 1, 247 | a: '{"b":4}', 248 | c: null, 249 | e: null, 250 | json: null, 251 | }) 252 | }) 253 | 254 | test('where/whereVal', () => { 255 | const m = getModel({ 256 | columns: { 257 | s: {where: 's `${v}=?`}, 259 | v: {whereVal: v => [v * 2]}, 260 | w: {where: 'hi', whereVal: () => []}, 261 | o: {whereVal: () => false}, 262 | }, 263 | }) 264 | expect( 265 | m.makeSelect({attrs: {s: 1, f: 2, v: 3, w: 4, o: 5}}).slice(0, 2) 266 | ).toEqual([ 267 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(s { 273 | const m = getModel({ 274 | columns: { 275 | a: {where: (v, o) => `${v}|${o}`, whereVal: v => [v * 2]}, 276 | }, 277 | }) 278 | expect(m.makeSelect({attrs: {a: 1}}).slice(0, 2)).toEqual([ 279 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(2|1)', 280 | [2], 281 | ]) 282 | }) 283 | 284 | test('where string', () => { 285 | expect(() => 286 | getModel({ 287 | columns: { 288 | s: {where: 'noplaceholder'}, 289 | }, 290 | }) 291 | ).toThrow('where') 292 | }) 293 | 294 | test('whereVal truthy not array', () => { 295 | const m = getModel({ 296 | columns: { 297 | s: {whereVal: () => true}, 298 | }, 299 | }) 300 | expect(() => m.makeSelect({attrs: {s: 1}})).toThrow('whereVal') 301 | }) 302 | 303 | test('falsyBool', async () => { 304 | const m = getModel({ 305 | columns: { 306 | id: {type: 'INTEGER'}, 307 | b: {real: true, falsyBool: true}, 308 | c: {real: true, falsyBool: true}, 309 | d: {falsyBool: true}, 310 | e: {falsyBool: true}, 311 | f: {type: 'JSON', falsyBool: true}, 312 | g: {type: 'JSON', falsyBool: true}, 313 | }, 314 | }) 315 | expect(await m.set({b: true, c: false, e: true, f: false, g: true})).toEqual({ 316 | id: 1, 317 | b: true, 318 | c: undefined, 319 | d: undefined, 320 | e: true, 321 | f: undefined, 322 | g: true, 323 | }) 324 | await expect(m.get(1)).resolves.toEqual({id: 1, b: true, e: true, g: true}) 325 | await expect(m.db.get('select * from testing')).resolves.toEqual({ 326 | id: 1, 327 | b: 1, 328 | c: null, 329 | json: '{"e":true}', 330 | f: null, 331 | g: 'true', 332 | }) 333 | }) 334 | 335 | test('falsyBool query', async () => { 336 | const m = getModel({ 337 | columns: { 338 | id: {type: 'INTEGER'}, 339 | b: {real: true, falsyBool: true}, 340 | c: {real: true, falsyBool: true}, 341 | d: {falsyBool: true}, 342 | e: {falsyBool: true}, 343 | f: {type: 'JSON', falsyBool: true}, 344 | g: {type: 'JSON', falsyBool: true}, 345 | }, 346 | }) 347 | await m.set({c: true, d: true, f: true}) 348 | await m.set({b: true, c: false, e: true, f: false, g: true}) 349 | expect(await m.searchOne({c: false})).toHaveProperty('id', 2) 350 | expect(await m.searchOne({d: ''})).toHaveProperty('id', 2) 351 | expect(await m.searchOne({f: 0})).toHaveProperty('id', 2) 352 | expect(await m.searchOne({b: 1})).toHaveProperty('id', 2) 353 | expect(await m.searchOne({e: 'yes'})).toHaveProperty('id', 2) 354 | expect(await m.searchOne({g: {}})).toHaveProperty('id', 2) 355 | }) 356 | 357 | test('columnName: ({columnName}) => columnDefinition', async () => { 358 | let m 359 | expect(() => { 360 | m = getModel({ 361 | columns: { 362 | foo1: ({columnName}) => ({ 363 | type: 'TEXT', 364 | value: o => o[columnName] + '!', 365 | }), 366 | }, 367 | }) 368 | }).not.toThrow() 369 | expect(await m.set({foo1: 'hi'})).toHaveProperty('foo1', 'hi!') 370 | expect(() => { 371 | getModel({ 372 | columns: { 373 | foo1: () => ({ 374 | type: 'TEXT', 375 | badProp: 5, 376 | }), 377 | }, 378 | }) 379 | }).toThrow() 380 | }) 381 | -------------------------------------------------------------------------------- /src/JsonModel/JM-create.test.js: -------------------------------------------------------------------------------- 1 | import {DB, JsonModel, getModel, sharedSetup} from '../lib/_test-helpers' 2 | 3 | test('create', () => { 4 | const m = getModel() 5 | expect(() => m.all()).not.toThrow() 6 | expect(m.db.store[m.name]).toBe(m) 7 | }) 8 | 9 | test('create invalid', () => { 10 | const db = new DB() 11 | expect(() => new JsonModel()).toThrow() 12 | expect(() => new JsonModel({db})).toThrow() 13 | expect(() => new JsonModel({name: 'foo'})).toThrow() 14 | }) 15 | 16 | test('derived set', async () => { 17 | let ran = false 18 | class Foo extends JsonModel { 19 | set(obj) { 20 | ran = true 21 | return super.set(obj) 22 | } 23 | } 24 | const db = new DB() 25 | const foo = new Foo({db, name: 'foo'}) 26 | await foo.set({test: true}) 27 | expect(ran).toBe(true) 28 | }) 29 | 30 | test('id generation', async () => { 31 | const m = getModel({columns: {id: {value: o => o.foo}}}) 32 | const idFn = m.columns.id.value.bind(m) 33 | expect(await idFn({})).toBeTruthy() 34 | expect(await idFn({id: 'hi'})).toBe('hi') 35 | expect(await idFn({foo: 'ho'})).toBe('ho') 36 | expect(await idFn({foo: 0})).toBe(0) 37 | expect(await idFn({foo: ''})).toBe('') 38 | expect(await idFn({foo: null})).toBeTruthy() 39 | }) 40 | 41 | test('async id generation', async () => { 42 | const m = getModel({columns: {id: {value: async o => o.foo}}}) 43 | const idFn = m.columns.id.value.bind(m) 44 | expect(await idFn({})).toBeTruthy() 45 | expect(await idFn({id: 'hi'})).toBe('hi') 46 | expect(await idFn({foo: 'ho'})).toBe('ho') 47 | expect(await idFn({foo: 0})).toBe(0) 48 | expect(await idFn({foo: ''})).toBe('') 49 | expect(await idFn({foo: null})).toBeTruthy() 50 | }) 51 | 52 | test('id/col slugValue', async () => { 53 | const m = getModel({ 54 | columns: { 55 | id: {slugValue: o => o.hi.slice(0, 3)}, 56 | hi: {}, 57 | other: {slugValue: o => o.hi.toUpperCase(), index: true, get: true}, 58 | }, 59 | }) 60 | await m.set({hi: 'hello'}) 61 | const o = await m.searchOne() 62 | expect(o).toEqual({id: 'hel', hi: 'hello', other: 'hello'}) 63 | await m.set({hi: 'Hello'}) 64 | const p = await m.searchOne({hi: 'Hello'}) 65 | expect(p).toEqual({id: 'hel-2', hi: 'Hello', other: 'hello-2'}) 66 | const q = await m.set({id: 'hel-2', hi: 'Hello', other: undefined}) 67 | expect(q).toEqual({id: 'hel-2', hi: 'Hello', other: 'hello-2'}) 68 | }) 69 | 70 | const withObjs = sharedSetup(() => { 71 | const m = getModel() 72 | return Promise.all([ 73 | m.set({id: 0, moop: 8}), 74 | m.set({id: '', moop: 9}), 75 | m.set({id: 'foobar', fluffy: true}), 76 | m.set({noId: true}), 77 | ]).then(() => m) 78 | }) 79 | test( 80 | 'get falsy ids', 81 | withObjs(async m => { 82 | expect(await m.get(0)).toEqual({id: '0', moop: 8}) 83 | expect(await m.get('')).toEqual({id: '', moop: 9}) 84 | }) 85 | ) 86 | 87 | test( 88 | 'get by id', 89 | withObjs(async m => { 90 | expect(await m.get('foobar')).toEqual({id: 'foobar', fluffy: true}) 91 | }) 92 | ) 93 | 94 | test( 95 | 'get w/ auto id', 96 | withObjs(async m => { 97 | const obj = {fluffier: true} 98 | const withId = await m.set(obj) 99 | const saved = await m.get(withId.id) 100 | expect(saved).toEqual(withId) 101 | expect(saved.fluffier).toBe(true) 102 | }) 103 | ) 104 | 105 | test( 106 | 'get w/ null id', 107 | withObjs(async m => { 108 | await expect(m.get(null)).rejects.toThrow('No id') 109 | await expect(m.get(undefined)).rejects.toThrow('No id') 110 | }) 111 | ) 112 | 113 | test( 114 | 'get w/ unknown column', 115 | withObjs(async m => { 116 | await expect(m.get(1, 'foo')).rejects.toThrow('column "foo"') 117 | }) 118 | ) 119 | 120 | test('get w/ other colName', async () => { 121 | const m = getModel({ 122 | columns: {id: {type: 'INTEGER'}, slug: {}}, 123 | }) 124 | await m.set({id: 0, slug: 10}) 125 | expect(await m.get(10, 'slug')).toEqual({id: 0, slug: 10}) 126 | }) 127 | 128 | test('getAll', async () => { 129 | const m = getModel({ 130 | columns: { 131 | id: {type: 'INTEGER'}, 132 | slug: {}, 133 | objectId: {path: 'object.id'}, 134 | no: {real: true, get: false, value: () => 5}, 135 | }, 136 | }) 137 | await expect(m.getAll([], 'no')).rejects.toThrow('get:false') 138 | await Promise.all( 139 | [0, 1, 2, 3, 4].map(id => m.set({id, slug: id + 10, object: {id}})) 140 | ) 141 | expect(await m.getAll([])).toEqual([]) 142 | expect(await m.getAll([4])).toEqual([{id: 4, slug: 14, object: {id: 4}}]) 143 | expect(await m.getAll([4, 'nope', 0])).toEqual([ 144 | {id: 4, slug: 14, object: {id: 4}}, 145 | undefined, 146 | {id: 0, slug: 10, object: {id: 0}}, 147 | ]) 148 | expect(await m.getAll([10, 'nope', 12], 'slug')).toEqual([ 149 | {id: 0, slug: 10, object: {id: 0}}, 150 | undefined, 151 | {id: 2, slug: 12, object: {id: 2}}, 152 | ]) 153 | expect(await m.getAll([2, 'nope', 4], 'objectId')).toEqual([ 154 | {id: 2, slug: 12, object: {id: 2}}, 155 | undefined, 156 | {id: 4, slug: 14, object: {id: 4}}, 157 | ]) 158 | }) 159 | 160 | test('all', async () => { 161 | const m = getModel() 162 | await Promise.all([0, 1, 2, 3, 4].map(id => m.set({id}))) 163 | const saved = await m.all() 164 | expect(saved).toHaveLength(5) 165 | expect(saved.some(r => r.id === '4')).toBe(true) 166 | expect(saved.some(r => r.id === '1')).toBe(true) 167 | }) 168 | 169 | test('delete undefined', async () => { 170 | const m = getModel() 171 | const p = m.remove() 172 | await expect(p).resolves.not.toThrow() 173 | }) 174 | 175 | test('delete', async () => { 176 | const m = getModel({columns: {id: {type: 'INTEGER'}}}) 177 | await m.set({id: 123}) 178 | await expect(m.remove(123)).resolves.not.toThrow() 179 | expect(await m.get(123)).toBeFalsy() 180 | await m.set({id: 234}) 181 | await expect(m.remove({id: 234})).resolves.not.toThrow() 182 | expect(await m.get(234)).toBeFalsy() 183 | }) 184 | 185 | test('count', async () => { 186 | const m = getModel() 187 | await Promise.all([0, 1, 2, 3, 4].map(id => m.set({id}))) 188 | const count = await m.count(null, {where: {'id > 2': []}}) 189 | expect(count).toBe(2) 190 | }) 191 | 192 | test('idCol', async () => { 193 | const m = getModel({idCol: 'v', columns: {foo: {}}}) 194 | await Promise.all([0, 1, 2, 3, 4].map(v => m.set({v}))) 195 | expect(await m.get(1)).toEqual({v: '1'}) 196 | expect(await m.get(1)).toEqual({v: '1'}) 197 | const n = await m.set({foo: 342}) 198 | expect(n.v).toBeTruthy() 199 | m.set({v: n.v, foo: 342}) 200 | const n2 = await m.search({foo: 342}) 201 | expect(n2.items).toHaveLength(1) 202 | expect(await m.get(n.v)).toBeTruthy() 203 | await m.remove(n.v) 204 | expect(await m.get(n.v)).toBeFalsy() 205 | expect(m.makeSelect({limit: 2})).toEqual([ 206 | 'SELECT tbl."v" AS _i,tbl."json" AS _j FROM "testing" tbl ORDER BY _i LIMIT 2', 207 | [], 208 | ['_i'], 209 | 'SELECT COUNT(*) as t from ( SELECT tbl."v" AS _i,tbl."json" AS _j FROM "testing" tbl )', 210 | [], 211 | false, 212 | ]) 213 | }) 214 | 215 | describe('each', () => { 216 | const m = getModel({columns: {id: {type: 'INTEGER'}}}) 217 | beforeAll(async () => { 218 | await Promise.all([0, 1, 2, 3, 4].map(id => m.set({id}))) 219 | }) 220 | const callEach = async (...args) => { 221 | const stats = {count: 0, total: 0, maxI: 0, maxConcurrent: 0} 222 | let concurrent = 0, 223 | running = true 224 | await m.each(...args, async (row, i) => { 225 | if (!running) throw new Error('got called after each returned') 226 | concurrent++ 227 | if (stats.maxConcurrent < concurrent) stats.maxConcurrent = concurrent 228 | stats.count++ 229 | stats.total += row.id 230 | if (stats.maxI < i) stats.maxI = i 231 | await new Promise(r => setTimeout(r, Math.random() * 10)) 232 | concurrent-- 233 | }) 234 | running = false 235 | return stats 236 | } 237 | test('call', async () => { 238 | await expect(m.each()).rejects.toThrow('requires function') 239 | }) 240 | test('uses noTotal', async () => { 241 | const n = getModel() 242 | n.search = vi.fn(() => ({items: []})) 243 | await n.each(() => {}) 244 | expect(n.search).toHaveBeenCalledTimes(1) 245 | expect(n.search).toHaveBeenCalledWith( 246 | undefined, 247 | expect.objectContaining({noTotal: true, limit: expect.any(Number)}) 248 | ) 249 | }) 250 | test('no query', async () => { 251 | const stats = await callEach() 252 | expect(stats).toMatchInlineSnapshot(` 253 | { 254 | "count": 5, 255 | "maxConcurrent": 5, 256 | "maxI": 4, 257 | "total": 10, 258 | } 259 | `) 260 | }) 261 | test('attr', async () => { 262 | const stats = await callEach({id: 3}) 263 | expect(stats).toMatchInlineSnapshot(` 264 | { 265 | "count": 1, 266 | "maxConcurrent": 1, 267 | "maxI": 0, 268 | "total": 3, 269 | } 270 | `) 271 | }) 272 | test('where', async () => { 273 | const stats = await callEach({}, {where: {'id<3': []}}) 274 | expect(stats).toMatchInlineSnapshot(` 275 | { 276 | "count": 3, 277 | "maxConcurrent": 3, 278 | "maxI": 2, 279 | "total": 3, 280 | } 281 | `) 282 | }) 283 | test('concurrent', async () => { 284 | const stats = await callEach({}, {concurrent: 2}) 285 | expect(stats).toMatchInlineSnapshot(` 286 | { 287 | "count": 5, 288 | "maxConcurrent": 2, 289 | "maxI": 4, 290 | "total": 10, 291 | } 292 | `) 293 | }) 294 | test('batchSize', async () => { 295 | const stats = await callEach({}, {batchSize: 3}) 296 | expect(stats).toMatchInlineSnapshot(` 297 | { 298 | "count": 5, 299 | "maxConcurrent": 3, 300 | "maxI": 4, 301 | "total": 10, 302 | } 303 | `) 304 | }) 305 | test('limit', async () => { 306 | const stats = await callEach({}, {limit: 1}) 307 | // ! in the next major release, count will be 2 308 | expect(stats).toMatchInlineSnapshot(` 309 | { 310 | "count": 5, 311 | "maxConcurrent": 1, 312 | "maxI": 4, 313 | "total": 10, 314 | } 315 | `) 316 | }) 317 | }) 318 | 319 | describe('id column types', () => { 320 | test('integer', async () => { 321 | const m = getModel({columns: {id: {type: 'INTEGER'}}}) 322 | const layout = await m.db.all(`pragma table_info(${m.quoted})`) 323 | expect(layout.find(col => col.name === 'id')).toEqual( 324 | expect.objectContaining({pk: 1, type: 'INTEGER', notnull: 1}) 325 | ) 326 | }) 327 | 328 | test('other without keepRowId', async () => { 329 | const m = getModel({columns: {id: {type: 'TEXT'}}, keepRowId: false}) 330 | const layout = await m.db.all(`pragma table_info(${m.quoted})`) 331 | expect(layout.find(col => col.name === 'id')).toEqual( 332 | expect.objectContaining({pk: 1, type: 'TEXT', notnull: 1}) 333 | ) 334 | }) 335 | 336 | test('other with keepRowId', async () => { 337 | const m = getModel({columns: {id: {type: 'TEXT'}}, keepRowId: true}) 338 | const layout = await m.db.all(`pragma table_info(${m.quoted})`) 339 | expect(layout.find(col => col.name === 'id')).toEqual( 340 | expect.objectContaining({pk: 0, type: 'TEXT', notnull: 1}) 341 | ) 342 | expect(await m.db.get(`pragma index_info("${m.name}_id")`)).toBeTruthy() 343 | }) 344 | }) 345 | -------------------------------------------------------------------------------- /src/JsonModel/JM-makeSelect.test.js: -------------------------------------------------------------------------------- 1 | import {getModel} from '../lib/_test-helpers' 2 | 3 | test('makeSelect basic', () => { 4 | const m = getModel({ 5 | columns: { 6 | foo: {real: true}, 7 | bar: {}, 8 | meep: {}, 9 | }, 10 | }) 11 | const [q, v, s] = m.makeSelect({ 12 | attrs: {foo: 0, bar: 3, meep: undefined, moop: null}, 13 | limit: 20, 14 | offset: 5, 15 | cols: ['meep'], 16 | sort: {name: 1, date: -2}, 17 | }) 18 | expect(q).toEqual( 19 | 'SELECT json_extract(tbl."json",\'$.meep\') AS _2,name,date,tbl."id" AS _i FROM "testing" tbl WHERE(tbl."foo"=?)AND(json_extract(tbl."json",\'$.bar\')=?) ORDER BY name,date DESC,_i LIMIT 20 OFFSET 5' 20 | ) 21 | expect(v).toEqual([0, 3]) 22 | expect(s).toEqual(['name', 'date', '_i']) 23 | }) 24 | 25 | test('makeSelect where', () => { 26 | const m = getModel() 27 | const [q, v] = m.makeSelect({ 28 | where: { 29 | 'foo < ?': [5], 30 | "json_extract(json, '$.bar') = ?": [8], 31 | 'json is not null': [], 32 | 'ignore me': null, 33 | }, 34 | }) 35 | expect(q).toEqual( 36 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(foo < ?)AND(json_extract(json, \'$.bar\') = ?)AND(json is not null)' 37 | ) 38 | expect(v).toEqual([5, 8]) 39 | }) 40 | 41 | test('makeSelect limit 1 w/ sort', () => { 42 | const m = getModel() 43 | const [q] = m.makeSelect({limit: 1, sort: {bar: 1}, noCursor: true}) 44 | expect(q).toEqual( 45 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl ORDER BY bar LIMIT 1' 46 | ) 47 | }) 48 | 49 | test('makeSelect sort w/ path', () => { 50 | const m = getModel({columns: {foo: {}}}) 51 | const [q] = m.makeSelect({limit: 1, sort: {foo: -1}}) 52 | expect(q).toEqual( 53 | 'SELECT tbl."id" AS _i,tbl."json" AS _j,json_extract(tbl."json",\'$.foo\') AS _0 FROM "testing" tbl ORDER BY _0 DESC,_i LIMIT 1' 54 | ) 55 | }) 56 | 57 | test('makeSelect isArray', () => { 58 | const m = getModel({columns: {foo: {isArray: true}}}) 59 | const [q] = m.makeSelect({attrs: {foo: 'meep'}}) 60 | expect(q).toEqual( 61 | `SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(? IN (SELECT value FROM json_each(tbl."json",'$.foo')))` 62 | ) 63 | }) 64 | 65 | test('makeSelect textSearch', () => { 66 | const m = getModel({columns: {foo: {textSearch: true}}}) 67 | expect(m.makeSelect({attrs: {foo: 'meep'}})).toEqual([ 68 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",\'$.foo\') LIKE ?)', 69 | ['%meep%'], 70 | undefined, 71 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",\'$.foo\') LIKE ?) )', 72 | ['%meep%'], 73 | false, 74 | ]) 75 | }) 76 | 77 | test('makeSelect textSearch falsy', () => { 78 | const m = getModel({columns: {foo: {textSearch: true}}}) 79 | expect(m.makeSelect({attrs: {foo: ''}})).toEqual([ 80 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl', 81 | [], 82 | undefined, 83 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl )', 84 | [], 85 | false, 86 | ]) 87 | expect(m.makeSelect({attrs: {foo: null}})).toEqual([ 88 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl', 89 | [], 90 | undefined, 91 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl )', 92 | [], 93 | false, 94 | ]) 95 | expect(m.makeSelect({attrs: {foo: 0}})).toEqual([ 96 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",\'$.foo\') LIKE ?)', 97 | ['%0%'], 98 | undefined, 99 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",\'$.foo\') LIKE ?) )', 100 | ['%0%'], 101 | false, 102 | ]) 103 | }) 104 | 105 | test('makeSelect isAnyOfArray', () => { 106 | const m = getModel({columns: {foo: {isAnyOfArray: true}}}) 107 | const [q] = m.makeSelect({attrs: {foo: ['meep', 'moop']}}) 108 | expect(q).toEqual( 109 | `SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(EXISTS(SELECT 1 FROM json_each(tbl."json",'$.foo') j WHERE j.value IN (SELECT value FROM json_each(?))))` 110 | ) 111 | }) 112 | 113 | test('makeSelect in', () => { 114 | const m = getModel({ 115 | columns: {foo: {in: true}, bar: {real: true, in: true}}, 116 | }) 117 | const [q] = m.makeSelect({ 118 | attrs: {foo: ['meep', 'moop'], bar: ['meep', 'moop']}, 119 | }) 120 | expect(q).toEqual( 121 | `SELECT tbl."bar" AS _1,tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",'$.foo') IN (SELECT value FROM json_each(?)))AND(tbl."bar" IN (SELECT value FROM json_each(?)))` 122 | ) 123 | const [q2] = m.makeSelect({attrs: {foo: []}}) 124 | expect(q2).toEqual( 125 | 'SELECT tbl."bar" AS _1,tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl' 126 | ) 127 | }) 128 | 129 | test('makeSelect in w/ path', () => { 130 | const m = getModel({columns: {foo: {in: true}}}) 131 | const [q] = m.makeSelect({attrs: {foo: ['meep', 'moop']}}) 132 | expect(q).toEqual( 133 | `SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(json_extract(tbl."json",'$.foo') IN (SELECT value FROM json_each(?)))` 134 | ) 135 | }) 136 | 137 | test('makeSelect in + isArray = isAnyOfArray', () => { 138 | const m = getModel({ 139 | columns: {foo: {in: true, isArray: true}}, 140 | }) 141 | const [q] = m.makeSelect({attrs: {foo: ['meep', 'moop']}}) 142 | expect(q).toEqual( 143 | `SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(EXISTS(SELECT 1 FROM json_each(tbl."json",'$.foo') j WHERE j.value IN (SELECT value FROM json_each(?))))` 144 | ) 145 | const [q2] = m.makeSelect({attrs: {foo: []}}) 146 | expect(q2).toEqual( 147 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl' 148 | ) 149 | }) 150 | 151 | test('makeSelect inAll', () => { 152 | const m = getModel({ 153 | columns: {foo: {inAll: true, isArray: true}}, 154 | }) 155 | const [q, v] = m.makeSelect({attrs: {foo: ['meep', 'moop']}}) 156 | expect(q).toEqual( 157 | `SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(NOT EXISTS(SELECT 1 FROM json_each(?) j WHERE j.value NOT IN (SELECT value FROM json_each(tbl."json",'$.foo'))))` 158 | ) 159 | expect(v).toEqual([`["meep","moop"]`]) 160 | const [q2] = m.makeSelect({attrs: {foo: []}}) 161 | expect(q2).toEqual( 162 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl' 163 | ) 164 | }) 165 | 166 | test('in works', async () => { 167 | const m = getModel({ 168 | columns: { 169 | id: {type: 'INTEGER'}, 170 | foo: {in: true, isArray: true}, 171 | }, 172 | }) 173 | await m.set({foo: [1, 2, 5]}) 174 | await m.set({foo: [1, 2, 3, 4]}) 175 | await m.set({foo: [1, 2, 4, 5]}) 176 | expect(await m.searchAll({foo: []})).toHaveLength(3) 177 | expect(await m.searchAll({foo: [1, 2]})).toHaveLength(3) 178 | expect(await m.searchAll({foo: [5]})).toHaveLength(2) 179 | expect(await m.searchAll({foo: [4, 2]})).toHaveLength(3) 180 | expect(await m.searchAll({foo: [4, 5]})).toHaveLength(3) 181 | }) 182 | 183 | test('inAll works', async () => { 184 | const m = getModel({ 185 | columns: { 186 | id: {type: 'INTEGER'}, 187 | foo: {inAll: true, isArray: true}, 188 | }, 189 | }) 190 | await m.set({foo: [1, 2, 5]}) 191 | await m.set({foo: [1, 2, 3, 4]}) 192 | await m.set({foo: [1, 2, 4, 5]}) 193 | expect(await m.searchAll({foo: []})).toHaveLength(3) 194 | expect(await m.searchAll({foo: [1, 2]})).toHaveLength(3) 195 | expect(await m.searchAll({foo: [5]})).toHaveLength(2) 196 | expect(await m.searchAll({foo: [4, 2]})).toHaveLength(2) 197 | expect(await m.searchAll({foo: [4, 5]})).toEqual([{foo: [1, 2, 4, 5], id: 3}]) 198 | }) 199 | 200 | test('col.where', () => { 201 | const m = getModel({ 202 | columns: {foo: {sql: 'foo', where: 'foo = ?'}}, 203 | }) 204 | expect(m.makeSelect({attrs: {foo: 'moop'}})).toEqual([ 205 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(foo = ?)', 206 | ['moop'], 207 | undefined, 208 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(foo = ?) )', 209 | ['moop'], 210 | false, 211 | ]) 212 | }) 213 | 214 | test('col.where fn', () => { 215 | const m = getModel({ 216 | columns: {foo: {sql: 'foo', where: v => v.length}}, 217 | }) 218 | expect(m.makeSelect({attrs: {id: 4, foo: '123'}})).toEqual([ 219 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?)AND(3)', 220 | [4, '123'], 221 | undefined, 222 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?)AND(3) )', 223 | [4, '123'], 224 | false, 225 | ]) 226 | }) 227 | 228 | test('col.whereVal fn', () => { 229 | const m = getModel({ 230 | columns: {foo: {sql: 'foo', where: 'ohai', whereVal: v => [v.join(',')]}}, 231 | }) 232 | expect(m.makeSelect({attrs: {id: 5, foo: ['meep', 'moop']}})).toEqual([ 233 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?)AND(ohai)', 234 | [5, 'meep,moop'], 235 | undefined, 236 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?)AND(ohai) )', 237 | [5, 'meep,moop'], 238 | false, 239 | ]) 240 | }) 241 | 242 | test('col.whereVal fn falsy', () => { 243 | const m = getModel({ 244 | columns: {foo: {sql: 'foo', where: 'ohai', whereVal: () => 0}}, 245 | }) 246 | expect(m.makeSelect({attrs: {id: 5, foo: ['meep', 'moop']}})).toEqual([ 247 | 'SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?)', 248 | [5], 249 | undefined, 250 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl WHERE(tbl."id"=?) )', 251 | [5], 252 | false, 253 | ]) 254 | }) 255 | 256 | test('min', async () => { 257 | const m = getModel({columns: {v: {}}}) 258 | expect(await m.min('v')).toBe(null) 259 | await m.set({v: 5}) 260 | expect(await m.min('v')).toBe(5) 261 | await m.set({v: '3'}) 262 | expect(await m.min('v')).toBe(3) 263 | await m.set({v: 'blah'}) 264 | expect(await m.min('v')).toBe(0) 265 | }) 266 | 267 | test('max', async () => { 268 | const m = getModel({columns: {v: {}}}) 269 | expect(await m.max('v')).toBe(null) 270 | await m.set({v: 'blah'}) 271 | expect(await m.max('v')).toBe(0) 272 | await m.set({v: 5}) 273 | expect(await m.max('v')).toBe(5) 274 | await m.set({v: '7'}) 275 | expect(await m.max('v')).toBe(7) 276 | }) 277 | 278 | test('avg', async () => { 279 | const m = getModel({columns: {v: {}}}) 280 | expect(await m.avg('v')).toBe(null) 281 | await m.set({v: 'blah'}) 282 | expect(await m.avg('v')).toBe(0) 283 | await m.set({v: 5}) 284 | await m.set({v: '10'}) 285 | expect(await m.avg('v')).toBe(5) 286 | expect( 287 | await m.avg('v', null, { 288 | where: {'CAST(json_extract(json,"$.v") as NUMERIC)>0': []}, 289 | }) 290 | ).toBe(7.5) 291 | }) 292 | 293 | test('sum', async () => { 294 | const m = getModel({columns: {v: {}}}) 295 | expect(await m.sum('v')).toBe(null) 296 | await m.set({v: 'blah'}) 297 | expect(await m.sum('v')).toBe(0) 298 | await m.set({v: 5}) 299 | await m.set({v: '8'}) 300 | expect(await m.sum('v')).toBe(13) 301 | }) 302 | 303 | test('distinct', async () => { 304 | const m = getModel({}) 305 | expect(m.makeSelect({distinct: true})).toEqual([ 306 | 'SELECT DISTINCT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl', 307 | [], 308 | undefined, 309 | 'SELECT COUNT(*) as t from ( SELECT DISTINCT tbl."id" AS _i,tbl."json" AS _j FROM "testing" tbl )', 310 | [], 311 | false, 312 | ]) 313 | }) 314 | -------------------------------------------------------------------------------- /src/JsonModel/JM-migration.test.js: -------------------------------------------------------------------------------- 1 | import sysPath from 'path' 2 | import tmp from 'tmp-promise' 3 | import {DB, JsonModel, getModel} from '../lib/_test-helpers' 4 | 5 | test('falsy migration', async () => { 6 | const m = getModel({ 7 | migrations: { 8 | foo: false, 9 | }, 10 | }) 11 | await expect(() => m.searchOne()).not.toThrow() 12 | }) 13 | 14 | test('migrations', async () => { 15 | const m = getModel({ 16 | columns: { 17 | foo: { 18 | type: 'NUMERIC', 19 | value: () => 5, 20 | get: true, 21 | }, 22 | }, 23 | migrations: { 24 | meep: async ({db, model, hi}) => { 25 | expect(db).toBeTruthy() 26 | expect(model).toBeTruthy() 27 | expect(hi).toBe(3) 28 | const d = await model.set({foo: 1}) 29 | expect(d.foo).toBe(5) 30 | // This creates a prepared statement which must not leak 31 | expect(await model.get(d.id)).toHaveProperty('foo', 5) 32 | }, 33 | }, 34 | migrationOptions: { 35 | hi: 3, 36 | db: false, 37 | }, 38 | }) 39 | const d = await m.searchOne() 40 | expect(d.foo).toBe(5) 41 | // This should create a new prepared statement 42 | expect(await m.get(d.id)).toHaveProperty('foo', 5) 43 | }) 44 | 45 | test('concurrent migrations', async () => { 46 | const db = new DB() 47 | const a = db.addModel(JsonModel, { 48 | name: 'a', 49 | migrations: { 50 | 2: { 51 | async up({db: mDb}) { 52 | expect(await mDb.store.b.searchOne()).toEqual({id: '1'}) 53 | }, 54 | }, 55 | }, 56 | }) 57 | db.addModel(JsonModel, { 58 | name: 'b', 59 | migrations: { 60 | 1: { 61 | up({model}) { 62 | return model.set({id: 1}) 63 | }, 64 | }, 65 | }, 66 | }) 67 | await a.searchOne() 68 | }) 69 | 70 | test('migration clones writeable', async () => { 71 | const m = getModel({ 72 | name: 'test', 73 | migrations: { 74 | foo1: ({model}) => { 75 | model.__temp = 123 76 | }, 77 | foo2: ({db}) => { 78 | expect(db.store.test).toHaveProperty('__temp', 123) 79 | }, 80 | }, 81 | }) 82 | await expect(m.db.store.test).not.toHaveProperty('__temp') 83 | }) 84 | 85 | test('column add', () => 86 | tmp.withDir( 87 | async ({path: dir}) => { 88 | const file = sysPath.join(dir, 'db') 89 | const m1 = new DB({file}).addModel(JsonModel, {name: 'testing'}) 90 | await m1.set({id: 'a', foo: {hi: true}}) 91 | expect(await m1.db.get(`select * from testing`)).not.toHaveProperty('foo') 92 | await m1.db.close() 93 | const m2 = new DB({file}).addModel(JsonModel, { 94 | name: 'testing', 95 | columns: {foo: {type: 'JSON'}}, 96 | }) 97 | expect(await m2.get('a')).toHaveProperty('foo.hi') 98 | await m2.set({id: 'a', foo: {hello: true}}) 99 | const a = await m2.get('a') 100 | expect(a).not.toHaveProperty('foo.hi') 101 | expect(a).toHaveProperty('foo.hello') 102 | expect(await m2.db.get(`select * from testing`)).toHaveProperty('foo') 103 | }, 104 | {unsafeCleanup: true, prefix: 'jm-coladd'} 105 | )) 106 | 107 | test('failed migration', async () => { 108 | const m = getModel({ 109 | name: 'test', 110 | migrations: { 111 | fail: async ({model}) => { 112 | await model.set({id: 'hi'}) 113 | throw new Error('oh no') 114 | }, 115 | }, 116 | }) 117 | await expect(m.db.store.test.get('id')).rejects.toThrow('oh no') 118 | expect(m.db.isOpen).toBe(false) 119 | }) 120 | -------------------------------------------------------------------------------- /src/JsonModel/JM-search.test.js: -------------------------------------------------------------------------------- 1 | import {getModel} from '../lib/_test-helpers' 2 | 3 | test('searchOne', async () => { 4 | const m = getModel({columns: {upperId: {sql: 'upper(id)'}}}) 5 | const obj = {id: 'foobar', fluffy: true} 6 | await m.set(obj) 7 | const saved = await m.searchOne({id: obj.id}) 8 | expect(saved).toEqual(obj) 9 | // check if `options` argument is properly passed 10 | const idOnly = await m.searchOne({id: obj.id}, {cols: ['upperId']}) 11 | expect(idOnly).toEqual({upperId: obj.id.toUpperCase()}) 12 | }) 13 | 14 | test('search[One] attrs=null', async () => { 15 | const m = getModel() 16 | await expect(m.searchOne(null)).resolves.not.toThrow() 17 | await expect(m.search(null)).resolves.not.toThrow() 18 | await expect(m.searchOne(undefined)).resolves.not.toThrow() 19 | await expect(m.search(undefined)).resolves.not.toThrow() 20 | await m.set({t: 5}) 21 | const all = await m.search(null) 22 | expect(all.items[0].t).toBe(5) 23 | }) 24 | 25 | test('makeSelect with cursor', () => { 26 | const m = getModel({ 27 | columns: { 28 | id: {type: 'INTEGER'}, 29 | c: {type: 'TEXT'}, 30 | d: {}, 31 | }, 32 | }) 33 | expect(m.makeSelect({limit: 5})).toEqual([ 34 | 'SELECT tbl."id" AS _i,tbl."c" AS _0,tbl."json" AS _j FROM "testing" tbl ORDER BY _i LIMIT 5', 35 | [], 36 | ['_i'], 37 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."c" AS _0,tbl."json" AS _j FROM "testing" tbl )', 38 | [], 39 | false, 40 | ]) 41 | expect(m.makeSelect({limit: 5, cursor: '!3'})).toEqual([ 42 | 'SELECT tbl."id" AS _i,tbl."c" AS _0,tbl."json" AS _j FROM "testing" tbl WHERE(_i>?) ORDER BY _i LIMIT 5', 43 | [3], 44 | ['_i'], 45 | 'SELECT COUNT(*) as t from ( SELECT tbl."id" AS _i,tbl."c" AS _0,tbl."json" AS _j FROM "testing" tbl )', 46 | [], 47 | false, 48 | ]) 49 | }) 50 | 51 | describe('search cursor', () => { 52 | let m, q, sampleSortedTotal, sampleSortedItems 53 | beforeAll(async () => { 54 | m = getModel({ 55 | columns: { 56 | id: {type: 'INTEGER'}, 57 | s1: {}, 58 | s2: {}, 59 | x1: {}, 60 | x2: {falsyBool: true}, 61 | }, 62 | }) 63 | const sampleStr = 'aaabbccddffeee' 64 | await Promise.all( 65 | [...'DCACAEDFCAEBEF'].map((s1, i) => 66 | m.set({ 67 | id: i + 1, 68 | s1, 69 | s2: sampleStr[i], 70 | x1: i % 2 === 0 ? 0 : 1, 71 | x2: i % 2 === 0 ? 0 : 1, 72 | }) 73 | ) 74 | ) 75 | 76 | q = { 77 | where: {"json_extract(json, '$.s1')>?": ['B']}, 78 | sort: { 79 | s1: -1, 80 | s2: 1, 81 | // id: 100_000, 82 | }, 83 | limit: undefined, 84 | } 85 | 86 | const resNoLimit = await m.search(null, q) 87 | sampleSortedTotal = resNoLimit.total 88 | sampleSortedItems = resNoLimit.items 89 | }) 90 | 91 | test('search with where, sort, no limit + reverse', async () => { 92 | const resNoLimit = await m.search(null, q) 93 | expect(resNoLimit).toEqual({ 94 | cursor: undefined, 95 | items: [ 96 | {id: 8, s1: 'F', s2: 'd', x1: 1, x2: true}, 97 | {id: 14, s1: 'F', s2: 'e', x1: 1, x2: true}, 98 | {id: 6, s1: 'E', s2: 'c', x1: 1, x2: true}, 99 | {id: 13, s1: 'E', s2: 'e', x1: 0}, 100 | {id: 11, s1: 'E', s2: 'f', x1: 0}, 101 | {id: 1, s1: 'D', s2: 'a', x1: 0}, 102 | {id: 7, s1: 'D', s2: 'c', x1: 0}, 103 | {id: 2, s1: 'C', s2: 'a', x1: 1, x2: true}, 104 | {id: 4, s1: 'C', s2: 'b', x1: 1, x2: true}, 105 | {id: 9, s1: 'C', s2: 'd', x1: 0}, 106 | ], 107 | prevCursor: undefined, 108 | total: 10, 109 | }) 110 | 111 | const resReverseSortNoLimit = await m.search(null, { 112 | ...q, 113 | sort: { 114 | s1: 1, 115 | s2: -1, 116 | // id: -100_000, 117 | }, 118 | }) 119 | expect(resReverseSortNoLimit).toEqual({ 120 | cursor: undefined, 121 | items: resNoLimit.items.reverse(), 122 | prevCursor: undefined, 123 | total: 10, 124 | }) 125 | }) 126 | 127 | test('search with where, sort, cursor, limit = full path', async () => { 128 | // no cursor (1/4) 129 | const res1 = await m.search(null, {...q, limit: 3}) 130 | expect(res1).toEqual({ 131 | cursor: '!E~c~6', 132 | items: sampleSortedItems.slice(0, 3), 133 | prevCursor: '!!F~d~8', 134 | total: 10, 135 | }) 136 | 137 | // => move to next cursor (2/4) 138 | const res2 = await m.search(null, {...q, limit: 3, cursor: res1.cursor}) 139 | expect(res2).toEqual({ 140 | cursor: '!D~a~1', 141 | items: sampleSortedItems.slice(3, 6), 142 | prevCursor: '!!E~e~13', 143 | total: 10, 144 | }) 145 | 146 | // => move to next cursor (3/4) 147 | const res3 = await m.search(null, {...q, limit: 3, cursor: res2.cursor}) 148 | expect(res3).toEqual({ 149 | cursor: '!C~b~4', 150 | items: sampleSortedItems.slice(6, 9), 151 | prevCursor: '!!D~c~7', 152 | total: 10, 153 | }) 154 | 155 | // => move to next cursor (4/4) 156 | const res4 = await m.search(null, { 157 | ...q, 158 | limit: 3, 159 | cursor: res3.cursor, 160 | }) 161 | expect(res4).toEqual({ 162 | items: sampleSortedItems.slice(9), 163 | cursor: undefined, 164 | prevCursor: '!!C~d~9', 165 | total: 10, 166 | }) 167 | 168 | // => move to prev cursor (1/3) 169 | const res3prev = await m.search(null, { 170 | ...q, 171 | limit: 3, 172 | cursor: res4.prevCursor, 173 | }) 174 | 175 | expect(res3prev).toEqual(res3) 176 | 177 | // => move to prev cursor (2/3) 178 | const res2prev = await m.search(null, { 179 | ...q, 180 | limit: 3, 181 | cursor: res3.prevCursor, 182 | }) 183 | expect(res2prev).toEqual(res2) 184 | 185 | // => move to prev cursor (3/3) 186 | const res1prev = await m.search(null, { 187 | ...q, 188 | limit: 3, 189 | cursor: res2.prevCursor, 190 | }) 191 | expect(res1prev).toEqual(res1) 192 | 193 | expect(await m.count(null, {...q, cursor: res3.cursor})).toBe(1) 194 | expect(await m.count(null, {...q, cursor: res3.prevCursor})).toBe(6) 195 | }) 196 | 197 | test('search with where, sort, cursor, limit - limit equals to total', async () => { 198 | // expect: cursor should be null, when equals total 199 | const res1 = await m.search(null, { 200 | ...q, 201 | limit: sampleSortedTotal, 202 | }) 203 | expect(res1.cursor).toBeFalsy() 204 | 205 | // expect: cursor should be null, when equals total (2 steps) 206 | const res21 = await m.search(null, {...q, limit: sampleSortedTotal / 2}) 207 | const res22 = await m.search(null, {...q, cursor: res21.cursor}) 208 | expect(res21.cursor).toBeTruthy() 209 | expect(res22.cursor).toBeFalsy() 210 | }) 211 | 212 | test('search with where, limit, bool sort', async () => { 213 | const sortWithBool = {s1: -1, s2: 1, x2: -1} 214 | 215 | const res0 = await m.search(null, {...q, sort: sortWithBool}) 216 | expect(res0).toEqual({ 217 | cursor: undefined, 218 | items: [ 219 | {id: 8, s1: 'F', s2: 'd', x1: 1, x2: true}, 220 | {id: 14, s1: 'F', s2: 'e', x1: 1, x2: true}, 221 | {id: 6, s1: 'E', s2: 'c', x1: 1, x2: true}, 222 | {id: 13, s1: 'E', s2: 'e', x1: 0}, 223 | {id: 11, s1: 'E', s2: 'f', x1: 0}, 224 | {id: 1, s1: 'D', s2: 'a', x1: 0}, 225 | {id: 7, s1: 'D', s2: 'c', x1: 0}, 226 | {id: 2, s1: 'C', s2: 'a', x1: 1, x2: true}, 227 | {id: 4, s1: 'C', s2: 'b', x1: 1, x2: true}, 228 | {id: 9, s1: 'C', s2: 'd', x1: 0}, 229 | ], 230 | prevCursor: undefined, 231 | total: 10, 232 | }) 233 | 234 | const res1 = await m.search(null, {...q, limit: 4, sort: sortWithBool}) 235 | expect(res1).toEqual({ 236 | cursor: '!E~e~_N~13', 237 | items: res0.items.slice(0, 4), 238 | prevCursor: '!!F~d~1~8', 239 | total: 10, 240 | }) 241 | 242 | const res2 = await m.search(null, { 243 | ...q, 244 | limit: 4, 245 | sort: sortWithBool, 246 | cursor: res1.cursor, 247 | }) 248 | expect(res2).toEqual({ 249 | cursor: '!C~a~1~2', 250 | items: res0.items.slice(4, 8), 251 | prevCursor: '!!E~f~_N~11', 252 | total: 10, 253 | }) 254 | }) 255 | 256 | test('search with sort, cursor, limit - limit < total, some sorted values = falsyBool', async () => { 257 | const totalCount = await m.count() 258 | const falsyBoolFalseCount = await m.count({x2: false}) 259 | const falsyBoolTrueCount = await m.count({x2: true}) 260 | 261 | const pageCount = falsyBoolTrueCount + 1 262 | // to make sure there are results on page2 263 | expect(falsyBoolFalseCount - pageCount).toBeTruthy() 264 | 265 | const searchOptions = { 266 | // true first 267 | sort: {x2: -1}, 268 | limit: pageCount, 269 | cols: ['id', 'x2'], 270 | } 271 | const page1 = await m.search({}, searchOptions) 272 | 273 | expect(page1).toEqual({ 274 | cursor: '!_N~1', 275 | items: [ 276 | {id: 2, x2: 1}, 277 | {id: 4, x2: 1}, 278 | {id: 6, x2: 1}, 279 | {id: 8, x2: 1}, 280 | {id: 10, x2: 1}, 281 | {id: 12, x2: 1}, 282 | {id: 14, x2: 1}, 283 | {id: 1}, 284 | ], 285 | prevCursor: '!!1~2', 286 | total: 14, 287 | }) 288 | 289 | const page2Count = await m.count( 290 | {}, 291 | {...searchOptions, cursor: page1.cursor} 292 | ) 293 | const remainingAfterPage1 = totalCount - pageCount 294 | expect(page2Count).toEqual( 295 | remainingAfterPage1 < pageCount ? remainingAfterPage1 : pageCount 296 | ) 297 | const page2 = await m.search({}, {...searchOptions, cursor: page1.cursor}) 298 | expect(page2).toEqual({ 299 | cursor: undefined, 300 | items: [{id: 3}, {id: 5}, {id: 7}, {id: 9}, {id: 11}, {id: 13}], 301 | prevCursor: '!!_N~3', 302 | total: 14, 303 | }) 304 | }) 305 | }) 306 | 307 | test('search itemsOnly', async () => { 308 | const m = getModel() 309 | const obj = await m.set({fluffy: true}) 310 | expect(await m.search(null, {itemsOnly: true})).toEqual([obj]) 311 | }) 312 | 313 | test('searchAll', async () => { 314 | const m = getModel() 315 | const obj = await m.set({fluffy: true}) 316 | expect(await m.searchAll()).toEqual([obj]) 317 | }) 318 | 319 | test('exists', async () => { 320 | const m = getModel({columns: {hi: {}}}) 321 | expect(await m.exists()).toBe(false) 322 | await m.set({hi: true}) 323 | expect(await m.exists()).toBe(true) 324 | expect(await m.exists({hi: true})).toBe(true) 325 | expect(await m.exists({hi: false})).toBe(false) 326 | }) 327 | 328 | test('exists id', async () => { 329 | const m = getModel({columns: {hi: {}}}) 330 | await m.set({id: 1}) // make sure we have rows so empty exists returns true 331 | expect(await m.exists(55)).toBe(false) 332 | await m.set({id: 55}) 333 | expect(await m.exists(55)).toBe(true) 334 | expect(await m.exists('foo')).toBe(false) 335 | await m.set({id: 'foo'}) 336 | expect(await m.exists('foo')).toBe(true) 337 | }) 338 | -------------------------------------------------------------------------------- /src/JsonModel/JM-set.test.js: -------------------------------------------------------------------------------- 1 | import {getModel} from '../lib/_test-helpers' 2 | 3 | test('set with id', async () => { 4 | const m = getModel() 5 | const obj = {id: 'foobar', fluffy: true} 6 | const set = await m.set(obj) 7 | const saved = await m.get(set.id) 8 | expect(saved).toEqual(obj) 9 | expect(saved).toEqual(set) 10 | }) 11 | 12 | test('set with falsy id, BLOB type', async () => { 13 | const m = getModel({columns: {id: {type: 'BLOB'}}}) 14 | await m.set({id: 0}) 15 | await m.set({id: ''}) 16 | const all = await m.all() 17 | expect(all).toHaveLength(2) 18 | expect(all.every(r => !r.id)).toBe(true) 19 | }) 20 | 21 | test('set without id', async () => { 22 | const m = getModel() 23 | const obj = {fluffy: true} 24 | const saved = await m.set(obj) 25 | expect(saved.id).toBeTruthy() 26 | expect(saved.fluffy).toBe(obj.fluffy) 27 | }) 28 | 29 | test('set without id, INTEGER type', async () => { 30 | const m = getModel({columns: {id: {type: 'INTEGER'}}}) 31 | const o = await m.set({}) 32 | const all = await m.all() 33 | expect([o]).toEqual(all) 34 | }) 35 | 36 | test('INTEGER autoIncrement id', async () => { 37 | const m = getModel({ 38 | columns: {id: {type: 'INTEGER', autoIncrement: true}}, 39 | }) 40 | await m.set({id: 50}) 41 | await m.remove({id: 50}) 42 | await m.set({}) 43 | const all = await m.all() 44 | expect([{id: 51}]).toEqual(all) 45 | }) 46 | 47 | test('set with existing id', async () => { 48 | let val = 5 49 | const m = getModel({columns: {id: {value: () => val}}}) 50 | await m.set({hi: true}) 51 | const o = await m.searchOne() 52 | expect(o.id).toBe('5') 53 | val = 6 54 | await m.set(o) 55 | const p = await m.all() 56 | expect(p).toHaveLength(1) 57 | expect(p[0].id).toBe('5') 58 | }) 59 | 60 | test('insert conflicting unique non-id column', async () => { 61 | const m = getModel({columns: {u: {unique: true, index: true}}}) 62 | await m.set({id: 1, u: 1}) 63 | await expect(m.set({id: 2, u: 1})).rejects.toThrow('SQLITE_CONSTRAINT') 64 | }) 65 | 66 | test('set(obj, insertOnly)', async () => { 67 | const m = getModel() 68 | await m.set({id: '234'}) 69 | await expect(m.set({id: '234'}, true)).rejects.toThrow('SQLITE_CONSTRAINT') 70 | }) 71 | 72 | test('set(obj, insertOnly) for integer id', async () => { 73 | const m = getModel({columns: {id: {type: 'INTEGER'}}}) 74 | await m.set({id: 234}) 75 | await expect(m.set({id: 234}, true)).rejects.toThrow('SQLITE_CONSTRAINT') 76 | }) 77 | 78 | test('set almost empty object', async () => { 79 | const m = getModel() 80 | await m.set({id: 'ta'}) 81 | expect(await m.db.all`SELECT * from ${m.name}ID`).toEqual([ 82 | {id: 'ta', json: null}, 83 | ]) 84 | expect(await m.all()).toEqual([{id: 'ta'}]) 85 | }) 86 | 87 | test('update(obj)', async () => { 88 | const m = getModel() 89 | const obj = await m.update({hi: 5, ho: 8}, true) 90 | const {id} = obj 91 | expect(await m.get(id)).toEqual(obj) 92 | await m.update({id, hi: 7}) 93 | expect(await m.get(id)).toEqual({...obj, hi: 7}) 94 | }) 95 | 96 | test('update(obj, upsert)', async () => { 97 | const m = getModel() 98 | await m.set({id: 5, ho: 8, meep: true}) 99 | await expect(m.update({id: 5, ho: 1})).resolves.toEqual({ 100 | id: 5, 101 | ho: 1, 102 | meep: true, 103 | }) 104 | await expect(m.get(5)).resolves.toEqual({ 105 | id: '5', 106 | ho: 1, 107 | meep: true, 108 | }) 109 | await expect(m.update({id: 7, ho: 2})).rejects.toThrow('No object') 110 | await expect(m.update({id: 7, ho: 3}, true)).resolves.toEqual({ 111 | id: 7, 112 | ho: 3, 113 | }) 114 | await expect(m.update({ho: 4}, true)).resolves.toMatchObject({ho: 4}) 115 | expect(await m.count()).toBe(3) 116 | }) 117 | 118 | test('update transactional', async () => { 119 | const m = getModel() 120 | await m.db.run(`BEGIN IMMEDIATE`) 121 | await expect(m.update({id: 5, ho: 9}, true)).rejects.toThrow( 122 | 'cannot start a transaction within a transaction' 123 | ) 124 | }) 125 | 126 | test('update reuses existing transaction', async () => { 127 | const m = getModel() 128 | await m.db.withTransaction(async () => { 129 | await expect(m.update({id: 5, ho: 9}, true)).resolves.not.toThrow() 130 | }) 131 | }) 132 | 133 | test('updateNoTrans not transactional', async () => { 134 | const m = getModel() 135 | await m.db.run(`BEGIN IMMEDIATE`) 136 | await expect(m.updateNoTrans({id: 5, ho: 9}, true)).resolves.not.toThrow() 137 | await m.db.run(`END`) 138 | }) 139 | 140 | test('.changeId(oldId, newId)', async () => { 141 | const m = getModel() 142 | await m.set({id: 'a', t: 1}) 143 | await m.changeId('a', 'b') 144 | expect(await m.all()).toEqual([{id: 'b', t: 1}]) 145 | }) 146 | test('.changeId(oldId, existing)', async () => { 147 | const m = getModel() 148 | await m.set({id: 'a', t: 1}) 149 | await m.set({id: 'b', t: 2}) 150 | await expect(m.changeId('a', 'b')).rejects.toThrow('SQLITE_CONSTRAINT') 151 | }) 152 | test('.changeId(missing, newId)', async () => { 153 | const m = getModel() 154 | const p = m.changeId('a', 'b') 155 | await expect(p).rejects.toThrow('not found') 156 | }) 157 | test('.changeId(missing, newId) race', async () => { 158 | const m = getModel() 159 | await Promise.all([ 160 | expect(m.changeId('a', 'b')).rejects.toThrow('id a not found'), 161 | m.set({id: 'a'}), 162 | ]) 163 | expect(await m.all()).toEqual([{id: 'a'}]) 164 | }) 165 | test('.changeId(oldId, invalid)', async () => { 166 | const m = getModel() 167 | expect(() => m.changeId('a', null)).toThrow(TypeError) 168 | expect(() => m.changeId('a', undefined)).toThrow(TypeError) 169 | }) 170 | -------------------------------------------------------------------------------- /src/JsonModel/JM-toObj.test.js: -------------------------------------------------------------------------------- 1 | import {getModel} from '../lib/_test-helpers' 2 | 3 | test('toObj nothing', () => { 4 | const m = getModel() 5 | expect(m.toObj()).toBeFalsy() 6 | }) 7 | 8 | test('toObj single', () => { 9 | const m = getModel() 10 | expect(m.toObj({_j: '{"hi":5}', _i: 2})).toEqual({hi: 5, id: 2}) 11 | }) 12 | 13 | test('toObj derived', () => { 14 | const m = getModel({ 15 | columns: {ohai: {real: true, get: true}, beep: {}}, 16 | }) 17 | expect(m.toObj({_j: '{"hi":5}', _0: 8, hi: 3, nohai: 4, _i: 0})).toEqual({ 18 | hi: 5, 19 | ohai: 8, 20 | id: 0, 21 | }) 22 | }) 23 | 24 | test('toObj array', () => { 25 | const m = getModel() 26 | expect( 27 | m.toObj([ 28 | {_j: '{"hi":5}', _i: 0}, 29 | {_j: '{"ho":6}', _i: 1}, 30 | ]) 31 | ).toEqual([ 32 | {hi: 5, id: 0}, 33 | {ho: 6, id: 1}, 34 | ]) 35 | }) 36 | 37 | test('enforce string type', async () => { 38 | const m = getModel({ 39 | columns: { 40 | f: {type: 'TEXT'}, 41 | g: {real: true}, 42 | }, 43 | }) 44 | await m.set({id: 2, f: 5, g: 6}) 45 | const o = await m.searchOne() 46 | expect(o.id).toBe('2') 47 | expect(o.f).toBe('5') 48 | expect(o.g).toBe(6) 49 | }) 50 | 51 | test('long number string id', async () => { 52 | const m = getModel() 53 | const id = '234234239874972349872342' 54 | await m.set({id}) 55 | const o = await m.searchOne() 56 | expect(o.id).toBe(id) 57 | }) 58 | 59 | test('parse validity', async () => { 60 | expect(() => 61 | getModel({columns: {f: {value: o => o.f, parse: v => `_${v}`}}}) 62 | ).toThrow() 63 | }) 64 | 65 | test('parse function', async () => { 66 | const m = getModel({ 67 | columns: { 68 | f: {real: true, parse: v => `_${v}`}, 69 | g: {get: true, parse: v => `_${v}`}, 70 | }, 71 | }) 72 | const o = await m.set({f: 'hi', g: 'there'}) 73 | expect(o.f).toBe('_hi') 74 | // On the set return, only real columns are gotten 75 | expect(o.g).toBe('there') 76 | const p = await m.get(o.id) 77 | expect(p.f).toBe('_hi') 78 | expect(p.g).toBe('_there') 79 | }) 80 | -------------------------------------------------------------------------------- /src/JsonModel/__snapshots__/JM-columns.test.js.snap: -------------------------------------------------------------------------------- 1 | // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html 2 | 3 | exports[`columns create 1`] = ` 4 | [ 5 | { 6 | "alias": "_0", 7 | "get": false, 8 | "ignoreNull": true, 9 | "index": "SPARSE", 10 | "jsonCol": "json", 11 | "jsonPath": "foo1", 12 | "name": "foo1", 13 | "parts": [ 14 | "foo1", 15 | ], 16 | "path": "foo1", 17 | "quoted": ""foo1"", 18 | "select": "json_extract(tbl."json",'$.foo1') AS _0", 19 | "sql": "json_extract(tbl."json",'$.foo1')", 20 | "where": "json_extract(tbl."json",'$.foo1')=?", 21 | }, 22 | { 23 | "alias": "foo2", 24 | "get": false, 25 | "ignoreNull": true, 26 | "name": "foo2", 27 | "parts": [ 28 | "foo2", 29 | ], 30 | "path": "foo2", 31 | "quoted": ""foo2"", 32 | "real": true, 33 | "select": "tbl."foo2" AS foo2", 34 | "sql": "tbl."foo2"", 35 | "type": "INTEGER", 36 | "value": [Function], 37 | "where": "tbl."foo2"=?", 38 | }, 39 | { 40 | "alias": "_1", 41 | "get": false, 42 | "ignoreNull": false, 43 | "index": "ALL", 44 | "jsonCol": "json", 45 | "jsonPath": "foo3", 46 | "name": "foo3", 47 | "parts": [ 48 | "foo3", 49 | ], 50 | "path": "foo3", 51 | "quoted": ""foo3"", 52 | "select": "json_extract(tbl."json",'$.foo3') AS _1", 53 | "sql": "json_extract(tbl."json",'$.foo3')", 54 | "unique": true, 55 | "value": [Function], 56 | "where": "json_extract(tbl."json",'$.foo3')=?", 57 | }, 58 | { 59 | "alias": "_2", 60 | "get": true, 61 | "ignoreNull": true, 62 | "name": "fooGet", 63 | "parts": [ 64 | "fooGet", 65 | ], 66 | "path": "fooGet", 67 | "quoted": ""fooGet"", 68 | "real": true, 69 | "select": "tbl."fooGet" AS _2", 70 | "sql": "tbl."fooGet"", 71 | "type": "BLOB", 72 | "value": [Function], 73 | "where": "tbl."fooGet"=?", 74 | }, 75 | { 76 | "alias": "_i", 77 | "autoIncrement": undefined, 78 | "get": true, 79 | "ignoreNull": false, 80 | "index": "ALL", 81 | "name": "id", 82 | "parts": [ 83 | "id", 84 | ], 85 | "path": "id", 86 | "quoted": ""id"", 87 | "real": true, 88 | "select": "tbl."id" AS _i", 89 | "sql": "tbl."id"", 90 | "type": "TEXT", 91 | "unique": true, 92 | "value": [Function], 93 | "where": "tbl."id"=?", 94 | }, 95 | { 96 | "alias": "_j", 97 | "alwaysObject": true, 98 | "get": true, 99 | "ignoreNull": true, 100 | "name": "json", 101 | "parse": [Function], 102 | "parts": [], 103 | "path": "", 104 | "quoted": ""json"", 105 | "real": true, 106 | "select": "tbl."json" AS _j", 107 | "sql": "tbl."json"", 108 | "stringify": [Function], 109 | "type": "JSON", 110 | "where": "tbl."json"=?", 111 | }, 112 | ] 113 | `; 114 | -------------------------------------------------------------------------------- /src/JsonModel/assignJsonParents.js: -------------------------------------------------------------------------------- 1 | import {byPathLengthDesc} from './prepareSqlCol' 2 | 3 | export const assignJsonParents = columnArr => { 4 | const parents = columnArr 5 | .filter(c => c.type === 'JSON' && c.get) 6 | .sort(byPathLengthDesc) 7 | for (const col of columnArr) { 8 | // Will always match, json column has path:'' 9 | const parent = parents.find( 10 | p => !p.path || col.path.startsWith(p.path + '.') 11 | ) 12 | if (parent.alwaysObject == null) parent.alwaysObject = true 13 | if (!col.real) { 14 | col.jsonCol = parent.name 15 | col.jsonPath = parent.path 16 | ? col.path.slice(parent.path.length + 1) 17 | : col.path 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/JsonModel/index.js: -------------------------------------------------------------------------------- 1 | // export {verifyOptions} from './columnPropType' 2 | // export {verifyColumn} from './verifyColumn' 3 | // export {makeMigrations} from './makeMigrations' 4 | export {makeIdValue} from './makeDefaultIdValue' 5 | export {byPathLength, byPathLengthDesc, parseJson} from './prepareSqlCol' 6 | export {default} from './JsonModel' 7 | -------------------------------------------------------------------------------- /src/JsonModel/makeDefaultIdValue.js: -------------------------------------------------------------------------------- 1 | import {v1} from 'uuid' 2 | import {uniqueSlugId} from '../lib/slugify' 3 | 4 | const makeDefaultIdValue = idCol => obj => { 5 | if (obj[idCol] != null) return obj[idCol] 6 | return v1() 7 | } 8 | 9 | export const makeIdValue = (idCol, {value, slugValue, type} = {}) => { 10 | if (type === 'INTEGER') { 11 | // eslint-disable-next-line unicorn/prefer-logical-operator-over-ternary 12 | return value 13 | ? value 14 | : o => { 15 | const id = o[idCol] 16 | return id || id === 0 ? id : null 17 | } 18 | } 19 | // do not bind the value functions, they must be able to use other db during migrations 20 | if (slugValue) { 21 | return async function (o) { 22 | if (o[idCol] != null) return o[idCol] 23 | return uniqueSlugId(this, await slugValue(o), idCol) 24 | } 25 | } 26 | const defaultIdValue = makeDefaultIdValue(idCol) 27 | if (value) { 28 | return async function (o) { 29 | if (o[idCol] != null) return o[idCol] 30 | const id = await value.call(this, o) 31 | return id == null ? defaultIdValue(o) : id 32 | } 33 | } 34 | return defaultIdValue 35 | } 36 | -------------------------------------------------------------------------------- /src/JsonModel/makeMigrations.js: -------------------------------------------------------------------------------- 1 | import {sql} from '../DB' 2 | 3 | export const cloneModelWithDb = (m, db) => { 4 | const model = Object.create(m) 5 | model.db = db 6 | model._set = model._makeSetFn() 7 | return model 8 | } 9 | 10 | export const makeMigrations = ({ 11 | name: tableName, 12 | idCol, 13 | columns, 14 | keepRowId, 15 | migrations, 16 | migrationOptions, 17 | }) => { 18 | const tableQuoted = sql.quoteId(tableName) 19 | const allMigrations = { 20 | ...migrations, 21 | // We make id a real column to allow foreign keys 22 | 0: async ({db}) => { 23 | const {quoted, type, autoIncrement} = columns[idCol] 24 | const isIntegerId = type === 'INTEGER' 25 | const addRowId = !isIntegerId && keepRowId 26 | // The NOT NULL is a SQLite bug, otherwise it allows NULL as id 27 | const rowIdCol = addRowId 28 | ? `"rowId" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, ` 29 | : '' 30 | const keySql = addRowId 31 | ? `${type} NOT NULL` 32 | : `${type} PRIMARY KEY ${ 33 | isIntegerId && autoIncrement ? 'AUTOINCREMENT' : '' 34 | } NOT NULL` 35 | 36 | await db.exec( 37 | `CREATE TABLE ${tableQuoted}(${rowIdCol}${quoted} ${keySql}, json JSON);` 38 | ) 39 | if (addRowId) { 40 | // implement the unique constraint with our own index 41 | await db.exec( 42 | `CREATE UNIQUE INDEX ${sql.quoteId( 43 | `${tableName}_${idCol}` 44 | )} ON ${tableQuoted}(${sql.quoteId(idCol)})` 45 | ) 46 | } 47 | }, 48 | } 49 | for (const [name, col] of Object.entries(columns)) { 50 | // We already added these, or it's an alias 51 | if (name === idCol || name === 'json' || name !== col.name) continue 52 | const expr = col.sql.replace('tbl.', '') 53 | // Make sure real columns are created before indexes on expressions 54 | allMigrations[`${col.real ? 0 : 1}_${name}`] = ({db}) => 55 | db.exec( 56 | `${ 57 | col.type 58 | ? `ALTER TABLE ${tableQuoted} ADD COLUMN ${col.quoted} ${col.type};` 59 | : '' 60 | }${ 61 | col.index 62 | ? `CREATE ${ 63 | col.unique ? 'UNIQUE ' : '' 64 | }INDEX IF NOT EXISTS ${sql.quoteId( 65 | `${tableName}_${name}` 66 | )} ON ${tableQuoted}(${expr}) ${ 67 | col.ignoreNull ? `WHERE ${expr} IS NOT NULL` : '' 68 | };` 69 | : '' 70 | }` 71 | ) 72 | } 73 | // Wrap the migration functions to provide their arguments 74 | const wrappedMigrations = {} 75 | const wrap = fn => 76 | fn && 77 | (writeableDb => { 78 | if (!writeableDb.store.__madeWriteable) { 79 | const {store} = writeableDb 80 | const newStore = {__madeWriteable: true} 81 | // Create a patched version of all models that uses the migration db 82 | for (const m of Object.values(store)) { 83 | if (typeof m !== 'object') continue 84 | newStore[m.name] = cloneModelWithDb(m, writeableDb) 85 | } 86 | writeableDb.store = newStore 87 | } 88 | const model = writeableDb.store[tableName] 89 | return fn({...migrationOptions, db: writeableDb, model}) 90 | }) 91 | const wrapMigration = migration => wrap(migration.up || migration) 92 | 93 | for (const k of Object.keys(allMigrations)) { 94 | const m = allMigrations[k] 95 | if (m) wrappedMigrations[k] = wrapMigration(m) 96 | } 97 | return wrappedMigrations 98 | } 99 | -------------------------------------------------------------------------------- /src/JsonModel/normalizeColumn.js: -------------------------------------------------------------------------------- 1 | import {sql, valToSql} from '../DB' 2 | import {uniqueSlugId} from '../lib/slugify' 3 | import {get} from 'lodash' 4 | 5 | /** 6 | * @template 7 | * @template 8 | * @param {JMColumnDef} col 9 | * @param {JMColName} name 10 | */ 11 | 12 | export const normalizeColumn = (col, name) => { 13 | col.name = name 14 | col.quoted = sql.quoteId(name) 15 | if (col.type) col.real = true 16 | else if (col.real) col.type = col.falsyBool ? 'INTEGER' : 'BLOB' 17 | if (col.get == null) col.get = !!col.real 18 | if (!col.path && name !== 'json') col.path = name 19 | col.parts = col.path === '' ? [] : col.path.split('.') 20 | if (col.index === 'ALL') col.ignoreNull = false 21 | if (col.index === 'SPARSE') col.ignoreNull = true 22 | if (col.unique) { 23 | if (!col.index) throw new TypeError(`${name}: unique requires index`) 24 | } else if (col.ignoreNull == null) { 25 | col.ignoreNull = true 26 | } 27 | if (col.autoIncrement && col.type !== 'INTEGER') 28 | throw new TypeError(`${name}: autoIncrement is only for type INTEGER`) 29 | if (col.slugValue) { 30 | if (col.value) 31 | throw new TypeError(`${name}: slugValue and value can't both be defined`) 32 | if (!col.index) throw new TypeError(`${name}: slugValue requires index`) 33 | col.value = async function (o) { 34 | if (o[name] != null) return o[name] 35 | return uniqueSlugId(this, await col.slugValue(o), name, o[this.idCol]) 36 | } 37 | } 38 | if (col.default != null) { 39 | col.ignoreNull = false 40 | const prev = col.value 41 | if (prev) { 42 | col.value = async function (o) { 43 | const r = await prev.call(this, o) 44 | return r == null ? col.default : r 45 | } 46 | } else if (col.sql) { 47 | col.sql = `ifNull(${col.sql},${valToSql(col.default)})` 48 | } else { 49 | col.value = o => { 50 | const v = get(o, col.path) 51 | return v == null ? col.default : v 52 | } 53 | } 54 | } 55 | if (col.required) { 56 | col.ignoreNull = false 57 | const prev = col.value 58 | col.value = prev 59 | ? async function (o) { 60 | const r = await prev.call(this, o) 61 | if (r == null) throw new Error(`${name}: value is required`) 62 | return r 63 | } 64 | : o => { 65 | const v = get(o, col.path) 66 | if (v == null) throw new Error(`${name}: value is required`) 67 | return v 68 | } 69 | } 70 | if (col.falsyBool) { 71 | const prev = col.value 72 | col.value = prev 73 | ? async function (o) { 74 | const r = await prev.call(this, o) 75 | return r ? true : undefined 76 | } 77 | : o => { 78 | const v = get(o, col.path) 79 | return v ? true : undefined 80 | } 81 | if (col.real) { 82 | if (col.parse) throw new TypeError(`${name}: falsyBool can't have parse`) 83 | col.parse = v => (v ? true : undefined) 84 | } 85 | } 86 | if (!col.real && col.stringify) 87 | throw new Error(`${name}: stringify only applies to real columns`) 88 | if (!col.get && col.parse) 89 | throw new Error(`${name}: parse only applies to get:true columns`) 90 | } 91 | -------------------------------------------------------------------------------- /src/JsonModel/prepareSqlCol.js: -------------------------------------------------------------------------------- 1 | import {sql} from '../DB' 2 | 3 | export const byPathLength = (a, b) => a.parts.length - b.parts.length 4 | export const byPathLengthDesc = (a, b) => b.parts.length - a.parts.length 5 | 6 | const stringifyJson = JSON.stringify 7 | export const stringifyJsonObject = obj => { 8 | const json = JSON.stringify(obj) 9 | return json === '{}' ? null : json 10 | } 11 | 12 | export const parseJson = v => (v == null ? v : JSON.parse(v)) 13 | const parseJsonObject = v => (v == null ? {} : JSON.parse(v)) 14 | 15 | const arrayToJson = v => (v && v.length ? [JSON.stringify(v)] : false) 16 | 17 | // Note: avoid where functions; that way, queries can be reused for different args 18 | 19 | export const prepareSqlCol = (col, name) => { 20 | if (col.type === 'JSON') { 21 | if (col.stringify === undefined) 22 | col.stringify = col.alwaysObject ? stringifyJsonObject : stringifyJson 23 | if (col.parse === undefined) 24 | col.parse = col.alwaysObject ? parseJsonObject : parseJson 25 | } else if (col.alwaysObject) 26 | throw new TypeError(`${name}: .alwaysObject only applies to JSON type`) 27 | if (col.falsyBool && !col.where) { 28 | col.where = (_, v) => (v ? `${col.sql} IS NOT NULL` : `${col.sql} IS NULL`) 29 | col.whereVal = () => [] 30 | } 31 | if (!col.sql) { 32 | col.sql = col.real 33 | ? `tbl.${col.quoted}` 34 | : `json_extract(tbl.${sql.quoteId(col.jsonCol)},'$.${col.jsonPath}')` 35 | } 36 | if (col.isAnyOfArray) { 37 | col.isArray = true 38 | col.in = true 39 | } 40 | if (col.isArray) { 41 | if (col.where || col.whereVal) 42 | throw new TypeError(`${name}: cannot mix isArray and where/whereVal`) 43 | if (col.textSearch) 44 | throw new TypeError(`${name}: Only one of isArray/textSearch allowed`) 45 | const eachSql = `json_each(${ 46 | col.real 47 | ? `tbl.${col.quoted}` 48 | : `tbl.${sql.quoteId(col.jsonCol)},'$.${col.jsonPath}'` 49 | })` 50 | if (col.in) { 51 | col.where = `EXISTS(SELECT 1 FROM ${eachSql} j WHERE j.value IN (SELECT value FROM json_each(?)))` 52 | col.whereVal = arrayToJson 53 | } else if (col.inAll) { 54 | col.where = `NOT EXISTS(SELECT 1 FROM json_each(?) j WHERE j.value NOT IN (SELECT value FROM ${eachSql}))` 55 | col.whereVal = arrayToJson 56 | } else { 57 | col.where = `? IN (SELECT value FROM ${eachSql})` 58 | } 59 | } else if (col.in) { 60 | if (col.where || col.whereVal) 61 | throw new TypeError(`${name}: cannot mix .in and where/whereVal`) 62 | if (col.textSearch) 63 | throw new TypeError(`${name}: Only one of in/textSearch allowed`) 64 | col.where = `${col.sql} IN (SELECT value FROM json_each(?))` 65 | col.whereVal = arrayToJson 66 | } else if (col.textSearch) { 67 | if (col.where || col.whereVal) 68 | throw new TypeError(`${name}: cannot mix textSearch and where/whereVal`) 69 | if (col.in) 70 | throw new TypeError(`${name}: Only one of in/textSearch allowed`) 71 | col.where = `${col.sql} LIKE ?` 72 | col.whereVal = v => { 73 | if (v == null) return 74 | const s = String(v) 75 | if (s) return [`%${s}%`] 76 | } 77 | } 78 | col.select = `${col.sql} AS ${col.alias}` 79 | if ( 80 | typeof col.where === 'string' && 81 | !col.whereVal && 82 | !col.where.includes('?') 83 | ) 84 | throw new Error( 85 | `${col.name}: .where "${col.where}" should include a ? when not passing .whereVal` 86 | ) 87 | if (!col.where) col.where = `${col.sql}=?` 88 | } 89 | -------------------------------------------------------------------------------- /src/JsonModel/verifyOptions.js: -------------------------------------------------------------------------------- 1 | import PropTypes from 'prop-types' 2 | 3 | /** 4 | * @type {Object} 5 | * 6 | * @typedef ColumnDef 7 | * @property {boolean} [real=!!type] - Is this a real table column. Default is 8 | * `!!type` 9 | * @property {string} [type] 10 | * 11 | * - Sql column type as accepted by {@link DB} 12 | * 13 | * @property {string} [path] 14 | * 15 | * - Path to the value in the object. 16 | * 17 | * @property {boolean} [autoIncrement] 18 | * 19 | * - INTEGER id column only: apply AUTOINCREMENT on the column. 20 | * 21 | * @property {string} [alias] 22 | * 23 | * - The alias to use in SELECT statements. 24 | * 25 | * @property {boolean} [get=true] - Should the column be included in search 26 | * results. Default is `true` 27 | * @property {function} [parse] 28 | * 29 | * - Process the value after getting from DB. 30 | * 31 | * @property {function} [stringify] 32 | * 33 | * - Process the value before putting into DB. 34 | * 35 | * @property {boolean} [alwaysObject] 36 | * 37 | * - The value is an object and must always be there. If this is a real column, a 38 | * NULL column value will be replaced by `{}` and vice versa. 39 | * 40 | * @property {function} [value] 41 | * 42 | * - Function getting object and returning the value for the column; this creates 43 | * a real column. Right now the column value is not regenerated for 44 | * existing rows. 45 | * 46 | * @property {function} [slugValue] 47 | * 48 | * - Same as value, but the result is used to generate a unique slug. 49 | * 50 | * @property {string} [sql] 51 | * 52 | * - Any sql expression to use in SELECT statements. 53 | * 54 | * @property {any} [default] 55 | * 56 | * - If the value is nullish, this will be stored instead. 57 | * 58 | * @property {boolean} [required] 59 | * 60 | * - Throw when trying to store a NULL. 61 | * 62 | * @property {boolean} [falsyBool] 63 | * 64 | * - Store/retrieve this boolean value as either `true` or absent from the object. 65 | * 66 | * @property {boolean} [index] 67 | * 68 | * - Should it be indexed? If `unique` is false, NULLs are never indexed. 69 | * 70 | * @property {boolean} [ignoreNull=!unique] - Are null values ignored in the 71 | * index?. Default is `!unique` 72 | * @property {boolean} [unique] 73 | * 74 | * - Should the index enforce uniqueness? 75 | * 76 | * @property {function} [whereVal] 77 | * 78 | * - A function receiving `origVals` and returning the `vals` given to `where`. It 79 | * should return falsy or an array of values. 80 | * 81 | * @property {string | function} [where] 82 | * 83 | * - The where clause for querying, or a function returning one given `(vals, 84 | * origVals)` 85 | * 86 | * @property {boolean} [isArray] 87 | * 88 | * - This column contains an array of values. 89 | * 90 | * @property {boolean} [in] 91 | * 92 | * - To query, this column value must match one of the given array items. 93 | * 94 | * @property {boolean} [inAll] 95 | * 96 | * - [isArray only] to query, this column value must match all of the given array 97 | * items. 98 | * 99 | * @property {boolean} [textSearch] 100 | * 101 | * - Perform searches as substring search with LIKE. 102 | * 103 | * @property {boolean} [isAnyOfArray] 104 | * 105 | * - Alias for isArray+inAll. 106 | */ 107 | 108 | export const columnPropType = 109 | process.env.NODE_ENV === 'production' 110 | ? null 111 | : PropTypes.exact({ 112 | // === sql column === 113 | real: PropTypes.bool, 114 | // column type if real column 115 | type: PropTypes.oneOf([ 116 | 'TEXT', 117 | 'NUMERIC', 118 | 'INTEGER', 119 | 'REAL', 120 | 'BLOB', 121 | 'JSON', 122 | ]), 123 | path: PropTypes.string, 124 | autoIncrement: PropTypes.bool, 125 | alias: PropTypes.string, 126 | get: PropTypes.bool, 127 | parse: PropTypes.func, 128 | stringify: PropTypes.func, 129 | alwaysObject: PropTypes.bool, 130 | // === value related === 131 | slugValue: PropTypes.func, 132 | sql: PropTypes.string, 133 | value: PropTypes.func, 134 | default: PropTypes.any, 135 | required: PropTypes.bool, 136 | falsyBool: PropTypes.bool, 137 | // === index === 138 | // create index for this column 139 | index: PropTypes.oneOfType([PropTypes.bool, PropTypes.string]), 140 | ignoreNull: PropTypes.bool, 141 | unique: PropTypes.bool, 142 | // === queries === 143 | where: PropTypes.oneOfType([PropTypes.string, PropTypes.func]), 144 | whereVal: PropTypes.func, 145 | // === query helpers === 146 | in: PropTypes.bool, 147 | inAll: PropTypes.bool, 148 | isAnyOfArray: PropTypes.bool, 149 | isArray: PropTypes.bool, 150 | textSearch: PropTypes.bool, 151 | }) 152 | 153 | export const verifyColumn = (name, column) => { 154 | if (process.env.NODE_ENV !== 'production') { 155 | /* eslint-disable no-console */ 156 | const prevError = console.error 157 | console.error = message => { 158 | console.error = prevError 159 | throw new Error(message) 160 | } 161 | PropTypes.checkPropTypes( 162 | {column: columnPropType}, 163 | {column}, 164 | `column`, 165 | 'JsonModel' 166 | ) 167 | console.error = prevError 168 | /* eslint-enable no-console */ 169 | } 170 | } 171 | 172 | /** 173 | * @type {Object} 174 | * 175 | * @typedef JMOptions 176 | * @property {DB} db - A DB instance, normally passed by DB. 177 | * @property {string} name - The table name. 178 | * @property {Object} [migrations] - An object with migration functions. They 179 | * are ran in alphabetical order. 180 | * @property {Object} [migrationOptions] - Free-form data passed to the 181 | * migration functions. 182 | * @property {Object} [columns] - The column definitions as {@link ColumnDef} 183 | * objects. Each value must be a columndef or a function returning a 184 | * columndef. 185 | * @property {function} [ItemClass] - An object class to use for results, must 186 | * be able to handle `Object.assign(item, result)` 187 | * @property {string} [idCol='id'] - The key of the ID column. Default is `'id'` 188 | * @property {boolean} [keepRowId] - Preserve row id after vacuum. 189 | */ 190 | const jmPropTypes = 191 | process.env.NODE_ENV === 'production' 192 | ? null 193 | : { 194 | options: PropTypes.exact({ 195 | db: PropTypes.object.isRequired, 196 | name: PropTypes.string.isRequired, 197 | migrations: PropTypes.objectOf( 198 | PropTypes.oneOfType([ 199 | PropTypes.oneOf([false]), 200 | PropTypes.func, 201 | PropTypes.exact({up: PropTypes.func, down: PropTypes.func}), 202 | ]) 203 | ), 204 | migrationOptions: PropTypes.object, 205 | columns: PropTypes.objectOf( 206 | PropTypes.oneOfType([PropTypes.func, columnPropType]) 207 | ), 208 | ItemClass: PropTypes.func, 209 | idCol: PropTypes.string, 210 | keepRowId: PropTypes.bool, 211 | // Harmless props passed by ESDB 212 | dispatch: PropTypes.any, 213 | emitter: PropTypes.any, 214 | }), 215 | } 216 | 217 | export const verifyOptions = options => { 218 | if (process.env.NODE_ENV !== 'production') { 219 | /* eslint-disable no-console */ 220 | const prevError = console.error 221 | console.error = message => { 222 | console.error = prevError 223 | throw new Error(message) 224 | } 225 | PropTypes.checkPropTypes(jmPropTypes, {options}, 'options', 'JsonModel') 226 | console.error = prevError 227 | /* eslint-enable no-console */ 228 | } 229 | } 230 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | export {default as DB, SQLite} from './DB' 2 | export {default as EventQueue} from './EventQueue' 3 | export { 4 | default as EventSourcingDB, 5 | applyResult, 6 | ESModel, 7 | } from './EventSourcingDB' 8 | export {default as JsonModel} from './JsonModel' 9 | -------------------------------------------------------------------------------- /src/lib/_test-helpers.js: -------------------------------------------------------------------------------- 1 | import DB from '../DB' 2 | import ESDB from '../EventSourcingDB' 3 | import EQ from '../EventQueue' 4 | import JsonModel from '../JsonModel' 5 | 6 | export {default as DB} from '../DB' 7 | export {default as JsonModel} from '../JsonModel' 8 | 9 | export const getModel = options => { 10 | const db = new DB() 11 | return db.addModel(JsonModel, { 12 | name: 'testing', 13 | keepRowId: false, 14 | ...options, 15 | }) 16 | } 17 | 18 | export const sharedSetup = getPromise => fn => { 19 | let promise 20 | return async () => { 21 | if (!promise) { 22 | promise = getPromise() 23 | } 24 | return fn(await promise) 25 | } 26 | } 27 | 28 | export const testModels = { 29 | count: { 30 | // shortName: 'c', 31 | columns: { 32 | total: {type: 'INTEGER'}, 33 | }, 34 | migrations: { 35 | init: { 36 | up({db, model, queue}) { 37 | expect(db).toBeTruthy() 38 | expect(queue).toBeTruthy() 39 | return model.set({id: 'count', total: 0, byType: {}}) 40 | }, 41 | }, 42 | }, 43 | preprocessor: async ({event}) => { 44 | if (event.type === 'error_pre') throw new Error('pre error for you') 45 | }, 46 | reducer: async ({model, event: {type}}) => { 47 | if (type === 'error_reduce') throw new Error('error for you') 48 | if (!model.get) return false 49 | const c = (await model.get('count')) || { 50 | id: 'count', 51 | total: 0, 52 | byType: {}, 53 | } 54 | c.total++ 55 | c.byType[type] = (c.byType[type] || 0) + 1 56 | return { 57 | set: [c], 58 | // audit: '', 59 | } 60 | }, 61 | deriver: async ({event}) => { 62 | if (event.type === 'error_derive') throw new Error('post error for you') 63 | }, 64 | }, 65 | ignorer: { 66 | reducer: _args => {}, 67 | }, 68 | deriver: { 69 | deriver: async ({model, store, result, event}) => { 70 | if (result !== event.result[model.name]) { 71 | throw new Error('Expecting event.result as separate input') 72 | } 73 | if (event.result.count) { 74 | const currentCount = await store.count.get('count') 75 | await model.set({ 76 | id: 'descCount', 77 | desc: `Total: ${currentCount.total}, seen types: ${Object.keys( 78 | currentCount.byType 79 | )}`, 80 | }) 81 | } 82 | }, 83 | }, 84 | } 85 | 86 | const withDBs = async fn => { 87 | const db = new DB({name: 'D'}) 88 | const queue = new EQ({ 89 | db: new DB({name: 'Q'}), 90 | columns: {events: {type: 'JSON'}}, 91 | }) 92 | const ret = await fn(db, queue) 93 | await Promise.all([db.close(), queue.db.close()]) 94 | return ret 95 | } 96 | 97 | /** 98 | * @param {Record | function} modelsOrFn 99 | * @param {function} [fn] 100 | */ 101 | export const withESDB = (modelsOrFn, fn) => { 102 | let models 103 | if (typeof modelsOrFn === 'function') { 104 | if (fn) throw 'Use either .withESDB(fn) or .withESDB(models, fn)' 105 | fn = modelsOrFn 106 | } else { 107 | models = modelsOrFn 108 | } 109 | if (!models) models = testModels 110 | if (!fn) throw new Error('no fn passed to withESDB') 111 | return withDBs(async (db, queue) => { 112 | const eSDB = new ESDB({queue, models, name: 'E'}) 113 | const out = await fn(eSDB, queue) 114 | await eSDB.close() 115 | return out 116 | }) 117 | } 118 | -------------------------------------------------------------------------------- /src/lib/settleAll.js: -------------------------------------------------------------------------------- 1 | import {Sema} from 'async-sema' 2 | 3 | // Only throw after all items are processed 4 | /** 5 | * @template Item 6 | * @param {Item[]} items 7 | * @param {(item: Item) => Promise | void} fn Function to call on each 8 | * item. 9 | * @param {number} [maxConcurrent] Maximum functions running in parallel. 10 | */ 11 | export const settleAll = async (items, fn, maxConcurrent) => { 12 | let err, cb 13 | if (maxConcurrent) { 14 | const sema = new Sema(maxConcurrent) 15 | cb = async item => { 16 | await sema.acquire() 17 | try { 18 | return await fn(item) 19 | } finally { 20 | sema.release() 21 | } 22 | } 23 | } else { 24 | cb = fn 25 | } 26 | await Promise.all( 27 | items.map(async i => { 28 | try { 29 | await cb(i) 30 | } catch (error) { 31 | // last one wins 32 | err = error 33 | } 34 | }) 35 | ) 36 | if (err) throw err 37 | } 38 | -------------------------------------------------------------------------------- /src/lib/slugify.js: -------------------------------------------------------------------------------- 1 | import {deburr} from 'lodash' 2 | 3 | const abc = 'abcdefghijklmnopqrstuvwxyz0123456789' 4 | export const randomString = (/** @type {number} */ n) => 5 | // eslint-disable-next-line unicorn/no-new-array 6 | Array.apply(null, new Array(n)) 7 | .map(() => { 8 | return abc.charAt(Math.floor(Math.random() * abc.length)) 9 | }) 10 | .join('') 11 | 12 | export const slugifyString = (name, alwaysResult) => { 13 | // extract name from i18n objects 14 | const t = 15 | typeof name === 'string' 16 | ? name 17 | : typeof name === 'number' 18 | ? name.toString() 19 | : name && typeof name === 'object' 20 | ? Object.values(name).find(v => typeof v === 'string' && v) 21 | : null 22 | if (!t) { 23 | if (alwaysResult) return randomString(12) 24 | throw new Error(`Cannot slugify ${name}`) 25 | } 26 | return encodeURIComponent(deburr(t).trim()) 27 | .replaceAll(/(%..|['()_~])/g, '-') 28 | .replaceAll(/--+/g, '-') 29 | .toLowerCase() 30 | .replaceAll(/(^[^\da-z]+|[^\da-z]+$)/g, '') 31 | .slice(0, 30) 32 | } 33 | 34 | // This is not race-safe - only use for write-seldomn things like backoffice or inside transactions 35 | export const uniqueSlugId = async (model, name, colName, currentId) => { 36 | const slug = slugifyString(name, true) 37 | let id = slug 38 | let i = 1 39 | const where = currentId && { 40 | [`${model.idColQ} IS NOT ?`]: [currentId], 41 | } 42 | 43 | while (await model.exists({[colName]: id}, {where})) { 44 | id = `${slug}-${++i}` 45 | } 46 | return id 47 | } 48 | -------------------------------------------------------------------------------- /src/lib/slugify.test.js: -------------------------------------------------------------------------------- 1 | import {randomString, slugifyString} from './slugify' 2 | 3 | test('randomString', () => { 4 | const values = {} 5 | let i = 0 6 | let duplicateCount = 0 7 | while (i < 100_000) { 8 | const val = randomString(8) 9 | expect(val).toHaveLength(8) 10 | if (values[val]) { 11 | duplicateCount++ 12 | } 13 | values[val] = 1 14 | i++ 15 | } 16 | expect(duplicateCount <= 1).toBe(true) 17 | }) 18 | 19 | test('slugifyString', () => { 20 | const cmp = (o, s) => expect(slugifyString(o)).toBe(s) 21 | cmp(' rostuLR"st wfpunq 🤗', 'rostulr-st-wfpunq') 22 | cmp('hi there', 'hi-there') 23 | cmp('hi there.', 'hi-there') 24 | // eslint-disable-next-line @typescript-eslint/no-loss-of-precision 25 | cmp(2_341_234_901_283_740_987, '2341234901283741000') 26 | cmp('1234567890!@#$`|&*()+-={}[];:\\\'"%^_~nice!', '1234567890!-*-nice') 27 | cmp({meep: 'yoyo'}, 'yoyo') 28 | cmp({a: true, meep: 'yoyo'}, 'yoyo') 29 | cmp({a: '', meep: 'yoyo'}, 'yoyo') 30 | cmp(0, '0') 31 | expect(() => slugifyString()).toThrow() 32 | expect(() => slugifyString('')).toThrow() 33 | expect(() => slugifyString(null)).toThrow() 34 | expect(() => slugifyString([])).toThrow() 35 | expect(() => slugifyString(0, true)).not.toThrow() 36 | expect(slugifyString(null, true).length === 12).toBe(true) 37 | }) 38 | -------------------------------------------------------------------------------- /src/lib/warning.js: -------------------------------------------------------------------------------- 1 | export const DEV = process.env.NODE_ENV !== 'production' 2 | export let deprecated, unknown 3 | 4 | if (DEV) { 5 | const warned = {} 6 | const warner = type => (tag, msg, conditionFn) => { 7 | if (warned[tag]) return 8 | if (conditionFn && !conditionFn()) return 9 | warned[tag] = true 10 | // eslint-disable-next-line no-console 11 | console.warn(new Error(`!!! ${type} ${msg}`)) 12 | } 13 | deprecated = warner('DEPRECATED') 14 | unknown = warner('UNKNOWN') 15 | } else { 16 | deprecated = () => {} 17 | unknown = () => {} 18 | } 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | // Code generation, not actually used 4 | "target": "ES2017", 5 | "module": "ES2022", 6 | "incremental": true, 7 | "isolatedModules": true, 8 | "outDir": "tmp", 9 | "noEmit": true, 10 | 11 | // Available types 12 | "lib": ["ES2021"], 13 | "types": ["vite/client", "vitest/globals", "node"], 14 | 15 | // Module resolution 16 | "forceConsistentCasingInFileNames": true, 17 | "moduleResolution": "node", 18 | "esModuleInterop": true, 19 | "resolveJsonModule": true, 20 | 21 | // Strictness 22 | "strict": true, 23 | "noImplicitAny": false, 24 | "allowImportingTsExtensions": true, 25 | 26 | // Check JS files too 27 | "allowJs": true, 28 | "checkJs": true, 29 | 30 | // Make TS lots faster by skipping lib checks 31 | "skipLibCheck": true, 32 | 33 | // Uncomment for more details in errors 34 | // "extendedDiagnostics": true, 35 | // "noErrorTruncation": true 36 | 37 | "declaration": true, 38 | "declarationDir": "lib-types" 39 | }, 40 | "exclude": ["node_modules", "dist", "tmp", "coverage"] 41 | } 42 | -------------------------------------------------------------------------------- /vite.config.ts: -------------------------------------------------------------------------------- 1 | import {defineConfig, type UserConfig} from 'vite' 2 | import {configDefaults} from 'vitest/config' 3 | import pkg from './package.json' 4 | 5 | const {dependencies = {}, peerDependencies = {}} = pkg as any 6 | const makeRegex = (dep: string) => new RegExp(`^${dep}(/.*)?$`) 7 | const excludeAll = (obj: {[pkg: string]: string}) => 8 | Object.keys(obj).map(dep => makeRegex(dep)) 9 | 10 | export default defineConfig(() => { 11 | return { 12 | build: { 13 | // keep debugging readable 14 | minify: false, 15 | target: 'es2020', 16 | lib: { 17 | entry: ['./src/index.js'], 18 | formats: ['es', 'cjs'], 19 | }, 20 | rollupOptions: { 21 | preserveModules: true, 22 | // externalize deps that shouldn't be bundled into the library 23 | external: [ 24 | /^node:.*/, 25 | ...excludeAll(dependencies), 26 | ...excludeAll(peerDependencies), 27 | ], 28 | }, 29 | }, 30 | test: { 31 | globals: true, 32 | testTimeout: 20_000, 33 | exclude: [...configDefaults.exclude, 'dist/**', 'dist-types/**'], 34 | coverage: { 35 | exclude: [...configDefaults.coverage!.exclude!, 'package-scripts.cjs'], 36 | }, 37 | }, 38 | } as UserConfig 39 | }) 40 | --------------------------------------------------------------------------------